forked from clan/clan-core
Compare commits
188 Commits
main
...
init/mumbl
Author | SHA1 | Date | |
---|---|---|---|
67dcca47d1 | |||
9bbf7f668a | |||
afdfa6181b | |||
6c11e0ced7 | |||
399ce2e35c | |||
e575c2e769 | |||
56b2347a30 | |||
70954acf3d | |||
13aa60529f | |||
7474f01193 | |||
bd9883baaf | |||
313db5643f | |||
93a6d7a476 | |||
d221d90972 | |||
30fd5dcfb8 | |||
c79680344d | |||
ad544a7d24 | |||
1cd606b879 | |||
39f74c0f52 | |||
8feea28a19 | |||
b73246bdfd | |||
36a418b6ac | |||
43e8804eb4 | |||
8790e5a0eb | |||
5e39514251 | |||
b28950f310 | |||
3ebee252aa | |||
720fb4af63 | |||
af19950dfa | |||
149be249fa | |||
0cf86806b2 | |||
cb847cab82 | |||
a89fd31844 | |||
870948306d | |||
ec49d1f844 | |||
e3d84a5daf | |||
79b5ad0754 | |||
24b0d72d96 | |||
084cd8751f | |||
3d77e0a3a9 | |||
06bbae6d14 | |||
5f22493361 | |||
56a4caf39b | |||
83056f743d | |||
6743ff96a9 | |||
1f3c4f4ac3 | |||
7766829fb1 | |||
175b219246 | |||
48aee84547 | |||
d587b326b5 | |||
ac099d9e6f | |||
913ab4627c | |||
be868ee107 | |||
36b1bb65af | |||
4a752bb951 | |||
3dabb4e89a | |||
e2474f4e66 | |||
f4ee0b0387 | |||
5df1f9f9d2 | |||
3368255473 | |||
1cbb2d6aa4 | |||
bc0e0088a0 | |||
a6a9f763db | |||
8dcb009e5b | |||
9f0f44b470 | |||
67aa84760d | |||
b05c937151 | |||
3322bbd681 | |||
a1acf0b05d | |||
66bdc61e3d | |||
dd2bd2f989 | |||
6f18a5de92 | |||
1d542d4396 | |||
07fb01d9db | |||
8a5d4a0f8f | |||
|
48069f99cd | ||
1eaf6cec39 | |||
f0c9de9e50 | |||
ef42bcc525 | |||
e7995ad344 | |||
6e3c2506c9 | |||
5473e2733c | |||
006a7044f1 | |||
c647197b8c | |||
62735ebfe2 | |||
8ff00fd8fe | |||
bd586575b3 | |||
f14f7368d7 | |||
6adcd1fdf2 | |||
6e99beb335 | |||
6689d45a4f | |||
6d82a5851b | |||
337ba1f8f6 | |||
bf7b148592 | |||
a7f724a804 | |||
7c06b65def | |||
7286c7250c | |||
4e841d3087 | |||
2ce704dd40 | |||
6279610691 | |||
297d53dac8 | |||
6f1300f819 | |||
02a015a1b6 | |||
5c11a30b46 | |||
0dc3b9f056 | |||
c0d8aaf73a | |||
2a0019457d | |||
6dec2a9222 | |||
f71295e640 | |||
c1aedc5bb8 | |||
d6a9f6d3f9 | |||
ba6840d978 | |||
86b08258dd | |||
9ccff4ab2e | |||
cf310be1c8 | |||
d8e80bb0c8 | |||
9206182e15 | |||
d25eaa48d0 | |||
5a2c91959a | |||
193d54153d | |||
510634bc04 | |||
954f1fe605 | |||
764b53275f | |||
44fc1be270 | |||
5ef170020d | |||
5f7099fc89 | |||
fe08fef015 | |||
edb744f654 | |||
5ff5b46896 | |||
49e67ac46c | |||
5024973896 | |||
7dce6ad6c4 | |||
779229a907 | |||
af23ed027a | |||
06412865bb | |||
fab311b53a | |||
bc602dbf3c | |||
0fb207bb59 | |||
c751bc78d8 | |||
c9038ad0b3 | |||
b4699cd8a3 | |||
d0a87d8e3c | |||
78dbabf901 | |||
ad771ae6a0 | |||
92bc2962b8 | |||
836754d7ad | |||
6576290160 | |||
db88e63148 | |||
f2d2102127 | |||
b9bf453731 | |||
fb98247a8d | |||
4bd927cbcf | |||
3725d5703e | |||
bf0cc19c8f | |||
8af137545f | |||
3d71ebcc5f | |||
c6fcb833b3 | |||
c926f23c09 | |||
21ac1f7204 | |||
05ff7bd261 | |||
b2109351ff | |||
0bd13727de | |||
e1d6d04b48 | |||
9dbbb6f2f6 | |||
836170e5b6 | |||
d4fabff7f4 | |||
b21bef0b98 | |||
533ed97fc1 | |||
e7e5a1ded8 | |||
4e95030e55 | |||
b331a8c730 | |||
2923051a12 | |||
fe96137c56 | |||
addc4de735 | |||
2460ba9b67 | |||
62be27ec62 | |||
8515d41fe3 | |||
d4d69d6990 | |||
0027c46313 | |||
ca2001040b | |||
d6725100ac | |||
503ce29c84 | |||
87444cd2b8 | |||
31eca9e8bc | |||
822afe08b5 | |||
cfb78b0edb | |||
65fd7d3efe | |||
e8241fb7c9 |
@ -40,6 +40,6 @@ Clan is more than a tool; it's a movement towards a better digital future. By co
|
||||
|
||||
Connect with us and the Clan community for support and discussion:
|
||||
|
||||
- [Matrix channel](https://matrix.to/#/#clan:lassul.us) for live discussions.
|
||||
- [Matrix channel](https://matrix.to/#/#clan:clan.lol) for live discussions.
|
||||
- IRC bridges (coming soon) for real-time chat support.
|
||||
|
||||
|
@ -68,7 +68,7 @@
|
||||
};
|
||||
};
|
||||
};
|
||||
clanCore.facts.secretStore = "vm";
|
||||
clan.core.facts.secretStore = "vm";
|
||||
|
||||
environment.systemPackages = [
|
||||
self.packages.${pkgs.system}.clan-cli
|
||||
@ -87,9 +87,12 @@
|
||||
flake-registry = pkgs.writeText "flake-registry" ''{"flakes":[],"version":2}'';
|
||||
};
|
||||
system.extraDependencies = dependencies;
|
||||
clanCore.state.test-backups.folders = [ "/var/test-backups" ];
|
||||
clan.core.state.test-backups.folders = [ "/var/test-backups" ];
|
||||
|
||||
clanCore.state.test-service = {
|
||||
clan.core.state.test-service = {
|
||||
preBackupCommand = ''
|
||||
touch /var/test-service/pre-backup-command
|
||||
'';
|
||||
preRestoreCommand = "pre-restore-command";
|
||||
postRestoreCommand = "post-restore-command";
|
||||
folders = [ "/var/test-service" ];
|
||||
@ -164,13 +167,15 @@
|
||||
assert machine.succeed("cat /var/test-backups/somefile").strip() == "testing", "restore failed"
|
||||
machine.succeed("test -f /var/test-service/pre-restore-command")
|
||||
machine.succeed("test -f /var/test-service/post-restore-command")
|
||||
machine.succeed("test -f /var/test-service/pre-backup-command")
|
||||
|
||||
## localbackup restore
|
||||
machine.succeed("rm -f /var/test-backups/somefile /var/test-service/{pre,post}-restore-command")
|
||||
machine.succeed("rm -rf /var/test-backups/somefile /var/test-service/ && mkdir -p /var/test-service")
|
||||
machine.succeed(f"clan backups restore --debug --flake ${self} test-backup localbackup '{localbackup_id}' >&2")
|
||||
assert machine.succeed("cat /var/test-backups/somefile").strip() == "testing", "restore failed"
|
||||
machine.succeed("test -f /var/test-service/pre-restore-command")
|
||||
machine.succeed("test -f /var/test-service/post-restore-command")
|
||||
machine.succeed("test -f /var/test-service/pre-backup-command")
|
||||
'';
|
||||
} { inherit pkgs self; };
|
||||
};
|
||||
|
@ -16,9 +16,9 @@
|
||||
};
|
||||
}
|
||||
{
|
||||
clanCore.machineName = "machine";
|
||||
clanCore.clanDir = ./.;
|
||||
clanCore.state.testState.folders = [ "/etc/state" ];
|
||||
clan.core.machineName = "machine";
|
||||
clan.core.clanDir = ./.;
|
||||
clan.core.state.testState.folders = [ "/etc/state" ];
|
||||
environment.etc.state.text = "hello world";
|
||||
systemd.tmpfiles.settings."vmsecrets" = {
|
||||
"/etc/secrets/borgbackup.ssh" = {
|
||||
@ -36,7 +36,7 @@
|
||||
};
|
||||
};
|
||||
};
|
||||
clanCore.facts.secretStore = "vm";
|
||||
clan.core.facts.secretStore = "vm";
|
||||
|
||||
clan.borgbackup.destinations.test.repo = "borg@localhost:.";
|
||||
}
|
||||
|
@ -10,8 +10,8 @@
|
||||
self.clanModules.deltachat
|
||||
self.nixosModules.clanCore
|
||||
{
|
||||
clanCore.machineName = "machine";
|
||||
clanCore.clanDir = ./.;
|
||||
clan.core.machineName = "machine";
|
||||
clan.core.clanDir = ./.;
|
||||
}
|
||||
];
|
||||
};
|
||||
|
@ -23,7 +23,7 @@
|
||||
options =
|
||||
(pkgs.nixos {
|
||||
imports = [ self.nixosModules.clanCore ];
|
||||
clanCore.clanDir = ./.;
|
||||
clan.core.clanDir = ./.;
|
||||
}).options;
|
||||
warningsAreErrors = false;
|
||||
};
|
||||
@ -40,10 +40,12 @@
|
||||
secrets = import ./secrets nixosTestArgs;
|
||||
container = import ./container nixosTestArgs;
|
||||
deltachat = import ./deltachat nixosTestArgs;
|
||||
matrix-synapse = import ./matrix-synapse nixosTestArgs;
|
||||
zt-tcp-relay = import ./zt-tcp-relay nixosTestArgs;
|
||||
borgbackup = import ./borgbackup nixosTestArgs;
|
||||
matrix-synapse = import ./matrix-synapse nixosTestArgs;
|
||||
mumble = import ./mumble nixosTestArgs;
|
||||
syncthing = import ./syncthing nixosTestArgs;
|
||||
zt-tcp-relay = import ./zt-tcp-relay nixosTestArgs;
|
||||
postgresql = import ./postgresql nixosTestArgs;
|
||||
wayland-proxy-virtwl = import ./wayland-proxy-virtwl nixosTestArgs;
|
||||
};
|
||||
|
||||
|
@ -151,7 +151,7 @@ class Machine:
|
||||
"""
|
||||
|
||||
# Always run command with shell opts
|
||||
command = f"set -euo pipefail; {command}"
|
||||
command = f"set -eo pipefail; source /etc/profile; set -u; {command}"
|
||||
|
||||
proc = subprocess.run(
|
||||
[
|
||||
|
@ -4,26 +4,61 @@
|
||||
name = "matrix-synapse";
|
||||
|
||||
nodes.machine =
|
||||
{ self, lib, ... }:
|
||||
{
|
||||
config,
|
||||
self,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
{
|
||||
imports = [
|
||||
self.clanModules.matrix-synapse
|
||||
self.nixosModules.clanCore
|
||||
{
|
||||
clanCore.machineName = "machine";
|
||||
clanCore.clanDir = ./.;
|
||||
clan.matrix-synapse = {
|
||||
enable = true;
|
||||
domain = "clan.test";
|
||||
};
|
||||
}
|
||||
{
|
||||
# secret override
|
||||
clanCore.facts.services.matrix-synapse.secret.synapse-registration_shared_secret.path = "${./synapse-registration_shared_secret}";
|
||||
clan.core.machineName = "machine";
|
||||
clan.core.clanDir = ./.;
|
||||
|
||||
services.nginx.virtualHosts."matrix.clan.test" = {
|
||||
enableACME = lib.mkForce false;
|
||||
forceSSL = lib.mkForce false;
|
||||
};
|
||||
clan.matrix-synapse.domain = "clan.test";
|
||||
clan.matrix-synapse.users.admin.admin = true;
|
||||
clan.matrix-synapse.users.someuser = { };
|
||||
|
||||
clan.core.facts.secretStore = "vm";
|
||||
|
||||
# because we use systemd-tmpfiles to copy the secrets, we need to a seperate systemd-tmpfiles call to provison them.
|
||||
boot.postBootCommands = "${config.systemd.package}/bin/systemd-tmpfiles --create /etc/tmpfiles.d/00-vmsecrets.conf";
|
||||
|
||||
systemd.tmpfiles.settings."00-vmsecrets" = {
|
||||
# run before 00-nixos.conf
|
||||
"/etc/secrets" = {
|
||||
d.mode = "0700";
|
||||
z.mode = "0700";
|
||||
};
|
||||
"/etc/secrets/synapse-registration_shared_secret" = {
|
||||
f.argument = "supersecret";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
"/etc/secrets/matrix-password-admin" = {
|
||||
f.argument = "matrix-password1";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
"/etc/secrets/matrix-password-someuser" = {
|
||||
f.argument = "matrix-password2";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
@ -32,6 +67,12 @@
|
||||
machine.wait_for_unit("matrix-synapse")
|
||||
machine.succeed("${pkgs.netcat}/bin/nc -z -v ::1 8008")
|
||||
machine.succeed("${pkgs.curl}/bin/curl -Ssf -L http://localhost/_matrix/static/ -H 'Host: matrix.clan.test'")
|
||||
|
||||
machine.systemctl("restart matrix-synapse >&2") # check if user creation is idempotent
|
||||
machine.execute("journalctl -u matrix-synapse --no-pager >&2")
|
||||
machine.wait_for_unit("matrix-synapse")
|
||||
machine.succeed("${pkgs.netcat}/bin/nc -z -v ::1 8008")
|
||||
machine.succeed("${pkgs.curl}/bin/curl -Ssf -L http://localhost/_matrix/static/ -H 'Host: matrix.clan.test'")
|
||||
'';
|
||||
}
|
||||
)
|
||||
|
150
checks/mumble/default.nix
Normal file
150
checks/mumble/default.nix
Normal file
@ -0,0 +1,150 @@
|
||||
(import ../lib/test-base.nix) (
|
||||
{ ... }:
|
||||
let
|
||||
common =
|
||||
{ self, pkgs, ... }:
|
||||
{
|
||||
imports = [
|
||||
self.clanModules.mumble
|
||||
self.nixosModules.clanCore
|
||||
(self.inputs.nixpkgs + "/nixos/tests/common/x11.nix")
|
||||
{
|
||||
clan.core.clanDir = ./.;
|
||||
environment.systemPackages = [
|
||||
pkgs.litecli
|
||||
pkgs.xdotool
|
||||
pkgs.killall
|
||||
];
|
||||
services.murmur.sslKey = "/etc/mumble-key";
|
||||
services.murmur.sslCert = "/etc/mumble-cert";
|
||||
clan.core.facts.services.mumble.secret."mumble-key".path = "/etc/mumble-key";
|
||||
clan.core.facts.services.mumble.public."mumble-cert".path = "/etc/mumble-cert";
|
||||
}
|
||||
];
|
||||
|
||||
};
|
||||
in
|
||||
{
|
||||
name = "mumble";
|
||||
|
||||
enableOCR = true;
|
||||
|
||||
nodes.peer1 =
|
||||
{ ... }:
|
||||
{
|
||||
imports = [
|
||||
common
|
||||
{
|
||||
clan.core.machineName = "peer1";
|
||||
environment.etc = {
|
||||
"mumble-key".source = ./peer_1/peer_1_test_key;
|
||||
"mumble-cert".source = ./peer_1/peer_1_test_cert;
|
||||
};
|
||||
systemd.tmpfiles.settings."vmsecrets" = {
|
||||
"/etc/secrets/mumble-key" = {
|
||||
C.argument = "${./peer_1/peer_1_test_key}";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "murmur";
|
||||
};
|
||||
};
|
||||
"/etc/secrets/mumble-cert" = {
|
||||
C.argument = "${./peer_1/peer_1_test_cert}";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "murmur";
|
||||
};
|
||||
};
|
||||
};
|
||||
services.murmur.sslKey = "/etc/mumble-key";
|
||||
services.murmur.sslCert = "/etc/mumble-cert";
|
||||
clan.core.facts.services.mumble.secret."mumble-key".path = "/etc/mumble-key";
|
||||
clan.core.facts.services.mumble.public."mumble-cert".path = "/etc/mumble-cert";
|
||||
}
|
||||
];
|
||||
};
|
||||
nodes.peer2 =
|
||||
{ ... }:
|
||||
{
|
||||
imports = [
|
||||
common
|
||||
{
|
||||
clan.core.machineName = "peer2";
|
||||
environment.etc = {
|
||||
"mumble-key".source = ./peer_2/peer_2_test_key;
|
||||
"mumble-cert".source = ./peer_2/peer_2_test_cert;
|
||||
};
|
||||
systemd.tmpfiles.settings."vmsecrets" = {
|
||||
"/etc/secrets/mumble-key" = {
|
||||
C.argument = "${./peer_2/peer_2_test_key}";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "murmur";
|
||||
};
|
||||
};
|
||||
"/etc/secrets/mumble-cert" = {
|
||||
C.argument = "${./peer_2/peer_2_test_cert}";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "murmur";
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
testScript = ''
|
||||
start_all()
|
||||
|
||||
with subtest("Waiting for x"):
|
||||
peer1.wait_for_x()
|
||||
peer2.wait_for_x()
|
||||
|
||||
with subtest("Waiting for murmur"):
|
||||
peer1.wait_for_unit("murmur.service")
|
||||
peer2.wait_for_unit("murmur.service")
|
||||
|
||||
with subtest("Starting Mumble"):
|
||||
# starting mumble is blocking
|
||||
peer1.execute("mumble >&2 &")
|
||||
peer2.execute("mumble >&2 &")
|
||||
|
||||
with subtest("Wait for Mumble"):
|
||||
peer1.wait_for_window(r"^Mumble$")
|
||||
peer2.wait_for_window(r"^Mumble$")
|
||||
|
||||
with subtest("Wait for certificate creation"):
|
||||
peer1.wait_for_window(r"^Mumble$")
|
||||
peer1.sleep(3) # mumble is slow to register handlers
|
||||
peer1.send_chars("\n")
|
||||
peer1.send_chars("\n")
|
||||
peer2.wait_for_window(r"^Mumble$")
|
||||
peer2.sleep(3) # mumble is slow to register handlers
|
||||
peer2.send_chars("\n")
|
||||
peer2.send_chars("\n")
|
||||
|
||||
with subtest("Wait for server connect"):
|
||||
peer1.wait_for_window(r"^Mumble Server Connect$")
|
||||
peer2.wait_for_window(r"^Mumble Server Connect$")
|
||||
|
||||
with subtest("Check validity of server certificates"):
|
||||
peer1.execute("killall .mumble-wrapped")
|
||||
peer1.sleep(1)
|
||||
peer1.execute("mumble mumble://peer2 >&2 &")
|
||||
peer1.wait_for_window(r"^Mumble$")
|
||||
peer1.sleep(3) # mumble is slow to register handlers
|
||||
peer1.send_chars("\n")
|
||||
peer1.send_chars("\n")
|
||||
peer1.wait_for_text("Connected.")
|
||||
|
||||
peer2.execute("killall .mumble-wrapped")
|
||||
peer2.sleep(1)
|
||||
peer2.execute("mumble mumble://peer1 >&2 &")
|
||||
peer2.wait_for_window(r"^Mumble$")
|
||||
peer2.sleep(3) # mumble is slow to register handlers
|
||||
peer2.send_chars("\n")
|
||||
peer2.send_chars("\n")
|
||||
peer2.wait_for_text("Connected.")
|
||||
'';
|
||||
}
|
||||
)
|
22
checks/mumble/machines/peer1/facts/mumble-cert
Normal file
22
checks/mumble/machines/peer1/facts/mumble-cert
Normal file
@ -0,0 +1,22 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDazCCAlOgAwIBAgIUCUjfNkF0CDhTKbO3nNczcsCW4qEwDQYJKoZIhvcNAQEL
|
||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNDA2MjcwOTM2NDZaFw0yNDA3
|
||||
MjcwOTM2NDZaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQDCcdZEJvXJIeOKO5pF5XUFvUeJtCCiwfWvWS662bxc
|
||||
R/5MZucRLqfTNYo9aBv4NITw5kxZsTaaubmS4zSGQoTEAVzqzVdi3a/gNvsdVLb+
|
||||
7CivpmweLllX/OGsTL0kHPEI+74AYiTBjXfdWV1Y5T1tuwc3G8ATrguQ33Uo5vvF
|
||||
vcqsbTKcRZC0pB9O/nn4q03GsRdvlpaKakIhjMpRG/uZ3u7wtbyZ+WqjsjxZNfnY
|
||||
aMyPoaipFqX1v+L7GKlOj2NpyEZFVVwa2ZqhVSYXyDfpAWQFznwKGzD5mjtcyKym
|
||||
gnv/5LwrpH4Xj+JMt48hN+rPnu5vfXT8Y4KnID30OQW7AgMBAAGjUzBRMB0GA1Ud
|
||||
DgQWBBQBBO8Wp975pAGioMjkaxANAVInfzAfBgNVHSMEGDAWgBQBBO8Wp975pAGi
|
||||
oMjkaxANAVInfzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAg
|
||||
F40MszTZXpR/A1z9B1CcXH47tNK67f8bCMR2dhvXODbpatwSihyxhQjtLb5R6kYH
|
||||
5Yq/B4yrh303j0CXaobCQ4nQH7zI7fhViww+TzW7vDhgM7ueEyyXrqCXt6JY8avg
|
||||
TuvIRtJSeWSQJ5aLNaYqmiwMf/tj9W3BMDpctGyLqu1WTSrbpYa9mA5Vudud70Yz
|
||||
DgZ/aqHilB07cVNqzVYZzRZ56WJlTjGzVevRgnHZqPiZNVrU13H6gtWa3r8aV4Gj
|
||||
i4F663eRAttj166cRgfl1QqpSG2IprNyV9UfuS2LlUaVNT3y0idawiJ4HhaA8pGB
|
||||
ZqMUUkA4DSucb6xxEcTK
|
||||
-----END CERTIFICATE-----
|
||||
|
1
checks/mumble/machines/peer1/key.age
Normal file
1
checks/mumble/machines/peer1/key.age
Normal file
@ -0,0 +1 @@
|
||||
AGE-SECRET-KEY-1UCXEUJH6JXF8LFKWFHDM4N9AQE2CCGQZGXLUNV4TKR5KY0KC8FDQ2TY4NX
|
14
checks/mumble/machines/peer1/peer_1_test_cert
Normal file
14
checks/mumble/machines/peer1/peer_1_test_cert
Normal file
@ -0,0 +1,14 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICHTCCAaKgAwIBAgIIT2gZuvqVFP0wCgYIKoZIzj0EAwIwSjESMBAGA1UEChMJ
|
||||
U3luY3RoaW5nMSAwHgYDVQQLExdBdXRvbWF0aWNhbGx5IEdlbmVyYXRlZDESMBAG
|
||||
A1UEAxMJc3luY3RoaW5nMB4XDTIzMTIwNjAwMDAwMFoXDTQzMTIwMTAwMDAwMFow
|
||||
SjESMBAGA1UEChMJU3luY3RoaW5nMSAwHgYDVQQLExdBdXRvbWF0aWNhbGx5IEdl
|
||||
bmVyYXRlZDESMBAGA1UEAxMJc3luY3RoaW5nMHYwEAYHKoZIzj0CAQYFK4EEACID
|
||||
YgAEBAr1CsciwCa0vi7eC6xxuSGijY3txbjtsyFanec/fge4oJBD3rVpaLKFETb3
|
||||
TvHHsuvblzElcP483MEVq6FMUoxwuL9CzTtpJrRhtwSmAs8AHLFu8irVn8sZjgkL
|
||||
sXMho1UwUzAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsG
|
||||
AQUFBwMCMAwGA1UdEwEB/wQCMAAwFAYDVR0RBA0wC4IJc3luY3RoaW5nMAoGCCqG
|
||||
SM49BAMCA2kAMGYCMQDbrtLgfcyMMIkNQn+PJe9DHYAqj8C47LQcWuIY/nekhOu0
|
||||
aUfKctEAwyBtI60Y5zcCMQCEdgD/6CNBh7Qqq3z3CKPhlrpxHtCO5tNw17k0jfdH
|
||||
haCwJInHZvZgclHk4EtFpTw=
|
||||
-----END CERTIFICATE-----
|
6
checks/mumble/machines/peer1/peer_1_test_key
Normal file
6
checks/mumble/machines/peer1/peer_1_test_key
Normal file
@ -0,0 +1,6 @@
|
||||
-----BEGIN EC PRIVATE KEY-----
|
||||
MIGkAgEBBDA14Nqo17Xs/xRLGH2KLuyzjKp4eW9iWFobVNM93RZZbECT++W3XcQc
|
||||
cEc5WVtiPmWgBwYFK4EEACKhZANiAAQECvUKxyLAJrS+Lt4LrHG5IaKNje3FuO2z
|
||||
IVqd5z9+B7igkEPetWlosoURNvdO8cey69uXMSVw/jzcwRWroUxSjHC4v0LNO2km
|
||||
tGG3BKYCzwAcsW7yKtWfyxmOCQuxcyE=
|
||||
-----END EC PRIVATE KEY-----
|
22
checks/mumble/machines/peer2/facts/mumble-cert
Normal file
22
checks/mumble/machines/peer2/facts/mumble-cert
Normal file
@ -0,0 +1,22 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDazCCAlOgAwIBAgIUfENbTtH5nr7giuawwQpDYqUpWJswDQYJKoZIhvcNAQEL
|
||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNDA2MjcwOTQxNDNaFw0yNDA3
|
||||
MjcwOTQxNDNaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQCfP6cZhCs9jOnWqyQP12vrOOxlBrWofYZFf9amUA24
|
||||
AfE7oGcSfkylanmkxzvGqQkhgLAvkHZj/GEvHujKyy8PgcEGP+pwmsfWNQMvU0Dz
|
||||
j3syjWOTi3eIC/3DoUnHlWCT2qCil/bjqxgU1l7fO/OXUlq5kyvIjln7Za4sUHun
|
||||
ixe/m96Er6l8a4Mh2pxh2C5pkLCvulkQhjjGG+R6MccH8wwQwmLg5oVBkFEZrnRE
|
||||
pnRKBI0DvA+wk1aJFAPOI4d8Q5T7o/MyxH3f8TYGHqbeMQFCKwusnlWPRtrNdaIc
|
||||
gaLvSpR0LVlroXGu8tYmRpvHPByoKGDbgVvO0Bwx8fmRAgMBAAGjUzBRMB0GA1Ud
|
||||
DgQWBBR7r+mQWNUZ0TpQNwrwjgxgngvOjTAfBgNVHSMEGDAWgBR7r+mQWNUZ0TpQ
|
||||
NwrwjgxgngvOjTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCO
|
||||
7B4s6uQEGE8jg3CQgy76oU/D8sazGcP8+/E4JLHSc0Nj49w4ztSpkOVk2HyEtzbm
|
||||
uR3TreIw+SfqpbiOI/ivVNDbEBsb/vEeq7qPzDH1Bi72plHZNRVhNGGV5rd7ibga
|
||||
TkfXHKPM9yt8ffffHHiu1ROvb8gg2B6JbQwboU4hvvmmorW7onyTFSYEzZVdNSpv
|
||||
pUtKPldxYjTnLlbsJdXC4xyCC4PrJt2CC0n0jsWfICJ77LMxIxTODh8oZNjbPg6r
|
||||
RdI7U/DsD+R072DjbIcrivvigotJM+jihzz5inZwbO8o0WQOHAbJLIG3C3BnRW3A
|
||||
Ek4u3+HXZMl5a0LGJ76u
|
||||
-----END CERTIFICATE-----
|
||||
|
14
checks/mumble/machines/peer2/peer_2_test_cert
Normal file
14
checks/mumble/machines/peer2/peer_2_test_cert
Normal file
@ -0,0 +1,14 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICHjCCAaOgAwIBAgIJAKbMWefkf1rVMAoGCCqGSM49BAMCMEoxEjAQBgNVBAoT
|
||||
CVN5bmN0aGluZzEgMB4GA1UECxMXQXV0b21hdGljYWxseSBHZW5lcmF0ZWQxEjAQ
|
||||
BgNVBAMTCXN5bmN0aGluZzAeFw0yMzEyMDYwMDAwMDBaFw00MzEyMDEwMDAwMDBa
|
||||
MEoxEjAQBgNVBAoTCVN5bmN0aGluZzEgMB4GA1UECxMXQXV0b21hdGljYWxseSBH
|
||||
ZW5lcmF0ZWQxEjAQBgNVBAMTCXN5bmN0aGluZzB2MBAGByqGSM49AgEGBSuBBAAi
|
||||
A2IABFZTMt4RfsfBue0va7QuNdjfXMI4HfZzJCEcG+b9MtV7FlDmwMKX5fgGykD9
|
||||
FBbC7yiza3+xCobdMb5bakz1qYJ7nUFCv1mwSDo2eNM+/XE+rJmlre8NwkwGmvzl
|
||||
h1uhyqNVMFMwDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggr
|
||||
BgEFBQcDAjAMBgNVHRMBAf8EAjAAMBQGA1UdEQQNMAuCCXN5bmN0aGluZzAKBggq
|
||||
hkjOPQQDAgNpADBmAjEAwzhsroN6R4/quWeXj6dO5gt5CfSTLkLee6vrcuIP5i1U
|
||||
rZvJ3OKQVmmGG6IWYe7iAjEAyuq3X2wznaqiw2YK3IDI4qVeYWpCUap0fwRNq7/x
|
||||
4dC4k+BOzHcuJOwNBIY/bEuK
|
||||
-----END CERTIFICATE-----
|
6
checks/mumble/machines/peer2/peer_2_test_key
Normal file
6
checks/mumble/machines/peer2/peer_2_test_key
Normal file
@ -0,0 +1,6 @@
|
||||
-----BEGIN EC PRIVATE KEY-----
|
||||
MIGkAgEBBDCXHGpvumKjjDRxB6SsjZOb7duw3w+rdlGQCJTIvRThLjD6zwjnyImi
|
||||
7c3PD5nWtLqgBwYFK4EEACKhZANiAARWUzLeEX7HwbntL2u0LjXY31zCOB32cyQh
|
||||
HBvm/TLVexZQ5sDCl+X4BspA/RQWwu8os2t/sQqG3TG+W2pM9amCe51BQr9ZsEg6
|
||||
NnjTPv1xPqyZpa3vDcJMBpr85Ydboco=
|
||||
-----END EC PRIVATE KEY-----
|
1
checks/mumble/peer_1/key.age
Normal file
1
checks/mumble/peer_1/key.age
Normal file
@ -0,0 +1 @@
|
||||
AGE-SECRET-KEY-1UCXEUJH6JXF8LFKWFHDM4N9AQE2CCGQZGXLUNV4TKR5KY0KC8FDQ2TY4NX
|
22
checks/mumble/peer_1/peer_1_test_cert
Normal file
22
checks/mumble/peer_1/peer_1_test_cert
Normal file
@ -0,0 +1,22 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDazCCAlOgAwIBAgIUCUjfNkF0CDhTKbO3nNczcsCW4qEwDQYJKoZIhvcNAQEL
|
||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNDA2MjcwOTM2NDZaFw0yNDA3
|
||||
MjcwOTM2NDZaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQDCcdZEJvXJIeOKO5pF5XUFvUeJtCCiwfWvWS662bxc
|
||||
R/5MZucRLqfTNYo9aBv4NITw5kxZsTaaubmS4zSGQoTEAVzqzVdi3a/gNvsdVLb+
|
||||
7CivpmweLllX/OGsTL0kHPEI+74AYiTBjXfdWV1Y5T1tuwc3G8ATrguQ33Uo5vvF
|
||||
vcqsbTKcRZC0pB9O/nn4q03GsRdvlpaKakIhjMpRG/uZ3u7wtbyZ+WqjsjxZNfnY
|
||||
aMyPoaipFqX1v+L7GKlOj2NpyEZFVVwa2ZqhVSYXyDfpAWQFznwKGzD5mjtcyKym
|
||||
gnv/5LwrpH4Xj+JMt48hN+rPnu5vfXT8Y4KnID30OQW7AgMBAAGjUzBRMB0GA1Ud
|
||||
DgQWBBQBBO8Wp975pAGioMjkaxANAVInfzAfBgNVHSMEGDAWgBQBBO8Wp975pAGi
|
||||
oMjkaxANAVInfzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAg
|
||||
F40MszTZXpR/A1z9B1CcXH47tNK67f8bCMR2dhvXODbpatwSihyxhQjtLb5R6kYH
|
||||
5Yq/B4yrh303j0CXaobCQ4nQH7zI7fhViww+TzW7vDhgM7ueEyyXrqCXt6JY8avg
|
||||
TuvIRtJSeWSQJ5aLNaYqmiwMf/tj9W3BMDpctGyLqu1WTSrbpYa9mA5Vudud70Yz
|
||||
DgZ/aqHilB07cVNqzVYZzRZ56WJlTjGzVevRgnHZqPiZNVrU13H6gtWa3r8aV4Gj
|
||||
i4F663eRAttj166cRgfl1QqpSG2IprNyV9UfuS2LlUaVNT3y0idawiJ4HhaA8pGB
|
||||
ZqMUUkA4DSucb6xxEcTK
|
||||
-----END CERTIFICATE-----
|
||||
|
28
checks/mumble/peer_1/peer_1_test_key
Normal file
28
checks/mumble/peer_1/peer_1_test_key
Normal file
@ -0,0 +1,28 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDCcdZEJvXJIeOK
|
||||
O5pF5XUFvUeJtCCiwfWvWS662bxcR/5MZucRLqfTNYo9aBv4NITw5kxZsTaaubmS
|
||||
4zSGQoTEAVzqzVdi3a/gNvsdVLb+7CivpmweLllX/OGsTL0kHPEI+74AYiTBjXfd
|
||||
WV1Y5T1tuwc3G8ATrguQ33Uo5vvFvcqsbTKcRZC0pB9O/nn4q03GsRdvlpaKakIh
|
||||
jMpRG/uZ3u7wtbyZ+WqjsjxZNfnYaMyPoaipFqX1v+L7GKlOj2NpyEZFVVwa2Zqh
|
||||
VSYXyDfpAWQFznwKGzD5mjtcyKymgnv/5LwrpH4Xj+JMt48hN+rPnu5vfXT8Y4Kn
|
||||
ID30OQW7AgMBAAECggEAGVKn+/Iy+kG+l2cRvV6XseqnoWhjA69M5swviMgIfuAl
|
||||
Xx/boeI4mwoS+dJQKi/0zEbB1MB+gwIDB/0s/vs0vS4MQswBQG/skr+2TmiU+Hgb
|
||||
CF0dIYUZv5rAbScFTumx/mCCqxwc+1QIMzyLKqOYL203EFc92ZJGEVT4th321haZ
|
||||
8Wd+dllcYAb7BbEeBhCrTqRe9T3zt5reZgtZTquTF5hGm8EAyBp6rLjZK7dyZ9dd
|
||||
gyIsDbWgPC9vkRc6x/eANn70hgDbYOuoXwAP/qIFnWLL1Zzy8LKUyOsSgQ91S3S3
|
||||
Il4Lt6lEyU3+61MsCYss7jDoP/7REEjz5h6gfxlFSQKBgQD9u8nhHuwte4/d9VNU
|
||||
rhSBW9h8IJzwPif/eS8vh9VaS2SjR2dDCcHg6rGYKnexeEzUcx56aQMA+p3nRJwy
|
||||
Uwnx5BfEWs9FO6yPR8VEI0a2sBp+hoWKJX/Lvat+QCs6IFuGmlQpczD7/RYAkhG4
|
||||
mwyt/ymqzjukb9mFaeYIltOfPwKBgQDELnkH1ChTUH5u3HgDoelFbzR18okz6dxH
|
||||
urMbfZMAl8W5h2zAvHsAX5qxyHHankOUsiH2y3BrAgqQtTuIA2a5W7j+yHBkYiEZ
|
||||
EUNeI9YNA0KU+wwZpVVvRGUsRB5SUBo5LlcSYmX/V32f0oU5Np44i0vjl3Ju8esx
|
||||
2MLfj1A2hQKBgQDCxtZZZ0h8Pb8Z7wpSFfQNvXi5CLwQvFYuClQLk6VXVErkAJsn
|
||||
XiUjyGYeXnNVm/i2mcyKwXQZ20k90HBrPU2ED8mi5Ob5ya5Uqw6mmMHe2d7sw81d
|
||||
WB37RBWSrCXC0DYSZQQ4cYHn3sd2Fqtd4EBijV7qDLjCKU582OdKLqYzNwKBgH31
|
||||
UKQkJZgIkIThbPT4GewI0GgCRvFb76DmUGUQJTg2Oi86siq1WUwOFiabie5RuxZX
|
||||
oNLyH8W008/BbO2RMX1FVOvRCciJ8LJFkTl6TM6iDzfUUBqPOuFryoG3Yrh60btw
|
||||
81rMbqyZIgFhi0QGu2OWnC0Oadyt2tJwV/5t55R5AoGBAPspZttDmOzVkAJDSn9Z
|
||||
iByYt1KmwBQ6l7LpFg33a7ds9zWqW4+i6r0PzXvSewf/z69L0cAywSk5CaJJjDso
|
||||
dTlNMqwux01wd6V+nQGR871xnsOg+qzgJ565TJZelWgRmNRUooi4DMp5POJA33xp
|
||||
rqAISUfW0w2S+q7/5Lm0QiJE
|
||||
-----END PRIVATE KEY-----
|
22
checks/mumble/peer_2/peer_2_test_cert
Normal file
22
checks/mumble/peer_2/peer_2_test_cert
Normal file
@ -0,0 +1,22 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDazCCAlOgAwIBAgIUfENbTtH5nr7giuawwQpDYqUpWJswDQYJKoZIhvcNAQEL
|
||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNDA2MjcwOTQxNDNaFw0yNDA3
|
||||
MjcwOTQxNDNaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQCfP6cZhCs9jOnWqyQP12vrOOxlBrWofYZFf9amUA24
|
||||
AfE7oGcSfkylanmkxzvGqQkhgLAvkHZj/GEvHujKyy8PgcEGP+pwmsfWNQMvU0Dz
|
||||
j3syjWOTi3eIC/3DoUnHlWCT2qCil/bjqxgU1l7fO/OXUlq5kyvIjln7Za4sUHun
|
||||
ixe/m96Er6l8a4Mh2pxh2C5pkLCvulkQhjjGG+R6MccH8wwQwmLg5oVBkFEZrnRE
|
||||
pnRKBI0DvA+wk1aJFAPOI4d8Q5T7o/MyxH3f8TYGHqbeMQFCKwusnlWPRtrNdaIc
|
||||
gaLvSpR0LVlroXGu8tYmRpvHPByoKGDbgVvO0Bwx8fmRAgMBAAGjUzBRMB0GA1Ud
|
||||
DgQWBBR7r+mQWNUZ0TpQNwrwjgxgngvOjTAfBgNVHSMEGDAWgBR7r+mQWNUZ0TpQ
|
||||
NwrwjgxgngvOjTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCO
|
||||
7B4s6uQEGE8jg3CQgy76oU/D8sazGcP8+/E4JLHSc0Nj49w4ztSpkOVk2HyEtzbm
|
||||
uR3TreIw+SfqpbiOI/ivVNDbEBsb/vEeq7qPzDH1Bi72plHZNRVhNGGV5rd7ibga
|
||||
TkfXHKPM9yt8ffffHHiu1ROvb8gg2B6JbQwboU4hvvmmorW7onyTFSYEzZVdNSpv
|
||||
pUtKPldxYjTnLlbsJdXC4xyCC4PrJt2CC0n0jsWfICJ77LMxIxTODh8oZNjbPg6r
|
||||
RdI7U/DsD+R072DjbIcrivvigotJM+jihzz5inZwbO8o0WQOHAbJLIG3C3BnRW3A
|
||||
Ek4u3+HXZMl5a0LGJ76u
|
||||
-----END CERTIFICATE-----
|
||||
|
28
checks/mumble/peer_2/peer_2_test_key
Normal file
28
checks/mumble/peer_2/peer_2_test_key
Normal file
@ -0,0 +1,28 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCfP6cZhCs9jOnW
|
||||
qyQP12vrOOxlBrWofYZFf9amUA24AfE7oGcSfkylanmkxzvGqQkhgLAvkHZj/GEv
|
||||
HujKyy8PgcEGP+pwmsfWNQMvU0Dzj3syjWOTi3eIC/3DoUnHlWCT2qCil/bjqxgU
|
||||
1l7fO/OXUlq5kyvIjln7Za4sUHunixe/m96Er6l8a4Mh2pxh2C5pkLCvulkQhjjG
|
||||
G+R6MccH8wwQwmLg5oVBkFEZrnREpnRKBI0DvA+wk1aJFAPOI4d8Q5T7o/MyxH3f
|
||||
8TYGHqbeMQFCKwusnlWPRtrNdaIcgaLvSpR0LVlroXGu8tYmRpvHPByoKGDbgVvO
|
||||
0Bwx8fmRAgMBAAECggEACAkjOnNj5zA0IIP0RuRc6rqtmw9ynTTwUJN51lyVxKI8
|
||||
dQDMEq/S2En+J2VyS7z92/XtbgkBIFx83u7VWl5UWpj2j4UsJFB7IwD7zyiJT4D+
|
||||
+3cM/kX8Wx4XyQZbfbm47N0MXAgFCkn45hxHH0acLReXwmN9wxoDyl7AIjZRdwvG
|
||||
Qq0rnOnIc8kkkew7L6AiFwQS8b77eyzua3d6moKXN9hU/kfiJ6YUFG/WLe0pmQA1
|
||||
HbF27YghfeLnYUt50oDuX6jF6CzQhflchWVq/wn8/cxEpg/RMicWE8ulrTk7o27l
|
||||
JwCrHrhYEBsPuZO4mxX/DHrAMmhTeFjLaV5bQlz0PQKBgQDgRPSOEixYnKz9iPs/
|
||||
EDTlji5LA3Rm6TytRCNsjYY6Trw60KcvYqwyDUCiEjruvOQ9mqgBiQm1VHSalrG3
|
||||
RcbVfpEMouyZbEwmTjS8KdOi5x4Z6AX+4yWDN31jX3b8sktgbxV/HRdg3sA3q7MJ
|
||||
vExTUuoXg57W+FepIZ+XlhSoQwKBgQC1x6UMAlAeW45/yUUm/LFRcCgb/bdCQx+e
|
||||
hSb8w3jdvVoNWgx1j7RsjjFKaZUnseK3qQvVfCm4Qjvlz6MpKDxslaUYuR162Ku0
|
||||
e153z/xc7XRoXyPyPLdGZFlWii30jirB7ZqPdyz6mwlWwqdImNerbUqdFt9R8bId
|
||||
pYsyHB5zmwKBgBjYCq9iW/9E+/TqI8sMpI95fK9app5v4AThs3rnAqOa7Ucmrh6V
|
||||
s7Wnui06D8U6r54Tb+EbqTOpM3Gcl/tRg4FLEA5yTfuA/76Ok1D04Tj+mVsNVPyz
|
||||
dQhgMUe835WGusroA12df2V/x5NjNeYyMdJZMQ2ByyrNQAjAbMmCGq+5AoGBAIj8
|
||||
ERFysMOfxUvg9b7CkDFJrsAhOzew86P2vYGfIHchGTqUkG0LRTDFGrnzxNXsBGjY
|
||||
+DUB40Kajx7IkTETxC0jvA1ceq23l/VjPrZVQt0YiC+a+rCyNn7SYkyHxsfTVr9b
|
||||
ea0BZyDXMntyJrPbkjL6Ik8tDE9pLwuOU84ISJ5fAoGAZ2+Ams/VhdZj/wpRpMky
|
||||
K4jtS4nzbCmJzzTa6vdVV7Kjer5kFxSFFqMrS/FtJ/RxHeHvxdze9dfGu9jIdTKK
|
||||
vSzbyQdHFfZgRkmAKfcoN9u567z7Oc74AQ9UgFEGdEVFQUbfWOevmr8KIPt8nDQK
|
||||
J9HuVfILi1kH0jzDd/64TvA=
|
||||
-----END PRIVATE KEY-----
|
72
checks/postgresql/default.nix
Normal file
72
checks/postgresql/default.nix
Normal file
@ -0,0 +1,72 @@
|
||||
(import ../lib/container-test.nix) ({
|
||||
name = "postgresql";
|
||||
|
||||
nodes.machine =
|
||||
{ self, config, ... }:
|
||||
{
|
||||
imports = [
|
||||
self.nixosModules.clanCore
|
||||
self.clanModules.postgresql
|
||||
self.clanModules.localbackup
|
||||
];
|
||||
clan.postgresql.users.test = { };
|
||||
clan.postgresql.databases.test.create.options.OWNER = "test";
|
||||
clan.postgresql.databases.test.restore.stopOnRestore = [ "sample-service" ];
|
||||
clan.localbackup.targets.hdd.directory = "/mnt/external-disk";
|
||||
|
||||
systemd.services.sample-service = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
script = ''
|
||||
while true; do
|
||||
echo "Hello, world!"
|
||||
sleep 5
|
||||
done
|
||||
'';
|
||||
};
|
||||
|
||||
environment.systemPackages = [ config.services.postgresql.package ];
|
||||
};
|
||||
testScript =
|
||||
{ nodes, ... }:
|
||||
''
|
||||
start_all()
|
||||
machine.wait_for_unit("postgresql")
|
||||
machine.wait_for_unit("sample-service")
|
||||
# Create a test table
|
||||
machine.succeed("runuser -u postgres -- /run/current-system/sw/bin/psql -c 'CREATE TABLE test (id serial PRIMARY KEY);' test")
|
||||
|
||||
machine.succeed("/run/current-system/sw/bin/localbackup-create >&2")
|
||||
timestamp_before = int(machine.succeed("systemctl show --property=ExecMainStartTimestampMonotonic sample-service | cut -d= -f2").strip())
|
||||
|
||||
machine.succeed("test -e /mnt/external-disk/snapshot.0/machine/var/backup/postgres/test/pg-dump || { echo 'pg-dump not found'; exit 1; }")
|
||||
machine.succeed("runuser -u postgres -- /run/current-system/sw/bin/psql -d test -c 'INSERT INTO test DEFAULT VALUES;'")
|
||||
machine.succeed("runuser -u postgres -- /run/current-system/sw/bin/psql -d test -c 'DROP TABLE test;'")
|
||||
machine.succeed("test -e /var/backup/postgres/test/pg-dump || { echo 'pg-dump not found'; exit 1; }")
|
||||
|
||||
machine.succeed("rm -rf /var/backup/postgres")
|
||||
|
||||
machine.succeed("NAME=/mnt/external-disk/snapshot.0 FOLDERS=/var/backup/postgres/test /run/current-system/sw/bin/localbackup-restore >&2")
|
||||
machine.succeed("test -e /var/backup/postgres/test/pg-dump || { echo 'pg-dump not found'; exit 1; }")
|
||||
|
||||
machine.succeed("""
|
||||
set -x
|
||||
${nodes.machine.clan.core.state.postgresql-test.postRestoreCommand}
|
||||
""")
|
||||
machine.succeed("runuser -u postgres -- /run/current-system/sw/bin/psql -l >&2")
|
||||
machine.succeed("runuser -u postgres -- /run/current-system/sw/bin/psql -d test -c '\dt' >&2")
|
||||
|
||||
timestamp_after = int(machine.succeed("systemctl show --property=ExecMainStartTimestampMonotonic sample-service | cut -d= -f2").strip())
|
||||
assert timestamp_before < timestamp_after, f"{timestamp_before} >= {timestamp_after}: expected sample-service to be restarted after restore"
|
||||
|
||||
# Check that the table is still there
|
||||
machine.succeed("runuser -u postgres -- /run/current-system/sw/bin/psql -d test -c 'SELECT * FROM test;'")
|
||||
output = machine.succeed("runuser -u postgres -- /run/current-system/sw/bin/psql --csv -c \"SELECT datdba::regrole FROM pg_database WHERE datname = 'test'\"")
|
||||
owner = output.split("\n")[1]
|
||||
assert owner == "test", f"Expected database owner to be 'test', got '{owner}'"
|
||||
|
||||
# check if restore works if the database does not exist
|
||||
machine.succeed("runuser -u postgres -- dropdb test")
|
||||
machine.succeed("${nodes.machine.clanCore.state.postgresql-test.postRestoreCommand}")
|
||||
machine.succeed("runuser -u postgres -- /run/current-system/sw/bin/psql -d test -c '\dt' >&2")
|
||||
'';
|
||||
})
|
@ -10,8 +10,8 @@
|
||||
environment.etc."group-secret".source = config.sops.secrets.group-secret.path;
|
||||
sops.age.keyFile = "/etc/privkey.age";
|
||||
|
||||
clanCore.clanDir = "${./.}";
|
||||
clanCore.machineName = "machine";
|
||||
clan.core.clanDir = "${./.}";
|
||||
clan.core.machineName = "machine";
|
||||
|
||||
networking.hostName = "machine";
|
||||
};
|
||||
|
@ -12,14 +12,14 @@
|
||||
self.clanModules.syncthing
|
||||
self.nixosModules.clanCore
|
||||
{
|
||||
clanCore.machineName = "introducer";
|
||||
clanCore.clanDir = ./.;
|
||||
clan.core.machineName = "introducer";
|
||||
clan.core.clanDir = ./.;
|
||||
environment.etc = {
|
||||
"syncthing.pam".source = ./introducer/introducer_test_cert;
|
||||
"syncthing.key".source = ./introducer/introducer_test_key;
|
||||
"syncthing.api".source = ./introducer/introducer_test_api;
|
||||
};
|
||||
clanCore.facts.services.syncthing.secret."syncthing.api".path = "/etc/syncthing.api";
|
||||
clan.core.facts.services.syncthing.secret."syncthing.api".path = "/etc/syncthing.api";
|
||||
services.syncthing.cert = "/etc/syncthing.pam";
|
||||
services.syncthing.key = "/etc/syncthing.key";
|
||||
# Doesn't test zerotier!
|
||||
@ -53,8 +53,8 @@
|
||||
self.clanModules.syncthing
|
||||
self.nixosModules.clanCore
|
||||
{
|
||||
clanCore.machineName = "peer1";
|
||||
clanCore.clanDir = ./.;
|
||||
clan.core.machineName = "peer1";
|
||||
clan.core.clanDir = ./.;
|
||||
clan.syncthing.introducer = lib.strings.removeSuffix "\n" (
|
||||
builtins.readFile ./introducer/introducer_device_id
|
||||
);
|
||||
@ -75,8 +75,8 @@
|
||||
self.clanModules.syncthing
|
||||
self.nixosModules.clanCore
|
||||
{
|
||||
clanCore.machineName = "peer2";
|
||||
clanCore.clanDir = ./.;
|
||||
clan.core.machineName = "peer2";
|
||||
clan.core.clanDir = ./.;
|
||||
clan.syncthing.introducer = lib.strings.removeSuffix "\n" (
|
||||
builtins.readFile ./introducer/introducer_device_id
|
||||
);
|
||||
|
@ -14,8 +14,8 @@ import ../lib/test-base.nix (
|
||||
imports = [
|
||||
self.nixosModules.clanCore
|
||||
{
|
||||
clanCore.machineName = "machine";
|
||||
clanCore.clanDir = ./.;
|
||||
clan.core.machineName = "machine";
|
||||
clan.core.clanDir = ./.;
|
||||
}
|
||||
];
|
||||
services.wayland-proxy-virtwl.enable = true;
|
||||
|
@ -10,8 +10,8 @@
|
||||
self.nixosModules.clanCore
|
||||
self.clanModules.zt-tcp-relay
|
||||
{
|
||||
clanCore.machineName = "machine";
|
||||
clanCore.clanDir = ./.;
|
||||
clan.core.machineName = "machine";
|
||||
clan.core.clanDir = ./.;
|
||||
}
|
||||
];
|
||||
};
|
||||
|
10
clanModules/borgbackup-static/README.md
Normal file
10
clanModules/borgbackup-static/README.md
Normal file
@ -0,0 +1,10 @@
|
||||
Statically configure borgbackup with sane defaults.
|
||||
---
|
||||
This module implements the `borgbackup` backend and implements sane defaults
|
||||
for backup management through `borgbackup` for members of the clan.
|
||||
|
||||
Configure target machines where the backups should be sent to through `targets`.
|
||||
|
||||
Configure machines that should be backuped either through `includeMachines`
|
||||
which will exclusively add the included machines to be backuped, or through
|
||||
`excludeMachines`, which will add every machine except the excluded machine to the backup.
|
101
clanModules/borgbackup-static/default.nix
Normal file
101
clanModules/borgbackup-static/default.nix
Normal file
@ -0,0 +1,101 @@
|
||||
{ lib, config, ... }:
|
||||
let
|
||||
clanDir = config.clan.core.clanDir;
|
||||
machineDir = clanDir + "/machines/";
|
||||
in
|
||||
{
|
||||
imports = [ ../borgbackup ];
|
||||
|
||||
options.clan.borgbackup-static = {
|
||||
excludeMachines = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
example = [ config.clan.core.machineName ];
|
||||
default = [ ];
|
||||
description = ''
|
||||
Machines that should not be backuped.
|
||||
Mutually exclusive with includeMachines.
|
||||
If this is not empty, every other machine except the targets in the clan will be backuped by this module.
|
||||
If includeMachines is set, only the included machines will be backuped.
|
||||
'';
|
||||
};
|
||||
includeMachines = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
example = [ config.clan.core.machineName ];
|
||||
default = [ ];
|
||||
description = ''
|
||||
Machines that should be backuped.
|
||||
Mutually exclusive with excludeMachines.
|
||||
'';
|
||||
};
|
||||
targets = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = [ ];
|
||||
description = ''
|
||||
Machines that should act as target machines for backups.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config.services.borgbackup.repos =
|
||||
let
|
||||
machines = builtins.readDir machineDir;
|
||||
borgbackupIpMachinePath = machines: machineDir + machines + "/facts/borgbackup.ssh.pub";
|
||||
filteredMachines =
|
||||
if ((builtins.length config.clan.borgbackup-static.includeMachines) != 0) then
|
||||
lib.filterAttrs (name: _: (lib.elem name config.clan.borgbackup-static.includeMachines)) machines
|
||||
else
|
||||
lib.filterAttrs (name: _: !(lib.elem name config.clan.borgbackup-static.excludeMachines)) machines;
|
||||
machinesMaybeKey = lib.mapAttrsToList (
|
||||
machine: _:
|
||||
let
|
||||
fullPath = borgbackupIpMachinePath machine;
|
||||
in
|
||||
if builtins.pathExists fullPath then machine else null
|
||||
) filteredMachines;
|
||||
machinesWithKey = lib.filter (x: x != null) machinesMaybeKey;
|
||||
hosts = builtins.map (machine: {
|
||||
name = machine;
|
||||
value = {
|
||||
path = "/var/lib/borgbackup/${machine}";
|
||||
authorizedKeys = [ (builtins.readFile (borgbackupIpMachinePath machine)) ];
|
||||
};
|
||||
}) machinesWithKey;
|
||||
in
|
||||
lib.mkIf
|
||||
(builtins.any (
|
||||
target: target == config.clan.core.machineName
|
||||
) config.clan.borgbackup-static.targets)
|
||||
(if (builtins.listToAttrs hosts) != null then builtins.listToAttrs hosts else { });
|
||||
|
||||
config.clan.borgbackup.destinations =
|
||||
let
|
||||
destinations = builtins.map (d: {
|
||||
name = d;
|
||||
value = {
|
||||
repo = "borg@${d}:/var/lib/borgbackup/${config.clan.core.machineName}";
|
||||
};
|
||||
}) config.clan.borgbackup-static.targets;
|
||||
in
|
||||
lib.mkIf (builtins.any (
|
||||
target: target == config.clan.core.machineName
|
||||
) config.clan.borgbackup-static.includeMachines) (builtins.listToAttrs destinations);
|
||||
|
||||
config.assertions = [
|
||||
{
|
||||
assertion =
|
||||
!(
|
||||
((builtins.length config.clan.borgbackup-static.excludeMachines) != 0)
|
||||
&& ((builtins.length config.clan.borgbackup-static.includeMachines) != 0)
|
||||
);
|
||||
message = ''
|
||||
The options:
|
||||
config.clan.borgbackup-static.excludeMachines = [${builtins.toString config.clan.borgbackup-static.excludeMachines}]
|
||||
and
|
||||
config.clan.borgbackup-static.includeMachines = [${builtins.toString config.clan.borgbackup-static.includeMachines}]
|
||||
are mutually exclusive.
|
||||
Use excludeMachines to exclude certain machines and backup the other clan machines.
|
||||
Use include machines to only backup certain machines.
|
||||
'';
|
||||
}
|
||||
];
|
||||
}
|
@ -6,6 +6,27 @@
|
||||
}:
|
||||
let
|
||||
cfg = config.clan.borgbackup;
|
||||
preBackupScript = ''
|
||||
declare -A preCommandErrors
|
||||
|
||||
${lib.concatMapStringsSep "\n" (
|
||||
state:
|
||||
lib.optionalString (state.preBackupCommand != null) ''
|
||||
echo "Running pre-backup command for ${state.name}"
|
||||
if ! ( ${state.preBackupCommand} ) then
|
||||
preCommandErrors["${state.name}"]=1
|
||||
fi
|
||||
''
|
||||
) (lib.attrValues config.clan.core.state)}
|
||||
|
||||
if [[ ''${#preCommandErrors[@]} -gt 0 ]]; then
|
||||
echo "PreBackupCommand failed for the following services:"
|
||||
for state in "''${!preCommandErrors[@]}"; do
|
||||
echo " $state"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
'';
|
||||
in
|
||||
{
|
||||
options.clan.borgbackup.destinations = lib.mkOption {
|
||||
@ -26,9 +47,9 @@ in
|
||||
rsh = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "ssh -i ${
|
||||
config.clanCore.facts.services.borgbackup.secret."borgbackup.ssh".path
|
||||
} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null";
|
||||
defaultText = "ssh -i \${config.clanCore.facts.services.borgbackup.secret.\"borgbackup.ssh\".path} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null";
|
||||
config.clan.core.facts.services.borgbackup.secret."borgbackup.ssh".path
|
||||
} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o IdentitiesOnly=Yes";
|
||||
defaultText = "ssh -i \${config.clan.core.facts.services.borgbackup.secret.\"borgbackup.ssh\".path} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null";
|
||||
description = "the rsh to use for the backup";
|
||||
};
|
||||
};
|
||||
@ -50,21 +71,30 @@ in
|
||||
];
|
||||
|
||||
config = lib.mkIf (cfg.destinations != { }) {
|
||||
systemd.services = lib.mapAttrs' (
|
||||
_: dest:
|
||||
lib.nameValuePair "borgbackup-job-${dest.name}" {
|
||||
# since borgbackup mounts the system read-only, we need to run in a ExecStartPre script, so we can generate additional files.
|
||||
serviceConfig.ExecStartPre = [
|
||||
(''+${pkgs.writeShellScript "borgbackup-job-${dest.name}-pre-backup-commands" preBackupScript}'')
|
||||
];
|
||||
}
|
||||
) cfg.destinations;
|
||||
|
||||
services.borgbackup.jobs = lib.mapAttrs (_: dest: {
|
||||
paths = lib.flatten (map (state: state.folders) (lib.attrValues config.clanCore.state));
|
||||
paths = lib.unique (
|
||||
lib.flatten (map (state: state.folders) (lib.attrValues config.clan.core.state))
|
||||
);
|
||||
exclude = [ "*.pyc" ];
|
||||
repo = dest.repo;
|
||||
environment.BORG_RSH = dest.rsh;
|
||||
compression = "auto,zstd";
|
||||
startAt = "*-*-* 01:00:00";
|
||||
persistentTimer = true;
|
||||
preHook = ''
|
||||
set -x
|
||||
'';
|
||||
|
||||
encryption = {
|
||||
mode = "repokey";
|
||||
passCommand = "cat ${config.clanCore.facts.services.borgbackup.secret."borgbackup.repokey".path}";
|
||||
passCommand = "cat ${config.clan.core.facts.services.borgbackup.secret."borgbackup.repokey".path}";
|
||||
};
|
||||
|
||||
prune.keep = {
|
||||
@ -75,7 +105,7 @@ in
|
||||
};
|
||||
}) cfg.destinations;
|
||||
|
||||
clanCore.facts.services.borgbackup = {
|
||||
clan.core.facts.services.borgbackup = {
|
||||
public."borgbackup.ssh.pub" = { };
|
||||
secret."borgbackup.ssh" = { };
|
||||
secret."borgbackup.repokey" = { };
|
||||
@ -111,7 +141,7 @@ in
|
||||
(pkgs.writeShellScriptBin "borgbackup-restore" ''
|
||||
set -efux
|
||||
cd /
|
||||
IFS=';' read -ra FOLDER <<< "$FOLDERS"
|
||||
IFS=':' read -ra FOLDER <<< "$FOLDERS"
|
||||
job_name=$(echo "$NAME" | ${pkgs.gawk}/bin/awk -F'::' '{print $1}')
|
||||
backup_name=''${NAME#"$job_name"::}
|
||||
if ! command -v borg-job-"$job_name" &> /dev/null; then
|
||||
@ -122,7 +152,7 @@ in
|
||||
'')
|
||||
];
|
||||
|
||||
clanCore.backups.providers.borgbackup = {
|
||||
clan.core.backups.providers.borgbackup = {
|
||||
list = "borgbackup-list";
|
||||
create = "borgbackup-create";
|
||||
restore = "borgbackup-restore";
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
services.maddy =
|
||||
let
|
||||
domain = "${config.clanCore.machineName}.local";
|
||||
domain = "${config.clan.core.machineName}.local";
|
||||
in
|
||||
{
|
||||
enable = true;
|
||||
|
@ -10,5 +10,5 @@ _: {
|
||||
};
|
||||
};
|
||||
|
||||
clanCore.state.ergochat.folders = [ "/var/lib/ergo" ];
|
||||
clan.core.state.ergochat.folders = [ "/var/lib/ergo" ];
|
||||
}
|
||||
|
@ -5,17 +5,21 @@
|
||||
imports = [ ./disk-layouts ];
|
||||
};
|
||||
borgbackup = ./borgbackup;
|
||||
borgbackup-static = ./borgbackup-static;
|
||||
deltachat = ./deltachat;
|
||||
ergochat = ./ergochat;
|
||||
localbackup = ./localbackup;
|
||||
localsend = ./localsend;
|
||||
matrix-synapse = ./matrix-synapse;
|
||||
moonlight = ./moonlight;
|
||||
mumble = ./mumble;
|
||||
postgresql = ./postgresql;
|
||||
root-password = ./root-password;
|
||||
sshd = ./sshd;
|
||||
sunshine = ./sunshine;
|
||||
static-hosts = ./static-hosts;
|
||||
syncthing = ./syncthing;
|
||||
syncthing-static-peers = ./syncthing-static-peers;
|
||||
thelounge = ./thelounge;
|
||||
trusted-nix-caches = ./trusted-nix-caches;
|
||||
user-password = ./user-password;
|
||||
|
@ -6,7 +6,10 @@
|
||||
}:
|
||||
let
|
||||
cfg = config.clan.localbackup;
|
||||
rsnapshotConfig = target: states: ''
|
||||
uniqueFolders = lib.unique (
|
||||
lib.flatten (lib.mapAttrsToList (_name: state: state.folders) config.clan.core.state)
|
||||
);
|
||||
rsnapshotConfig = target: ''
|
||||
config_version 1.2
|
||||
snapshot_root ${target.directory}
|
||||
sync_first 1
|
||||
@ -17,12 +20,6 @@ let
|
||||
cmd_logger ${pkgs.inetutils}/bin/logger
|
||||
cmd_du ${pkgs.coreutils}/bin/du
|
||||
cmd_rsnapshot_diff ${pkgs.rsnapshot}/bin/rsnapshot-diff
|
||||
${lib.optionalString (target.preBackupHook != null) ''
|
||||
cmd_preexec ${pkgs.writeShellScript "preexec.sh" ''
|
||||
set -efu -o pipefail
|
||||
${target.preBackupHook}
|
||||
''}
|
||||
''}
|
||||
|
||||
${lib.optionalString (target.postBackupHook != null) ''
|
||||
cmd_postexec ${pkgs.writeShellScript "postexec.sh" ''
|
||||
@ -31,11 +28,9 @@ let
|
||||
''}
|
||||
''}
|
||||
retain snapshot ${builtins.toString config.clan.localbackup.snapshots}
|
||||
${lib.concatMapStringsSep "\n" (state: ''
|
||||
${lib.concatMapStringsSep "\n" (folder: ''
|
||||
backup ${folder} ${config.networking.hostName}/
|
||||
'') state.folders}
|
||||
'') states}
|
||||
${lib.concatMapStringsSep "\n" (folder: ''
|
||||
backup ${folder} ${config.networking.hostName}/
|
||||
'') uniqueFolders}
|
||||
'';
|
||||
in
|
||||
{
|
||||
@ -129,14 +124,30 @@ in
|
||||
]
|
||||
}
|
||||
${lib.concatMapStringsSep "\n" (target: ''
|
||||
(
|
||||
${mountHook target}
|
||||
echo "Creating backup '${target.name}'"
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target (lib.attrValues config.clanCore.state))}" sync
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target (lib.attrValues config.clanCore.state))}" snapshot
|
||||
)
|
||||
'') (builtins.attrValues cfg.targets)}
|
||||
'')
|
||||
${mountHook target}
|
||||
set -x
|
||||
echo "Creating backup '${target.name}'"
|
||||
|
||||
${lib.optionalString (target.preBackupHook != null) ''
|
||||
(
|
||||
${target.preBackupHook}
|
||||
)
|
||||
''}
|
||||
|
||||
declare -A preCommandErrors
|
||||
${lib.concatMapStringsSep "\n" (
|
||||
state:
|
||||
lib.optionalString (state.preBackupCommand != null) ''
|
||||
echo "Running pre-backup command for ${state.name}"
|
||||
if ! ( ${state.preBackupCommand} ) then
|
||||
preCommandErrors["${state.name}"]=1
|
||||
fi
|
||||
''
|
||||
) (builtins.attrValues config.clan.core.state)}
|
||||
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target)}" sync
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target)}" snapshot
|
||||
'') (builtins.attrValues cfg.targets)}'')
|
||||
(pkgs.writeShellScriptBin "localbackup-list" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
@ -167,6 +178,14 @@ in
|
||||
pkgs.gawk
|
||||
]
|
||||
}
|
||||
if [[ "''${NAME:-}" == "" ]]; then
|
||||
echo "No backup name given via NAME environment variable"
|
||||
exit 1
|
||||
fi
|
||||
if [[ "''${FOLDERS:-}" == "" ]]; then
|
||||
echo "No folders given via FOLDERS environment variable"
|
||||
exit 1
|
||||
fi
|
||||
name=$(awk -F'::' '{print $1}' <<< $NAME)
|
||||
backupname=''${NAME#$name::}
|
||||
|
||||
@ -182,8 +201,9 @@ in
|
||||
exit 1
|
||||
fi
|
||||
|
||||
IFS=';' read -ra FOLDER <<< "$FOLDERS"
|
||||
IFS=':' read -ra FOLDER <<< "''$FOLDERS"
|
||||
for folder in "''${FOLDER[@]}"; do
|
||||
mkdir -p "$folder"
|
||||
rsync -a "$backupname/${config.networking.hostName}$folder/" "$folder"
|
||||
done
|
||||
'')
|
||||
@ -213,7 +233,7 @@ in
|
||||
''
|
||||
) cfg.targets;
|
||||
|
||||
clanCore.backups.providers.localbackup = {
|
||||
clan.core.backups.providers.localbackup = {
|
||||
# TODO list needs to run locally or on the remote machine
|
||||
list = "localbackup-list";
|
||||
create = "localbackup-create";
|
||||
|
@ -18,7 +18,7 @@
|
||||
};
|
||||
|
||||
config = lib.mkIf config.clan.localsend.enable {
|
||||
clanCore.state.localsend.folders = [
|
||||
clan.core.state.localsend.folders = [
|
||||
"/var/localsend"
|
||||
config.clan.localsend.defaultLocation
|
||||
];
|
||||
|
@ -0,0 +1,100 @@
|
||||
From bc199a27f23b0fcf175b116f7cf606c0d22b422a Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= <joerg@thalheim.io>
|
||||
Date: Tue, 11 Jun 2024 11:40:47 +0200
|
||||
Subject: [PATCH 1/2] register_new_matrix_user: add password-file flag
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
getpass in python expects stdin to be a tty, hence we cannot just pipe
|
||||
into register_new_matrix_user. --password-file instead works better and
|
||||
it would also allow the use of stdin if /dev/stdin is passed.
|
||||
|
||||
Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
|
||||
Signed-off-by: Jörg Thalheim <joerg@thalheim.io>
|
||||
---
|
||||
changelog.d/17294.feature | 2 ++
|
||||
debian/register_new_matrix_user.ronn | 9 +++++++--
|
||||
synapse/_scripts/register_new_matrix_user.py | 20 +++++++++++++++-----
|
||||
3 files changed, 24 insertions(+), 7 deletions(-)
|
||||
create mode 100644 changelog.d/17294.feature
|
||||
|
||||
diff --git a/changelog.d/17294.feature b/changelog.d/17294.feature
|
||||
new file mode 100644
|
||||
index 000000000..33aac7b0b
|
||||
--- /dev/null
|
||||
+++ b/changelog.d/17294.feature
|
||||
@@ -0,0 +1,2 @@
|
||||
+`register_new_matrix_user` now supports a --password-file flag, which
|
||||
+is useful for scripting.
|
||||
diff --git a/debian/register_new_matrix_user.ronn b/debian/register_new_matrix_user.ronn
|
||||
index 0410b1f4c..d99e9215a 100644
|
||||
--- a/debian/register_new_matrix_user.ronn
|
||||
+++ b/debian/register_new_matrix_user.ronn
|
||||
@@ -31,8 +31,13 @@ A sample YAML file accepted by `register_new_matrix_user` is described below:
|
||||
Local part of the new user. Will prompt if omitted.
|
||||
|
||||
* `-p`, `--password`:
|
||||
- New password for user. Will prompt if omitted. Supplying the password
|
||||
- on the command line is not recommended. Use the STDIN instead.
|
||||
+ New password for user. Will prompt if this option and `--password-file` are omitted.
|
||||
+ Supplying the password on the command line is not recommended.
|
||||
+ Use `--password-file` if possible.
|
||||
+
|
||||
+ * `--password-file`:
|
||||
+ File containing the new password for user. If set, overrides `--password`.
|
||||
+ This is a more secure alternative to specifying the password on the command line.
|
||||
|
||||
* `-a`, `--admin`:
|
||||
Register new user as an admin. Will prompt if omitted.
|
||||
diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py
|
||||
index 77a7129ee..972b35e2d 100644
|
||||
--- a/synapse/_scripts/register_new_matrix_user.py
|
||||
+++ b/synapse/_scripts/register_new_matrix_user.py
|
||||
@@ -173,11 +173,18 @@ def main() -> None:
|
||||
default=None,
|
||||
help="Local part of the new user. Will prompt if omitted.",
|
||||
)
|
||||
- parser.add_argument(
|
||||
+ password_group = parser.add_mutually_exclusive_group()
|
||||
+ password_group.add_argument(
|
||||
"-p",
|
||||
"--password",
|
||||
default=None,
|
||||
- help="New password for user. Will prompt if omitted.",
|
||||
+ help="New password for user. Will prompt for a password if "
|
||||
+ "this flag and `--password-file` are both omitted.",
|
||||
+ )
|
||||
+ password_group.add_argument(
|
||||
+ "--password-file",
|
||||
+ default=None,
|
||||
+ help="File containing the new password for user. If set, will override `--password`.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
@@ -247,6 +254,11 @@ def main() -> None:
|
||||
print(_NO_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
+ if args.password_file:
|
||||
+ password = _read_file(args.password_file, "password-file").strip()
|
||||
+ else:
|
||||
+ password = args.password
|
||||
+
|
||||
if args.server_url:
|
||||
server_url = args.server_url
|
||||
elif config is not None:
|
||||
@@ -269,9 +281,7 @@ def main() -> None:
|
||||
if args.admin or args.no_admin:
|
||||
admin = args.admin
|
||||
|
||||
- register_new_user(
|
||||
- args.user, args.password, server_url, secret, admin, args.user_type
|
||||
- )
|
||||
+ register_new_user(args.user, password, server_url, secret, admin, args.user_type)
|
||||
|
||||
|
||||
def _read_file(file_path: Any, config_path: str) -> str:
|
||||
--
|
||||
2.44.1
|
||||
|
@ -0,0 +1,94 @@
|
||||
From 1789416df425d22693b0055a6688d8686e0ee4a1 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= <joerg@thalheim.io>
|
||||
Date: Thu, 13 Jun 2024 14:38:19 +0200
|
||||
Subject: [PATCH 2/2] register-new-matrix-user: add a flag to ignore already
|
||||
existing users
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
This allows to register users in a more declarative and stateless way.
|
||||
|
||||
Signed-off-by: Jörg Thalheim <joerg@thalheim.io>
|
||||
---
|
||||
synapse/_scripts/register_new_matrix_user.py | 22 ++++++++++++++++++--
|
||||
1 file changed, 20 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py
|
||||
index 972b35e2d..233e7267d 100644
|
||||
--- a/synapse/_scripts/register_new_matrix_user.py
|
||||
+++ b/synapse/_scripts/register_new_matrix_user.py
|
||||
@@ -52,6 +52,7 @@ def request_registration(
|
||||
user_type: Optional[str] = None,
|
||||
_print: Callable[[str], None] = print,
|
||||
exit: Callable[[int], None] = sys.exit,
|
||||
+ exists_ok: bool = False,
|
||||
) -> None:
|
||||
url = "%s/_synapse/admin/v1/register" % (server_location.rstrip("/"),)
|
||||
|
||||
@@ -97,6 +98,10 @@ def request_registration(
|
||||
r = requests.post(url, json=data)
|
||||
|
||||
if r.status_code != 200:
|
||||
+ response = r.json()
|
||||
+ if exists_ok and response["errcode"] == "M_USER_IN_USE":
|
||||
+ _print("User already exists. Skipping.")
|
||||
+ return
|
||||
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
|
||||
if 400 <= r.status_code < 500:
|
||||
try:
|
||||
@@ -115,6 +120,7 @@ def register_new_user(
|
||||
shared_secret: str,
|
||||
admin: Optional[bool],
|
||||
user_type: Optional[str],
|
||||
+ exists_ok: bool = False,
|
||||
) -> None:
|
||||
if not user:
|
||||
try:
|
||||
@@ -154,7 +160,13 @@ def register_new_user(
|
||||
admin = False
|
||||
|
||||
request_registration(
|
||||
- user, password, server_location, shared_secret, bool(admin), user_type
|
||||
+ user,
|
||||
+ password,
|
||||
+ server_location,
|
||||
+ shared_secret,
|
||||
+ bool(admin),
|
||||
+ user_type,
|
||||
+ exists_ok=exists_ok,
|
||||
)
|
||||
|
||||
|
||||
@@ -173,6 +185,11 @@ def main() -> None:
|
||||
default=None,
|
||||
help="Local part of the new user. Will prompt if omitted.",
|
||||
)
|
||||
+ parser.add_argument(
|
||||
+ "--exists-ok",
|
||||
+ action="store_true",
|
||||
+ help="Do not fail if user already exists.",
|
||||
+ )
|
||||
password_group = parser.add_mutually_exclusive_group()
|
||||
password_group.add_argument(
|
||||
"-p",
|
||||
@@ -192,6 +209,7 @@ def main() -> None:
|
||||
default=None,
|
||||
help="User type as specified in synapse.api.constants.UserTypes",
|
||||
)
|
||||
+
|
||||
admin_group = parser.add_mutually_exclusive_group()
|
||||
admin_group.add_argument(
|
||||
"-a",
|
||||
@@ -281,7 +299,7 @@ def main() -> None:
|
||||
if args.admin or args.no_admin:
|
||||
admin = args.admin
|
||||
|
||||
- register_new_user(args.user, password, server_url, secret, admin, args.user_type)
|
||||
+ register_new_user(args.user, password, server_url, secret, admin, args.user_type, exists_ok=args.exists_ok)
|
||||
|
||||
|
||||
def _read_file(file_path: Any, config_path: str) -> str:
|
||||
--
|
||||
2.44.1
|
||||
|
@ -6,17 +6,93 @@
|
||||
}:
|
||||
let
|
||||
cfg = config.clan.matrix-synapse;
|
||||
nginx-vhost = "matrix.${config.clan.matrix-synapse.domain}";
|
||||
element-web =
|
||||
pkgs.runCommand "element-web-with-config" { nativeBuildInputs = [ pkgs.buildPackages.jq ]; }
|
||||
''
|
||||
cp -r ${pkgs.element-web} $out
|
||||
chmod -R u+w $out
|
||||
jq '."default_server_config"."m.homeserver" = { "base_url": "https://${nginx-vhost}:443", "server_name": "${config.clan.matrix-synapse.domain}" }' \
|
||||
> $out/config.json < ${pkgs.element-web}/config.json
|
||||
ln -s $out/config.json $out/config.${nginx-vhost}.json
|
||||
'';
|
||||
|
||||
# FIXME: This was taken from upstream. Drop this when our patch is upstream
|
||||
synapseCfg = config.services.matrix-synapse;
|
||||
wantedExtras =
|
||||
synapseCfg.extras
|
||||
++ lib.optional (synapseCfg.settings ? oidc_providers) "oidc"
|
||||
++ lib.optional (synapseCfg.settings ? jwt_config) "jwt"
|
||||
++ lib.optional (synapseCfg.settings ? saml2_config) "saml2"
|
||||
++ lib.optional (synapseCfg.settings ? redis) "redis"
|
||||
++ lib.optional (synapseCfg.settings ? sentry) "sentry"
|
||||
++ lib.optional (synapseCfg.settings ? user_directory) "user-search"
|
||||
++ lib.optional (synapseCfg.settings.url_preview_enabled) "url-preview"
|
||||
++ lib.optional (synapseCfg.settings.database.name == "psycopg2") "postgres";
|
||||
in
|
||||
{
|
||||
options.services.matrix-synapse.package = lib.mkOption { readOnly = false; };
|
||||
options.clan.matrix-synapse = {
|
||||
enable = lib.mkEnableOption "Enable matrix-synapse";
|
||||
domain = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = "The domain name of the matrix server";
|
||||
example = "example.com";
|
||||
};
|
||||
users = lib.mkOption {
|
||||
default = { };
|
||||
type = lib.types.attrsOf (
|
||||
lib.types.submodule (
|
||||
{ name, ... }:
|
||||
{
|
||||
options = {
|
||||
name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = name;
|
||||
description = "The name of the user";
|
||||
};
|
||||
|
||||
admin = lib.mkOption {
|
||||
type = lib.types.bool;
|
||||
default = false;
|
||||
description = "Whether the user should be an admin";
|
||||
};
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
description = "A list of users. Not that only new users will be created and existing ones are not modified.";
|
||||
example.alice = {
|
||||
admin = true;
|
||||
};
|
||||
};
|
||||
};
|
||||
config = lib.mkIf cfg.enable {
|
||||
imports = [
|
||||
../postgresql
|
||||
(lib.mkRemovedOptionModule [
|
||||
"clan"
|
||||
"matrix-synapse"
|
||||
"enable"
|
||||
] "Importing the module will already enable the service.")
|
||||
|
||||
../postgresql
|
||||
];
|
||||
config = {
|
||||
services.matrix-synapse = {
|
||||
package = lib.mkForce (
|
||||
pkgs.matrix-synapse.override {
|
||||
matrix-synapse-unwrapped = pkgs.matrix-synapse.unwrapped.overrideAttrs (_old: {
|
||||
doInstallCheck = false; # too slow, nixpkgs maintainer already run this.
|
||||
patches = [
|
||||
# see: https://github.com/element-hq/synapse/pull/17304
|
||||
./0001-register_new_matrix_user-add-password-file-flag.patch
|
||||
./0002-register-new-matrix-user-add-a-flag-to-ignore-alread.patch
|
||||
];
|
||||
});
|
||||
extras = wantedExtras;
|
||||
plugins = synapseCfg.plugins;
|
||||
}
|
||||
);
|
||||
|
||||
enable = true;
|
||||
settings = {
|
||||
server_name = cfg.domain;
|
||||
@ -29,6 +105,7 @@ in
|
||||
"turn:turn.matrix.org?transport=udp"
|
||||
"turn:turn.matrix.org?transport=tcp"
|
||||
];
|
||||
registration_shared_secret_path = "/run/synapse-registration-shared-secret";
|
||||
listeners = [
|
||||
{
|
||||
port = 8008;
|
||||
@ -49,45 +126,76 @@ in
|
||||
}
|
||||
];
|
||||
};
|
||||
extraConfigFiles = [ "/var/lib/matrix-synapse/registration_shared_secret.yaml" ];
|
||||
};
|
||||
systemd.services.matrix-synapse.serviceConfig.ExecStartPre = [
|
||||
"+${pkgs.writeScript "copy_registration_shared_secret" ''
|
||||
#!/bin/sh
|
||||
cp ${config.clanCore.facts.services.matrix-synapse.secret.synapse-registration_shared_secret.path} /var/lib/matrix-synapse/registration_shared_secret.yaml
|
||||
chown matrix-synapse:matrix-synapse /var/lib/matrix-synapse/registration_shared_secret.yaml
|
||||
chmod 600 /var/lib/matrix-synapse/registration_shared_secret.yaml
|
||||
''}"
|
||||
];
|
||||
|
||||
clanCore.facts.services."matrix-synapse" = {
|
||||
secret."synapse-registration_shared_secret" = { };
|
||||
generator.path = with pkgs; [
|
||||
coreutils
|
||||
pwgen
|
||||
];
|
||||
generator.script = ''
|
||||
echo "registration_shared_secret: $(pwgen -s 32 1)" > "$secrets"/synapse-registration_shared_secret
|
||||
'';
|
||||
};
|
||||
|
||||
services.postgresql.enable = true;
|
||||
# we need to use both ensusureDatabases and initialScript, because the former runs everytime but with the wrong collation
|
||||
services.postgresql = {
|
||||
ensureDatabases = [ "matrix-synapse" ];
|
||||
ensureUsers = [
|
||||
{
|
||||
name = "matrix-synapse";
|
||||
ensureDBOwnership = true;
|
||||
systemd.tmpfiles.settings."01-matrix" = {
|
||||
"/run/synapse-registration-shared-secret" = {
|
||||
C.argument =
|
||||
config.clan.core.facts.services.matrix-synapse.secret.synapse-registration_shared_secret.path;
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "matrix-synapse";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
clan.postgresql.users.matrix-synapse = { };
|
||||
clan.postgresql.databases.matrix-synapse.create.options = {
|
||||
TEMPLATE = "template0";
|
||||
LC_COLLATE = "C";
|
||||
LC_CTYPE = "C";
|
||||
ENCODING = "UTF8";
|
||||
OWNER = "matrix-synapse";
|
||||
};
|
||||
clan.postgresql.databases.matrix-synapse.restore.stopOnRestore = [ "matrix-synapse" ];
|
||||
|
||||
clan.core.facts.services =
|
||||
{
|
||||
"matrix-synapse" = {
|
||||
secret."synapse-registration_shared_secret" = { };
|
||||
generator.path = with pkgs; [
|
||||
coreutils
|
||||
pwgen
|
||||
];
|
||||
generator.script = ''
|
||||
echo -n "$(pwgen -s 32 1)" > "$secrets"/synapse-registration_shared_secret
|
||||
'';
|
||||
};
|
||||
}
|
||||
// lib.mapAttrs' (
|
||||
name: user:
|
||||
lib.nameValuePair "matrix-password-${user.name}" {
|
||||
secret."matrix-password-${user.name}" = { };
|
||||
generator.path = with pkgs; [ xkcdpass ];
|
||||
generator.script = ''
|
||||
xkcdpass -n 4 -d - > "$secrets"/${lib.escapeShellArg "matrix-password-${user.name}"}
|
||||
'';
|
||||
}
|
||||
];
|
||||
initialScript = pkgs.writeText "synapse-init.sql" ''
|
||||
CREATE DATABASE "matrix-synapse"
|
||||
TEMPLATE template0
|
||||
LC_COLLATE = "C"
|
||||
LC_CTYPE = "C";
|
||||
'';
|
||||
};
|
||||
) cfg.users;
|
||||
|
||||
systemd.services.matrix-synapse =
|
||||
let
|
||||
usersScript =
|
||||
''
|
||||
while ! ${pkgs.netcat}/bin/nc -z -v ::1 8008; do
|
||||
if ! kill -0 "$MAINPID"; then exit 1; fi
|
||||
sleep 1;
|
||||
done
|
||||
''
|
||||
+ lib.concatMapStringsSep "\n" (user: ''
|
||||
# only create user if it doesn't exist
|
||||
/run/current-system/sw/bin/matrix-synapse-register_new_matrix_user --exists-ok --password-file ${
|
||||
config.clanCore.facts.services."matrix-password-${user.name}".secret."matrix-password-${user.name}".path
|
||||
} --user "${user.name}" ${if user.admin then "--admin" else "--no-admin"}
|
||||
'') (lib.attrValues cfg.users);
|
||||
in
|
||||
{
|
||||
path = [ pkgs.curl ];
|
||||
serviceConfig.ExecStartPost = [
|
||||
(''+${pkgs.writeShellScript "matrix-synapse-create-users" usersScript}'')
|
||||
];
|
||||
};
|
||||
|
||||
services.nginx = {
|
||||
enable = true;
|
||||
virtualHosts = {
|
||||
@ -102,7 +210,7 @@ in
|
||||
return 200 '${
|
||||
builtins.toJSON {
|
||||
"m.homeserver" = {
|
||||
"base_url" = "https://matrix.${cfg.domain}";
|
||||
"base_url" = "https://${nginx-vhost}";
|
||||
};
|
||||
"m.identity_server" = {
|
||||
"base_url" = "https://vector.im";
|
||||
@ -111,15 +219,12 @@ in
|
||||
}';
|
||||
'';
|
||||
};
|
||||
"matrix.${cfg.domain}" = {
|
||||
${nginx-vhost} = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
locations."/_matrix" = {
|
||||
proxyPass = "http://localhost:8008";
|
||||
};
|
||||
locations."/test".extraConfig = ''
|
||||
return 200 "Hello, world!";
|
||||
'';
|
||||
locations."/_matrix".proxyPass = "http://localhost:8008";
|
||||
locations."/_synapse".proxyPass = "http://localhost:8008";
|
||||
locations."/".root = element-web;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
@ -13,10 +13,10 @@ in
|
||||
systemd.tmpfiles.rules = [
|
||||
"d '/var/lib/moonlight' 0770 'user' 'users' - -"
|
||||
"C '/var/lib/moonlight/moonlight.cert' 0644 'user' 'users' - ${
|
||||
config.clanCore.facts.services.moonlight.secret."moonlight.cert".path or ""
|
||||
config.clan.core.facts.services.moonlight.secret."moonlight.cert".path or ""
|
||||
}"
|
||||
"C '/var/lib/moonlight/moonlight.key' 0644 'user' 'users' - ${
|
||||
config.clanCore.facts.services.moonlight.secret."moonlight.key".path or ""
|
||||
config.clan.core.facts.services.moonlight.secret."moonlight.key".path or ""
|
||||
}"
|
||||
];
|
||||
|
||||
@ -45,7 +45,7 @@ in
|
||||
systemd.user.services.moonlight-join = {
|
||||
description = "Join sunshine hosts";
|
||||
script = ''${ms-accept}/bin/moonlight-sunshine-accept moonlight join --port ${builtins.toString defaultPort} --cert '${
|
||||
config.clanCore.facts.services.moonlight.public."moonlight.cert".value or ""
|
||||
config.clan.core.facts.services.moonlight.public."moonlight.cert".value or ""
|
||||
}' --host fd2e:25da:6035:c98f:cd99:93e0:b9b8:9ca1'';
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
@ -68,7 +68,7 @@ in
|
||||
};
|
||||
};
|
||||
|
||||
clanCore.facts.services.moonlight = {
|
||||
clan.core.facts.services.moonlight = {
|
||||
secret."moonlight.key" = { };
|
||||
secret."moonlight.cert" = { };
|
||||
public."moonlight.cert" = { };
|
||||
|
105
clanModules/mumble/default.nix
Normal file
105
clanModules/mumble/default.nix
Normal file
@ -0,0 +1,105 @@
|
||||
{
|
||||
lib,
|
||||
config,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
let
|
||||
clanDir = lib.trace config.clan.core.clanDir config.clan.core.clanDir;
|
||||
machineDir = clanDir + "/machines/";
|
||||
machinesFileSet = builtins.readDir machineDir;
|
||||
machines = lib.mapAttrsToList (name: _: name) machinesFileSet;
|
||||
machineJson = builtins.toJSON (lib.trace machines machines);
|
||||
certificateMachinePath = machines: machineDir + "/${machines}" + "/facts/mumble-cert";
|
||||
certificatesUnchecked = builtins.map (
|
||||
machine:
|
||||
let
|
||||
fullPath = certificateMachinePath machine;
|
||||
in
|
||||
if builtins.pathExists (lib.trace fullPath fullPath) then machine else null
|
||||
) machines;
|
||||
certificate = lib.filter (machine: machine != null) certificatesUnchecked;
|
||||
machineCert = builtins.map (
|
||||
machine:
|
||||
lib.trace machine (lib.nameValuePair machine (builtins.readFile (certificateMachinePath machine)))
|
||||
) certificate;
|
||||
machineCertJson = builtins.toJSON (lib.trace machineCert machineCert);
|
||||
|
||||
in
|
||||
{
|
||||
options.clan.services.mumble = {
|
||||
user = lib.mkOption {
|
||||
type = lib.types.string;
|
||||
default = "alice";
|
||||
description = "The user mumble should be set up for.";
|
||||
};
|
||||
};
|
||||
|
||||
config = {
|
||||
services.murmur = {
|
||||
enable = true;
|
||||
logDays = -1;
|
||||
registerName = config.clan.core.machineName;
|
||||
openFirewall = true;
|
||||
bonjour = true;
|
||||
sslKey = config.clan.core.facts.services.mumble.secret.mumble-key.path;
|
||||
sslCert = config.clan.core.facts.services.mumble.public.mumble-cert.path;
|
||||
};
|
||||
|
||||
clan.core.state.mumble.folders = [
|
||||
"/var/lib/mumble"
|
||||
"/var/lib/murmur"
|
||||
];
|
||||
|
||||
systemd.tmpfiles.rules = [
|
||||
"d '/var/lib/mumble' 0770 '${config.clan.services.mumble.user}' 'users' - -"
|
||||
];
|
||||
|
||||
environment.systemPackages =
|
||||
let
|
||||
mumbleCfgDir = "/var/lib/mumble";
|
||||
mumbleDatabasePath = "${mumbleCfgDir}/mumble.sqlite";
|
||||
mumbleCfgPath = "/var/lib/mumble/mumble_settings.json";
|
||||
populate-channels = pkgs.writers.writePython3 "mumble-populate-channels" {
|
||||
libraries = [
|
||||
pkgs.python3Packages.cryptography
|
||||
pkgs.python3Packages.pyopenssl
|
||||
];
|
||||
flakeIgnore = [
|
||||
# We don't live in the dark ages anymore.
|
||||
# Languages like Python that are whitespace heavy will overrun
|
||||
# 79 characters..
|
||||
"E501"
|
||||
];
|
||||
} (builtins.readFile ./mumble-populate-channels.py);
|
||||
mumble = pkgs.writeShellScriptBin "mumble" ''
|
||||
set -xeu
|
||||
mkdir -p ${mumbleCfgDir}
|
||||
pushd "${mumbleCfgDir}"
|
||||
XDG_DATA_HOME=${mumbleCfgDir}
|
||||
XDG_DATA_DIR=${mumbleCfgDir}
|
||||
${populate-channels} --ensure-config '${mumbleCfgPath}' --db-location ${mumbleDatabasePath}
|
||||
echo ${machineCertJson}
|
||||
${populate-channels} --machines '${machineJson}' --username ${config.clan.core.machineName} --db-location ${mumbleDatabasePath}
|
||||
${populate-channels} --servers '${machineCertJson}' --username ${config.clan.core.machineName} --db-location ${mumbleDatabasePath} --cert True
|
||||
${pkgs.mumble}/bin/mumble --config ${mumbleCfgPath} "$@"
|
||||
popd
|
||||
'';
|
||||
in
|
||||
[ mumble ];
|
||||
|
||||
clan.core.facts.services.mumble = {
|
||||
secret.mumble-key = { };
|
||||
public.mumble-cert = { };
|
||||
generator.path = [
|
||||
pkgs.coreutils
|
||||
pkgs.openssl
|
||||
];
|
||||
generator.script = ''
|
||||
openssl genrsa -out $secrets/mumble-key 2048
|
||||
openssl req -new -x509 -key $secrets/mumble-key -out $facts/mumble-cert
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
}
|
249
clanModules/mumble/mumble-populate-channels.py
Normal file
249
clanModules/mumble/mumble-populate-channels.py
Normal file
@ -0,0 +1,249 @@
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
|
||||
def ensure_config(path: str, db_path: str) -> None:
|
||||
# Default JSON structure if the file doesn't exist
|
||||
default_json = {
|
||||
"misc": {
|
||||
"audio_wizard_has_been_shown": True,
|
||||
"database_location": db_path,
|
||||
"viewed_server_ping_consent_message": True,
|
||||
},
|
||||
"settings_version": 1,
|
||||
}
|
||||
|
||||
# Check if the file exists
|
||||
if os.path.exists(path):
|
||||
with open(path) as file:
|
||||
data = json.load(file)
|
||||
else:
|
||||
data = default_json
|
||||
# Create the file with default JSON structure
|
||||
with open(path, "w") as file:
|
||||
json.dump(data, file, indent=4)
|
||||
|
||||
# TODO: make sure to only update the diff
|
||||
updated_data = {**default_json, **data}
|
||||
|
||||
# Write the modified JSON object back to the file
|
||||
with open(path, "w") as file:
|
||||
json.dump(updated_data, file, indent=4)
|
||||
|
||||
|
||||
def initialize_database(db_location: str) -> None:
|
||||
"""
|
||||
Initializes the database. If the database or the servers table does not exist, it creates them.
|
||||
|
||||
:param db_location: The path to the SQLite database
|
||||
"""
|
||||
conn = sqlite3.connect(db_location)
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the servers table if it doesn't exist
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS servers (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
hostname TEXT NOT NULL,
|
||||
port INTEGER NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
password TEXT NOT NULL,
|
||||
url TEXT
|
||||
)
|
||||
""")
|
||||
|
||||
# Commit the changes
|
||||
conn.commit()
|
||||
|
||||
except sqlite3.Error as e:
|
||||
print(f"An error occurred while initializing the database: {e}")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def initialize_certificates(
|
||||
db_location: str, hostname: str, port: str, digest: str
|
||||
) -> None:
|
||||
# Connect to the SQLite database
|
||||
conn = sqlite3.connect(db_location)
|
||||
|
||||
try:
|
||||
# Create a cursor object
|
||||
cursor = conn.cursor()
|
||||
|
||||
# TODO: check if cert already there
|
||||
# if server_check(cursor, name, hostname):
|
||||
# print(
|
||||
# f"Server with name '{name}' and hostname '{hostname}' already exists."
|
||||
# )
|
||||
# return
|
||||
|
||||
# SQL command to insert data into the servers table
|
||||
insert_query = """
|
||||
INSERT INTO cert (hostname, port, digest)
|
||||
VALUES (?, ?, ?)
|
||||
"""
|
||||
|
||||
# Data to be inserted
|
||||
data = (hostname, port, digest)
|
||||
|
||||
# Execute the insert command with the provided data
|
||||
cursor.execute(insert_query, data)
|
||||
|
||||
# Commit the changes
|
||||
conn.commit()
|
||||
|
||||
print("Data has been successfully inserted.")
|
||||
except sqlite3.Error as e:
|
||||
print(f"An error occurred: {e}")
|
||||
finally:
|
||||
# Close the connection
|
||||
conn.close()
|
||||
pass
|
||||
|
||||
|
||||
def calculate_digest(cert: str) -> str:
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
||||
cert = cert.strip()
|
||||
cert = cert.encode("utf-8")
|
||||
cert = x509.load_pem_x509_certificate(cert, default_backend())
|
||||
digest = cert.fingerprint(hashes.SHA1()).hex()
|
||||
return digest
|
||||
|
||||
|
||||
def server_check(cursor: str, name: str, hostname: str) -> bool:
|
||||
"""
|
||||
Check if a server with the given name and hostname already exists.
|
||||
|
||||
:param cursor: The database cursor
|
||||
:param name: The name of the server
|
||||
:param hostname: The hostname of the server
|
||||
:return: True if the server exists, False otherwise
|
||||
"""
|
||||
check_query = """
|
||||
SELECT 1 FROM servers WHERE name = ? AND hostname = ?
|
||||
"""
|
||||
cursor.execute(check_query, (name, hostname))
|
||||
return cursor.fetchone() is not None
|
||||
|
||||
|
||||
def insert_server(
|
||||
name: str,
|
||||
hostname: str,
|
||||
port: str,
|
||||
username: str,
|
||||
password: str,
|
||||
url: str,
|
||||
db_location: str,
|
||||
) -> None:
|
||||
"""
|
||||
Inserts a new server record into the servers table.
|
||||
|
||||
:param name: The name of the server
|
||||
:param hostname: The hostname of the server
|
||||
:param port: The port number
|
||||
:param username: The username
|
||||
:param password: The password
|
||||
:param url: The URL
|
||||
"""
|
||||
# Connect to the SQLite database
|
||||
conn = sqlite3.connect(db_location)
|
||||
|
||||
try:
|
||||
# Create a cursor object
|
||||
cursor = conn.cursor()
|
||||
|
||||
if server_check(cursor, name, hostname):
|
||||
print(
|
||||
f"Server with name '{name}' and hostname '{hostname}' already exists."
|
||||
)
|
||||
return
|
||||
|
||||
# SQL command to insert data into the servers table
|
||||
insert_query = """
|
||||
INSERT INTO servers (name, hostname, port, username, password, url)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
# Data to be inserted
|
||||
data = (name, hostname, port, username, password, url)
|
||||
|
||||
# Execute the insert command with the provided data
|
||||
cursor.execute(insert_query, data)
|
||||
|
||||
# Commit the changes
|
||||
conn.commit()
|
||||
|
||||
print("Data has been successfully inserted.")
|
||||
except sqlite3.Error as e:
|
||||
print(f"An error occurred: {e}")
|
||||
finally:
|
||||
# Close the connection
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
port = 64738
|
||||
password = ""
|
||||
url = None
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="initialize_mumble",
|
||||
)
|
||||
|
||||
subparser = parser.add_subparsers(dest="certificates")
|
||||
# cert_parser = subparser.add_parser("certificates")
|
||||
|
||||
parser.add_argument("--cert")
|
||||
parser.add_argument("--digest")
|
||||
parser.add_argument("--machines")
|
||||
parser.add_argument("--servers")
|
||||
parser.add_argument("--username")
|
||||
parser.add_argument("--db-location")
|
||||
parser.add_argument("--ensure-config")
|
||||
args = parser.parse_args()
|
||||
|
||||
print(args)
|
||||
|
||||
if args.ensure_config:
|
||||
ensure_config(args.ensure_config, args.db_location)
|
||||
print("Initialized config")
|
||||
exit(0)
|
||||
|
||||
if args.servers:
|
||||
print(args.servers)
|
||||
servers = json.loads(f"{args.servers}")
|
||||
db_location = args.db_location
|
||||
for server in servers:
|
||||
digest = calculate_digest(server.get("value"))
|
||||
name = server.get("name")
|
||||
initialize_certificates(db_location, name, port, digest)
|
||||
print("Initialized certificates")
|
||||
exit(0)
|
||||
|
||||
initialize_database(args.db_location)
|
||||
|
||||
# Insert the server into the database
|
||||
print(args.machines)
|
||||
machines = json.loads(f"{args.machines}")
|
||||
print(machines)
|
||||
print(list(machines))
|
||||
|
||||
for machine in list(machines):
|
||||
print(f"Inserting {machine}.")
|
||||
insert_server(
|
||||
machine,
|
||||
machine,
|
||||
port,
|
||||
args.username,
|
||||
password,
|
||||
url,
|
||||
args.db_location,
|
||||
)
|
42
clanModules/mumble/test.nix
Normal file
42
clanModules/mumble/test.nix
Normal file
@ -0,0 +1,42 @@
|
||||
{ pkgs, self, ... }:
|
||||
pkgs.nixosTest {
|
||||
name = "mumble";
|
||||
nodes.peer1 =
|
||||
{ ... }:
|
||||
{
|
||||
imports = [
|
||||
self.nixosModules.mumble
|
||||
self.inputs.clan-core.nixosModules.clanCore
|
||||
{
|
||||
config = {
|
||||
clan.core.machineName = "peer1";
|
||||
clan.core.clanDir = ./.;
|
||||
|
||||
documentation.enable = false;
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
nodes.peer2 =
|
||||
{ ... }:
|
||||
{
|
||||
imports = [
|
||||
self.nixosModules.mumble
|
||||
self.inputs.clan-core.nixosModules.clanCore
|
||||
{
|
||||
config = {
|
||||
|
||||
clan.core.machineName = "peer2";
|
||||
clan.core.clanDir = ./.;
|
||||
|
||||
documentation.enable = false;
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
start_all()
|
||||
'';
|
||||
|
||||
}
|
2
clanModules/postgresql/README.md
Normal file
2
clanModules/postgresql/README.md
Normal file
@ -0,0 +1,2 @@
|
||||
A free and open-source relational database management system (RDBMS) emphasizing extensibility and SQL compliance.
|
||||
---
|
182
clanModules/postgresql/default.nix
Normal file
182
clanModules/postgresql/default.nix
Normal file
@ -0,0 +1,182 @@
|
||||
{
|
||||
pkgs,
|
||||
lib,
|
||||
config,
|
||||
...
|
||||
}:
|
||||
let
|
||||
createDatatbaseState =
|
||||
db:
|
||||
let
|
||||
folder = "/var/backup/postgres/${db.name}";
|
||||
current = "${folder}/pg-dump";
|
||||
compression = lib.optionalString (lib.versionAtLeast config.services.postgresql.package.version "16") "--compress=zstd";
|
||||
in
|
||||
{
|
||||
folders = [ folder ];
|
||||
preBackupCommand = ''
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
config.services.postgresql.package
|
||||
config.systemd.package
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
pkgs.zstd
|
||||
]
|
||||
}
|
||||
while [[ "$(systemctl is-active postgresql)" == activating ]]; do
|
||||
sleep 1
|
||||
done
|
||||
|
||||
mkdir -p "${folder}"
|
||||
runuser -u postgres -- pg_dump ${compression} --dbname=${db.name} -Fc -c > "${current}.tmp"
|
||||
mv "${current}.tmp" ${current}
|
||||
'';
|
||||
postRestoreCommand = "postgres-db-restore-command-${db.name}";
|
||||
};
|
||||
|
||||
createDatabase = db: ''
|
||||
CREATE DATABASE "${db.name}" ${
|
||||
lib.concatStringsSep " " (
|
||||
lib.mapAttrsToList (name: value: "${name} = '${value}'") db.create.options
|
||||
)
|
||||
}
|
||||
'';
|
||||
cfg = config.clan.postgresql;
|
||||
|
||||
userClauses = lib.mapAttrsToList (
|
||||
_: user:
|
||||
''$PSQL -tAc "SELECT 1 FROM pg_roles WHERE rolname='${user.name}'" | grep -q 1 || $PSQL -tAc 'CREATE USER "${user.name}"' ''
|
||||
) cfg.users;
|
||||
databaseClauses = lib.mapAttrsToList (
|
||||
name: db:
|
||||
lib.optionalString db.create.enable ''$PSQL -d postgres -c "SELECT 1 FROM pg_database WHERE datname = '${name}'" | grep -q 1 || $PSQL -d postgres -c ${lib.escapeShellArg (createDatabase db)} ''
|
||||
) cfg.databases;
|
||||
in
|
||||
{
|
||||
options.clan.postgresql = {
|
||||
# we are reimplemeting ensureDatabase and ensureUser options here to allow to create databases with options
|
||||
databases = lib.mkOption {
|
||||
default = { };
|
||||
type = lib.types.attrsOf (
|
||||
lib.types.submodule (
|
||||
{ name, ... }:
|
||||
{
|
||||
options = {
|
||||
name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = name;
|
||||
};
|
||||
# set to false, in case the upstream module uses ensureDatabase option
|
||||
create.enable = lib.mkOption {
|
||||
type = lib.types.bool;
|
||||
default = true;
|
||||
description = "Create the database if it does not exist.";
|
||||
};
|
||||
create.options = lib.mkOption {
|
||||
type = lib.types.lazyAttrsOf lib.types.str;
|
||||
default = { };
|
||||
example = {
|
||||
TEMPLATE = "template0";
|
||||
LC_COLLATE = "C";
|
||||
LC_CTYPE = "C";
|
||||
ENCODING = "UTF8";
|
||||
OWNER = "foo";
|
||||
};
|
||||
};
|
||||
restore.stopOnRestore = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = [ ];
|
||||
description = "List of services to stop before restoring the database.";
|
||||
};
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
};
|
||||
users = lib.mkOption {
|
||||
default = { };
|
||||
type = lib.types.attrsOf (
|
||||
lib.types.submodule (
|
||||
{ name, ... }:
|
||||
{
|
||||
options.name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = name;
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
};
|
||||
};
|
||||
config = {
|
||||
services.postgresql.settings = {
|
||||
wal_level = "replica";
|
||||
max_wal_senders = 3;
|
||||
};
|
||||
|
||||
services.postgresql.enable = true;
|
||||
# We are duplicating a bit the upstream module but allow to create databases with options
|
||||
systemd.services.postgresql.postStart = ''
|
||||
PSQL="psql --port=${builtins.toString config.services.postgresql.settings.port}"
|
||||
|
||||
while ! $PSQL -d postgres -c "" 2> /dev/null; do
|
||||
if ! kill -0 "$MAINPID"; then exit 1; fi
|
||||
sleep 0.1
|
||||
done
|
||||
${lib.concatStringsSep "\n" userClauses}
|
||||
${lib.concatStringsSep "\n" databaseClauses}
|
||||
'';
|
||||
|
||||
clan.core.state = lib.mapAttrs' (
|
||||
_: db: lib.nameValuePair "postgresql-${db.name}" (createDatatbaseState db)
|
||||
) config.clan.postgresql.databases;
|
||||
|
||||
environment.systemPackages = builtins.map (
|
||||
db:
|
||||
let
|
||||
folder = "/var/backup/postgres/${db.name}";
|
||||
current = "${folder}/pg-dump";
|
||||
in
|
||||
pkgs.writeShellScriptBin "postgres-db-restore-command-${db.name}" ''
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
config.services.postgresql.package
|
||||
config.systemd.package
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
pkgs.zstd
|
||||
pkgs.gnugrep
|
||||
]
|
||||
}
|
||||
while [[ "$(systemctl is-active postgresql)" == activating ]]; do
|
||||
sleep 1
|
||||
done
|
||||
echo "Waiting for postgres to be ready..."
|
||||
while ! runuser -u postgres -- psql --port=${builtins.toString config.services.postgresql.settings.port} -d postgres -c "" ; do
|
||||
if ! systemctl is-active postgresql; then exit 1; fi
|
||||
sleep 0.1
|
||||
done
|
||||
|
||||
if [[ -e "${current}" ]]; then
|
||||
(
|
||||
${
|
||||
lib.optionalString (db.restore.stopOnRestore != [ ]) ''
|
||||
systemctl stop ${builtins.toString db.restore.stopOnRestore}
|
||||
trap "systemctl start ${builtins.toString db.restore.stopOnRestore}" EXIT
|
||||
''
|
||||
}
|
||||
|
||||
mkdir -p "${folder}"
|
||||
if runuser -u postgres -- psql -d postgres -c "SELECT 1 FROM pg_database WHERE datname = '${db.name}'" | grep -q 1; then
|
||||
runuser -u postgres -- dropdb "${db.name}"
|
||||
fi
|
||||
runuser -u postgres -- pg_restore -C -d postgres "${current}"
|
||||
)
|
||||
else
|
||||
echo No database backup found, skipping restore
|
||||
fi
|
||||
''
|
||||
) (builtins.attrValues config.clan.postgresql.databases);
|
||||
};
|
||||
}
|
@ -2,9 +2,9 @@
|
||||
{
|
||||
users.mutableUsers = false;
|
||||
users.users.root.hashedPasswordFile =
|
||||
config.clanCore.facts.services.root-password.secret.password-hash.path;
|
||||
sops.secrets."${config.clanCore.machineName}-password-hash".neededForUsers = true;
|
||||
clanCore.facts.services.root-password = {
|
||||
config.clan.core.facts.services.root-password.secret.password-hash.path;
|
||||
sops.secrets."${config.clan.core.machineName}-password-hash".neededForUsers = true;
|
||||
clan.core.facts.services.root-password = {
|
||||
secret.password = { };
|
||||
secret.password-hash = { };
|
||||
generator.path = with pkgs; [
|
||||
|
@ -5,12 +5,12 @@
|
||||
|
||||
services.openssh.hostKeys = [
|
||||
{
|
||||
path = config.clanCore.facts.services.openssh.secret."ssh.id_ed25519".path;
|
||||
path = config.clan.core.facts.services.openssh.secret."ssh.id_ed25519".path;
|
||||
type = "ed25519";
|
||||
}
|
||||
];
|
||||
|
||||
clanCore.facts.services.openssh = {
|
||||
clan.core.facts.services.openssh = {
|
||||
secret."ssh.id_ed25519" = { };
|
||||
public."ssh.id_ed25519.pub" = { };
|
||||
generator.path = [
|
||||
|
@ -4,7 +4,7 @@
|
||||
excludeHosts = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default =
|
||||
if config.clan.static-hosts.topLevelDomain != "" then [ ] else [ config.clanCore.machineName ];
|
||||
if config.clan.static-hosts.topLevelDomain != "" then [ ] else [ config.clan.core.machineName ];
|
||||
description = "Hosts that should be excluded";
|
||||
};
|
||||
topLevelDomain = lib.mkOption {
|
||||
@ -16,13 +16,23 @@
|
||||
|
||||
config.networking.hosts =
|
||||
let
|
||||
clanDir = config.clanCore.clanDir;
|
||||
clanDir = config.clan.core.clanDir;
|
||||
machineDir = clanDir + "/machines/";
|
||||
zerotierIpMachinePath = machines: machineDir + machines + "/facts/zerotier-ip";
|
||||
machines = builtins.readDir machineDir;
|
||||
machinesFileSet = builtins.readDir machineDir;
|
||||
machines = lib.mapAttrsToList (name: _: name) machinesFileSet;
|
||||
networkIpsUnchecked = builtins.map (
|
||||
machine:
|
||||
let
|
||||
fullPath = zerotierIpMachinePath machine;
|
||||
in
|
||||
if builtins.pathExists fullPath then machine else null
|
||||
) machines;
|
||||
networkIps = lib.filter (machine: machine != null) networkIpsUnchecked;
|
||||
machinesWithIp = lib.filterAttrs (name: _: (lib.elem name networkIps)) machinesFileSet;
|
||||
filteredMachines = lib.filterAttrs (
|
||||
name: _: !(lib.elem name config.clan.static-hosts.excludeHosts)
|
||||
) machines;
|
||||
) machinesWithIp;
|
||||
in
|
||||
lib.filterAttrs (_: value: value != null) (
|
||||
lib.mapAttrs' (
|
||||
@ -38,7 +48,7 @@
|
||||
[ "${machine}.${config.clan.static-hosts.topLevelDomain}" ]
|
||||
)
|
||||
else
|
||||
null
|
||||
{ }
|
||||
) filteredMachines
|
||||
);
|
||||
}
|
||||
|
@ -97,10 +97,10 @@ in
|
||||
systemd.tmpfiles.rules = [
|
||||
"d '/var/lib/sunshine' 0770 'user' 'users' - -"
|
||||
"C '/var/lib/sunshine/sunshine.cert' 0644 'user' 'users' - ${
|
||||
config.clanCore.facts.services.sunshine.secret."sunshine.cert".path or ""
|
||||
config.clan.core.facts.services.sunshine.secret."sunshine.cert".path or ""
|
||||
}"
|
||||
"C '/var/lib/sunshine/sunshine.key' 0644 'user' 'users' - ${
|
||||
config.clanCore.facts.services.sunshine.secret."sunshine.key".path or ""
|
||||
config.clan.core.facts.services.sunshine.secret."sunshine.key".path or ""
|
||||
}"
|
||||
];
|
||||
|
||||
@ -117,8 +117,8 @@ in
|
||||
RestartSec = "5s";
|
||||
ReadWritePaths = [ "/var/lib/sunshine" ];
|
||||
ReadOnlyPaths = [
|
||||
(config.clanCore.facts.services.sunshine.secret."sunshine.key".path or "")
|
||||
(config.clanCore.facts.services.sunshine.secret."sunshine.cert".path or "")
|
||||
(config.clan.core.facts.services.sunshine.secret."sunshine.key".path or "")
|
||||
(config.clan.core.facts.services.sunshine.secret."sunshine.cert".path or "")
|
||||
];
|
||||
};
|
||||
wantedBy = [ "graphical-session.target" ];
|
||||
@ -137,7 +137,7 @@ in
|
||||
startLimitIntervalSec = 500;
|
||||
script = ''
|
||||
${ms-accept}/bin/moonlight-sunshine-accept sunshine init-state --uuid ${
|
||||
config.clanCore.facts.services.sunshine.public.sunshine-uuid.value or null
|
||||
config.clan.core.facts.services.sunshine.public.sunshine-uuid.value or null
|
||||
} --state-file /var/lib/sunshine/state.json
|
||||
'';
|
||||
serviceConfig = {
|
||||
@ -173,9 +173,9 @@ in
|
||||
startLimitIntervalSec = 500;
|
||||
script = ''
|
||||
${ms-accept}/bin/moonlight-sunshine-accept sunshine listen --port ${builtins.toString listenPort} --uuid ${
|
||||
config.clanCore.facts.services.sunshine.public.sunshine-uuid.value or null
|
||||
config.clan.core.facts.services.sunshine.public.sunshine-uuid.value or null
|
||||
} --state /var/lib/sunshine/state.json --cert '${
|
||||
config.clanCore.facts.services.sunshine.public."sunshine.cert".value or null
|
||||
config.clan.core.facts.services.sunshine.public."sunshine.cert".value or null
|
||||
}'
|
||||
'';
|
||||
serviceConfig = {
|
||||
@ -187,7 +187,7 @@ in
|
||||
wantedBy = [ "graphical-session.target" ];
|
||||
};
|
||||
|
||||
clanCore.facts.services.ergochat = {
|
||||
clan.core.facts.services.ergochat = {
|
||||
secret."sunshine.key" = { };
|
||||
secret."sunshine.cert" = { };
|
||||
public."sunshine-uuid" = { };
|
||||
|
2
clanModules/syncthing-static-peers/README.md
Normal file
2
clanModules/syncthing-static-peers/README.md
Normal file
@ -0,0 +1,2 @@
|
||||
Statically configure syncthing peers through clan
|
||||
---
|
108
clanModules/syncthing-static-peers/default.nix
Normal file
108
clanModules/syncthing-static-peers/default.nix
Normal file
@ -0,0 +1,108 @@
|
||||
{
|
||||
lib,
|
||||
config,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
let
|
||||
clanDir = config.clan.core.clanDir;
|
||||
machineDir = clanDir + "/machines/";
|
||||
syncthingPublicKeyPath = machines: machineDir + machines + "/facts/syncthing.pub";
|
||||
machinesFileSet = builtins.readDir machineDir;
|
||||
machines = lib.mapAttrsToList (name: _: name) machinesFileSet;
|
||||
syncthingPublicKeysUnchecked = builtins.map (
|
||||
machine:
|
||||
let
|
||||
fullPath = syncthingPublicKeyPath machine;
|
||||
in
|
||||
if builtins.pathExists fullPath then machine else null
|
||||
) machines;
|
||||
syncthingPublicKeyMachines = lib.filter (machine: machine != null) syncthingPublicKeysUnchecked;
|
||||
zerotierIpMachinePath = machines: machineDir + machines + "/facts/zerotier-ip";
|
||||
networkIpsUnchecked = builtins.map (
|
||||
machine:
|
||||
let
|
||||
fullPath = zerotierIpMachinePath machine;
|
||||
in
|
||||
if builtins.pathExists fullPath then machine else null
|
||||
) machines;
|
||||
networkIpMachines = lib.filter (machine: machine != null) networkIpsUnchecked;
|
||||
devices = builtins.map (machine: {
|
||||
name = machine;
|
||||
value = {
|
||||
name = machine;
|
||||
id = (lib.removeSuffix "\n" (builtins.readFile (syncthingPublicKeyPath machine)));
|
||||
addresses =
|
||||
[ "dynamic" ]
|
||||
++ (
|
||||
if (lib.elem machine networkIpMachines) then
|
||||
[ "tcp://[${(lib.removeSuffix "\n" (builtins.readFile (zerotierIpMachinePath machine)))}]:22000" ]
|
||||
else
|
||||
[ ]
|
||||
);
|
||||
};
|
||||
}) syncthingPublicKeyMachines;
|
||||
in
|
||||
{
|
||||
options.clan.syncthing-static-peers = {
|
||||
excludeMachines = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
example = [ config.clan.core.machineName ];
|
||||
default = [ ];
|
||||
description = ''
|
||||
Machines that should not be added.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config.services.syncthing.settings.devices = (builtins.listToAttrs devices);
|
||||
|
||||
imports = [
|
||||
{
|
||||
# Syncthing ports: 8384 for remote access to GUI
|
||||
# 22000 TCP and/or UDP for sync traffic
|
||||
# 21027/UDP for discovery
|
||||
# source: https://docs.syncthing.net/users/firewall.html
|
||||
networking.firewall.interfaces."zt+".allowedTCPPorts = [
|
||||
8384
|
||||
22000
|
||||
];
|
||||
networking.firewall.allowedTCPPorts = [ 8384 ];
|
||||
networking.firewall.interfaces."zt+".allowedUDPPorts = [
|
||||
22000
|
||||
21027
|
||||
];
|
||||
|
||||
# Activates inotify compatibility on syncthing
|
||||
# use mkOverride 900 here as it otherwise would collide with the default of the
|
||||
# upstream nixos xserver.nix
|
||||
boot.kernel.sysctl."fs.inotify.max_user_watches" = lib.mkOverride 900 524288;
|
||||
|
||||
services.syncthing = {
|
||||
enable = true;
|
||||
configDir = "/var/lib/syncthing";
|
||||
group = "syncthing";
|
||||
|
||||
key = lib.mkDefault config.clan.core.facts.services.syncthing.secret."syncthing.key".path or null;
|
||||
cert = lib.mkDefault config.clan.core.facts.services.syncthing.secret."syncthing.cert".path or null;
|
||||
};
|
||||
|
||||
clan.core.facts.services.syncthing = {
|
||||
secret."syncthing.key" = { };
|
||||
secret."syncthing.cert" = { };
|
||||
public."syncthing.pub" = { };
|
||||
generator.path = [
|
||||
pkgs.coreutils
|
||||
pkgs.gnugrep
|
||||
pkgs.syncthing
|
||||
];
|
||||
generator.script = ''
|
||||
syncthing generate --config "$secrets"
|
||||
mv "$secrets"/key.pem "$secrets"/syncthing.key
|
||||
mv "$secrets"/cert.pem "$secrets"/syncthing.cert
|
||||
cat "$secrets"/config.xml | grep -oP '(?<=<device id=")[^"]+' | uniq > "$facts"/syncthing.pub
|
||||
'';
|
||||
};
|
||||
}
|
||||
];
|
||||
}
|
@ -9,8 +9,8 @@
|
||||
id = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.str;
|
||||
example = "BABNJY4-G2ICDLF-QQEG7DD-N3OBNGF-BCCOFK6-MV3K7QJ-2WUZHXS-7DTW4AS";
|
||||
default = config.clanCore.facts.services.syncthing.public."syncthing.pub".value or null;
|
||||
defaultText = "config.clanCore.facts.services.syncthing.public.\"syncthing.pub\".value";
|
||||
default = config.clan.core.facts.services.syncthing.public."syncthing.pub".value or null;
|
||||
defaultText = "config.clan.core.facts.services.syncthing.public.\"syncthing.pub\".value";
|
||||
};
|
||||
introducer = lib.mkOption {
|
||||
description = ''
|
||||
@ -119,7 +119,7 @@
|
||||
getPendingDevices = "/rest/cluster/pending/devices";
|
||||
postNewDevice = "/rest/config/devices";
|
||||
SharedFolderById = "/rest/config/folders/";
|
||||
apiKey = config.clanCore.facts.services.syncthing.secret."syncthing.api".path or null;
|
||||
apiKey = config.clan.core.facts.services.syncthing.secret."syncthing.api".path or null;
|
||||
in
|
||||
lib.mkIf config.clan.syncthing.autoAcceptDevices {
|
||||
description = "Syncthing auto accept devices";
|
||||
@ -161,7 +161,7 @@
|
||||
|
||||
systemd.services.syncthing-init-api-key =
|
||||
let
|
||||
apiKey = config.clanCore.facts.services.syncthing.secret."syncthing.api".path or null;
|
||||
apiKey = config.clan.core.facts.services.syncthing.secret."syncthing.api".path or null;
|
||||
in
|
||||
lib.mkIf config.clan.syncthing.autoAcceptDevices {
|
||||
description = "Set the api key";
|
||||
@ -183,7 +183,7 @@
|
||||
};
|
||||
};
|
||||
|
||||
clanCore.facts.services.syncthing = {
|
||||
clan.core.facts.services.syncthing = {
|
||||
secret."syncthing.key" = { };
|
||||
secret."syncthing.cert" = { };
|
||||
secret."syncthing.api" = { };
|
||||
|
@ -11,5 +11,5 @@ _: {
|
||||
};
|
||||
};
|
||||
|
||||
clanCore.state.thelounde.folders = [ "/var/lib/thelounge" ];
|
||||
clan.core.state.thelounde.folders = [ "/var/lib/thelounge" ];
|
||||
}
|
||||
|
@ -22,9 +22,9 @@
|
||||
config = {
|
||||
users.mutableUsers = false;
|
||||
users.users.${config.clan.user-password.user}.hashedPasswordFile =
|
||||
config.clanCore.facts.services.user-password.secret.user-password-hash.path;
|
||||
sops.secrets."${config.clanCore.machineName}-user-password-hash".neededForUsers = true;
|
||||
clanCore.facts.services.user-password = {
|
||||
config.clan.core.facts.services.user-password.secret.user-password-hash.path;
|
||||
sops.secrets."${config.clan.core.machineName}-user-password-hash".neededForUsers = true;
|
||||
clan.core.facts.services.user-password = {
|
||||
secret.user-password = { };
|
||||
secret.user-password-hash = { };
|
||||
generator.prompt = (
|
||||
|
@ -2,11 +2,11 @@
|
||||
lib,
|
||||
config,
|
||||
pkgs,
|
||||
inputs,
|
||||
self,
|
||||
...
|
||||
}:
|
||||
let
|
||||
clanDir = config.clanCore.clanDir;
|
||||
clanDir = config.clan.core.clanDir;
|
||||
machineDir = clanDir + "/machines/";
|
||||
machinesFileSet = builtins.readDir machineDir;
|
||||
machines = lib.mapAttrsToList (name: _: name) machinesFileSet;
|
||||
@ -20,7 +20,7 @@ let
|
||||
if builtins.pathExists fullPath then builtins.readFile fullPath else null
|
||||
) machines;
|
||||
networkIds = lib.filter (machine: machine != null) networkIdsUnchecked;
|
||||
networkId = builtins.elemAt networkIds 0;
|
||||
networkId = if builtins.length networkIds == 0 then null else builtins.elemAt networkIds 0;
|
||||
in
|
||||
#TODO:trace on multiple found network-ids
|
||||
#TODO:trace on no single found networkId
|
||||
@ -28,41 +28,48 @@ in
|
||||
options.clan.zerotier-static-peers = {
|
||||
excludeHosts = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = [ config.clanCore.machineName ];
|
||||
default = [ config.clan.core.machineName ];
|
||||
description = "Hosts that should be excluded";
|
||||
};
|
||||
};
|
||||
|
||||
config.systemd.services.zerotier-static-peers-autoaccept =
|
||||
let
|
||||
machines = builtins.readDir machineDir;
|
||||
zerotierIpMachinePath = machines: machineDir + machines + "/facts/zerotier-ip";
|
||||
filteredMachines = lib.filterAttrs (
|
||||
name: _: !(lib.elem name config.clan.static-hosts.excludeHosts)
|
||||
networkIpsUnchecked = builtins.map (
|
||||
machine:
|
||||
let
|
||||
fullPath = zerotierIpMachinePath machine;
|
||||
in
|
||||
if builtins.pathExists fullPath then machine else null
|
||||
) machines;
|
||||
networkIps = lib.filter (machine: machine != null) networkIpsUnchecked;
|
||||
machinesWithIp = lib.filterAttrs (name: _: (lib.elem name networkIps)) machinesFileSet;
|
||||
filteredMachines = lib.filterAttrs (
|
||||
name: _: !(lib.elem name config.clan.zerotier-static-peers.excludeHosts)
|
||||
) machinesWithIp;
|
||||
hosts = lib.mapAttrsToList (host: _: host) (
|
||||
lib.mapAttrs' (
|
||||
machine: _:
|
||||
let
|
||||
fullPath = zerotierIpMachinePath machine;
|
||||
in
|
||||
if builtins.pathExists fullPath then
|
||||
lib.nameValuePair (builtins.readFile fullPath) [ machine ]
|
||||
else
|
||||
null
|
||||
lib.nameValuePair (builtins.readFile fullPath) [ machine ]
|
||||
) filteredMachines
|
||||
);
|
||||
in
|
||||
lib.mkIf (config.clan.networking.zerotier.controller.enable) {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "zerotierone.service" ];
|
||||
path = [ pkgs.zerotierone ];
|
||||
path = [
|
||||
# pkgs.zerotierone
|
||||
# TODO: debug why inputs is missing in path
|
||||
self.packages.${pkgs.system}.zerotierone
|
||||
];
|
||||
serviceConfig.ExecStart = pkgs.writeScript "static-zerotier-peers-autoaccept" ''
|
||||
#!/bin/sh
|
||||
${lib.concatMapStringsSep "\n" (host: ''
|
||||
${
|
||||
inputs.clan-core.packages.${pkgs.system}.zerotier-members
|
||||
}/bin/zerotier-members allow --member-ip ${host}
|
||||
${self.packages.${pkgs.system}.zerotier-members}/bin/zerotier-members allow --member-ip ${host}
|
||||
'') hosts}
|
||||
'';
|
||||
};
|
||||
|
@ -121,7 +121,7 @@ git+file:///home/lhebendanz/Projects/clan-core
|
||||
│ ├───clan-cli omitted (use '--all-systems' to show)
|
||||
│ ├───clan-cli-docs omitted (use '--all-systems' to show)
|
||||
│ ├───clan-ts-api omitted (use '--all-systems' to show)
|
||||
│ ├───clan-vm-manager omitted (use '--all-systems' to show)
|
||||
│ ├───clan-app omitted (use '--all-systems' to show)
|
||||
│ ├───default omitted (use '--all-systems' to show)
|
||||
│ ├───deploy-docs omitted (use '--all-systems' to show)
|
||||
│ ├───docs omitted (use '--all-systems' to show)
|
||||
|
@ -51,6 +51,7 @@ nav:
|
||||
- Flake-parts: getting-started/flake-parts.md
|
||||
- Modules:
|
||||
- Clan Modules:
|
||||
- reference/clanModules/borgbackup-static.md
|
||||
- reference/clanModules/borgbackup.md
|
||||
- reference/clanModules/deltachat.md
|
||||
- reference/clanModules/disk-layouts.md
|
||||
@ -59,12 +60,15 @@ nav:
|
||||
- reference/clanModules/localsend.md
|
||||
- reference/clanModules/matrix-synapse.md
|
||||
- reference/clanModules/moonlight.md
|
||||
- reference/clanModules/postgresql.md
|
||||
- reference/clanModules/root-password.md
|
||||
- reference/clanModules/sshd.md
|
||||
- reference/clanModules/sunshine.md
|
||||
- reference/clanModules/syncthing.md
|
||||
- reference/clanModules/static-hosts.md
|
||||
- reference/clanModules/sunshine.md
|
||||
- reference/clanModules/syncthing-static-peers.md
|
||||
- reference/clanModules/syncthing.md
|
||||
- reference/clanModules/thelounge.md
|
||||
- reference/clanModules/mumble.md
|
||||
- reference/clanModules/trusted-nix-caches.md
|
||||
- reference/clanModules/user-password.md
|
||||
- reference/clanModules/xfce.md
|
||||
@ -80,6 +84,7 @@ nav:
|
||||
- reference/cli/history.md
|
||||
- reference/cli/machines.md
|
||||
- reference/cli/secrets.md
|
||||
- reference/cli/show.md
|
||||
- reference/cli/ssh.md
|
||||
- reference/cli/vms.md
|
||||
- Clan Core:
|
||||
|
@ -50,7 +50,7 @@
|
||||
};
|
||||
|
||||
module-docs = pkgs.runCommand "rendered" { nativeBuildInputs = [ pkgs.python3 ]; } ''
|
||||
export CLAN_CORE=${jsonDocs.clanCore}/share/doc/nixos/options.json
|
||||
export CLAN_CORE=${jsonDocs.clanCore}/share/doc/nixos/options.json
|
||||
# A file that contains the links to all clanModule docs
|
||||
export CLAN_MODULES=${clanModulesFileInfo}
|
||||
export CLAN_MODULES_READMES=${clanModulesReadmes}
|
||||
|
@ -13,7 +13,7 @@ let
|
||||
|
||||
clanCoreNixosModules = [
|
||||
clanCore
|
||||
{ clanCore.clanDir = ./.; }
|
||||
{ clan.core.clanDir = ./.; }
|
||||
] ++ allNixosModules;
|
||||
|
||||
# TODO: optimally we would not have to evaluate all nixos modules for every page
|
||||
@ -25,6 +25,8 @@ let
|
||||
# improves eval performance slightly (10%)
|
||||
getOptions = modules: (clanCoreNixos.extendModules { inherit modules; }).options;
|
||||
|
||||
getOptionsWithoutCore = modules: builtins.removeAttrs (getOptions modules) [ "core" ];
|
||||
|
||||
evalDocs =
|
||||
options:
|
||||
pkgs.nixosOptionsDoc {
|
||||
@ -34,7 +36,7 @@ let
|
||||
|
||||
# clanModules docs
|
||||
clanModulesDocs = builtins.mapAttrs (
|
||||
name: module: (evalDocs ((getOptions [ module ]).clan.${name} or { })).optionsJSON
|
||||
name: module: (evalDocs ((getOptionsWithoutCore [ module ]).clan.${name} or { })).optionsJSON
|
||||
) clanModules;
|
||||
|
||||
clanModulesReadmes = builtins.mapAttrs (
|
||||
@ -42,7 +44,7 @@ let
|
||||
) clanModules;
|
||||
|
||||
# clanCore docs
|
||||
clanCoreDocs = (evalDocs (getOptions [ ]).clanCore).optionsJSON;
|
||||
clanCoreDocs = (evalDocs (getOptions [ ]).clan.core).optionsJSON;
|
||||
in
|
||||
{
|
||||
inherit clanModulesReadmes;
|
||||
|
@ -137,7 +137,7 @@ To use this module, import it like this:
|
||||
"""
|
||||
|
||||
|
||||
clan_core_descr = """ClanCore delivers all the essential features for every clan.
|
||||
clan_core_descr = """ClanCore delivers all the essential features for every clan.
|
||||
It's always included in your setup, and you can customize your clan's behavior with the configuration [options](#module-options) provided below.
|
||||
|
||||
"""
|
||||
@ -163,7 +163,7 @@ def produce_clan_core_docs() -> None:
|
||||
outfile = f"{module_name}/index.md"
|
||||
|
||||
# Create separate files for nested options
|
||||
if len(option_name.split(".")) <= 2:
|
||||
if len(option_name.split(".")) <= 3:
|
||||
# i.e. clan-core.clanDir
|
||||
output = core_outputs.get(
|
||||
outfile,
|
||||
@ -174,7 +174,7 @@ def produce_clan_core_docs() -> None:
|
||||
core_outputs[outfile] = output
|
||||
else:
|
||||
# Clan sub-options
|
||||
[_, sub] = option_name.split(".")[0:2]
|
||||
[_, sub] = option_name.split(".")[1:3]
|
||||
outfile = f"{module_name}/{sub}.md"
|
||||
# Get the content or write the header
|
||||
output = core_outputs.get(outfile, render_option_header(sub))
|
||||
|
@ -61,7 +61,7 @@ Clan is for anyone and everyone who believes in the power of open source technol
|
||||
|
||||
Ready to control your digital world? Clan is more than a tool—it's a movement. Secure your data, manage your systems easily, or connect with others how you like. Start with Clan for a better digital future.
|
||||
|
||||
Connect with us on our [Matrix channel at clan.lol](https://matrix.to/#/#clan:lassul.us) or through our IRC bridges (coming soon).
|
||||
Connect with us on our [Matrix channel at clan.lol](https://matrix.to/#/#clan:clan.lol) or through our IRC bridges (coming soon).
|
||||
|
||||
Want to see the code? Check it out [on our Gitea](https://git.clan.lol/clan/clan-core) or [on GitHub](https://github.com/clan-lol/clan-core).
|
||||
|
||||
|
@ -190,5 +190,5 @@ We hope these experiments inspire the community, encourage contributions and fur
|
||||
- [Comments on NixOS Discourse](https://discourse.nixos.org/t/introducing-the-nixos-to-json-schema-converter/45948)
|
||||
- [Source Code of the JSON Schema Library](https://git.clan.lol/clan/clan-core/src/branch/main/lib/jsonschema)
|
||||
- [Our Issue Tracker](https://git.clan.lol/clan/clan-core/issues)
|
||||
- [Our Matrix Channel](https://matrix.to/#/#clan:lassul.us)
|
||||
- [Our Matrix Channel](https://matrix.to/#/#clan:clan.lol)
|
||||
- [react-jsonschema-form Playground](https://rjsf-team.github.io/react-jsonschema-form/)
|
||||
|
@ -10,4 +10,4 @@ Last week, we added a new documentation hub for clan at [docs.clan.lol](https://
|
||||
We are still working on improving the installation procedures, so stay tuned.
|
||||
We now have weekly office hours where people are invited to hangout and ask questions.
|
||||
They are every Wednesday 15:30 UTC (17:30 CEST) in our [jitsi](https://jitsi.lassul.us/clan.lol).
|
||||
Otherwise drop by in our [matrix channel](https://matrix.to/#/#clan:lassul.us).
|
||||
Otherwise drop by in our [matrix channel](https://matrix.to/#/#clan:clan.lol).
|
||||
|
@ -98,7 +98,7 @@ Start by indicating where your backup data should be sent. Replace `hostname` wi
|
||||
Decide which folders you want to back up. For example, to backup your home and root directories:
|
||||
|
||||
```nix
|
||||
{ clanCore.state.userdata.folders = [ "/home" "/root" ]; }
|
||||
{ clan.core.state.userdata.folders = [ "/home" "/root" ]; }
|
||||
```
|
||||
|
||||
3. **Generate Backup Credentials:**
|
||||
@ -116,7 +116,7 @@ On the server where backups will be stored, enable the SSH daemon and set up a r
|
||||
services.borgbackup.repos.myhostname = {
|
||||
path = "/var/lib/borgbackup/myhostname";
|
||||
authorizedKeys = [
|
||||
(builtins.readFile ./machines/myhostname/facts/borgbackup.ssh.pub)
|
||||
(builtins.readFile (config.clan.core.clanDir + "/machines/myhostname/facts/borgbackup.ssh.pub"))
|
||||
];
|
||||
};
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ In the `flake.nix` file:
|
||||
|
||||
```nix title="clan-core.lib.buildClan"
|
||||
buildClan {
|
||||
# Set a unique name
|
||||
# Set a unique name
|
||||
clanName = "Lobsters";
|
||||
# Should usually point to the directory of flake.nix
|
||||
directory = ./.;
|
||||
@ -30,7 +30,7 @@ In the `flake.nix` file:
|
||||
|
||||
```nix title="clan-core.flakeModules.default"
|
||||
clan = {
|
||||
# Set a unique name
|
||||
# Set a unique name
|
||||
clanName = "Lobsters";
|
||||
|
||||
machines = {
|
||||
@ -61,13 +61,13 @@ Adding or configuring a new machine requires two simple steps:
|
||||
|
||||
```{.shellSession hl_lines="6" .no-copy}
|
||||
NAME ID-LINK FSTYPE SIZE MOUNTPOINT
|
||||
sda usb-ST_16GB_AA6271026J1000000509-0:0 14.9G
|
||||
├─sda1 usb-ST_16GB_AA6271026J1000000509-0:0-part1 1M
|
||||
sda usb-ST_16GB_AA6271026J1000000509-0:0 14.9G
|
||||
├─sda1 usb-ST_16GB_AA6271026J1000000509-0:0-part1 1M
|
||||
├─sda2 usb-ST_16GB_AA6271026J1000000509-0:0-part2 vfat 100M /boot
|
||||
└─sda3 usb-ST_16GB_AA6271026J1000000509-0:0-part3 ext4 2.9G /
|
||||
nvme0n1 nvme-eui.e8238fa6bf530001001b448b4aec2929 476.9G
|
||||
├─nvme0n1p1 nvme-eui.e8238fa6bf530001001b448b4aec2929-part1 vfat 512M
|
||||
├─nvme0n1p2 nvme-eui.e8238fa6bf530001001b448b4aec2929-part2 ext4 459.6G
|
||||
nvme0n1 nvme-eui.e8238fa6bf530001001b448b4aec2929 476.9G
|
||||
├─nvme0n1p1 nvme-eui.e8238fa6bf530001001b448b4aec2929-part1 vfat 512M
|
||||
├─nvme0n1p2 nvme-eui.e8238fa6bf530001001b448b4aec2929-part2 ext4 459.6G
|
||||
└─nvme0n1p3 nvme-eui.e8238fa6bf530001001b448b4aec2929-part3 swap 16.8G
|
||||
```
|
||||
|
||||
@ -150,10 +150,17 @@ These steps will allow you to update your machine later.
|
||||
Generate the `hardware-configuration.nix` file for your machine by executing the following command:
|
||||
|
||||
```bash
|
||||
ssh root@flash-installer.local nixos-generate-config --no-filesystems --show-hardware-config > machines/jon/hardware-configuration.nix
|
||||
clan machines hw-generate [MACHINE_NAME] [HOSTNAME]
|
||||
```
|
||||
|
||||
This command connects to `flash-installer.local` as `root`, runs `nixos-generate-config` to detect hardware configurations (excluding filesystems), and writes them to `machines/jon/hardware-configuration.nix`.
|
||||
replace `[MACHINE_NAME]` with the name of the machine i.e. `jon` and `[HOSTNAME]` with the `ip_adress` or `hostname` of the machine within the network. i.e. `flash-installer.local`
|
||||
|
||||
!!! Example
|
||||
```bash
|
||||
clan machines hw-generate jon flash-installer.local
|
||||
```
|
||||
|
||||
This command connects to `flash-installer.local` as `root`, runs `nixos-generate-config` to detect hardware configurations (excluding filesystems), and writes them to `machines/jon/hardware-configuration.nix`.
|
||||
|
||||
### Step 3: Custom Disk Formatting
|
||||
|
||||
|
@ -48,7 +48,7 @@ To introduce a new machine to the VPN, adhere to the following steps:
|
||||
configuration, substituting `<CONTROLLER>` with the controller machine name:
|
||||
```nix
|
||||
{ config, ... }: {
|
||||
clan.networking.zerotier.networkId = builtins.readFile (config.clanCore.clanDir + "/machines/<CONTROLLER>/facts/zerotier-network-id");
|
||||
clan.networking.zerotier.networkId = builtins.readFile (config.clan.core.clanDir + "/machines/<CONTROLLER>/facts/zerotier-network-id");
|
||||
}
|
||||
```
|
||||
1. **Update the New Machine**: Execute:
|
||||
|
50
flake.lock
50
flake.lock
@ -7,11 +7,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1717177033,
|
||||
"narHash": "sha256-G3CZJafCO8WDy3dyA2EhpUJEmzd5gMJ2IdItAg0Hijw=",
|
||||
"lastModified": 1717915259,
|
||||
"narHash": "sha256-VsGPboaleIlPELHY5cNTrXK4jHVmgUra8uC6h7KVC5c=",
|
||||
"owner": "nix-community",
|
||||
"repo": "disko",
|
||||
"rev": "0274af4c92531ebfba4a5bd493251a143bc51f3c",
|
||||
"rev": "1bbdb06f14e2621290b250e631cf3d8948e4d19b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -55,22 +55,6 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixos-2311": {
|
||||
"locked": {
|
||||
"lastModified": 1717017538,
|
||||
"narHash": "sha256-S5kltvDDfNQM3xx9XcvzKEOyN2qk8Sa+aSOLqZ+1Ujc=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "64e468fd2652105710d86cd2ae3e65a5a6d58dec",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "release-23.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixos-generators": {
|
||||
"inputs": {
|
||||
"nixlib": "nixlib",
|
||||
@ -94,17 +78,17 @@
|
||||
},
|
||||
"nixos-images": {
|
||||
"inputs": {
|
||||
"nixos-2311": "nixos-2311",
|
||||
"nixos-stable": [],
|
||||
"nixos-unstable": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1717040312,
|
||||
"narHash": "sha256-yI/en4IxuCEClIUpIs3QTyYCCtmSPLOhwLJclfNwdeg=",
|
||||
"lastModified": 1717770332,
|
||||
"narHash": "sha256-NQmFHj0hTCUgnMAsaNTu6sNTRyo0rFQEe+/lVgV5yxU=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nixos-images",
|
||||
"rev": "47bfb55316e105390dd761e0b6e8e0be09462b67",
|
||||
"rev": "72771bd35f4e19e32d6f652528483b5e07fc317b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -115,16 +99,16 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1717298511,
|
||||
"narHash": "sha256-9sXuJn/nL+9ImeYtlspTvjt83z1wIgU+9AwfNbnq+tI=",
|
||||
"lastModified": 1718530797,
|
||||
"narHash": "sha256-pup6cYwtgvzDpvpSCFh1TEUjw2zkNpk8iolbKnyFmmU=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "6634a0509e9e81e980b129435fbbec518ab246d0",
|
||||
"rev": "b60ebf54c15553b393d144357375ea956f89e9a9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable-small",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
@ -148,11 +132,11 @@
|
||||
"nixpkgs-stable": []
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1717297459,
|
||||
"narHash": "sha256-cZC2f68w5UrJ1f+2NWGV9Gx0dEYmxwomWN2B0lx0QRA=",
|
||||
"lastModified": 1717902109,
|
||||
"narHash": "sha256-OQTjaEZcByyVmHwJlKp/8SE9ikC4w+mFd3X0jJs6wiA=",
|
||||
"owner": "Mic92",
|
||||
"repo": "sops-nix",
|
||||
"rev": "ab2a43b0d21d1d37d4d5726a892f714eaeb4b075",
|
||||
"rev": "f0922ad001829b400f0160ba85b47d252fa3d925",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -168,11 +152,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1717278143,
|
||||
"narHash": "sha256-u10aDdYrpiGOLoxzY/mJ9llST9yO8Q7K/UlROoNxzDw=",
|
||||
"lastModified": 1717850719,
|
||||
"narHash": "sha256-npYqVg+Wk4oxnWrnVG7416fpfrlRhp/lQ6wQ4DHI8YE=",
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"rev": "3eb96ca1ae9edf792a8e0963cc92fddfa5a87706",
|
||||
"rev": "4fc1c45a5f50169f9f29f6a98a438fb910b834ed",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -7,7 +7,7 @@
|
||||
];
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable-small";
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
disko.url = "github:nix-community/disko";
|
||||
disko.inputs.nixpkgs.follows = "nixpkgs";
|
||||
sops-nix.url = "github:Mic92/sops-nix";
|
||||
@ -17,6 +17,8 @@
|
||||
nixos-generators.inputs.nixpkgs.follows = "nixpkgs";
|
||||
nixos-images.url = "github:nix-community/nixos-images";
|
||||
nixos-images.inputs.nixos-unstable.follows = "nixpkgs";
|
||||
# unused input
|
||||
nixos-images.inputs.nixos-stable.follows = "";
|
||||
flake-parts.url = "github:hercules-ci/flake-parts";
|
||||
flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs";
|
||||
treefmt-nix.url = "github:numtide/treefmt-nix";
|
||||
@ -51,6 +53,8 @@
|
||||
./nixosModules/flake-module.nix
|
||||
./pkgs/flake-module.nix
|
||||
./templates/flake-module.nix
|
||||
|
||||
./inventory/flake-module.nix
|
||||
];
|
||||
}
|
||||
);
|
||||
|
@ -10,10 +10,9 @@
|
||||
treefmt.programs.mypy.enable = true;
|
||||
treefmt.programs.mypy.directories = {
|
||||
"pkgs/clan-cli".extraPythonPackages = self'.packages.clan-cli.testDependencies;
|
||||
"pkgs/clan-vm-manager".extraPythonPackages =
|
||||
# clan-vm-manager currently only exists on linux
|
||||
(self'.packages.clan-vm-manager.externalTestDeps or [ ])
|
||||
++ self'.packages.clan-cli.testDependencies;
|
||||
"pkgs/clan-app".extraPythonPackages =
|
||||
# clan-app currently only exists on linux
|
||||
(self'.packages.clan-app.externalTestDeps or [ ]) ++ self'.packages.clan-cli.testDependencies;
|
||||
};
|
||||
|
||||
treefmt.settings.formatter.nix = {
|
||||
|
5
inventory/.envrc
Normal file
5
inventory/.envrc
Normal file
@ -0,0 +1,5 @@
|
||||
source_up
|
||||
|
||||
watch_file flake-module.nix
|
||||
|
||||
use flake .#inventory-schema --builders ''
|
57
inventory/README.md
Normal file
57
inventory/README.md
Normal file
@ -0,0 +1,57 @@
|
||||
# Inventory
|
||||
|
||||
This part provides a specification for the inventory.
|
||||
|
||||
It is used for design phase and as validation helper.
|
||||
|
||||
> Cue is less verbose and easier to understand and maintain than json-schema.
|
||||
> Json-schema, if needed can be easily generated on-the fly.
|
||||
|
||||
## Checking validity
|
||||
|
||||
Directly check a json against the schema
|
||||
|
||||
`cue vet inventory.json root.cue -d '#Root'`
|
||||
|
||||
## Json schema
|
||||
|
||||
Export the json-schema i.e. for usage in python / javascript / nix
|
||||
|
||||
`cue export --out openapi root.cue`
|
||||
|
||||
## Usage
|
||||
|
||||
Comments are rendered as descriptions in the json schema.
|
||||
|
||||
```cue
|
||||
// A name of the clan (primarily shown by the UI)
|
||||
name: string
|
||||
```
|
||||
|
||||
Cue open sets. In the following `foo = {...}` means that the key `foo` can contain any arbitrary json object.
|
||||
|
||||
```cue
|
||||
foo: { ... }
|
||||
```
|
||||
|
||||
Cue dynamic keys.
|
||||
|
||||
```cue
|
||||
[string]: {
|
||||
attr: string
|
||||
}
|
||||
```
|
||||
|
||||
This is the schema of
|
||||
|
||||
```json
|
||||
{
|
||||
"a": {
|
||||
"attr": "foo"
|
||||
},
|
||||
"b": {
|
||||
"attr": "bar"
|
||||
}
|
||||
// ... Indefinitely more dynamic keys of type "string"
|
||||
}
|
||||
```
|
137
inventory/example_flake.nix
Normal file
137
inventory/example_flake.nix
Normal file
@ -0,0 +1,137 @@
|
||||
{
|
||||
description = "<Put your description here>";
|
||||
|
||||
inputs.clan-core.url = "https://git.clan.lol/clan/clan-core/archive/main.tar.gz";
|
||||
|
||||
outputs =
|
||||
{ clan-core, ... }:
|
||||
let
|
||||
pkgs = clan-core.inputs.nixpkgs.legacyPackages.${system};
|
||||
system = "x86_64-linux";
|
||||
in
|
||||
# Usage see: https://docs.clan.lol
|
||||
# nice_flake_interface -> buildInventory() -> Inventory -> buildClanFromInventory() -> nixosConfigurations
|
||||
# buildClanFromInventory = inventory: evalModules {
|
||||
# extraAttrs = { inherit inventory; };
|
||||
# # (attrNames inventory.machines)
|
||||
# };
|
||||
# clan =
|
||||
# clan-core.lib.buildClanFromInventory [
|
||||
# # Inventory 0 (loads the json file managed by the Python API)
|
||||
# (builtins.fromJSON (builtins.readFile ./inventory.json))
|
||||
# # ->
|
||||
# # {
|
||||
# # services."backups_1".autoIncludeMachines = true;
|
||||
# # services."backups_1".module = "borgbackup";
|
||||
# # ... etc.
|
||||
# # }
|
||||
# ]
|
||||
# ++ (buildInventory {
|
||||
# clanName = "nice_flake_interface";
|
||||
# description = "A nice flake interface";
|
||||
# icon = "assets/icon.png";
|
||||
# machines = {
|
||||
# jon = {
|
||||
# # Just regular nixos/clan configuration ?
|
||||
# # config = {
|
||||
# # imports = [
|
||||
# # ./modules/shared.nix
|
||||
# # ./machines/jon/configuration.nix
|
||||
# # ];
|
||||
# # nixpkgs.hostPlatform = system;
|
||||
# # # Set this for clan commands use ssh i.e. `clan machines update`
|
||||
# # # If you change the hostname, you need to update this line to root@<new-hostname>
|
||||
# # # This only works however if you have avahi running on your admin machine else use IP
|
||||
# # clan.networking.targetHost = pkgs.lib.mkDefault "root@jon";
|
||||
# # # ssh root@flash-installer.local lsblk --output NAME,ID-LINK,FSTYPE,SIZE,MOUNTPOINT
|
||||
# # disko.devices.disk.main = {
|
||||
# # device = "/dev/disk/by-id/__CHANGE_ME__";
|
||||
# # };
|
||||
# # # IMPORTANT! Add your SSH key here
|
||||
# # # e.g. > cat ~/.ssh/id_ed25519.pub
|
||||
# # users.users.root.openssh.authorizedKeys.keys = throw ''
|
||||
# # Don't forget to add your SSH key here!
|
||||
# # users.users.root.openssh.authorizedKeys.keys = [ "<YOUR SSH_KEY>" ]
|
||||
# # '';
|
||||
# # # Zerotier needs one controller to accept new nodes. Once accepted
|
||||
# # # the controller can be offline and routing still works.
|
||||
# # clan.networking.zerotier.controller.enable = true;
|
||||
# # };
|
||||
# };
|
||||
# };
|
||||
# })
|
||||
# ++ [
|
||||
# # Low level inventory overrides (comes at the end)
|
||||
# {
|
||||
# services."backups_2".autoIncludeMachines = true;
|
||||
# services."backups_2".module = "borgbackup";
|
||||
# }
|
||||
# ];
|
||||
# # buildClan :: [ Partial<Inventory> ] -> Inventory
|
||||
# # foldl' (acc: v: lib.recursiveUpdate acc v) {} []
|
||||
# inventory = [
|
||||
# # import json
|
||||
# {...}
|
||||
# # power user flake
|
||||
# {...}
|
||||
# ]
|
||||
# # With Module system
|
||||
# # Pros: Easy to understand,
|
||||
# # Cons: Verbose, hard to maintain
|
||||
# # buildClan :: { modules = [ { config = Partial<Inventory>; options :: InventoryOptions; } } ]; } -> Inventory
|
||||
# eval = lib.evalModules {
|
||||
# modules = [
|
||||
# {
|
||||
# # Inventory Schema
|
||||
# # Python validation
|
||||
# options = {...}
|
||||
# }
|
||||
# {
|
||||
# config = map lib.mkDefault
|
||||
# (builtins.fromJSON (builtins.readFile ./inventory.json))
|
||||
# }
|
||||
# {
|
||||
# # User provided
|
||||
# config = {...}
|
||||
# }
|
||||
# # Later overrides.
|
||||
# {
|
||||
# lib.mkForce ...
|
||||
# }
|
||||
# ];
|
||||
# }
|
||||
# nixosConfigurations = lib.evalModules inventory;
|
||||
# eval.config.inventory
|
||||
# #
|
||||
# eval.config.machines.jon#nixosConfig
|
||||
# eval.config.machines.sara#nixosConfig
|
||||
#
|
||||
# {inventory, config, ...}:{
|
||||
# hostname = config.machines.sara # Invalid
|
||||
# hostname = inventory.machines.sara.hostname # Valid
|
||||
# }
|
||||
/*
|
||||
# Type
|
||||
|
||||
buildInventory :: {
|
||||
clanName :: string
|
||||
machines :: {
|
||||
${name} :: {
|
||||
config :: {
|
||||
# NixOS configuration
|
||||
};
|
||||
};
|
||||
};
|
||||
# ... More mapped inventory options
|
||||
# i.e. shared config for all machines
|
||||
} -> Inventory
|
||||
*/
|
||||
{
|
||||
# all machines managed by Clan
|
||||
inherit (clan) nixosConfigurations clanInternals;
|
||||
# add the Clan cli tool to the dev shell
|
||||
devShells.${system}.default = pkgs.mkShell {
|
||||
packages = [ clan-core.packages.${system}.clan-cli ];
|
||||
};
|
||||
};
|
||||
}
|
46
inventory/flake-module.nix
Normal file
46
inventory/flake-module.nix
Normal file
@ -0,0 +1,46 @@
|
||||
{ ... }:
|
||||
{
|
||||
perSystem =
|
||||
{ pkgs, config, ... }:
|
||||
{
|
||||
packages.inventory-schema = pkgs.stdenv.mkDerivation {
|
||||
name = "inventory-schema";
|
||||
src = ./src;
|
||||
|
||||
buildInputs = [ pkgs.cue ];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
'';
|
||||
};
|
||||
devShells.inventory-schema = pkgs.mkShell { inputsFrom = [ config.packages.inventory-schema ]; };
|
||||
|
||||
checks.inventory-schema-checks = pkgs.stdenv.mkDerivation {
|
||||
name = "inventory-schema-checks";
|
||||
src = ./src;
|
||||
buildInputs = [ pkgs.cue ];
|
||||
buildPhase = ''
|
||||
echo "Running inventory tests..."
|
||||
|
||||
echo "Export cue as json-schema..."
|
||||
cue export --out openapi root.cue
|
||||
|
||||
echo "Validate test/*.json against inventory-schema..."
|
||||
|
||||
test_dir="test"
|
||||
for file in "$test_dir"/*; do
|
||||
# Check if the item is a file
|
||||
if [ -f "$file" ]; then
|
||||
# Print the filename
|
||||
echo "Running test on: $file"
|
||||
|
||||
# Run the cue vet command
|
||||
cue vet "$file" root.cue -d "#Root"
|
||||
fi
|
||||
done
|
||||
|
||||
touch $out
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
2
inventory/src/cue.mod/module.cue
Normal file
2
inventory/src/cue.mod/module.cue
Normal file
@ -0,0 +1,2 @@
|
||||
module: "clan.lol/inventory"
|
||||
language: version: "v0.8.2"
|
8
inventory/src/machines/machines.cue
Normal file
8
inventory/src/machines/machines.cue
Normal file
@ -0,0 +1,8 @@
|
||||
package machines
|
||||
|
||||
|
||||
#machine: machines: [string]: {
|
||||
name: string,
|
||||
description?: string,
|
||||
icon?: string
|
||||
}
|
24
inventory/src/root.cue
Normal file
24
inventory/src/root.cue
Normal file
@ -0,0 +1,24 @@
|
||||
package inventory
|
||||
|
||||
import (
|
||||
"clan.lol/inventory/services"
|
||||
"clan.lol/inventory/machines"
|
||||
)
|
||||
|
||||
@jsonschema(schema="http://json-schema.org/schema#")
|
||||
#Root: {
|
||||
meta: {
|
||||
// A name of the clan (primarily shown by the UI)
|
||||
name: string
|
||||
// A description of the clan
|
||||
description?: string
|
||||
// The icon path
|
||||
icon?: string
|
||||
}
|
||||
|
||||
// A map of services
|
||||
services.#service
|
||||
|
||||
// A map of machines
|
||||
machines.#machine
|
||||
}
|
32
inventory/src/services/services.cue
Normal file
32
inventory/src/services/services.cue
Normal file
@ -0,0 +1,32 @@
|
||||
package services
|
||||
|
||||
#ServiceRole: "server" | "client" | "both"
|
||||
|
||||
#service: services: [string]: {
|
||||
// Required meta fields
|
||||
meta: {
|
||||
name: string,
|
||||
icon?: string
|
||||
description?: string,
|
||||
},
|
||||
// Required module specifies the behavior of the service.
|
||||
module: string,
|
||||
|
||||
// We moved the machine sepcific config to "machines".
|
||||
// It may be moved back depending on what makes more sense in the future.
|
||||
machineConfig: {
|
||||
[string]: {
|
||||
roles?: [ ...#ServiceRole ],
|
||||
config?: {
|
||||
...
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Configuration for the service
|
||||
config: {
|
||||
// Schema depends on the module.
|
||||
// It declares the interface how the service can be configured.
|
||||
...
|
||||
}
|
||||
}
|
38
inventory/src/tests/borgbackup.json
Normal file
38
inventory/src/tests/borgbackup.json
Normal file
@ -0,0 +1,38 @@
|
||||
{
|
||||
"machines": {
|
||||
"camina_machine": {
|
||||
"name": "camina"
|
||||
},
|
||||
"vyr_machine": {
|
||||
"name": "vyr"
|
||||
},
|
||||
"vi_machine": {
|
||||
"name": "vi"
|
||||
}
|
||||
},
|
||||
"meta": {
|
||||
"name": "kenjis clan"
|
||||
},
|
||||
"services": {
|
||||
"backup": {
|
||||
"meta": {
|
||||
"name": "My backup"
|
||||
},
|
||||
"module": "borbackup-static",
|
||||
"machineConfig": {
|
||||
"vyr": {
|
||||
"roles": ["server"]
|
||||
},
|
||||
"vi": {
|
||||
"roles": ["client"]
|
||||
},
|
||||
"camina_machine": {
|
||||
"roles": ["client"]
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"folders": ["/home", "/root", "/var", "/etc"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
45
inventory/src/tests/syncthing.json
Normal file
45
inventory/src/tests/syncthing.json
Normal file
@ -0,0 +1,45 @@
|
||||
{
|
||||
"machines": {
|
||||
"camina_machine": {
|
||||
"name": "camina"
|
||||
},
|
||||
"vyr": {
|
||||
"name": "vyr"
|
||||
},
|
||||
"vi": {
|
||||
"name": "vi"
|
||||
}
|
||||
},
|
||||
"meta": {
|
||||
"name": "kenjis clan"
|
||||
},
|
||||
"services": {
|
||||
"sync_files": {
|
||||
"meta": {
|
||||
"name": "My sync"
|
||||
},
|
||||
"module": "syncthing-static-peers",
|
||||
"machineConfig": {
|
||||
"vyr": {},
|
||||
"vi": {},
|
||||
"camina_machine": {}
|
||||
},
|
||||
"config": {
|
||||
"folders": {
|
||||
"test": {
|
||||
"path": "~/data/docs",
|
||||
"devices": ["camina", "vyr", "vi"]
|
||||
},
|
||||
"videos": {
|
||||
"path": "~/data/videos",
|
||||
"devices": ["camina", "vyr", "ezra"]
|
||||
},
|
||||
"playlist": {
|
||||
"path": "~/data/playlist",
|
||||
"devices": ["camina", "vyr", "ezra"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
36
inventory/src/tests/zerotier.json
Normal file
36
inventory/src/tests/zerotier.json
Normal file
@ -0,0 +1,36 @@
|
||||
{
|
||||
"machines": {
|
||||
"camina_machine": {
|
||||
"name": "camina"
|
||||
},
|
||||
"vyr_machine": {
|
||||
"name": "vyr"
|
||||
},
|
||||
"vi_machine": {
|
||||
"name": "vi"
|
||||
}
|
||||
},
|
||||
"meta": {
|
||||
"name": "kenjis clan"
|
||||
},
|
||||
"services": {
|
||||
"backup": {
|
||||
"meta": {
|
||||
"name": "My backup"
|
||||
},
|
||||
"module": "borbackup-static",
|
||||
"machineConfig": {
|
||||
"vyr_machine": {
|
||||
"roles": ["server"]
|
||||
},
|
||||
"vi_machine": {
|
||||
"roles": ["peer"]
|
||||
},
|
||||
"camina_machine": {
|
||||
"roles": ["peer"]
|
||||
}
|
||||
},
|
||||
"config": {}
|
||||
}
|
||||
}
|
||||
}
|
@ -101,13 +101,13 @@ let
|
||||
(
|
||||
{
|
||||
# Settings
|
||||
clanCore.clanDir = directory;
|
||||
clan.core.clanDir = directory;
|
||||
# Inherited from clan wide settings
|
||||
clanCore.clanName = meta.name or clanName;
|
||||
clanCore.clanIcon = meta.icon or clanIcon;
|
||||
clan.core.clanName = meta.name or clanName;
|
||||
clan.core.clanIcon = meta.icon or clanIcon;
|
||||
|
||||
# Machine specific settings
|
||||
clanCore.machineName = name;
|
||||
clan.core.machineName = name;
|
||||
networking.hostName = lib.mkDefault name;
|
||||
nixpkgs.hostPlatform = lib.mkDefault system;
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
{ lib, ... }:
|
||||
{
|
||||
imports = [ ./state.nix ];
|
||||
options.clanCore.backups = {
|
||||
options.clan.core.backups = {
|
||||
providers = lib.mkOption {
|
||||
type = lib.types.attrsOf (
|
||||
lib.types.submodule (
|
||||
|
@ -2,37 +2,43 @@
|
||||
{
|
||||
imports = [
|
||||
(lib.mkRemovedOptionModule [
|
||||
"clanCore"
|
||||
"clan"
|
||||
"core"
|
||||
"secretsPrefix"
|
||||
] "secretsPrefix was only used by the sops module and the code is now integrated in there")
|
||||
(lib.mkRenamedOptionModule
|
||||
[
|
||||
"clanCore"
|
||||
"clan"
|
||||
"core"
|
||||
"secretStore"
|
||||
]
|
||||
[
|
||||
"clanCore"
|
||||
"clan"
|
||||
"core"
|
||||
"facts"
|
||||
"secretStore"
|
||||
]
|
||||
)
|
||||
(lib.mkRemovedOptionModule [
|
||||
"clanCore"
|
||||
"clan"
|
||||
"core"
|
||||
"secretsDirectory"
|
||||
] "clancore.secretsDirectory was removed. Use clanCore.facts.secretPathFunction instead")
|
||||
] "clan.core.secretsDirectory was removed. Use clan.core.facts.secretPathFunction instead")
|
||||
(lib.mkRenamedOptionModule
|
||||
[
|
||||
"clanCore"
|
||||
"clan"
|
||||
"core"
|
||||
"secretsUploadDirectory"
|
||||
]
|
||||
[
|
||||
"clanCore"
|
||||
"clan"
|
||||
"core"
|
||||
"facts"
|
||||
"secretUploadDirectory"
|
||||
]
|
||||
)
|
||||
];
|
||||
options.clanCore.secrets = lib.mkOption {
|
||||
options.clan.core.secrets = lib.mkOption {
|
||||
visible = false;
|
||||
default = { };
|
||||
type = lib.types.attrsOf (
|
||||
@ -97,14 +103,14 @@
|
||||
description = ''
|
||||
path to a secret which is generated by the generator
|
||||
'';
|
||||
default = config.clanCore.facts.secretPathFunction secret;
|
||||
defaultText = lib.literalExpression "config.clanCore.facts.secretPathFunction secret";
|
||||
default = config.clan.core.facts.secretPathFunction secret;
|
||||
defaultText = lib.literalExpression "config.clan.core.facts.secretPathFunction secret";
|
||||
};
|
||||
}
|
||||
// lib.optionalAttrs (config.clanCore.facts.secretStore == "sops") {
|
||||
// lib.optionalAttrs (config.clan.core.facts.secretStore == "sops") {
|
||||
groups = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = config.clanCore.sops.defaultGroups;
|
||||
default = config.clan.core.sops.defaultGroups;
|
||||
description = ''
|
||||
Groups to decrypt the secret for. By default we always use the user's key.
|
||||
'';
|
||||
@ -134,12 +140,12 @@
|
||||
path to a fact which is generated by the generator
|
||||
'';
|
||||
default =
|
||||
config.clanCore.clanDir
|
||||
+ "/machines/${config.clanCore.machineName}/facts/${fact.config._module.args.name}";
|
||||
defaultText = lib.literalExpression "\${config.clanCore.clanDir}/machines/\${config.clanCore.machineName}/facts/\${fact.config._module.args.name}";
|
||||
config.clan.core.clanDir
|
||||
+ "/machines/${config.clan.core.machineName}/facts/${fact.config._module.args.name}";
|
||||
defaultText = lib.literalExpression "\${config.clan.core.clanDir}/machines/\${config.clan.core.machineName}/facts/\${fact.config._module.args.name}";
|
||||
};
|
||||
value = lib.mkOption {
|
||||
defaultText = lib.literalExpression "\${config.clanCore.clanDir}/\${fact.config.path}";
|
||||
defaultText = lib.literalExpression "\${config.clan.core.clanDir}/\${fact.config.path}";
|
||||
type = lib.types.nullOr lib.types.str;
|
||||
default =
|
||||
if builtins.pathExists fact.config.path then lib.strings.fileContents fact.config.path else null;
|
||||
@ -152,16 +158,16 @@
|
||||
})
|
||||
);
|
||||
};
|
||||
config = lib.mkIf (config.clanCore.secrets != { }) {
|
||||
clanCore.facts.services = lib.mapAttrs' (
|
||||
config = lib.mkIf (config.clan.core.secrets != { }) {
|
||||
clan.core.facts.services = lib.mapAttrs' (
|
||||
name: service:
|
||||
lib.warn "clanCore.secrets.${name} is deprecated, use clanCore.facts.services.${name} instead" (
|
||||
lib.warn "clan.core.secrets.${name} is deprecated, use clan.core.facts.services.${name} instead" (
|
||||
lib.nameValuePair name ({
|
||||
secret = service.secrets;
|
||||
public = service.facts;
|
||||
generator = service.generator;
|
||||
})
|
||||
)
|
||||
) config.clanCore.secrets;
|
||||
) config.clan.core.secrets;
|
||||
};
|
||||
}
|
||||
|
@ -5,7 +5,7 @@
|
||||
...
|
||||
}:
|
||||
{
|
||||
options.clanCore.facts = {
|
||||
options.clan.core.facts = {
|
||||
secretStore = lib.mkOption {
|
||||
type = lib.types.enum [
|
||||
"sops"
|
||||
@ -115,6 +115,7 @@
|
||||
type = lib.types.str;
|
||||
readOnly = true;
|
||||
internal = true;
|
||||
defaultText = "read only script";
|
||||
default = ''
|
||||
set -eu -o pipefail
|
||||
|
||||
@ -155,13 +156,13 @@
|
||||
description = ''
|
||||
path to a secret which is generated by the generator
|
||||
'';
|
||||
default = config.clanCore.facts.secretPathFunction secret;
|
||||
default = config.clan.core.facts.secretPathFunction secret;
|
||||
};
|
||||
}
|
||||
// lib.optionalAttrs (config.clanCore.facts.secretModule == "clan_cli.facts.secret_modules.sops") {
|
||||
// lib.optionalAttrs (config.clan.core.facts.secretModule == "clan_cli.facts.secret_modules.sops") {
|
||||
groups = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = config.clanCore.sops.defaultGroups;
|
||||
default = config.clan.core.sops.defaultGroups;
|
||||
description = ''
|
||||
Groups to decrypt the secret for. By default we always use the user's key.
|
||||
'';
|
||||
@ -190,12 +191,12 @@
|
||||
description = ''
|
||||
path to a fact which is generated by the generator
|
||||
'';
|
||||
defaultText = lib.literalExpression "\${config.clanCore.clanDir}/machines/\${config.clanCore.machineName}/facts/\${fact.config.name}";
|
||||
defaultText = lib.literalExpression "\${config.clan.core.clanDir}/machines/\${config.clan.core.machineName}/facts/\${fact.config.name}";
|
||||
default =
|
||||
config.clanCore.clanDir + "/machines/${config.clanCore.machineName}/facts/${fact.config.name}";
|
||||
config.clan.core.clanDir + "/machines/${config.clan.core.machineName}/facts/${fact.config.name}";
|
||||
};
|
||||
value = lib.mkOption {
|
||||
defaultText = lib.literalExpression "\${config.clanCore.clanDir}/\${fact.config.path}";
|
||||
defaultText = lib.literalExpression "\${config.clan.core.clanDir}/\${fact.config.path}";
|
||||
type = lib.types.nullOr lib.types.str;
|
||||
default =
|
||||
if builtins.pathExists fact.config.path then lib.strings.fileContents fact.config.path else null;
|
||||
@ -218,5 +219,15 @@
|
||||
|
||||
./public/in_repo.nix
|
||||
./public/vm.nix
|
||||
|
||||
# (lib.mkRenamedOptionModule
|
||||
# [
|
||||
# "clanCore"
|
||||
# ]
|
||||
# [
|
||||
# "clan"
|
||||
# "core"
|
||||
# ]
|
||||
# )
|
||||
];
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
{ config, lib, ... }:
|
||||
{
|
||||
config = lib.mkIf (config.clanCore.facts.publicStore == "in_repo") {
|
||||
clanCore.facts.publicModule = "clan_cli.facts.public_modules.in_repo";
|
||||
config = lib.mkIf (config.clan.core.facts.publicStore == "in_repo") {
|
||||
clan.core.facts.publicModule = "clan_cli.facts.public_modules.in_repo";
|
||||
};
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
{ config, lib, ... }:
|
||||
{
|
||||
config = lib.mkIf (config.clanCore.facts.publicStore == "vm") {
|
||||
clanCore.facts.publicModule = "clan_cli.facts.public_modules.vm";
|
||||
config = lib.mkIf (config.clan.core.facts.publicStore == "vm") {
|
||||
clan.core.facts.publicModule = "clan_cli.facts.public_modules.vm";
|
||||
};
|
||||
}
|
||||
|
@ -8,10 +8,10 @@
|
||||
'';
|
||||
};
|
||||
|
||||
config = lib.mkIf (config.clanCore.facts.secretStore == "password-store") {
|
||||
clanCore.facts.secretPathFunction =
|
||||
config = lib.mkIf (config.clan.core.facts.secretStore == "password-store") {
|
||||
clan.core.facts.secretPathFunction =
|
||||
secret: "${config.clan.password-store.targetDirectory}/${secret.config.name}";
|
||||
clanCore.facts.secretUploadDirectory = config.clan.password-store.targetDirectory;
|
||||
clanCore.facts.secretModule = "clan_cli.facts.secret_modules.password_store";
|
||||
clan.core.facts.secretUploadDirectory = config.clan.password-store.targetDirectory;
|
||||
clan.core.facts.secretModule = "clan_cli.facts.secret_modules.password_store";
|
||||
};
|
||||
}
|
||||
|
@ -5,8 +5,8 @@
|
||||
...
|
||||
}:
|
||||
let
|
||||
secretsDir = config.clanCore.clanDir + "/sops/secrets";
|
||||
groupsDir = config.clanCore.clanDir + "/sops/groups";
|
||||
secretsDir = config.clan.core.clanDir + "/sops/secrets";
|
||||
groupsDir = config.clan.core.clanDir + "/sops/groups";
|
||||
|
||||
# My symlink is in the nixos module detected as a directory also it works in the repl. Is this because of pure evaluation?
|
||||
containsSymlink =
|
||||
@ -16,7 +16,7 @@ let
|
||||
|
||||
containsMachine =
|
||||
parent: name: type:
|
||||
type == "directory" && containsSymlink "${parent}/${name}/machines/${config.clanCore.machineName}";
|
||||
type == "directory" && containsSymlink "${parent}/${name}/machines/${config.clan.core.machineName}";
|
||||
|
||||
containsMachineOrGroups =
|
||||
name: type:
|
||||
@ -34,7 +34,7 @@ let
|
||||
in
|
||||
{
|
||||
options = {
|
||||
clanCore.sops.defaultGroups = lib.mkOption {
|
||||
clan.core.sops.defaultGroups = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = [ ];
|
||||
example = [ "admins" ];
|
||||
@ -42,16 +42,16 @@ in
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf (config.clanCore.facts.secretStore == "sops") {
|
||||
config = lib.mkIf (config.clan.core.facts.secretStore == "sops") {
|
||||
# Before we generate a secret we cannot know the path yet, so we need to set it to an empty string
|
||||
clanCore.facts.secretPathFunction =
|
||||
clan.core.facts.secretPathFunction =
|
||||
secret:
|
||||
config.sops.secrets.${"${config.clanCore.machineName}-${secret.config.name}"}.path
|
||||
config.sops.secrets.${"${config.clan.core.machineName}-${secret.config.name}"}.path
|
||||
or "/no-such-path";
|
||||
clanCore.facts.secretModule = "clan_cli.facts.secret_modules.sops";
|
||||
clanCore.facts.secretUploadDirectory = lib.mkDefault "/var/lib/sops-nix";
|
||||
clan.core.facts.secretModule = "clan_cli.facts.secret_modules.sops";
|
||||
clan.core.facts.secretUploadDirectory = lib.mkDefault "/var/lib/sops-nix";
|
||||
sops.secrets = builtins.mapAttrs (name: _: {
|
||||
sopsFile = config.clanCore.clanDir + "/sops/secrets/${name}/secret";
|
||||
sopsFile = config.clan.core.clanDir + "/sops/secrets/${name}/secret";
|
||||
format = "binary";
|
||||
}) secrets;
|
||||
# To get proper error messages about missing secrets we need a dummy secret file that is always present
|
||||
@ -60,7 +60,7 @@ in
|
||||
);
|
||||
|
||||
sops.age.keyFile = lib.mkIf (builtins.pathExists (
|
||||
config.clanCore.clanDir + "/sops/secrets/${config.clanCore.machineName}-age.key/secret"
|
||||
config.clan.core.clanDir + "/sops/secrets/${config.clan.core.machineName}-age.key/secret"
|
||||
)) (lib.mkDefault "/var/lib/sops-nix/key.txt");
|
||||
};
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
{ config, lib, ... }:
|
||||
{
|
||||
config = lib.mkIf (config.clanCore.facts.secretStore == "vm") {
|
||||
clanCore.facts.secretPathFunction = secret: "/etc/secrets/${secret.config.name}";
|
||||
clanCore.facts.secretUploadDirectory = "/etc/secrets";
|
||||
clanCore.facts.secretModule = "clan_cli.facts.secret_modules.vm";
|
||||
config = lib.mkIf (config.clan.core.facts.secretStore == "vm") {
|
||||
clan.core.facts.secretPathFunction = secret: "/etc/secrets/${secret.config.name}";
|
||||
clan.core.facts.secretUploadDirectory = "/etc/secrets";
|
||||
clan.core.facts.secretModule = "clan_cli.facts.secret_modules.vm";
|
||||
};
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
{ lib, pkgs, ... }:
|
||||
{
|
||||
options.clanCore = {
|
||||
options.clan.core = {
|
||||
clanName = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = ''
|
||||
|
@ -5,7 +5,13 @@
|
||||
...
|
||||
}:
|
||||
{
|
||||
options.clanCore.optionsNix = lib.mkOption {
|
||||
imports = [
|
||||
(lib.mkRenamedOptionModule [ "clanCore" ] [
|
||||
"clan"
|
||||
"core"
|
||||
])
|
||||
];
|
||||
options.clan.core.optionsNix = lib.mkOption {
|
||||
type = lib.types.raw;
|
||||
internal = true;
|
||||
readOnly = true;
|
||||
|
@ -66,7 +66,7 @@
|
||||
config = {
|
||||
system.clan.deployment.data = {
|
||||
facts = {
|
||||
inherit (config.clanCore.facts)
|
||||
inherit (config.clan.core.facts)
|
||||
secretUploadDirectory
|
||||
secretModule
|
||||
publicModule
|
||||
|
@ -1,22 +1,50 @@
|
||||
{ lib, ... }:
|
||||
{
|
||||
# defaults
|
||||
config.clanCore.state.HOME.folders = [ "/home" ];
|
||||
config.clan.core.state.HOME.folders = [ "/home" ];
|
||||
|
||||
# interface
|
||||
options.clanCore.state = lib.mkOption {
|
||||
options.clan.core.state = lib.mkOption {
|
||||
default = { };
|
||||
type = lib.types.attrsOf (
|
||||
lib.types.submodule (
|
||||
{ ... }:
|
||||
{ name, ... }:
|
||||
{
|
||||
options = {
|
||||
name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = name;
|
||||
description = ''
|
||||
Name of the state
|
||||
'';
|
||||
};
|
||||
folders = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
description = ''
|
||||
Folder where state resides in
|
||||
'';
|
||||
};
|
||||
preBackupCommand = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.str;
|
||||
default = null;
|
||||
description = ''
|
||||
script to run before backing up the state dir
|
||||
This is for example useful for services that require an export of their state
|
||||
e.g. a database dump
|
||||
'';
|
||||
};
|
||||
|
||||
# TODO: implement this
|
||||
#stopOnRestore = lib.mkOption {
|
||||
# type = lib.types.listOf lib.types.str;
|
||||
# default = [];
|
||||
# description = ''
|
||||
# List of services to stop before restoring the state dir from a backup
|
||||
|
||||
# Utilize this to stop services which currently access these folders or or other services affected by the restore
|
||||
# '';
|
||||
#};
|
||||
|
||||
preRestoreCommand = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.str;
|
||||
default = null;
|
||||
@ -26,6 +54,7 @@
|
||||
Utilize this to stop services which currently access these folders
|
||||
'';
|
||||
};
|
||||
|
||||
postRestoreCommand = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.str;
|
||||
default = null;
|
||||
|
@ -9,7 +9,9 @@
|
||||
}:
|
||||
let
|
||||
# Flatten the list of state folders into a single list
|
||||
stateFolders = lib.flatten (lib.mapAttrsToList (_item: attrs: attrs.folders) config.clanCore.state);
|
||||
stateFolders = lib.flatten (
|
||||
lib.mapAttrsToList (_item: attrs: attrs.folders) config.clan.core.state
|
||||
);
|
||||
|
||||
vmModule = {
|
||||
imports = [
|
||||
@ -22,7 +24,7 @@ let
|
||||
|
||||
# required to react to system_powerdown qmp command
|
||||
# Some desktop managers like xfce override the poweroff signal and therefore
|
||||
# make it impossible to handle it via 'logind' diretly.
|
||||
# make it impossible to handle it via 'logind' directly.
|
||||
services.acpid.enable = true;
|
||||
services.acpid.handlers.power.event = "button/power.*";
|
||||
services.acpid.handlers.power.action = "poweroff";
|
||||
@ -86,7 +88,7 @@ let
|
||||
fsType = "ext4";
|
||||
};
|
||||
|
||||
${config.clanCore.secretsUploadDirectory} = {
|
||||
${config.clan.core.secretsUploadDirectory} = {
|
||||
device = "secrets";
|
||||
fsType = "9p";
|
||||
neededForBoot = true;
|
||||
@ -158,7 +160,7 @@ in
|
||||
# All important VM config variables needed by the vm runner
|
||||
# this is really just a remapping of values defined elsewhere
|
||||
# and therefore not intended to be set by the user
|
||||
clanCore.vm.inspect = {
|
||||
clan.core.vm.inspect = {
|
||||
clan_name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
internal = true;
|
||||
@ -228,11 +230,11 @@ in
|
||||
|
||||
config = {
|
||||
# for clan vm inspect
|
||||
clanCore.vm.inspect = {
|
||||
clan_name = config.clanCore.clanName;
|
||||
machine_icon = config.clanCore.machineIcon or config.clanCore.clanIcon;
|
||||
machine_name = config.clanCore.machineName;
|
||||
machine_description = config.clanCore.machineDescription;
|
||||
clan.core.vm.inspect = {
|
||||
clan_name = config.clan.core.clanName;
|
||||
machine_icon = config.clan.core.machineIcon or config.clan.core.clanIcon;
|
||||
machine_name = config.clan.core.machineName;
|
||||
machine_description = config.clan.core.machineDescription;
|
||||
memory_size = config.clan.virtualisation.memorySize;
|
||||
inherit (config.clan.virtualisation) cores graphics waypipe;
|
||||
};
|
||||
|
@ -6,7 +6,7 @@
|
||||
}:
|
||||
let
|
||||
cfg = config.clan.networking.zerotier;
|
||||
facts = config.clanCore.facts.services.zerotier.public or { };
|
||||
facts = config.clan.core.facts.services.zerotier.public or { };
|
||||
genMoonScript = pkgs.runCommand "genmoon" { nativeBuildInputs = [ pkgs.python3 ]; } ''
|
||||
install -Dm755 ${./genmoon.py} $out/bin/genmoon
|
||||
patchShebangs $out/bin/genmoon
|
||||
@ -23,8 +23,8 @@ in
|
||||
};
|
||||
name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = config.clanCore.clanName;
|
||||
defaultText = "config.clanCore.clanName";
|
||||
default = config.clan.core.clanName;
|
||||
defaultText = "config.clan.core.clanName";
|
||||
description = ''
|
||||
zerotier network name
|
||||
'';
|
||||
@ -111,7 +111,7 @@ in
|
||||
|
||||
systemd.services.zerotierone.serviceConfig.ExecStartPre = [
|
||||
"+${pkgs.writeShellScript "init-zerotier" ''
|
||||
cp ${config.clanCore.facts.services.zerotier.secret.zerotier-identity-secret.path} /var/lib/zerotier-one/identity.secret
|
||||
cp ${config.clan.core.facts.services.zerotier.secret.zerotier-identity-secret.path} /var/lib/zerotier-one/identity.secret
|
||||
zerotier-idtool getpublic /var/lib/zerotier-one/identity.secret > /var/lib/zerotier-one/identity.public
|
||||
|
||||
${lib.optionalString (cfg.controller.enable) ''
|
||||
@ -176,7 +176,7 @@ in
|
||||
(lib.mkIf cfg.controller.enable {
|
||||
# only the controller needs to have the key in the repo, the other clients can be dynamic
|
||||
# we generate the zerotier code manually for the controller, since it's part of the bootstrap command
|
||||
clanCore.facts.services.zerotier = {
|
||||
clan.core.facts.services.zerotier = {
|
||||
public.zerotier-ip = { };
|
||||
public.zerotier-network-id = { };
|
||||
secret.zerotier-identity-secret = { };
|
||||
@ -192,12 +192,12 @@ in
|
||||
--network-id "$facts/zerotier-network-id"
|
||||
'';
|
||||
};
|
||||
clanCore.state.zerotier.folders = [ "/var/lib/zerotier-one" ];
|
||||
clan.core.state.zerotier.folders = [ "/var/lib/zerotier-one" ];
|
||||
|
||||
environment.systemPackages = [ config.clanCore.clanPkgs.zerotier-members ];
|
||||
environment.systemPackages = [ config.clan.core.clanPkgs.zerotier-members ];
|
||||
})
|
||||
(lib.mkIf (!cfg.controller.enable && cfg.networkId != null) {
|
||||
clanCore.facts.services.zerotier = {
|
||||
clan.core.facts.services.zerotier = {
|
||||
public.zerotier-ip = { };
|
||||
secret.zerotier-identity-secret = { };
|
||||
generator.path = [
|
||||
@ -255,7 +255,7 @@ in
|
||||
environment.etc."zerotier/network-id".text = facts.zerotier-network-id.value;
|
||||
systemd.services.zerotierone.serviceConfig.ExecStartPost = [
|
||||
"+${pkgs.writeShellScript "whitelist-controller" ''
|
||||
${config.clanCore.clanPkgs.zerotier-members}/bin/zerotier-members allow ${
|
||||
${config.clan.core.clanPkgs.zerotier-members}/bin/zerotier-members allow ${
|
||||
builtins.substring 0 10 cfg.networkId
|
||||
}
|
||||
''}"
|
||||
|
@ -16,7 +16,7 @@
|
||||
(
|
||||
{ pkgs, lib, ... }:
|
||||
{
|
||||
clanCore.clanPkgs = lib.mkDefault self.packages.${pkgs.hostPlatform.system};
|
||||
clan.core.clanPkgs = lib.mkDefault self.packages.${pkgs.hostPlatform.system};
|
||||
}
|
||||
)
|
||||
];
|
||||
|
@ -19,7 +19,9 @@ let
|
||||
};
|
||||
|
||||
# Flatten the list of state folders into a single list
|
||||
stateFolders = lib.flatten (lib.mapAttrsToList (_item: attrs: attrs.folders) config.clanCore.state);
|
||||
stateFolders = lib.flatten (
|
||||
lib.mapAttrsToList (_item: attrs: attrs.folders) config.clan.core.state
|
||||
);
|
||||
|
||||
# A module setting up bind mounts for all state folders
|
||||
stateMounts = {
|
||||
|
@ -3,4 +3,4 @@ source_up
|
||||
watch_file flake-module.nix shell.nix default.nix
|
||||
|
||||
# Because we depend on nixpkgs sources, uploading to builders takes a long time
|
||||
use flake .#clan-vm-manager --builders ''
|
||||
use flake .#clan-app --builders ''
|
@ -1,4 +1,4 @@
|
||||
# Clan VM Manager
|
||||
# clan app
|
||||
|
||||
Provides users with the simple functionality to manage their locally registered clans.
|
||||
|
||||
@ -9,19 +9,19 @@ Provides users with the simple functionality to manage their locally registered
|
||||
Run this application
|
||||
|
||||
```bash
|
||||
./bin/clan-vm-manager
|
||||
./bin/clan-app
|
||||
```
|
||||
|
||||
Join the default machine of a clan
|
||||
|
||||
```bash
|
||||
./bin/clan-vm-manager [clan-uri]
|
||||
./bin/clan-app [clan-uri]
|
||||
```
|
||||
|
||||
Join a specific machine of a clan
|
||||
|
||||
```bash
|
||||
./bin/clan-vm-manager [clan-uri]#[machine]
|
||||
./bin/clan-app [clan-uri]#[machine]
|
||||
```
|
||||
|
||||
For more available commands see the developer section below.
|
||||
@ -35,7 +35,7 @@ For more available commands see the developer section below.
|
||||
gsettings set org.gtk.Settings.Debug enable-inspector-keybinding true
|
||||
|
||||
# Start the application with the debugger attached
|
||||
GTK_DEBUG=interactive ./bin/clan-vm-manager --debug
|
||||
GTK_DEBUG=interactive ./bin/clan-app --debug
|
||||
```
|
||||
|
||||
Appending `--debug` flag enables debug logging printed into the console.
|
||||
@ -45,7 +45,7 @@ Appending `--debug` flag enables debug logging printed into the console.
|
||||
To activate profiling you can run
|
||||
|
||||
```bash
|
||||
PERF=1 ./bin/clan-vm-manager
|
||||
PERF=1 ./bin/clan-app
|
||||
```
|
||||
|
||||
### Library Components
|
||||
@ -75,20 +75,20 @@ gtk4-icon-browser
|
||||
|
||||
### Links
|
||||
|
||||
Here are some important documentation links related to the Clan VM Manager:
|
||||
Here are some important documentation links related to the Clan App:
|
||||
|
||||
- [Adw PyGobject Reference](http://lazka.github.io/pgi-docs/index.html#Adw-1): This link provides the PyGObject reference documentation for the Adw library, which is used in the Clan VM Manager. It contains detailed information about the Adw widgets and their usage.
|
||||
- [Adw PyGobject Reference](http://lazka.github.io/pgi-docs/index.html#Adw-1): This link provides the PyGObject reference documentation for the Adw library, which is used in the Clan App. It contains detailed information about the Adw widgets and their usage.
|
||||
|
||||
- [GTK4 PyGobject Reference](http://lazka.github.io/pgi-docs/index.html#Gtk-4.0): This link provides the PyGObject reference documentation for GTK4, the toolkit used for building the user interface of the Clan VM Manager. It includes information about GTK4 widgets, signals, and other features.
|
||||
- [GTK4 PyGobject Reference](http://lazka.github.io/pgi-docs/index.html#Gtk-4.0): This link provides the PyGObject reference documentation for GTK4, the toolkit used for building the user interface of the clan app. It includes information about GTK4 widgets, signals, and other features.
|
||||
|
||||
- [Adw Widget Gallery](https://gnome.pages.gitlab.gnome.org/libadwaita/doc/main/widget-gallery.html): This link showcases a widget gallery for Adw, allowing you to see the available widgets and their visual appearance. It can be helpful for designing the user interface of the Clan VM Manager.
|
||||
- [Adw Widget Gallery](https://gnome.pages.gitlab.gnome.org/libadwaita/doc/main/widget-gallery.html): This link showcases a widget gallery for Adw, allowing you to see the available widgets and their visual appearance. It can be helpful for designing the user interface of the clan app.
|
||||
|
||||
- [Python + GTK3 Tutorial](https://python-gtk-3-tutorial.readthedocs.io/en/latest/textview.html): Although the Clan VM Manager uses GTK4, this tutorial for GTK3 can still be useful as it covers the basics of building GTK-based applications with Python. It includes examples and explanations for various GTK widgets, including text views.
|
||||
- [Python + GTK3 Tutorial](https://python-gtk-3-tutorial.readthedocs.io/en/latest/textview.html): Although the clan app uses GTK4, this tutorial for GTK3 can still be useful as it covers the basics of building GTK-based applications with Python. It includes examples and explanations for various GTK widgets, including text views.
|
||||
|
||||
- [GNOME Human Interface Guidelines](https://developer.gnome.org/hig/): This link provides the GNOME Human Interface Guidelines, which offer design and usability recommendations for creating GNOME applications. It covers topics such as layout, navigation, and interaction patterns.
|
||||
|
||||
## Error handling
|
||||
|
||||
> Error dialogs should be avoided where possible, since they are disruptive.
|
||||
>
|
||||
> Error dialogs should be avoided where possible, since they are disruptive.
|
||||
>
|
||||
> For simple non-critical errors, toasts can be a good alternative.
|
@ -7,7 +7,7 @@ module_path = Path(__file__).parent.parent.absolute()
|
||||
sys.path.insert(0, str(module_path))
|
||||
sys.path.insert(0, str(module_path.parent / "clan_cli"))
|
||||
|
||||
from clan_vm_manager import main # NOQA
|
||||
from clan_app import main # NOQA
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -3,7 +3,7 @@ import sys
|
||||
|
||||
from clan_cli.profiler import profile
|
||||
|
||||
from clan_vm_manager.app import MainApplication
|
||||
from clan_app.app import MainApplication
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user