Merge pull request 're-add matrix server' (#190) from bump-buildbot into main
All checks were successful
buildbot/nix-build .#checks.x86_64-linux.package-clan-merge Build done.
buildbot/nix-build .#checks.x86_64-linux.clan-merge Build done.
buildbot/nix-build .#checks.x86_64-linux.devShell-clan-merge Build done.
buildbot/nix-build .#checks.x86_64-linux.devShell-default Build done.
buildbot/nix-build .#checks.x86_64-linux.package-action-flake-update Build done.
buildbot/nix-build .#checks.x86_64-linux.package-gitea Build done.
buildbot/nix-build .#checks.x86_64-linux.package-job-flake-update-clan-core Build done.
buildbot/nix-build .#checks.x86_64-linux.package-job-flake-update-clan-homepage Build done.
buildbot/nix-build .#checks.x86_64-linux.package-job-flake-update-clan-infra Build done.
buildbot/nix-build .#checks.x86_64-linux.package-action-ensure-tea-login Build done.
buildbot/nix-build .#checks.x86_64-linux.treefmt Build done.
buildbot/nix-build .#checks.x86_64-linux.package-action-flake-update-pr-clan Build done.
buildbot/nix-build .#checks.x86_64-linux.package-action-create-pr Build done.
buildbot/nix-build .#checks.x86_64-linux.nixos-web01 Build done.
buildbot/nix-eval Build done.

This commit is contained in:
clan-bot 2024-06-10 13:27:34 +00:00
commit d3e9877bd7
47 changed files with 658 additions and 9593 deletions

View File

@ -1,14 +1,14 @@
{
perSystem =
{ inputs'
, pkgs
, lib
, ...
}:
{ inputs', pkgs, ... }:
let
convert2Tofu = provider: provider.override (prev: {
homepage = builtins.replaceStrings [ "registry.terraform.io/providers" ] [ "registry.opentofu.org" ] prev.homepage;
});
convert2Tofu =
provider:
provider.override (prev: {
homepage = builtins.replaceStrings [ "registry.terraform.io/providers" ] [
"registry.opentofu.org"
] prev.homepage;
});
in
{
devShells.default = pkgs.mkShellNoCC {
@ -18,17 +18,18 @@
inputs'.clan-core.packages.clan-cli
(pkgs.opentofu.withPlugins (p: builtins.map convert2Tofu [
p.hetznerdns
p.hcloud
p.null
p.external
p.local
]))
];
inputsFrom = [
inputs'.clan-core.devShells.default
(pkgs.opentofu.withPlugins (
p:
builtins.map convert2Tofu [
p.hetznerdns
p.hcloud
p.null
p.external
p.local
]
))
];
inputsFrom = [ inputs'.clan-core.devShells.default ];
};
};
}

View File

@ -59,10 +59,11 @@
]
},
"locked": {
"lastModified": 1717997057,
"narHash": "sha256-SQtmiLGFuZTuRT+IhOD8K38PHmkhof3mHM4aKIP6pW8=",
"lastModified": 1718025848,
"narHash": "sha256-la9fj0/QSJyEaw1LXMLke9JdiKRdue7sS1bg4uQ5A/4=",
"rev": "a1acf0b05d201e12dcbebd2396d71b93e14b8982",
"type": "tarball",
"url": "https://git.clan.lol/clan/clan-core/archive/main.tar.gz"
"url": "https://git.clan.lol/api/v1/repos/clan/clan-core/archive/a1acf0b05d201e12dcbebd2396d71b93e14b8982.tar.gz"
},
"original": {
"type": "tarball",
@ -77,11 +78,11 @@
]
},
"locked": {
"lastModified": 1717915259,
"narHash": "sha256-VsGPboaleIlPELHY5cNTrXK4jHVmgUra8uC6h7KVC5c=",
"lastModified": 1717177033,
"narHash": "sha256-G3CZJafCO8WDy3dyA2EhpUJEmzd5gMJ2IdItAg0Hijw=",
"owner": "nix-community",
"repo": "disko",
"rev": "1bbdb06f14e2621290b250e631cf3d8948e4d19b",
"rev": "0274af4c92531ebfba4a5bd493251a143bc51f3c",
"type": "github"
},
"original": {
@ -158,6 +159,22 @@
"type": "github"
}
},
"nixos-2311": {
"locked": {
"lastModified": 1717017538,
"narHash": "sha256-S5kltvDDfNQM3xx9XcvzKEOyN2qk8Sa+aSOLqZ+1Ujc=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "64e468fd2652105710d86cd2ae3e65a5a6d58dec",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "release-23.11",
"repo": "nixpkgs",
"type": "github"
}
},
"nixos-generators": {
"inputs": {
"nixlib": "nixlib",
@ -182,18 +199,18 @@
},
"nixos-images": {
"inputs": {
"nixos-stable": "nixos-stable",
"nixos-2311": "nixos-2311",
"nixos-unstable": [
"clan-core",
"nixpkgs"
]
},
"locked": {
"lastModified": 1717770332,
"narHash": "sha256-NQmFHj0hTCUgnMAsaNTu6sNTRyo0rFQEe+/lVgV5yxU=",
"lastModified": 1717040312,
"narHash": "sha256-yI/en4IxuCEClIUpIs3QTyYCCtmSPLOhwLJclfNwdeg=",
"owner": "nix-community",
"repo": "nixos-images",
"rev": "72771bd35f4e19e32d6f652528483b5e07fc317b",
"rev": "47bfb55316e105390dd761e0b6e8e0be09462b67",
"type": "github"
},
"original": {
@ -229,22 +246,6 @@
"type": "gitlab"
}
},
"nixos-stable": {
"locked": {
"lastModified": 1717555607,
"narHash": "sha256-WZ1s48OODmRJ3DHC+I/DtM3tDRuRJlNqMvxvAPTD7ec=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "0b8e7a1ae5a94da2e1ee3f3030a32020f6254105",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-24.05",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1717868076,
@ -285,11 +286,11 @@
]
},
"locked": {
"lastModified": 1717902109,
"narHash": "sha256-OQTjaEZcByyVmHwJlKp/8SE9ikC4w+mFd3X0jJs6wiA=",
"lastModified": 1717297459,
"narHash": "sha256-cZC2f68w5UrJ1f+2NWGV9Gx0dEYmxwomWN2B0lx0QRA=",
"owner": "Mic92",
"repo": "sops-nix",
"rev": "f0922ad001829b400f0160ba85b47d252fa3d925",
"rev": "ab2a43b0d21d1d37d4d5726a892f714eaeb4b075",
"type": "github"
},
"original": {

View File

@ -37,39 +37,55 @@
buildbot-nix.inputs.treefmt-nix.follows = "treefmt-nix";
};
outputs = inputs@{ flake-parts, ... }:
flake-parts.lib.mkFlake { inherit inputs; } ({ self, ... }: {
systems = [
"x86_64-linux"
"aarch64-linux"
];
imports = [
inputs.treefmt-nix.flakeModule
./devShells/flake-module.nix
./targets/flake-module.nix
./modules/flake-module.nix
./pkgs/flake-module.nix
];
perSystem = ({ lib, self', system, ... }: {
treefmt = {
projectRootFile = ".git/config";
programs.hclfmt.enable = true;
programs.nixpkgs-fmt.enable = true;
settings.formatter.nixpkgs-fmt.excludes = [
# generated files
"node-env.nix"
"node-packages.nix"
"composition.nix"
];
};
checks =
let
nixosMachines = lib.mapAttrs' (name: config: lib.nameValuePair "nixos-${name}" config.config.system.build.toplevel) ((lib.filterAttrs (_: config: config.pkgs.system == system)) self.nixosConfigurations);
packages = lib.mapAttrs' (n: lib.nameValuePair "package-${n}") self'.packages;
devShells = lib.mapAttrs' (n: lib.nameValuePair "devShell-${n}") self'.devShells;
homeConfigurations = lib.mapAttrs' (name: config: lib.nameValuePair "home-manager-${name}" config.activation-script) (self'.legacyPackages.homeConfigurations or { });
in
nixosMachines // packages // devShells // homeConfigurations;
});
});
outputs =
inputs@{ flake-parts, ... }:
flake-parts.lib.mkFlake { inherit inputs; } (
{ self, ... }:
{
systems = [
"x86_64-linux"
"aarch64-linux"
];
imports = [
inputs.treefmt-nix.flakeModule
./devShells/flake-module.nix
./targets/flake-module.nix
./modules/flake-module.nix
./pkgs/flake-module.nix
];
perSystem = (
{
lib,
self',
system,
...
}:
{
treefmt = {
projectRootFile = ".git/config";
programs.hclfmt.enable = true;
programs.nixfmt-rfc-style.enable = true;
settings.formatter.nixfmt-rfc-style.excludes = [
# generated files
"node-env.nix"
"node-packages.nix"
"composition.nix"
];
};
checks =
let
nixosMachines = lib.mapAttrs' (
name: config: lib.nameValuePair "nixos-${name}" config.config.system.build.toplevel
) ((lib.filterAttrs (_: config: config.pkgs.system == system)) self.nixosConfigurations);
packages = lib.mapAttrs' (n: lib.nameValuePair "package-${n}") self'.packages;
devShells = lib.mapAttrs' (n: lib.nameValuePair "devShell-${n}") self'.devShells;
homeConfigurations = lib.mapAttrs' (
name: config: lib.nameValuePair "home-manager-${name}" config.activation-script
) (self'.legacyPackages.homeConfigurations or { });
in
nixosMachines // packages // devShells // homeConfigurations;
}
);
}
);
}

View File

@ -0,0 +1 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHS2PvT2e04pqbt1EFFN2y1za9nNmr8rcfnXq9kG5RS2 nixbld@turingmachine

View File

@ -41,7 +41,10 @@ in
extraGroups = [ "wheel" ];
shell = "/run/current-system/sw/bin/zsh";
uid = 1004;
openssh.authorizedKeys.keys = [ admins.kenji admins.kenji-remote ];
openssh.authorizedKeys.keys = [
admins.kenji
admins.kenji-remote
];
};
johannes = {
isNormalUser = true;

View File

@ -1,4 +1,5 @@
{ self, inputs, ... }: {
{ self, inputs, ... }:
{
flake.nixosModules = {
server.imports = [
inputs.srvos.nixosModules.server

View File

@ -1,31 +1,22 @@
{ config
, lib
, ...
}:
with lib; let
{ config, lib, ... }:
let
cfg = config.clan.networking;
in
{
options = {
clan.networking.ipv4.address = mkOption {
type = types.str;
};
clan.networking.ipv4.address = lib.mkOption { type = lib.types.str; };
clan.networking.ipv4.cidr = mkOption {
type = types.str;
clan.networking.ipv4.cidr = lib.mkOption {
type = lib.types.str;
default = "26";
};
clan.networking.ipv4.gateway = mkOption {
type = types.str;
};
clan.networking.ipv4.gateway = lib.mkOption { type = lib.types.str; };
clan.networking.ipv6.address = mkOption {
type = types.str;
};
clan.networking.ipv6.address = lib.mkOption { type = lib.types.str; };
clan.networking.ipv6.cidr = mkOption {
type = types.str;
clan.networking.ipv6.cidr = lib.mkOption {
type = lib.types.str;
default = "64";
};
};

View File

@ -22,6 +22,7 @@ in
fqdn = "mail.clan.lol";
domains = [ "clan.lol" ];
enablePop3 = true;
certificateScheme = "acme-nginx";
# kresd sucks unfortunally (fails when one NS server is not working, instead of trying other ones)
localDnsResolver = false;

View File

@ -1,26 +1,21 @@
{ config, ... }: {
{ config, self, ... }:
{
imports = [ self.inputs.clan-core.clanModules.borgbackup ];
# 100GB storagebox is under the nix-community hetzner account
systemd.services.borgbackup-job-clan-lol.serviceConfig.ReadWritePaths = [
"/var/log/telegraf"
];
# Run this from the hetzner network:
# ssh-keyscan -p 23 u359378.your-storagebox.de
programs.ssh.knownHosts = {
storagebox-ecdsa.hostNames = [ "[u359378.your-storagebox.de]:23" ];
storagebox-ecdsa.publicKey = "ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBAGK0po6usux4Qv2d8zKZN1dDvbWjxKkGsx7XwFdSUCnF19Q8psHEUWR7C/LtSQ5crU/g+tQVRBtSgoUcE8T+FWp5wBxKvWG2X9gD+s9/4zRmDeSJR77W6gSA/+hpOZoSE+4KgNdnbYSNtbZH/dN74EG7GLb/gcIpbUUzPNXpfKl7mQitw==";
storagebox-rsa.hostNames = [ "[u359378.your-storagebox.de]:23" ];
storagebox-rsa.publicKey = "ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA5EB5p/5Hp3hGW1oHok+PIOH9Pbn7cnUiGmUEBrCVjnAw+HrKyN8bYVV0dIGllswYXwkG/+bgiBlE6IVIBAq+JwVWu1Sss3KarHY3OvFJUXZoZyRRg/Gc/+LRCE7lyKpwWQ70dbelGRyyJFH36eNv6ySXoUYtGkwlU5IVaHPApOxe4LHPZa/qhSRbPo2hwoh0orCtgejRebNtW5nlx00DNFgsvn8Svz2cIYLxsPVzKgUxs8Zxsxgn+Q/UvR7uq4AbAhyBMLxv7DjJ1pc7PJocuTno2Rw9uMZi1gkjbnmiOh6TTXIEWbnroyIhwc8555uto9melEUmWNQ+C+PwAK+MPw==";
clan.borgbackup.destinations.${config.networking.hostName} = {
repo = "u366395@u366395.your-storagebox.de:/./borgbackup";
rsh = "ssh -oPort=23 -i ${config.clanCore.facts.services.borgbackup.secret."borgbackup.ssh".path}";
};
services.borgbackup.jobs.clan-lol = {
paths = [
"/home"
"/var"
"/root"
];
clanCore.state.system.folders = [
"/home"
"/etc"
"/var"
"/root"
];
services.borgbackup.jobs.${config.networking.hostName} = {
exclude = [
"*.pyc"
"/home/*/.direnv"
@ -41,32 +36,20 @@
"/var/tmp"
"/var/log"
];
# $ ssh-keygen -y -f /run/secrets/hetzner-borgbackup-ssh > /tmp/hetzner-borgbackup-ssh.pub
# $ cat /tmp/hetzner-borgbackup-ssh.pub | ssh -p23 u366395@u366395.your-storagebox.de install-ssh-key
repo = "u366395@u366395.your-storagebox.de:/./borgbackup";
# Disaster recovery:
# get the backup passphrase and ssh key from the sops and store them in /tmp
# $ export BORG_PASSCOMMAND='cat /tmp/hetzner-borgbackup-passphrase'
# $ export BORG_REPO='u359378@u359378.your-storagebox.de:/./borgbackup'
# $ export BORG_RSH='ssh -oPort=23 -i /tmp/hetzner-borgbackup-ssh'
# $ export BORG_RSH='ssh -oPort=23 -i /tmp/hetzner-borgbackup-ssh'
# $ borg list
# web01-clan-lol-2023-07-21T14:12:22 Fri, 2023-07-21 14:12:27 [539b1037669ffd0d3f50020f439bbe2881b7234910e405eafc333125383351bc]
# $ borg mount u359378@u359378.your-storagebox.de:/./borgbackup::web01-clan-lol-2023-07-21T14:12:22 /tmp/backup
doInit = true;
encryption = {
mode = "repokey-blake2";
# $ nix run nixpkgs#xkcdpass -- -d '-' -n 3 -C capitalize "$@"
passCommand = "cat ${config.sops.secrets.hetzner-borgbackup-passphrase.path}";
};
compression = "auto,zstd";
startAt = "daily";
# Also enable ssh support in the storagebox web interface.
# By default the storage box is only accessible from the hetzner network.
# $ ssh-keygen -t ed25519 -N "" -f /tmp/ssh_host_ed25519_key
# $ cat /tmp/ssh_host_ed25519_key.pub | ssh -p23 u359378@u359378.your-storagebox.de install-ssh-key
environment.BORG_RSH = "ssh -oPort=23 -i ${config.sops.secrets.hetzner-borgbackup-ssh.path}";
# $ clan facts generate
# $ clan facts list web01 | jq .borgbackup.ssh.pub | ssh -p23 u359378@u359378.your-storagebox.de install-ssh-key
preHook = ''
set -x
'';
@ -76,12 +59,19 @@
task,frequency=daily last_run=$(date +%s)i,state="$([[ $exitStatus == 0 ]] && echo ok || echo fail)"
EOF
'';
};
prune.keep = {
within = "1d"; # Keep all archives from the last day
daily = 7;
weekly = 4;
monthly = 0;
};
systemd.services."borgbackup-job-${config.networking.hostName}".serviceConfig.ReadWritePaths = [
"/var/log/telegraf"
];
# Run this from the hetzner network:
# ssh-keyscan -p 23 u359378.your-storagebox.de
programs.ssh.knownHosts = {
storagebox-ecdsa.hostNames = [ "[u359378.your-storagebox.de]:23" ];
storagebox-ecdsa.publicKey = "ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBAGK0po6usux4Qv2d8zKZN1dDvbWjxKkGsx7XwFdSUCnF19Q8psHEUWR7C/LtSQ5crU/g+tQVRBtSgoUcE8T+FWp5wBxKvWG2X9gD+s9/4zRmDeSJR77W6gSA/+hpOZoSE+4KgNdnbYSNtbZH/dN74EG7GLb/gcIpbUUzPNXpfKl7mQitw==";
storagebox-rsa.hostNames = [ "[u359378.your-storagebox.de]:23" ];
storagebox-rsa.publicKey = "ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA5EB5p/5Hp3hGW1oHok+PIOH9Pbn7cnUiGmUEBrCVjnAw+HrKyN8bYVV0dIGllswYXwkG/+bgiBlE6IVIBAq+JwVWu1Sss3KarHY3OvFJUXZoZyRRg/Gc/+LRCE7lyKpwWQ70dbelGRyyJFH36eNv6ySXoUYtGkwlU5IVaHPApOxe4LHPZa/qhSRbPo2hwoh0orCtgejRebNtW5nlx00DNFgsvn8Svz2cIYLxsPVzKgUxs8Zxsxgn+Q/UvR7uq4AbAhyBMLxv7DjJ1pc7PJocuTno2Rw9uMZi1gkjbnmiOh6TTXIEWbnroyIhwc8555uto9melEUmWNQ+C+PwAK+MPw==";
};
}

View File

@ -1,10 +1,18 @@
{ config, self, pkgs, ... }: {
{
config,
self,
pkgs,
...
}:
{
# service to for automatic merge bot
systemd.services.clan-merge = {
description = "Merge clan.lol PRs automatically";
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
environment = { GITEA_TOKEN_FILE = "%d/GITEA_TOKEN_FILE"; };
environment = {
GITEA_TOKEN_FILE = "%d/GITEA_TOKEN_FILE";
};
serviceConfig = {
LoadCredential = [ "GITEA_TOKEN_FILE:${config.sops.secrets.merge-bot-gitea-token.path}" ];
Restart = "on-failure";

View File

@ -1,4 +1,5 @@
{ self, ... }: {
{ self, ... }:
{
imports = [
./borgbackup.nix
./clan-merge.nix
@ -8,7 +9,7 @@
./homepage.nix
./postfix.nix
./jobs.nix
#./matrix-synapse.nix
./matrix-synapse.nix
../dev.nix
self.inputs.clan-core.clanModules.zt-tcp-relay
];

View File

@ -1,8 +1,26 @@
{ config, self, pkgs, lib, ... }:
{
config,
self,
pkgs,
lib,
...
}:
let
storeDeps = pkgs.runCommand "store-deps" { } ''
mkdir -p $out/bin
for dir in ${toString [ pkgs.coreutils pkgs.findutils pkgs.gnugrep pkgs.gawk pkgs.git pkgs.nix pkgs.bash pkgs.jq pkgs.nodejs ]}; do
for dir in ${
toString [
pkgs.coreutils
pkgs.findutils
pkgs.gnugrep
pkgs.gawk
pkgs.git
pkgs.nix
pkgs.bash
pkgs.jq
pkgs.nodejs
]
}; do
for bin in "$dir"/bin/*; do
ln -s "$bin" "$out/bin/$(basename "$bin")"
done
@ -14,87 +32,95 @@ let
'';
numInstances = 2;
in
lib.mkMerge [{
# everything here has no dependencies on the store
systemd.services.gitea-runner-nix-image = {
wantedBy = [ "multi-user.target" ];
after = [ "podman.service" ];
requires = [ "podman.service" ];
path = [ config.virtualisation.podman.package pkgs.gnutar pkgs.shadow pkgs.getent ];
# we also include etc here because the cleanup job also wants the nixuser to be present
script = ''
set -eux -o pipefail
mkdir -p etc/nix
# Create an unpriveleged user that we can use also without the run-as-user.sh script
touch etc/passwd etc/group
groupid=$(cut -d: -f3 < <(getent group nixuser))
userid=$(cut -d: -f3 < <(getent passwd nixuser))
groupadd --prefix $(pwd) --gid "$groupid" nixuser
emptypassword='$6$1ero.LwbisiU.h3D$GGmnmECbPotJoPQ5eoSTD6tTjKnSWZcjHoVTkxFLZP17W9hRi/XkmCiAMOfWruUwy8gMjINrBMNODc7cYEo4K.'
useradd --prefix $(pwd) -p "$emptypassword" -m -d /tmp -u "$userid" -g "$groupid" -G nixuser nixuser
cat <<NIX_CONFIG > etc/nix/nix.conf
accept-flake-config = true
experimental-features = nix-command flakes
NIX_CONFIG
cat <<NSSWITCH > etc/nsswitch.conf
passwd: files mymachines systemd
group: files mymachines systemd
shadow: files
hosts: files mymachines dns myhostname
networks: files
ethers: files
services: files
protocols: files
rpc: files
NSSWITCH
# list the content as it will be imported into the container
tar -cv . | tar -tvf -
tar -cv . | podman import - gitea-runner-nix
'';
serviceConfig = {
RuntimeDirectory = "gitea-runner-nix-image";
WorkingDirectory = "/run/gitea-runner-nix-image";
Type = "oneshot";
RemainAfterExit = true;
};
};
users.users.nixuser = {
group = "nixuser";
description = "Used for running nix ci jobs";
home = "/var/empty";
isSystemUser = true;
};
users.groups.nixuser = { };
}
lib.mkMerge [
{
systemd.services = lib.genAttrs (builtins.genList (n: "gitea-runner-nix${builtins.toString n}-token") numInstances) (name: {
# everything here has no dependencies on the store
systemd.services.gitea-runner-nix-image = {
wantedBy = [ "multi-user.target" ];
after = [ "gitea.service" ];
environment = {
GITEA_CUSTOM = "/var/lib/gitea/custom";
GITEA_WORK_DIR = "/var/lib/gitea";
};
after = [ "podman.service" ];
requires = [ "podman.service" ];
path = [
config.virtualisation.podman.package
pkgs.gnutar
pkgs.shadow
pkgs.getent
];
# we also include etc here because the cleanup job also wants the nixuser to be present
script = ''
set -euo pipefail
token=$(${lib.getExe self.packages.${pkgs.hostPlatform.system}.gitea} actions generate-runner-token)
echo "TOKEN=$token" > /var/lib/gitea-registration/${name}
set -eux -o pipefail
mkdir -p etc/nix
# Create an unpriveleged user that we can use also without the run-as-user.sh script
touch etc/passwd etc/group
groupid=$(cut -d: -f3 < <(getent group nixuser))
userid=$(cut -d: -f3 < <(getent passwd nixuser))
groupadd --prefix $(pwd) --gid "$groupid" nixuser
emptypassword='$6$1ero.LwbisiU.h3D$GGmnmECbPotJoPQ5eoSTD6tTjKnSWZcjHoVTkxFLZP17W9hRi/XkmCiAMOfWruUwy8gMjINrBMNODc7cYEo4K.'
useradd --prefix $(pwd) -p "$emptypassword" -m -d /tmp -u "$userid" -g "$groupid" -G nixuser nixuser
cat <<NIX_CONFIG > etc/nix/nix.conf
accept-flake-config = true
experimental-features = nix-command flakes
NIX_CONFIG
cat <<NSSWITCH > etc/nsswitch.conf
passwd: files mymachines systemd
group: files mymachines systemd
shadow: files
hosts: files mymachines dns myhostname
networks: files
ethers: files
services: files
protocols: files
rpc: files
NSSWITCH
# list the content as it will be imported into the container
tar -cv . | tar -tvf -
tar -cv . | podman import - gitea-runner-nix
'';
unitConfig.ConditionPathExists = [ "!/var/lib/gitea-registration/${name}" ];
serviceConfig = {
User = "gitea";
Group = "gitea";
StateDirectory = "gitea-registration";
RuntimeDirectory = "gitea-runner-nix-image";
WorkingDirectory = "/run/gitea-runner-nix-image";
Type = "oneshot";
RemainAfterExit = true;
};
});
};
users.users.nixuser = {
group = "nixuser";
description = "Used for running nix ci jobs";
home = "/var/empty";
isSystemUser = true;
};
users.groups.nixuser = { };
}
{
systemd.services =
lib.genAttrs (builtins.genList (n: "gitea-runner-nix${builtins.toString n}-token") numInstances)
(name: {
wantedBy = [ "multi-user.target" ];
after = [ "gitea.service" ];
environment = {
GITEA_CUSTOM = "/var/lib/gitea/custom";
GITEA_WORK_DIR = "/var/lib/gitea";
};
script = ''
set -euo pipefail
token=$(${lib.getExe self.packages.${pkgs.hostPlatform.system}.gitea} actions generate-runner-token)
echo "TOKEN=$token" > /var/lib/gitea-registration/${name}
'';
unitConfig.ConditionPathExists = [ "!/var/lib/gitea-registration/${name}" ];
serviceConfig = {
User = "gitea";
Group = "gitea";
StateDirectory = "gitea-registration";
Type = "oneshot";
RemainAfterExit = true;
};
});
# Format of the token file:
virtualisation = {
@ -111,106 +137,119 @@ lib.mkMerge [{
virtualisation.containers.containersConf.settings = {
# podman seems to not work with systemd-resolved
containers.dns_servers = [ "8.8.8.8" "8.8.4.4" ];
containers.dns_servers = [
"8.8.8.8"
"8.8.4.4"
];
};
}
{
systemd.services = lib.genAttrs (builtins.genList (n: "gitea-runner-nix${builtins.toString n}") numInstances) (name: {
after = [
"${name}-token.service"
"gitea-runner-nix-image.service"
];
requires = [
"${name}-token.service"
"gitea-runner-nix-image.service"
];
systemd.services =
lib.genAttrs (builtins.genList (n: "gitea-runner-nix${builtins.toString n}") numInstances)
(name: {
after = [
"${name}-token.service"
"gitea-runner-nix-image.service"
];
requires = [
"${name}-token.service"
"gitea-runner-nix-image.service"
];
# TODO: systemd confinment
serviceConfig = {
# Hardening (may overlap with DynamicUser=)
# The following options are only for optimizing output of systemd-analyze
AmbientCapabilities = "";
CapabilityBoundingSet = "";
# ProtectClock= adds DeviceAllow=char-rtc r
DeviceAllow = "";
NoNewPrivileges = true;
PrivateDevices = true;
PrivateMounts = true;
PrivateTmp = true;
PrivateUsers = true;
ProtectClock = true;
ProtectControlGroups = true;
ProtectHome = true;
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectSystem = "strict";
RemoveIPC = true;
RestrictNamespaces = true;
RestrictRealtime = true;
RestrictSUIDSGID = true;
UMask = "0066";
ProtectProc = "invisible";
SystemCallFilter = [
"~@clock"
"~@cpu-emulation"
"~@module"
"~@mount"
"~@obsolete"
"~@raw-io"
"~@reboot"
"~@swap"
# needed by go?
#"~@resources"
"~@privileged"
"~capset"
"~setdomainname"
"~sethostname"
];
RestrictAddressFamilies = [ "AF_INET" "AF_INET6" "AF_UNIX" "AF_NETLINK" ];
# TODO: systemd confinment
serviceConfig = {
# Hardening (may overlap with DynamicUser=)
# The following options are only for optimizing output of systemd-analyze
AmbientCapabilities = "";
CapabilityBoundingSet = "";
# ProtectClock= adds DeviceAllow=char-rtc r
DeviceAllow = "";
NoNewPrivileges = true;
PrivateDevices = true;
PrivateMounts = true;
PrivateTmp = true;
PrivateUsers = true;
ProtectClock = true;
ProtectControlGroups = true;
ProtectHome = true;
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectSystem = "strict";
RemoveIPC = true;
RestrictNamespaces = true;
RestrictRealtime = true;
RestrictSUIDSGID = true;
UMask = "0066";
ProtectProc = "invisible";
SystemCallFilter = [
"~@clock"
"~@cpu-emulation"
"~@module"
"~@mount"
"~@obsolete"
"~@raw-io"
"~@reboot"
"~@swap"
# needed by go?
#"~@resources"
"~@privileged"
"~capset"
"~setdomainname"
"~sethostname"
];
RestrictAddressFamilies = [
"AF_INET"
"AF_INET6"
"AF_UNIX"
"AF_NETLINK"
];
# Needs network access
PrivateNetwork = false;
# Cannot be true due to Node
MemoryDenyWriteExecute = false;
# Needs network access
PrivateNetwork = false;
# Cannot be true due to Node
MemoryDenyWriteExecute = false;
# The more restrictive "pid" option makes `nix` commands in CI emit
# "GC Warning: Couldn't read /proc/stat"
# You may want to set this to "pid" if not using `nix` commands
ProcSubset = "all";
# Coverage programs for compiled code such as `cargo-tarpaulin` disable
# ASLR (address space layout randomization) which requires the
# `personality` syscall
# You may want to set this to `true` if not using coverage tooling on
# compiled code
LockPersonality = false;
# The more restrictive "pid" option makes `nix` commands in CI emit
# "GC Warning: Couldn't read /proc/stat"
# You may want to set this to "pid" if not using `nix` commands
ProcSubset = "all";
# Coverage programs for compiled code such as `cargo-tarpaulin` disable
# ASLR (address space layout randomization) which requires the
# `personality` syscall
# You may want to set this to `true` if not using coverage tooling on
# compiled code
LockPersonality = false;
# Note that this has some interactions with the User setting; so you may
# want to consult the systemd docs if using both.
DynamicUser = true;
};
});
# Note that this has some interactions with the User setting; so you may
# want to consult the systemd docs if using both.
DynamicUser = true;
};
});
services.gitea-actions-runner.instances = lib.genAttrs (builtins.genList (n: "nix${builtins.toString n}") numInstances) (name: {
enable = true;
name = "nix-runner";
# take the git root url from the gitea config
# only possible if you've also configured your gitea though the same nix config
# otherwise you need to set it manually
url = config.services.gitea.settings.server.ROOT_URL;
# use your favourite nix secret manager to get a path for this
tokenFile = "/var/lib/gitea-registration/gitea-runner-${name}-token";
labels = [ "nix:docker://gitea-runner-nix" ];
settings = {
container.options = "-e NIX_BUILD_SHELL=/bin/bash -e PAGER=cat -e PATH=/bin -e SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt --device /dev/kvm -v /nix:/nix -v ${storeDeps}/bin:/bin -v ${storeDeps}/etc/ssl:/etc/ssl --user nixuser --device=/dev/kvm";
# the default network that also respects our dns server settings
container.network = "host";
container.valid_volumes = [
"/nix"
"${storeDeps}/bin"
"${storeDeps}/etc/ssl"
];
};
});
}]
services.gitea-actions-runner.instances =
lib.genAttrs (builtins.genList (n: "nix${builtins.toString n}") numInstances)
(name: {
enable = true;
name = "nix-runner";
# take the git root url from the gitea config
# only possible if you've also configured your gitea though the same nix config
# otherwise you need to set it manually
url = config.services.gitea.settings.server.ROOT_URL;
# use your favourite nix secret manager to get a path for this
tokenFile = "/var/lib/gitea-registration/gitea-runner-${name}-token";
labels = [ "nix:docker://gitea-runner-nix" ];
settings = {
container.options = "-e NIX_BUILD_SHELL=/bin/bash -e PAGER=cat -e PATH=/bin -e SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt --device /dev/kvm -v /nix:/nix -v ${storeDeps}/bin:/bin -v ${storeDeps}/etc/ssl:/etc/ssl --user nixuser --device=/dev/kvm";
# the default network that also respects our dns server settings
container.network = "host";
container.valid_volumes = [
"/nix"
"${storeDeps}/bin"
"${storeDeps}/etc/ssl"
];
};
});
}
]

View File

@ -1,12 +1,22 @@
{ config, pkgs, lib, self, ... }:
{
pkgs,
lib,
self,
config,
...
}:
let
# make the logs for this host "public" so that they show up in e.g. metrics
publog = vhost: lib.attrsets.unionOfDisjoint vhost {
extraConfig = (vhost.extraConfig or "") + ''
access_log /var/log/nginx/public.log vcombined;
'';
};
publog =
vhost:
lib.attrsets.unionOfDisjoint vhost {
extraConfig =
(vhost.extraConfig or "")
+ ''
access_log /var/log/nginx/public.log vcombined;
'';
};
in
{

View File

@ -1,4 +1,5 @@
{ pkgs, ... }: {
{ pkgs, ... }:
{
services.postgresql.enable = true;
services.postgresql.package = pkgs.postgresql_14;
services.postgresql.settings = {

View File

@ -1,4 +1,4 @@
{ stdenv, lib, pkgs, ... }:
{ pkgs, ... }:
let
domain = "metrics.clan.lol";
@ -38,14 +38,13 @@ in
"d ${pub_goaccess} 0755 goaccess nginx -"
];
# --browsers-file=/etc/goaccess/browsers.list
# https://raw.githubusercontent.com/allinurl/goaccess/master/config/browsers.list
systemd.services.goaccess = {
description = "GoAccess server monitoring";
preStart = ''
rm -f ${pub_goaccess}/index.html
'';
rm -f ${pub_goaccess}/index.html
'';
serviceConfig = {
User = "goaccess";
Group = "nginx";
@ -83,7 +82,11 @@ in
ProtectSystem = "strict";
SystemCallFilter = "~@clock @cpu-emulation @debug @keyring @memlock @module @mount @obsolete @privileged @reboot @resources @setuid @swap @raw-io";
ReadOnlyPaths = "/";
ReadWritePaths = [ "/proc/self" "${pub_goaccess}" "${priv_goaccess}" ];
ReadWritePaths = [
"/proc/self"
"${pub_goaccess}"
"${priv_goaccess}"
];
PrivateDevices = "yes";
ProtectKernelModules = "yes";
ProtectKernelTunables = "yes";
@ -92,7 +95,6 @@ in
wantedBy = [ "multi-user.target" ];
};
services.nginx.virtualHosts."${domain}" = {
addSSL = true;
enableACME = true;

View File

@ -1,17 +1,18 @@
{ config, pkgs, ... }: {
{ config, pkgs, ... }:
{
services.harmonia.enable = true;
# $ nix-store --generate-binary-cache-key cache.yourdomain.tld-1 harmonia.secret harmonia.pub
services.harmonia.signKeyPath = config.sops.secrets.harmonia-secret.path;
services.nginx = {
package = pkgs.nginxStable.override {
modules = [ pkgs.nginxModules.zstd ];
};
package = pkgs.nginxStable.override { modules = [ pkgs.nginxModules.zstd ]; };
};
# trust our own cache
nix.settings.trusted-substituters = [ "https://cache.clan.lol" ];
nix.settings.trusted-public-keys = [ "cache.clan.lol-1:3KztgSAB5R1M+Dz7vzkBGzXdodizbgLXGXKXlcQLA28=" ];
nix.settings.trusted-public-keys = [
"cache.clan.lol-1:3KztgSAB5R1M+Dz7vzkBGzXdodizbgLXGXKXlcQLA28="
];
services.nginx.virtualHosts."cache.clan.lol" = {
forceSSL = true;

View File

@ -1,4 +1,4 @@
{ config, lib, pkgs, self, ... }:
{ config, ... }:
{
security.acme.defaults.email = "admins@clan.lol";
@ -6,13 +6,11 @@
# www user to push website artifacts via ssh
users.users.www = {
openssh.authorizedKeys.keys =
config.users.users.root.openssh.authorizedKeys.keys
++ [
# ssh-homepage-key
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMxZ3Av30M6Sh6NU1mnCskB16bYtNP8vskc/+ud0AU1C ssh-homepage-key"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBuYyfSuETSrwqCsWHeeClqjcsFlMEmiJN6Rr8/DwrU0 gitea-ci"
];
openssh.authorizedKeys.keys = config.users.users.root.openssh.authorizedKeys.keys ++ [
# ssh-homepage-key
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMxZ3Av30M6Sh6NU1mnCskB16bYtNP8vskc/+ud0AU1C ssh-homepage-key"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBuYyfSuETSrwqCsWHeeClqjcsFlMEmiJN6Rr8/DwrU0 gitea-ci"
];
isSystemUser = true;
shell = "/run/current-system/sw/bin/bash";
group = "www";
@ -20,9 +18,7 @@
users.groups.www = { };
# ensure /var/www can be accessed by nginx and www user
systemd.tmpfiles.rules = [
"d /var/www 0755 www nginx"
];
systemd.tmpfiles.rules = [ "d /var/www 0755 www nginx" ];
services.nginx = {

View File

@ -1,4 +1,10 @@
{ config, self, pkgs, lib, ... }:
{
config,
self,
pkgs,
lib,
...
}:
let
configForJob = name: {
systemd.timers.${name} = {
@ -46,9 +52,11 @@ let
};
in
{
config = lib.mkMerge (map configForJob [
"job-flake-update-clan-core"
"job-flake-update-clan-homepage"
"job-flake-update-clan-infra"
]);
config = lib.mkMerge (
map configForJob [
"job-flake-update-clan-core"
"job-flake-update-clan-homepage"
"job-flake-update-clan-infra"
]
);
}

View File

@ -1,6 +1,5 @@
{ self, ... }:
{
imports = [ self.inputs.clan-core.clanModules.matrix-synapse ];
clan.matrix-synapse.enable = true;
clan.matrix-synapse.domain = "clan.lol";
}

View File

@ -1,4 +1,3 @@
{ self, ... }:
let
mirrorBoot = idx: {
type = "disk";
@ -41,8 +40,14 @@ in
efiSupport = true;
efiInstallAsRemovable = true;
mirroredBoots = [
{ path = "/boot0"; devices = [ "nodev" ]; }
{ path = "/boot1"; devices = [ "nodev" ]; }
{
path = "/boot0";
devices = [ "nodev" ];
}
{
path = "/boot1";
devices = [ "nodev" ];
}
];
};

View File

@ -1,10 +1,19 @@
{ bash
, coreutils
, git
, tea
, openssh
, writePureShellScriptBin
{
bash,
coreutils,
git,
tea,
openssh,
writePureShellScriptBin,
}:
writePureShellScriptBin "action-create-pr" [ bash coreutils git tea openssh ] ''
bash ${./script.sh} "$@"
''
writePureShellScriptBin "action-create-pr"
[
bash
coreutils
git
tea
openssh
]
''
bash ${./script.sh} "$@"
''

View File

@ -1,8 +1,15 @@
{ bash
, coreutils
, tea
, writePureShellScriptBin
{
bash,
coreutils,
tea,
writePureShellScriptBin,
}:
writePureShellScriptBin "action-ensure-tea-login" [ bash coreutils tea ] ''
bash ${./script.sh}
''
writePureShellScriptBin "action-ensure-tea-login"
[
bash
coreutils
tea
]
''
bash ${./script.sh}
''

View File

@ -1,20 +1,23 @@
{ bash
, coreutils
, git
, openssh
, action-ensure-tea-login
, action-create-pr
, action-flake-update
, writePureShellScriptBin
{
bash,
coreutils,
git,
openssh,
action-ensure-tea-login,
action-create-pr,
action-flake-update,
writePureShellScriptBin,
}:
writePureShellScriptBin "action-flake-update-pr-clan" [
bash
coreutils
git
openssh
action-ensure-tea-login
action-create-pr
action-flake-update
] ''
bash ${./script.sh}
''
writePureShellScriptBin "action-flake-update-pr-clan"
[
bash
coreutils
git
openssh
action-ensure-tea-login
action-create-pr
action-flake-update
]
''
bash ${./script.sh}
''

View File

@ -1,9 +1,17 @@
{ bash
, coreutils
, git
, nix
, writePureShellScriptBin
{
bash,
coreutils,
git,
nix,
writePureShellScriptBin,
}:
writePureShellScriptBin "action-flake-update" [ bash coreutils git nix ] ''
bash ${./script.sh}
''
writePureShellScriptBin "action-flake-update"
[
bash
coreutils
git
nix
]
''
bash ${./script.sh}
''

View File

@ -1,7 +1,7 @@
import argparse
import json
import urllib.request
import urllib.error
import urllib.request
from os import environ
from typing import Optional
@ -38,6 +38,7 @@ def is_ci_green(pr: dict) -> bool:
return False
return True
def is_org_member(user: str, token: str) -> bool:
url = "https://git.clan.lol/api/v1/orgs/clan/members/" + user + f"?token={token}"
try:
@ -50,7 +51,6 @@ def is_org_member(user: str, token: str) -> bool:
raise
def merge_allowed(pr: dict, bot_name: str, token: str) -> bool:
assignees = pr["assignees"] if pr["assignees"] else []
if (

View File

@ -1,9 +1,9 @@
{ pkgs ? import <nixpkgs> { }
, lib ? pkgs.lib
, python3 ? pkgs.python3
, ruff ? pkgs.ruff
, runCommand ? pkgs.runCommand
,
{
pkgs ? import <nixpkgs> { },
lib ? pkgs.lib,
python3 ? pkgs.python3,
ruff ? pkgs.ruff,
runCommand ? pkgs.runCommand,
}:
let
pyproject = builtins.fromTOML (builtins.readFile ./pyproject.toml);
@ -32,13 +32,11 @@ let
package = python3.pkgs.buildPythonPackage {
inherit name src;
format = "pyproject";
nativeBuildInputs = [
python3.pkgs.setuptools
];
propagatedBuildInputs =
dependencies
++ [ ];
passthru.tests = { inherit check; };
nativeBuildInputs = [ python3.pkgs.setuptools ];
propagatedBuildInputs = dependencies ++ [ ];
passthru.tests = {
inherit check;
};
passthru.devDependencies = devDependencies;
};

View File

@ -1,5 +1,6 @@
{
perSystem = { pkgs, ... }:
perSystem =
{ pkgs, ... }:
let
package = pkgs.callPackage ./default.nix { inherit pkgs; };
in

View File

@ -1,16 +1,11 @@
{ pkgs ? import <nixpkgs> { } }:
{
pkgs ? import <nixpkgs> { },
}:
let
inherit (pkgs) lib python3;
package = import ./default.nix {
inherit lib pkgs python3;
};
package = import ./default.nix { inherit lib pkgs python3; };
pythonWithDeps = python3.withPackages (
ps:
package.propagatedBuildInputs
++ package.devDependencies
++ [
ps.pip
]
ps: package.propagatedBuildInputs ++ package.devDependencies ++ [ ps.pip ]
);
checkScript = pkgs.writeScriptBin "check" ''
nix build -f . tests -L "$@"

View File

@ -112,4 +112,6 @@ def test_list_prs_to_merge(monkeypatch: pytest.MonkeyPatch) -> None:
assignees=[dict(login=bot_name)],
),
]
assert clan_merge.list_prs_to_merge(prs, bot_name=bot_name, gitea_token="test") == [prs[0]]
assert clan_merge.list_prs_to_merge(prs, bot_name=bot_name, gitea_token="test") == [
prs[0]
]

View File

@ -1,33 +1,37 @@
{
imports = [
./clan-merge/flake-module.nix
];
perSystem = { pkgs, config, ... }: {
packages =
let
writers = pkgs.callPackage ./writers.nix { };
in
{
inherit (pkgs.callPackage ./renovate { }) renovate;
gitea = pkgs.callPackage ./gitea { };
imports = [ ./clan-merge/flake-module.nix ];
perSystem =
{ pkgs, config, ... }:
{
packages =
let
writers = pkgs.callPackage ./writers.nix { };
in
{
gitea = pkgs.callPackage ./gitea { };
action-create-pr = pkgs.callPackage ./action-create-pr {
inherit (writers) writePureShellScriptBin;
action-create-pr = pkgs.callPackage ./action-create-pr {
inherit (writers) writePureShellScriptBin;
};
action-ensure-tea-login = pkgs.callPackage ./action-ensure-tea-login {
inherit (writers) writePureShellScriptBin;
};
action-flake-update = pkgs.callPackage ./action-flake-update {
inherit (writers) writePureShellScriptBin;
};
action-flake-update-pr-clan = pkgs.callPackage ./action-flake-update-pr-clan {
inherit (writers) writePureShellScriptBin;
inherit (config.packages) action-ensure-tea-login action-create-pr action-flake-update;
};
inherit
(pkgs.callPackages ./job-flake-updates {
inherit (writers) writePureShellScriptBin;
inherit (config.packages) action-flake-update-pr-clan;
})
job-flake-update-clan-core
job-flake-update-clan-homepage
job-flake-update-clan-infra
;
};
action-ensure-tea-login = pkgs.callPackage ./action-ensure-tea-login {
inherit (writers) writePureShellScriptBin;
};
action-flake-update = pkgs.callPackage ./action-flake-update {
inherit (writers) writePureShellScriptBin;
};
action-flake-update-pr-clan = pkgs.callPackage ./action-flake-update-pr-clan {
inherit (writers) writePureShellScriptBin;
inherit (config.packages) action-ensure-tea-login action-create-pr action-flake-update;
};
inherit (pkgs.callPackages ./job-flake-updates {
inherit (writers) writePureShellScriptBin;
inherit (config.packages) action-flake-update-pr-clan;
}) job-flake-update-clan-core job-flake-update-clan-homepage job-flake-update-clan-infra;
};
};
};
}

View File

@ -1,13 +1,13 @@
{ action-flake-update-pr-clan
, writePureShellScriptBin
}:
{ action-flake-update-pr-clan, writePureShellScriptBin }:
let
job-flake-update = repo: writePureShellScriptBin "job-flake-update-${repo}" [ action-flake-update-pr-clan ] ''
export REPO="gitea@git.clan.lol:clan/${repo}.git"
export KEEP_VARS="REPO''${KEEP_VARS:+ $KEEP_VARS}"
job-flake-update =
repo:
writePureShellScriptBin "job-flake-update-${repo}" [ action-flake-update-pr-clan ] ''
export REPO="gitea@git.clan.lol:clan/${repo}.git"
export KEEP_VARS="REPO''${KEEP_VARS:+ $KEEP_VARS}"
action-flake-update-pr-clan
'';
action-flake-update-pr-clan
'';
in
{
job-flake-update-clan-core = job-flake-update "clan-core";

View File

@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_18"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@ -1,8 +0,0 @@
{ pkgs, system, nodejs-18_x, makeWrapper }:
let
nodePackages = import ./composition.nix {
inherit pkgs system;
nodejs = nodejs-18_x;
};
in
nodePackages

View File

@ -1,5 +0,0 @@
#!/usr/bin/env nix-shell
#! nix-shell -i bash -p nodePackages.node2nix
rm -f node-env.nix
node2nix -18 -i node-packages.json -o node-packages.nix -c composition.nix

View File

@ -1,689 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Fixup all executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
if isScript "$file"
then
sed -i 's/\r$//' "$file" # convert crlf to lf
fi
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View File

@ -1,3 +0,0 @@
[
"renovate"
]

File diff suppressed because it is too large Load Diff

View File

@ -1,12 +1,13 @@
{ lib
, bash
, coreutils
, gawk
, path
, # nixpkgs path
writeScript
, writeScriptBin
, ...
{
lib,
bash,
coreutils,
gawk,
path,
# nixpkgs path
writeScript,
writeScriptBin,
...
}:
let
# Create a script that runs in a `pure` environment, in the sense that:
@ -18,12 +19,12 @@ let
# - all environment variables are unset, except:
# - the ones listed in `keepVars` defined in ./default.nix
# - the ones listed via the `KEEP_VARS` variable
writePureShellScript = PATH: script:
writeScript "script.sh" (mkScript PATH script);
writePureShellScript = PATH: script: writeScript "script.sh" (mkScript PATH script);
# Creates a script in a `bin/` directory in the output; suitable for use with `lib.makeBinPath`, etc.
# See {option}`writers.writePureShellScript`
writePureShellScriptBin = binName: PATH: script:
writePureShellScriptBin =
binName: PATH: script:
writeScriptBin binName (mkScript PATH script);
mkScript = PATH: scriptText: ''
@ -91,8 +92,5 @@ let
'';
in
{
inherit
writePureShellScript
writePureShellScriptBin
;
inherit writePureShellScript writePureShellScriptBin;
}

View File

@ -0,0 +1 @@
../../../machines/web01

View File

@ -0,0 +1,24 @@
{
"data": "ENC[AES256_GCM,data:WW0RmSs3k81jSgYLt8dHEiJOxlncPWl3QWvRtmNgtIxvup7h,iv:nw7SP15EVWfS78dJE37msnxAZ/goYb7rGqAKNzhXFP4=,tag:yxVyGUMFczq8cGuU4V/FzA==,type:str]",
"sops": {
"kms": null,
"gcp_kms": null,
"azure_kv": null,
"hc_vault": null,
"age": [
{
"recipient": "age17n64ahe3wesh8l8lj0zylf4nljdmqn28hvqns2g7hgm9mdkhlsvsjuvkxz",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBiM0FBVkhPc2luMjlpSW1R\nN0NlUU9ZQkxIOVAwa3hlMVg3bFluTjRlRUdRCmMxSkMvZjg2ckUyUThhSC9VOW1H\nZExFY2owcHQ5NzJtUW5pbDFjd2oyaEUKLS0tIG1Fd25acHdYWEdlQkMxajhRQXNw\nTGxJUDdPMlRrQ0t3SkVSaWdZZXJGT0EK7WfQ+6jVzOBToqO9wJby/qaF6kM00hMh\n+Y4A08X/ItLzyfCc5LQ97GQ2VlwXK5+HoD7jNnn//3xeH6YC1VBdkg==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age17xuvz0fqtynzdmf8rfh4g3e46tx8w3mc6zgytrmuj5v9dhnldgxs7ue7ct",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBhNlpOQ1QydEJuM3pCbEpK\nSDUzVlppNkFnSDJLSU1ITEdWWCtaUEE0THcwCmljSUl0amx2OTBVZXBPMFNGbjJP\nakNQcWlad1R3cDZYWWZpQkJkQmEvUEEKLS0tIFpJOU1GUnNaTnlaL25GRkdxZnhs\nUEhIVEpNWjNOV2FTSmVnRkVCWm90MDgKMvz6QdPRoYb2bPjS9oSOVA5gTfwrgn4q\nIyboQIMV3oAaAs9LSUcUMBvERzQ31JXnHRzrnqtdiNX0NLbIrN47yg==\n-----END AGE ENCRYPTED FILE-----\n"
}
],
"lastmodified": "2024-06-06T16:09:34Z",
"mac": "ENC[AES256_GCM,data:7iKDT5577mLLeNyi46JHa4AUumqbQm65V3DXqNdNyLWccpIcML8n7jgFNxuK9gTqV2LM6bG18qS1orBJtPdawKnvxJwUaFb3Mo06C2+LVnWG4fT6MV+5eF8y6SM3IngT9BPk7IhTTGWe8lGJ6HTlg+9/f4/cq5NSKfeRgTkDEcE=,iv:T8wjeq2D1J8krhWeQJbVCOPY5sr05z/wMJqvr9onQK8=,tag:XgDTOTa2zv4NiBFN0b3rqA==,type:str]",
"pgp": null,
"unencrypted_suffix": "_unencrypted",
"version": "3.8.1"
}
}

View File

@ -0,0 +1 @@
../../../users/joerg

View File

@ -0,0 +1 @@
../../../machines/web01

View File

@ -0,0 +1,24 @@
{
"data": "ENC[AES256_GCM,data:ybX1/Uc+LqfgUoZQqCURgPfsTyzlsO+Xn7z8/0H9v5kyfJYX7PI1VlXVFBgR3Xh+2iuTF+v98PQyYeJOYLk3NTWggZayQQ4ivt0DLdhgG+DRFbeN8GMiqV5NWNhnL2tgLBu9DZViBSpgcbg9aHfI2cagboJnCSqyS2w1i/anvKaEgKa5YucrS4jywVxhBbvON6Oa2v8Hb0f/R8Ldl9HSqMM6o3pQEaYOsTNieNy63h9C4ERP/jIhKSajggpeHENdnuQC7Kavz54faL9xaz0jwRHb1fd+IGTM7fxqbyB5702nKEGytDwKzH0fh6q1HJNHbhWeWyCmGFKOkqywaQjcpJsczP2FIwkZmoui0juTEluNk1KzugP0gxtsuwjUiJlZeJxtZEgsnifLPpHyCaN99jzPjhd1TknT7MWZMVJT/R14bdD+QdwvR8rHK8IMctMGrNsqu3+Crdwu3WSfDH9jEM1zAZQNvLUT13azRABz1rpJvNFnvhDTBwDbUJlLpIQcPOPtVKO0IQeM5EUnCr+oCfBrP/To3mqo82s3,iv:jbY6WK0BcyLlU3Sbo7qNOHfCGU4TjUqTiww546Tyq20=,tag:VklHST51z5XI9+UiASBO9g==,type:str]",
"sops": {
"kms": null,
"gcp_kms": null,
"azure_kv": null,
"hc_vault": null,
"age": [
{
"recipient": "age17n64ahe3wesh8l8lj0zylf4nljdmqn28hvqns2g7hgm9mdkhlsvsjuvkxz",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBlenFrWW45UXcyd3pwaTE0\nY3ROWWFPdzIwK1JuU2h2NCsyanF1VzhoM1NjCndscHZpTUNmcWc0TDd4V2xHM0lh\ndVNZQW1jMGFNeDdxbDhwdFB1Z3AwdVUKLS0tIGNmbVNMRUZGb1lPcnlrdkhnZ3JX\nM2szRHVydldGN3haV2lhZlFMeUgzcU0KDDwWVSjsua4DKXlqqk2Ns2e1zkzJK2Y2\n8+r8bXkBLJyXqQCQteXBrc5U+0n1KfHVkkvPmuBI3BmcAiVVmr/RxQ==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age17xuvz0fqtynzdmf8rfh4g3e46tx8w3mc6zgytrmuj5v9dhnldgxs7ue7ct",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBJdHEyT3U0TzJQUW9wM2Z3\nYVdTYmRSeUwrZy9iNTVpcmZJMnhOeHlGRGp3CnVYbk15NWxadk9waFdJVFBKa0Vx\nYUtkYWZuYmhabk1xREtDdzFGdUcyaW8KLS0tIGYxcDVuQXZwdk1rVHJOOHljTmVl\nMXNXTHdSam12djE3Z0UyU2dSZDcrRGcKaQnrYuUpSTjOYYHH0EsqnTLHkU5Md4Ro\nUpeJX1GmAoIAUGruB/8jPbMaDQQXbjNLDCCalStlbqMgbgz/Ty4ukQ==\n-----END AGE ENCRYPTED FILE-----\n"
}
],
"lastmodified": "2024-06-06T16:09:35Z",
"mac": "ENC[AES256_GCM,data:2RptHkE/k4JfqdybmnI3sbeEDaaD4bUtEPLuBcpltZjR5EHFYLsEB1Woxlzj2rLqq+8Wr6kWZtsG3uJSxsColUbazJd1CoVJxHpm6tAnM47Mv1YG5PdLwqpwJWji4AI5lAer4ZMfuGDpNbrwvbO3qB8R55r5SYay4b4Yc49wQXA=,iv:ESimFSybysRrgEj+27ECUi6kIklv1IunWVclTjX7C5g=,tag:LONP+cUm5NVcBvgVStZnwQ==,type:str]",
"pgp": null,
"unencrypted_suffix": "_unencrypted",
"version": "3.8.1"
}
}

View File

@ -0,0 +1 @@
../../../users/joerg

View File

@ -4,13 +4,11 @@
meta.name = "infra";
directory = self;
# Make flake available in modules
specialArgs = {
self = {
inherit (self) inputs nixosModules packages;
};
specialArgs.self = {
inherit (self) inputs nixosModules packages;
};
machines = {
web01 = { modulesPath, ... }: {
web01 = {
imports = [ (./web01/configuration.nix) ];
};
};

File diff suppressed because one or more lines are too long

View File

@ -1,34 +1,32 @@
locals {
hostnames = [
"@",
"git",
"mail",
"cache",
"matrix",
"www",
"docs",
"metrics",
"buildbot"
]
}
resource "hetznerdns_zone" "server" {
name = var.dns_zone
ttl = 3600
}
resource "hetznerdns_record" "server_a" {
for_each = toset(local.hostnames)
resource "hetznerdns_record" "root_a" {
zone_id = hetznerdns_zone.server.id
name = each.value
name = "@"
type = "A"
value = var.ipv4_address
}
resource "hetznerdns_record" "server_aaaa" {
for_each = toset(local.hostnames)
resource "hetznerdns_record" "root_aaaa" {
zone_id = hetznerdns_zone.server.id
name = each.value
name = "@"
type = "AAAA"
value = var.ipv6_address
}
resource "hetznerdns_record" "wildcard_a" {
zone_id = hetznerdns_zone.server.id
name = "*"
type = "A"
value = var.ipv4_address
}
resource "hetznerdns_record" "wildcard_aaaa" {
zone_id = hetznerdns_zone.server.id
name = "*"
type = "AAAA"
value = var.ipv6_address
}