Compare commits

..

No commits in common. "main" and "main" have entirely different histories.
main ... main

56 changed files with 430 additions and 1743 deletions

1
.envrc
View File

@ -1,4 +1,3 @@
# shellcheck shell=bash
use flake
watch_file .envrc.private

View File

@ -13,21 +13,21 @@ keys:
creation_rules:
- path_regex: targets/.*/(terraform.tfstate|secrets.auto.tfvars.sops.json)$
key_groups:
- age:
- *joerg
- *lassulus
- *dave
- age:
- *joerg
- *lassulus
- *dave
- path_regex: targets/web01/secrets.yaml$
key_groups:
- age:
- *joerg
- *lassulus
- *dave
- *web01
- age:
- *joerg
- *lassulus
- *dave
- *web01
- path_regex: targets/web01-new/secrets.yaml$
key_groups:
- age:
- *joerg
- *lassulus
- *dave
- *web01
- age:
- *joerg
- *lassulus
- *dave
- *web01

View File

@ -1,18 +1,7 @@
Copyright 2023 Clan contributers
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -4,10 +4,8 @@ This repository contains nixos modules and terraform code that powers clan.lol.
The website and git hosting is currently on [hetzner](https://www.hetzner.com/).
## Servers
- web01:
- Instance type:
[ex101](https://www.hetzner.com/de/dedicated-rootserver/ex101)
- Instance type: [ex101](https://www.hetzner.com/de/dedicated-rootserver/ex101)
- CPU: Intel Core i9-13900 (24 cores / 32 threads)
- RAM: 64GB DDR5
- Drives: 2 x 1.92 TB NVME
@ -28,7 +26,5 @@ $ ./tf.sh apply
## To add a new project to CI
1. Add the 'buildbot-clan' topic to the repository using the "Manage topics"
button below the project description
2. Go to https://buildbot.clan.lol/#/builders/2 and press "Update projects"
after you have logged in.
1. Add the 'buildbot-clan' topic to the repository using the "Manage topics" button below the project description
2. Go to https://buildbot.clan.lol/#/builders/2 and press "Update projects" after you have logged in.

View File

@ -1,11 +1,6 @@
{
perSystem =
{
config,
inputs',
pkgs,
...
}:
{ inputs', pkgs, ... }:
let
convert2Tofu =
provider:
@ -21,15 +16,8 @@
pkgs.bashInteractive
pkgs.sops
pkgs.nixVersions.latest
inputs'.clan-core.packages.tea-create-pr
inputs'.clan-core.packages.merge-after-ci
inputs'.clan-core.packages.clan-cli
# treefmt with config defined in ./flake.nix
config.treefmt.build.wrapper
(pkgs.opentofu.withPlugins (
p:
builtins.map convert2Tofu [
@ -41,6 +29,7 @@
]
))
];
inputsFrom = [ inputs'.clan-core.devShells.default ];
};
};
}

View File

@ -29,11 +29,11 @@
]
},
"locked": {
"lastModified": 1721526898,
"narHash": "sha256-HFaqhuClCWau5sbxDlNXFtElELSqn7oEgPEt2iW4nkA=",
"lastModified": 1718502800,
"narHash": "sha256-Arnuj2v9HCrmV9ZU5fln/MoKhQfICO6o9ia8xQ386CY=",
"owner": "Mic92",
"repo": "buildbot-nix",
"rev": "be581a532080db9f37a9ec8522eca351929fc846",
"rev": "c3b59dac3ee3b4c1dd9cabb2f850e2d8bcfaf417",
"type": "github"
},
"original": {
@ -48,6 +48,7 @@
"flake-parts": [
"flake-parts"
],
"nixos-generators": "nixos-generators",
"nixos-images": "nixos-images",
"nixpkgs": [
"nixpkgs"
@ -58,11 +59,11 @@
]
},
"locked": {
"lastModified": 1721578487,
"narHash": "sha256-xDmAPFSqeG1xzRPAIqQlONZqjsEsHTSHTA7V/vVwx8I=",
"rev": "6c7e9bafea382ae9dddc4d86b3e533c914080837",
"lastModified": 1718900431,
"narHash": "sha256-iEpESD8Hywek3lkGgvTjG5C25UTaAAjnqX9R0lIvhSI=",
"rev": "b3123b150ff7a287d36efd1cce29bd4d1e7e4d86",
"type": "tarball",
"url": "https://git.clan.lol/api/v1/repos/clan/clan-core/archive/6c7e9bafea382ae9dddc4d86b3e533c914080837.tar.gz"
"url": "https://git.clan.lol/api/v1/repos/clan/clan-core/archive/b3123b150ff7a287d36efd1cce29bd4d1e7e4d86.tar.gz"
},
"original": {
"type": "tarball",
@ -77,11 +78,11 @@
]
},
"locked": {
"lastModified": 1720661479,
"narHash": "sha256-nsGgA14vVn0GGiqEfomtVgviRJCuSR3UEopfP8ixW1I=",
"lastModified": 1717915259,
"narHash": "sha256-VsGPboaleIlPELHY5cNTrXK4jHVmgUra8uC6h7KVC5c=",
"owner": "nix-community",
"repo": "disko",
"rev": "786965e1b1ed3fd2018d78399984f461e2a44689",
"rev": "1bbdb06f14e2621290b250e631cf3d8948e4d19b",
"type": "github"
},
"original": {
@ -112,11 +113,11 @@
]
},
"locked": {
"lastModified": 1719994518,
"narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=",
"lastModified": 1717285511,
"narHash": "sha256-iKzJcpdXih14qYVcZ9QC9XuZYnPc6T8YImb6dX166kw=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7",
"rev": "2a55567fcf15b1b1c7ed712a2c6fadaec7412ea8",
"type": "github"
},
"original": {
@ -143,6 +144,43 @@
"type": "github"
}
},
"nixlib": {
"locked": {
"lastModified": 1712450863,
"narHash": "sha256-K6IkdtMtq9xktmYPj0uaYc8NsIqHuaAoRBaMgu9Fvrw=",
"owner": "nix-community",
"repo": "nixpkgs.lib",
"rev": "3c62b6a12571c9a7f65ab037173ee153d539905f",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "nixpkgs.lib",
"type": "github"
}
},
"nixos-generators": {
"inputs": {
"nixlib": "nixlib",
"nixpkgs": [
"clan-core",
"nixpkgs"
]
},
"locked": {
"lastModified": 1716210724,
"narHash": "sha256-iqQa3omRcHGpWb1ds75jS9ruA5R39FTmAkeR3J+ve1w=",
"owner": "nix-community",
"repo": "nixos-generators",
"rev": "d14b286322c7f4f897ca4b1726ce38cb68596c94",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "nixos-generators",
"type": "github"
}
},
"nixos-images": {
"inputs": {
"nixos-stable": [
@ -154,11 +192,11 @@
]
},
"locked": {
"lastModified": 1720659757,
"narHash": "sha256-ltzUuCsEfPA9CYM9BAnwObBGqDyQIs2OLkbVMeOOk00=",
"lastModified": 1717770332,
"narHash": "sha256-NQmFHj0hTCUgnMAsaNTu6sNTRyo0rFQEe+/lVgV5yxU=",
"owner": "nix-community",
"repo": "nixos-images",
"rev": "5eddae0afbcfd4283af5d6676d08ad059ca04b70",
"rev": "72771bd35f4e19e32d6f652528483b5e07fc317b",
"type": "github"
},
"original": {
@ -176,14 +214,17 @@
"nixpkgs": [
"nixpkgs"
],
"nixpkgs-24_05": []
"nixpkgs-24_05": "nixpkgs-24_05",
"utils": [
"flake-utils"
]
},
"locked": {
"lastModified": 1721121314,
"narHash": "sha256-zwc7YXga/1ppaZMWFreZykXtFwBgXodxUZiUx969r+g=",
"lastModified": 1718084203,
"narHash": "sha256-Cx1xoVfSMv1XDLgKg08CUd1EoTYWB45VmB9XIQzhmzI=",
"owner": "simple-nixos-mailserver",
"repo": "nixos-mailserver",
"rev": "059b50b2e729729ea00c6831124d3837c494f3d5",
"rev": "29916981e7b3b5782dc5085ad18490113f8ff63b",
"type": "gitlab"
},
"original": {
@ -194,11 +235,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1721559948,
"narHash": "sha256-cFgdjyK/VBM3hB1RfFHXcI/VOCBVAv813s1upHKX7bI=",
"lastModified": 1718396522,
"narHash": "sha256-C0re6ZtCqC1ndL7ib7vOqmgwvZDhOhJ1W0wQgX1tTIo=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "c19d62ad2265b16e2199c5feb4650fe459ca1c46",
"rev": "3e6b9369165397184774a4b7c5e8e5e46531b53f",
"type": "github"
},
"original": {
@ -208,6 +249,21 @@
"type": "github"
}
},
"nixpkgs-24_05": {
"locked": {
"lastModified": 1717144377,
"narHash": "sha256-F/TKWETwB5RaR8owkPPi+SPJh83AQsm6KrQAlJ8v/uA=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "805a384895c696f802a9bf5bf4720f37385df547",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-24.05",
"type": "indirect"
}
},
"root": {
"inputs": {
"buildbot-nix": "buildbot-nix",
@ -232,11 +288,11 @@
]
},
"locked": {
"lastModified": 1720926522,
"narHash": "sha256-eTpnrT6yu1vp8C0B5fxHXhgKxHoYMoYTEikQx///jxY=",
"lastModified": 1717902109,
"narHash": "sha256-OQTjaEZcByyVmHwJlKp/8SE9ikC4w+mFd3X0jJs6wiA=",
"owner": "Mic92",
"repo": "sops-nix",
"rev": "0703ba03fd9c1665f8ab68cc3487302475164617",
"rev": "f0922ad001829b400f0160ba85b47d252fa3d925",
"type": "github"
},
"original": {
@ -252,11 +308,11 @@
]
},
"locked": {
"lastModified": 1721263500,
"narHash": "sha256-6l0+MciXkktANuZ+Rwc6BZJxtMi7jHZRiSnzG+xpwyk=",
"lastModified": 1718585173,
"narHash": "sha256-G5DB6D3p8ucyGfmWt3JmiWcVW55DeuUoiT230wQ9Am4=",
"owner": "numtide",
"repo": "srvos",
"rev": "ef4f2248e1bbd84a0dd269ab31b9927d9c0bf2e6",
"rev": "c607ffef7c234d88f37ed12d75b2c48de3f4b3fe",
"type": "github"
},
"original": {
@ -287,11 +343,11 @@
]
},
"locked": {
"lastModified": 1721458737,
"narHash": "sha256-wNXLQ/ATs1S4Opg1PmuNoJ+Wamqj93rgZYV3Di7kxkg=",
"lastModified": 1718522839,
"narHash": "sha256-ULzoKzEaBOiLRtjeY3YoGFJMwWSKRYOic6VNw2UyTls=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "888bfb10a9b091d9ed2f5f8064de8d488f7b7c97",
"rev": "68eb1dc333ce82d0ab0c0357363ea17c31ea1f81",
"type": "github"
},
"original": {

View File

@ -18,7 +18,7 @@
nixos-mailserver = {
url = "gitlab:simple-nixos-mailserver/nixos-mailserver";
inputs.nixpkgs.follows = "nixpkgs";
inputs.nixpkgs-24_05.follows = "";
inputs.utils.follows = "flake-utils";
inputs.flake-compat.follows = "flake-compat";
};
@ -47,7 +47,6 @@
"aarch64-linux"
];
imports = [
inputs.clan-core.flakeModules.default
inputs.treefmt-nix.flakeModule
./devShells/flake-module.nix
./targets/flake-module.nix
@ -59,33 +58,14 @@
lib,
self',
system,
pkgs,
...
}:
{
treefmt = {
projectRootFile = ".git/config";
programs.terraform.enable = true;
programs.shellcheck.enable = true;
programs.deno.enable = true;
programs.ruff.check = true;
programs.ruff.format = true;
programs.yamlfmt.enable = true;
settings.global.excludes = [
# generated files
"sops/*"
"terraform.tfstate"
"*.tfvars.sops.json"
"*nixos-vars.json"
"secrets.yaml"
];
programs.nixfmt.enable = true;
programs.nixfmt.package = pkgs.nixfmt-rfc-style;
settings.formatter.nixfmt.excludes = [
programs.hclfmt.enable = true;
programs.nixfmt-rfc-style.enable = true;
settings.formatter.nixfmt-rfc-style.excludes = [
# generated files
"node-env.nix"
"node-packages.nix"

View File

@ -54,18 +54,6 @@ in
uid = 1005;
openssh.authorizedKeys.keys = [ admins.johannes ];
};
flokli = {
isNormalUser = true;
home = "/home/flokli";
extraGroups = [ "wheel" ];
shell = "/run/current-system/sw/bin/zsh";
uid = 1006;
openssh.authorizedKeys.keys = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPTVTXOutUZZjXLB0lUSgeKcSY/8mxKkC0ingGK1whD2 flokli"
"sk-ssh-ed25519@openssh.com AAAAGnNrLXNzaC1lZDI1NTE5QG9wZW5zc2guY29tAAAAIP7rdJ1klzK8nx74QQA8jYdFwznM1klLS0C7M5lHiu+IAAAABHNzaDo= flokli 20240617 28772765"
"sk-ssh-ed25519@openssh.com AAAAGnNrLXNzaC1lZDI1NTE5QG9wZW5zc2guY29tAAAAIA34k0FVKDGNdJ8uk0Ytbvh6J8v+H86F4t6BXAIoW/7xAAAABHNzaDo= flokli 20240704 14321691"
];
};
root.openssh.authorizedKeys.keys = builtins.attrValues admins;
};

View File

@ -15,9 +15,6 @@
pkgs.tig
pkgs.tmux
pkgs.direnv
# for flokli
pkgs.kitty.terminfo
];
programs.bash = {

View File

@ -27,7 +27,6 @@
self.nixosModules.buildbot
inputs.srvos.nixosModules.mixins-nginx
inputs.srvos.nixosModules.mixins-nix-experimental
./matrix-bot.nix
./web01
inputs.nixos-mailserver.nixosModules.mailserver
./mailserver.nix

View File

@ -1,21 +1,21 @@
{ config, lib, ... }:
let
cfg = config.clan-infra.networking;
cfg = config.clan.networking;
in
{
options = {
clan-infra.networking.ipv4.address = lib.mkOption { type = lib.types.str; };
clan.networking.ipv4.address = lib.mkOption { type = lib.types.str; };
clan-infra.networking.ipv4.cidr = lib.mkOption {
clan.networking.ipv4.cidr = lib.mkOption {
type = lib.types.str;
default = "26";
};
clan-infra.networking.ipv4.gateway = lib.mkOption { type = lib.types.str; };
clan.networking.ipv4.gateway = lib.mkOption { type = lib.types.str; };
clan-infra.networking.ipv6.address = lib.mkOption { type = lib.types.str; };
clan.networking.ipv6.address = lib.mkOption { type = lib.types.str; };
clan-infra.networking.ipv6.cidr = lib.mkOption {
clan.networking.ipv6.cidr = lib.mkOption {
type = lib.types.str;
default = "64";
};

View File

@ -1,49 +0,0 @@
{
config,
pkgs,
self,
...
}:
let
name = "matrix-bot";
in
{
users.groups.matrix-bot-user = { };
users.users.matrix-bot-user = {
group = "matrix-bot-user";
isSystemUser = true;
description = "User for matrix-bot service";
home = "/var/lib/matrix-bot";
createHome = true;
};
systemd.services.${name} = {
path = [ self.packages.${pkgs.system}.matrix-bot ];
description = "Matrix bot for changelog and reviews";
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
environment = {
MATRIX_PASSWORD_FILE = "%d/MATRIX_PASSWORD_FILE";
OPENAI_API_KEY_FILE = "%d/OPENAI_API_KEY_FILE";
HOME = "/var/lib/${name}";
};
serviceConfig = {
LoadCredential = [
"MATRIX_PASSWORD_FILE:${config.sops.secrets.web01-matrix-password-clan-bot.path}"
"OPENAI_API_KEY_FILE:${config.sops.secrets.qubasas-openai-api-key.path}"
];
User = "matrix-bot-user";
Group = "matrix-bot-user";
WorkingDirectory = "/var/lib/${name}";
RuntimeDirectory = "/var/lib/${name}";
};
script = ''
set -euxo pipefail
mbot --changelog-room "!FdCwyKsRlfooNYKYzx:matrix.org" --review-room "!tmSRJlbsVXFUKAddiM:gchq.icu"
'';
};
}

View File

@ -22,7 +22,7 @@
while sleep 10; do
${self.packages.${pkgs.system}.clan-merge}/bin/clan-merge \
--bot-name clan-bot \
--repos clan-infra clan-core clan-homepage data-mesher
--repos clan-infra clan-core clan-homepage
done
'';
};

View File

@ -71,9 +71,6 @@
locations."^~ /blog".extraConfig = ''
rewrite ^/blog(.*)$ https://docs.clan.lol/blog permanent;
'';
locations."^~ /wclan".extraConfig = ''
rewrite ^/wclan(.*)$ https://clan.lol/what-is-clan.html permanent;
'';
locations."/thaigersprint".return = "307 https://pad.lassul.us/s/clan-thaigersprint";
};

View File

@ -55,10 +55,8 @@ in
config = lib.mkMerge (
map configForJob [
"job-flake-update-clan-core"
"job-flake-update-clan-core-individual"
"job-flake-update-clan-homepage"
"job-flake-update-clan-infra"
"job-flake-update-data-mesher"
]
);
}

View File

@ -8,23 +8,4 @@
};
clan.matrix-synapse.users.monitoring = { };
clan.matrix-synapse.users.clan-bot = { };
# Rate limiting settings
# we need to up this to be able to support matrix bots
services.matrix-synapse.settings = {
rc_login = {
address = {
per_second = 20;
burst_count = 200;
};
account = {
per_second = 20;
burst_count = 200;
};
failed_attempts = {
per_second = 3;
burst_count = 15;
};
};
};
}

View File

@ -1,29 +0,0 @@
{
bash,
coreutils,
git,
gnugrep,
jq,
nix,
openssh,
action-ensure-tea-login,
action-create-pr,
action-flake-update,
writePureShellScriptBin,
}:
writePureShellScriptBin "action-flake-update-pr-clan-individual"
[
bash
coreutils
git
gnugrep
jq
nix
openssh
action-create-pr
action-ensure-tea-login
action-flake-update
]
''
bash ${./script.sh}
''

View File

@ -1,47 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# prevent these variables from being unset by writePureShellScript
export KEEP_VARS="GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_COMMITTER_NAME GIT_COMMITTER_EMAIL GITEA_URL GITEA_USER PR_TITLE REMOTE_BRANCH REPO_DIR${KEEP_VARS:+ $KEEP_VARS}"
# configure variables for actions
today=$(date --iso-8601)
today_minutes=$(date --iso-8601=minutes)
export REPO_DIR=$TMPDIR/repo
export GIT_AUTHOR_NAME="Clan Merge Bot"
export GIT_AUTHOR_EMAIL="clan-bot@git.clan.lol"
export GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"
export GIT_COMMITTER_EMAIL="$GIT_AUTHOR_NAME"
export GITEA_USER="clan-bot"
export GITEA_URL="https://git.clan.lol"
git clone --depth 1 --branch main "$REPO" "$REPO_DIR"
cd "$REPO_DIR"
inputs=$(nix flake metadata --json | jq '.locks.nodes | keys[]' --raw-output | grep -v "root")
for input in $inputs;
do
target_branch="update-${input}"
echo "updating input: ${input}"
echo "checking out: git checkout main"
git checkout main
git checkout -b "$target_branch"
echo "checking out: git checkout -b update-${input}"
export PR_TITLE="Automatic flake update - ${input} - ${today_minutes}"
export REMOTE_BRANCH="flake-update-${input}-${today}"
echo "action-ensure-tea-login"
action-ensure-tea-login
echo "action-flake-update: ${input}"
action-flake-update "$input"
echo "check diff"
if git diff --quiet main.."$target_branch" --;then
echo "No lockfile changes for input: ${input}"
else
echo "action-create-pr"
action-create-pr --assignees clan-bot
fi
done

View File

@ -5,10 +5,10 @@ set -euo pipefail
export KEEP_VARS="GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_COMMITTER_NAME GIT_COMMITTER_EMAIL GITEA_URL GITEA_USER PR_TITLE REMOTE_BRANCH REPO_DIR${KEEP_VARS:+ $KEEP_VARS}"
# configure variables for actions
today=$(date --iso-8601)
today_minutes=$(date --iso-8601=minutes)
export PR_TITLE="Automatic flake update - ${today_minutes}"
export REMOTE_BRANCH="flake-update-${today}"
PR_TITLE="Automatic flake update - $(date --iso-8601=minutes)"
export PR_TITLE
REMOTE_BRANCH="flake-update-$(date --iso-8601)"
export REMOTE_BRANCH
export REPO_DIR=$TMPDIR/repo
export GIT_AUTHOR_NAME="Clan Merge Bot"
export GIT_AUTHOR_EMAIL="clan-bot@git.clan.lol"

View File

@ -13,5 +13,5 @@ writePureShellScriptBin "action-flake-update"
nix
]
''
bash ${./script.sh} "$@"
bash ${./script.sh}
''

View File

@ -1,16 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
NIX_VERSION=$(nix --version)
echo "Nix version: $NIX_VERSION"
if [ -z "${*}" ];then
COMMIT_MSG="update flake lock - $(date --iso-8601=minutes)"
nix --experimental-features "nix-command flakes" \
flake update --commit-lock-file --commit-lockfile-summary "$COMMIT_MSG"
else
# Support for ancient nix versions
COMMIT_MSG="update flake lock - ${*} - $(date --iso-8601=minutes)"
nix --experimental-features "nix-command flakes" \
flake lock --commit-lock-file --commit-lockfile-summary "$COMMIT_MSG" --update-input "${@}"
fi

View File

@ -1,2 +1 @@
# shellcheck shell=bash
use flake .#clan-merge

View File

@ -1,8 +1,5 @@
{
imports = [
./clan-merge/flake-module.nix
./matrix-bot/flake-module.nix
];
imports = [ ./clan-merge/flake-module.nix ];
perSystem =
{ pkgs, config, ... }:
{
@ -26,20 +23,14 @@
inherit (writers) writePureShellScriptBin;
inherit (config.packages) action-ensure-tea-login action-create-pr action-flake-update;
};
action-flake-update-pr-clan-individual = pkgs.callPackage ./action-flake-update-pr-clan-individual {
inherit (writers) writePureShellScriptBin;
inherit (config.packages) action-ensure-tea-login action-create-pr action-flake-update;
};
inherit
(pkgs.callPackages ./job-flake-updates {
inherit (writers) writePureShellScriptBin;
inherit (config.packages) action-flake-update-pr-clan action-flake-update-pr-clan-individual;
inherit (config.packages) action-flake-update-pr-clan;
})
job-flake-update-clan-core
job-flake-update-clan-core-individual
job-flake-update-clan-homepage
job-flake-update-clan-infra
job-flake-update-data-mesher
;
};
};

View File

@ -0,0 +1,120 @@
From dd2ccf4ff923757b81088e27e362e3fdb222c9d3 Mon Sep 17 00:00:00 2001
From: Jade Lovelace <software@lfcode.ca>
Date: Tue, 28 May 2024 16:36:25 +0200
Subject: [PATCH] Add an immutable tarball link to archive download headers for
Nix
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This allows `nix flake metadata` and nix in general to lock a *branch*
tarball link in a manner that causes it to fetch the correct commit even
if the branch is updated with a newer version.
For further context, Nix flakes are a feature that, among other things,
allows for "inputs" that are "github:someuser/somerepo",
"https://some-tarball-service/some-tarball.tar.gz",
"sourcehut:~meow/nya" or similar. This feature allows our users to fetch
tarballs of git-based inputs to their builds rather than using git to
fetch them, saving significant download time.
There is presently no gitea or forgejo specific fetcher in Nix, and we
don't particularly wish to have one. Ideally (as a developer on a Nix
implementation myself) we could just use the generic tarball fetcher and
not add specific forgejo support, but to do so, we need additional
metadata to know which commit a given *branch* tarball represents, which
is the purpose of the Link header added here.
The result of this patch is that a Nix user can specify `inputs.something.url =
"https://forgejo-host/some/project/archive/main.tar.gz"` in flake.nix
and get a link to some concrete tarball for the actual commit in the
lock file, then when they run `nix flake update` in the future, they
will get the latest commit in that branch.
Example of it working locally:
» nix flake metadata --refresh 'http://localhost:3000/api/v1/repos/jade/cats/archive/main.tar.gz?dir=configs/nix'
Resolved URL: http://localhost:3000/api/v1/repos/jade/cats/archive/main.tar.gz?dir=configs/nix
Locked URL: http://localhost:3000/api/v1/repos/jade/cats/archive/804ede182b6b66469b23ea4d21eece52766b7a06.tar.gz?dir=configs
/nix&narHash=sha256-yP7KkDVfuixZzs0fsqhSETXFC0y8m6nmPLw2GrAMxKQ%3D
Description: Computers with the nixos
Path: /nix/store/s856c6yqghyan4v0zy6jj19ksv0q22nx-source
Revision: 804ede182b6b66469b23ea4d21eece52766b7a06
Last modified: 2024-05-02 00:48:32
For details on the header value, see:
https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
Signed-off-by: Jörg Thalheim <joerg@thalheim.io>
---
routers/api/v1/repo/file.go | 6 ++++++
routers/web/repo/repo.go | 6 ++++++
tests/integration/api_repo_archive_test.go | 11 +++++++++++
3 files changed, 23 insertions(+)
diff --git a/routers/api/v1/repo/file.go b/routers/api/v1/repo/file.go
index 156033f58a..b7ad63af08 100644
--- a/routers/api/v1/repo/file.go
+++ b/routers/api/v1/repo/file.go
@@ -319,6 +319,12 @@ func archiveDownload(ctx *context.APIContext) {
func download(ctx *context.APIContext, archiveName string, archiver *repo_model.RepoArchiver) {
downloadName := ctx.Repo.Repository.Name + "-" + archiveName
+ // Add nix format link header so tarballs lock correctly:
+ // https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
+ ctx.Resp.Header().Add("Link", fmt.Sprintf("<%s/archive/%s.tar.gz?rev=%s>; rel=\"immutable\"",
+ ctx.Repo.Repository.APIURL(),
+ archiver.CommitID, archiver.CommitID))
+
rPath := archiver.RelativePath()
if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
// If we have a signed url (S3, object storage), redirect to this directly.
diff --git a/routers/web/repo/repo.go b/routers/web/repo/repo.go
index 71c582b5f9..bb6349658f 100644
--- a/routers/web/repo/repo.go
+++ b/routers/web/repo/repo.go
@@ -484,6 +484,12 @@ func Download(ctx *context.Context) {
func download(ctx *context.Context, archiveName string, archiver *repo_model.RepoArchiver) {
downloadName := ctx.Repo.Repository.Name + "-" + archiveName
+ // Add nix format link header so tarballs lock correctly:
+ // https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
+ ctx.Resp.Header().Add("Link", fmt.Sprintf("<%s/archive/%s.tar.gz?rev=%s>; rel=\"immutable\"",
+ ctx.Repo.Repository.APIURL(),
+ archiver.CommitID, archiver.CommitID))
+
rPath := archiver.RelativePath()
if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
// If we have a signed url (S3, object storage), redirect to this directly.
diff --git a/tests/integration/api_repo_archive_test.go b/tests/integration/api_repo_archive_test.go
index 57d3abfe84..340ff03961 100644
--- a/tests/integration/api_repo_archive_test.go
+++ b/tests/integration/api_repo_archive_test.go
@@ -8,6 +8,7 @@
"io"
"net/http"
"net/url"
+ "regexp"
"testing"
auth_model "code.gitea.io/gitea/models/auth"
@@ -39,6 +40,16 @@ func TestAPIDownloadArchive(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, bs, 266)
+ // Must return a link to a commit ID as the "immutable" archive link
+ linkHeaderRe := regexp.MustCompile(`<(?P<url>https?://.*/api/v1/repos/user2/repo1/archive/[a-f0-9]+\.tar\.gz.*)>; rel="immutable"`)
+ m := linkHeaderRe.FindStringSubmatch(resp.Header().Get("Link"))
+ assert.NotEmpty(t, m[1])
+ resp = MakeRequest(t, NewRequest(t, "GET", m[1]).AddTokenAuth(token), http.StatusOK)
+ bs2, err := io.ReadAll(resp.Body)
+ assert.NoError(t, err)
+ // The locked URL should give the same bytes as the non-locked one
+ assert.EqualValues(t, bs, bs2)
+
link, _ = url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/archive/master.bundle", user2.Name, repo.Name))
resp = MakeRequest(t, NewRequest(t, "GET", link.String()).AddTokenAuth(token), http.StatusOK)
bs, err = io.ReadAll(resp.Body)
--
2.44.1

View File

@ -1,5 +1,8 @@
{ gitea }:
gitea.overrideAttrs (old: {
patches = old.patches ++ [ ./0001-add-bot-check.patch ];
patches = old.patches ++ [
./0001-add-bot-check.patch
./0001-Add-an-immutable-tarball-link-to-archive-download-he.patch
];
})

View File

@ -1,8 +1,4 @@
{
action-flake-update-pr-clan,
action-flake-update-pr-clan-individual,
writePureShellScriptBin,
}:
{ action-flake-update-pr-clan, writePureShellScriptBin }:
let
job-flake-update =
repo:
@ -12,21 +8,9 @@ let
action-flake-update-pr-clan
'';
job-flake-update-individual =
repo:
writePureShellScriptBin "job-flake-update-${repo}-individual"
[ action-flake-update-pr-clan-individual ]
''
export REPO="gitea@git.clan.lol:clan/${repo}.git"
export KEEP_VARS="REPO''${KEEP_VARS:+ $KEEP_VARS}"
action-flake-update-pr-clan-individual
'';
in
{
job-flake-update-clan-core = job-flake-update "clan-core";
job-flake-update-clan-core-individual = job-flake-update-individual "clan-core";
job-flake-update-clan-homepage = job-flake-update "clan-homepage";
job-flake-update-clan-infra = job-flake-update "clan-infra";
job-flake-update-data-mesher = job-flake-update "data-mesher";
}

View File

@ -1,7 +0,0 @@
# shellcheck shell=bash
source_up
watch_file flake-module.nix shell.nix default.nix
# Because we depend on nixpkgs sources, uploading to builders takes a long time
use flake .#matrix-bot --builders ''

View File

@ -1,3 +0,0 @@
*.json
**/data
**/__pycache__

View File

@ -1,13 +0,0 @@
#!/usr/bin/env python3
import os
import sys
sys.path.insert(
0, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
)
from matrix_bot import main # NOQA
if __name__ == "__main__":
main()

View File

@ -1,44 +0,0 @@
{
python3,
setuptools,
matrix-nio,
aiofiles,
aiohttp,
markdown2,
git,
tiktoken,
...
}:
let
pythonDependencies = [
matrix-nio
aiofiles
aiohttp
markdown2
tiktoken
];
runtimeDependencies = [ git ];
testDependencies = pythonDependencies ++ runtimeDependencies ++ [ ];
in
python3.pkgs.buildPythonApplication {
name = "matrix-bot";
src = ./.;
format = "pyproject";
nativeBuildInputs = [ setuptools ];
propagatedBuildInputs = pythonDependencies ++ runtimeDependencies;
passthru.testDependencies = testDependencies;
# Clean up after the package to avoid leaking python packages into a devshell
postFixup = ''
rm $out/nix-support/propagated-build-inputs
'';
meta.mainProgram = "matrix-bot";
}

View File

@ -1,14 +0,0 @@
{ ... }:
{
perSystem =
{ self', pkgs, ... }:
{
devShells.matrix-bot = pkgs.callPackage ./shell.nix { inherit (self'.packages) matrix-bot; };
packages = {
matrix-bot = pkgs.python3.pkgs.callPackage ./default.nix { };
};
checks = { };
};
}

View File

@ -1,183 +0,0 @@
import argparse
import asyncio
import logging
import os
import sys
from os import environ
from pathlib import Path
from matrix_bot.custom_logger import setup_logging
from matrix_bot.gitea import GiteaData
from matrix_bot.main import bot_main
from matrix_bot.matrix import MatrixData
log = logging.getLogger(__name__)
curr_dir = Path(__file__).parent
data_dir = Path(os.getcwd()) / "data"
def create_parser(prog: str | None = None) -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
prog=prog,
description="A gitea bot for matrix",
formatter_class=argparse.RawTextHelpFormatter,
)
parser.add_argument(
"--debug",
help="Enable debug logging",
action="store_true",
default=False,
)
parser.add_argument(
"--server",
help="The matrix server to connect to",
default="https://matrix.clan.lol",
)
parser.add_argument(
"--admin",
help="The matrix user to ping on error",
default="@qubasa:gchq.icu",
)
parser.add_argument(
"--user",
help="The matrix user to connect as",
default="@clan-bot:clan.lol",
)
parser.add_argument(
"--avatar",
help="The path to the image to use as the avatar",
default=curr_dir / "avatar.png",
)
parser.add_argument(
"--repo-owner",
help="The owner of gitea the repository",
default="clan",
)
parser.add_argument(
"--repo-name",
help="The name of the repository",
default="clan-core",
)
parser.add_argument(
"--changelog-room",
help="The matrix room to join for the changelog bot",
default="#bot-test:gchq.icu",
)
parser.add_argument(
"--review-room",
help="The matrix room to join for the review bot",
default="#bot-test:gchq.icu",
)
parser.add_argument(
"--changelog-frequency",
help="The frequency to check for changelog updates in days",
default=7,
type=int,
)
def valid_weekday(value: str) -> str:
days = [
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday",
"Sunday",
]
if value not in days:
raise argparse.ArgumentTypeError(
f"{value} is not a valid weekday. Choose from {', '.join(days)}"
)
return value
parser.add_argument(
"--publish-day",
help="The day of the week to publish the changelog. Ignored if changelog-frequency is less than 7 days.",
default="Wednesday",
type=valid_weekday,
)
parser.add_argument(
"--gitea-url",
help="The gitea url to connect to",
default="https://git.clan.lol",
)
parser.add_argument(
"--data-dir",
help="The directory to store data",
default=data_dir,
type=Path,
)
parser.add_argument(
"--poll-frequency",
help="The frequency to poll for new reviews in minutes",
default=10,
type=float,
)
return parser
def matrix_password() -> str:
matrix_password = environ.get("MATRIX_PASSWORD")
if matrix_password is not None:
return matrix_password
matrix_password_file = environ.get("MATRIX_PASSWORD_FILE", default=None)
if matrix_password_file is None:
raise Exception("MATRIX_PASSWORD_FILE environment variable is not set")
with open(matrix_password_file) as f:
return f.read().strip()
def main() -> None:
parser = create_parser()
args = parser.parse_args()
if args.debug:
setup_logging(logging.DEBUG, root_log_name=__name__.split(".")[0])
log.debug("Debug log activated")
else:
setup_logging(logging.INFO, root_log_name=__name__.split(".")[0])
matrix = MatrixData(
server=args.server,
user=args.user,
avatar=args.avatar,
changelog_room=args.changelog_room,
changelog_frequency=args.changelog_frequency,
publish_day=args.publish_day,
review_room=args.review_room,
password=matrix_password(),
admin=args.admin,
)
gitea = GiteaData(
url=args.gitea_url,
owner=args.repo_owner,
repo=args.repo_name,
access_token=os.getenv("GITEA_ACCESS_TOKEN"),
poll_frequency=args.poll_frequency,
)
args.data_dir.mkdir(parents=True, exist_ok=True)
try:
asyncio.run(bot_main(matrix, gitea, args.data_dir))
except KeyboardInterrupt:
print("User Interrupt", file=sys.stderr)
if __name__ == "__main__":
main()

View File

@ -1,4 +0,0 @@
from . import main
if __name__ == "__main__":
main()

Binary file not shown.

Before

Width:  |  Height:  |  Size: 105 KiB

View File

@ -1,287 +0,0 @@
import asyncio
import datetime
import json
import logging
import shlex
import subprocess
from pathlib import Path
import aiohttp
from nio import (
AsyncClient,
JoinResponse,
)
from matrix_bot.gitea import (
GiteaData,
)
from .locked_open import read_locked_file, write_locked_file
from .matrix import MatrixData, send_message
from .openai import create_jsonl_data, upload_and_process_files
log = logging.getLogger(__name__)
def last_ndays_to_today(ndays: int) -> tuple[str, str]:
# Get today's date
today = datetime.datetime.now()
# Calculate the date one week ago
last_week = today - datetime.timedelta(days=ndays)
# Format both dates to "YYYY-MM-DD"
todate = today.strftime("%Y-%m-%d")
fromdate = last_week.strftime("%Y-%m-%d")
return (fromdate, todate)
def write_file_with_date_prefix(
content: str, directory: Path, *, ndays: int, suffix: str
) -> Path:
"""
Write content to a file with the current date as filename prefix.
:param content: The content to write to the file.
:param directory: The directory where the file will be saved.
:return: The path to the created file.
"""
# Ensure the directory exists
directory.mkdir(parents=True, exist_ok=True)
# Get the current date
fromdate, todate = last_ndays_to_today(ndays)
# Create the filename
filename = f"{fromdate}__{todate}_{suffix}.txt"
file_path = directory / filename
# Write the content to the file
with open(file_path, "w") as file:
file.write(content)
return file_path
async def git_pull(repo_path: Path) -> None:
cmd = ["git", "pull"]
log.debug(f"Running command: {shlex.join(cmd)}")
process = await asyncio.create_subprocess_exec(
*cmd,
cwd=str(repo_path),
)
await process.wait()
async def git_log(repo_path: Path, ndays: int) -> str:
cmd = [
"git",
"log",
f"--since={ndays} days ago",
"--pretty=format:%h - %an, %ar : %s",
"--stat",
"--patch",
]
log.debug(f"Running command: {shlex.join(cmd)}")
process = await asyncio.create_subprocess_exec(
*cmd,
cwd=str(repo_path),
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await process.communicate()
if process.returncode != 0:
raise Exception(
f"Command '{' '.join(cmd)}' failed with exit code {process.returncode}"
)
return stdout.decode()
async def changelog_bot(
client: AsyncClient,
http: aiohttp.ClientSession,
matrix: MatrixData,
gitea: GiteaData,
data_dir: Path,
) -> None:
last_run_path = data_dir / "last_changelog_run.json"
last_run = read_locked_file(last_run_path)
today = datetime.datetime.now()
today_weekday = today.strftime("%A")
if today_weekday != matrix.publish_day:
log.debug(f"Changelog not due yet. Due on {matrix.publish_day}")
return
if last_run == {}:
log.debug(f"First run. Setting last_run to {last_run}")
else:
last_date = datetime.datetime.strptime(last_run["todate"], "%Y-%m-%d")
upper_bound = datetime.timedelta(days=matrix.changelog_frequency)
delta = today - last_date
if delta <= upper_bound:
log.debug(
f"Changelog not due yet. Due in {upper_bound.days - delta.days} days"
)
return
fromdate, todate = last_ndays_to_today(matrix.changelog_frequency)
last_run = {
"fromdate": fromdate,
"todate": todate,
"ndays": matrix.changelog_frequency,
}
# If you made a new room and haven't joined as that user, you can use
room: JoinResponse = await client.join(matrix.changelog_room)
if not room.transport_response.ok:
log.error("This can happen if the room doesn't exist or the bot isn't invited")
raise Exception(f"Failed to join room {room}")
repo_path = data_dir / gitea.repo
if not repo_path.exists():
cmd = [
"git",
"clone",
f"{gitea.url}/{gitea.owner}/{gitea.repo}.git",
gitea.repo,
]
subprocess.run(cmd, cwd=data_dir, check=True)
# git pull
await git_pull(repo_path)
# git log
diff = await git_log(repo_path, matrix.changelog_frequency)
fromdate, todate = last_ndays_to_today(matrix.changelog_frequency)
log.info(f"Generating changelog from {fromdate} to {todate}")
# Write the last run to the file before processing the changelog
# This ensures that the changelog is only generated once per period
# even if openai fails
write_locked_file(last_run_path, last_run)
system_prompt = f"""
Follow these guidelines:
- Follow the pull request format: "scope: message (#number1, #number2)"
- Don't use the commit messages tied to a pull request as is and instead explain the change in a user-friendly way
- Link pull requests as: '{gitea.url}/{gitea.owner}/{gitea.repo}/pulls/<number>'
- Use markdown links to make the pull request number clickable
- Mention each pull request number at most once
- Group similar changes / pull requests together
- Explain changes in a user-friendly way (be detailed if necessary)
- Always use four '#' for headings never less than that. Example: `####New Features`
- WRITE IN THE STYLE OF THE NEW YORK TIMES, PLEASE!
---
Example Changelog:
#### Changelog:
For the last {matrix.changelog_frequency} days from {fromdate} to {todate}
#### New Features
- `secrets`: [#1679]({gitea.url}/{gitea.owner}/{gitea.repo}/pulls/1679)
> Users can now generate secrets and manage settings in the new submodules
> This feature is available to all users with the 'admin' role
- `sshd`: [#1674]({gitea.url}/{gitea.owner}/{gitea.repo}/pulls/1674)
> A workaround has been added to mitigate the security vulnerability in the sshd module
> This workaround is temporary and will be replaced with a permanent fix in the next release
...
#### Refactoring
...
#### Documentation
...
#### Bug Fixes
...
#### Additional Notes
...
---
#### Changelog:
For the last {matrix.changelog_frequency} days from {fromdate} to {todate}
#### New Features
"""
# Step 1: Create the JSONL file
jsonl_files = await create_jsonl_data(user_prompt=diff, system_prompt=system_prompt)
# Step 2: Upload the JSONL file and process it
results = await upload_and_process_files(session=http, jsonl_files=jsonl_files)
# Join responses together
all_changelogs = []
for result in results:
choices = result["response"]["body"]["choices"]
changelog = "\n".join(choice["message"]["content"] for choice in choices)
all_changelogs.append(changelog)
full_changelog = "\n\n".join(all_changelogs)
log.debug(f"Changelog generated:\n{full_changelog}")
if len(results) == 1:
# Write the results to a file in the changelogs directory
new_result_file = write_file_with_date_prefix(
json.dumps(results, indent=4),
data_dir / "changelogs",
ndays=matrix.changelog_frequency,
suffix="result",
)
log.info(f"LLM result written to: {new_result_file}")
await send_message(client, room, full_changelog)
return
combine_prompt = """
Please combine the following changelogs into a single markdown changelog.
- Merge duplicates sections.
- Make sure the changelog is concise and easy to read.
- Always use four '#' for headings never less than that. Example: `####New Features`
- WRITE IN THE STYLE OF THE NEW YORK TIMES, PLEASE!
---
Example Changelog:
#### Changelog:
For the last {matrix.changelog_frequency} days from {fromdate} to {todate}
#### New Features
...
#### Refactoring
...
#### Documentation
...
#### Bug Fixes
...
#### Additional Notes
...
---
#### Changelog:
For the last {matrix.changelog_frequency} days from {fromdate} to {todate}
#### New Features
"""
new_jsonl_files = await create_jsonl_data(
user_prompt=full_changelog, system_prompt=combine_prompt
)
new_results = await upload_and_process_files(
session=http, jsonl_files=new_jsonl_files
)
new_all_changelogs = []
for result in new_results:
choices = result["response"]["body"]["choices"]
changelog = "\n".join(choice["message"]["content"] for choice in choices)
new_all_changelogs.append(changelog)
new_full_changelog = "\n\n".join(new_all_changelogs)
log.info(f"Changelog generated:\n{new_full_changelog}")
# Write the results to a file in the changelogs directory
new_result_file = write_file_with_date_prefix(
json.dumps(new_results, indent=4),
data_dir / "changelogs",
ndays=matrix.changelog_frequency,
suffix="result",
)
log.info(f"LLM result written to: {new_result_file}")
await send_message(client, room, new_full_changelog)

View File

@ -1,97 +0,0 @@
import inspect
import logging
from collections.abc import Callable
from pathlib import Path
from typing import Any
grey = "\x1b[38;20m"
yellow = "\x1b[33;20m"
red = "\x1b[31;20m"
bold_red = "\x1b[31;1m"
green = "\u001b[32m"
blue = "\u001b[34m"
def get_formatter(color: str) -> Callable[[logging.LogRecord, bool], logging.Formatter]:
def myformatter(
record: logging.LogRecord, with_location: bool
) -> logging.Formatter:
reset = "\x1b[0m"
try:
filepath = Path(record.pathname).resolve()
filepath = Path("~", filepath.relative_to(Path.home()))
except Exception:
filepath = Path(record.pathname)
if not with_location:
return logging.Formatter(f"{color}%(levelname)s{reset}: %(message)s")
return logging.Formatter(
f"{color}%(levelname)s{reset}: %(message)s\nLocation: {filepath}:%(lineno)d::%(funcName)s\n"
)
return myformatter
FORMATTER = {
logging.DEBUG: get_formatter(blue),
logging.INFO: get_formatter(green),
logging.WARNING: get_formatter(yellow),
logging.ERROR: get_formatter(red),
logging.CRITICAL: get_formatter(bold_red),
}
class CustomFormatter(logging.Formatter):
def __init__(self, log_locations: bool) -> None:
super().__init__()
self.log_locations = log_locations
def format(self, record: logging.LogRecord) -> str:
return FORMATTER[record.levelno](record, self.log_locations).format(record)
class ThreadFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
return FORMATTER[record.levelno](record, False).format(record)
def get_caller() -> str:
frame = inspect.currentframe()
if frame is None:
return "unknown"
caller_frame = frame.f_back
if caller_frame is None:
return "unknown"
caller_frame = caller_frame.f_back
if caller_frame is None:
return "unknown"
frame_info = inspect.getframeinfo(caller_frame)
try:
filepath = Path(frame_info.filename).resolve()
filepath = Path("~", filepath.relative_to(Path.home()))
except Exception:
filepath = Path(frame_info.filename)
ret = f"{filepath}:{frame_info.lineno}::{frame_info.function}"
return ret
def setup_logging(level: Any, root_log_name: str = __name__.split(".")[0]) -> None:
# Get the root logger and set its level
main_logger = logging.getLogger(root_log_name)
main_logger.setLevel(level)
# Create and add the default handler
default_handler = logging.StreamHandler()
# Create and add your custom handler
default_handler.setLevel(level)
default_handler.setFormatter(CustomFormatter(str(level) == str(logging.DEBUG)))
main_logger.addHandler(default_handler)
# Set logging level for other modules used by this module
logging.getLogger("asyncio").setLevel(logging.INFO)
logging.getLogger("httpx").setLevel(level=logging.WARNING)

View File

@ -1,89 +0,0 @@
import logging
log = logging.getLogger(__name__)
from dataclasses import dataclass
from enum import Enum
import aiohttp
@dataclass
class GiteaData:
url: str
owner: str
repo: str
poll_frequency: float
access_token: str | None
def endpoint_url(gitea: GiteaData, endpoint: str) -> str:
return f"{gitea.url}/api/v1/repos/{gitea.owner}/{gitea.repo}/{endpoint}"
async def fetch_repo_labels(
gitea: GiteaData,
session: aiohttp.ClientSession,
) -> list[dict]:
"""
Fetch labels from a Gitea repository.
Returns:
list: List of labels in the repository.
"""
url = endpoint_url(gitea, "labels")
headers = {"Accept": "application/vnd.github.v3+json"}
if gitea.access_token:
headers["Authorization"] = f"token {gitea.access_token}"
async with session.get(url, headers=headers) as response:
if response.status == 200:
labels = await response.json()
return labels
else:
# You may want to handle different statuses differently
raise Exception(
f"Failed to fetch labels: {response.status}, {await response.text()}"
)
class PullState(Enum):
OPEN = "open"
CLOSED = "closed"
ALL = "all"
async def fetch_pull_requests(
gitea: GiteaData,
session: aiohttp.ClientSession,
*,
limit: int,
state: PullState,
label_ids: list[int] = [],
) -> list[dict]:
"""
Fetch pull requests from a Gitea repository.
Returns:
list: List of pull requests.
"""
# You can use the same pattern as fetch_repo_labels
url = endpoint_url(gitea, "pulls")
params = {
"state": state.value,
"sort": "recentupdate",
"limit": limit,
"labels": label_ids,
}
headers = {"accept": "application/json"}
async with session.get(url, params=params, headers=headers) as response:
if response.status == 200:
labels = await response.json()
return labels
else:
# You may want to handle different statuses differently
raise Exception(
f"Failed to fetch labels: {response.status}, {await response.text()}"
)

View File

@ -1,31 +0,0 @@
import fcntl
import json
from collections.abc import Generator
from contextlib import contextmanager
from pathlib import Path
from typing import Any
@contextmanager
def locked_open(filename: str | Path, mode: str = "r") -> Generator:
"""
This is a context manager that provides an advisory write lock on the file specified by `filename` when entering the context, and releases the lock when leaving the context. The lock is acquired using the `fcntl` module's `LOCK_EX` flag, which applies an exclusive write lock to the file.
"""
with open(filename, mode) as fd:
fcntl.flock(fd, fcntl.LOCK_EX)
yield fd
fcntl.flock(fd, fcntl.LOCK_UN)
def write_locked_file(path: Path, data: dict[str, Any]) -> None:
with locked_open(path, "w+") as f:
f.write(json.dumps(data, indent=4))
def read_locked_file(path: Path) -> dict[str, Any]:
if not path.exists():
return {}
with locked_open(path, "r") as f:
content: str = f.read()
parsed: dict[str, Any] = json.loads(content)
return parsed

View File

@ -1,67 +0,0 @@
import asyncio
import logging
from pathlib import Path
import aiohttp
log = logging.getLogger(__name__)
from nio import AsyncClient, ClientConfig, ProfileGetAvatarResponse, RoomMessageText
from .changelog_bot import changelog_bot
from .gitea import GiteaData
from .matrix import MatrixData, set_avatar, upload_image
from .review_bot import message_callback, review_requested_bot, send_error
async def bot_main(
matrix: MatrixData,
gitea: GiteaData,
data_dir: Path,
) -> None:
# Setup client configuration to handle encryption
client_config = ClientConfig(
encryption_enabled=False,
)
log.info(f"Connecting to {matrix.server} as {matrix.user}")
client = AsyncClient(matrix.server, matrix.user, config=client_config)
client.add_event_callback(message_callback, RoomMessageText)
result = await client.login(matrix.password)
if not result.transport_response.ok:
log.critical(f"Failed to login: {result}")
exit(1)
log.info(f"Logged in as {result}")
avatar: ProfileGetAvatarResponse = await client.get_avatar()
if not avatar.avatar_url:
mxc_url = await upload_image(client, matrix.avatar)
log.info(f"Uploaded avatar to {mxc_url}")
await set_avatar(client, mxc_url)
else:
log.info(f"Bot already has an avatar {avatar.avatar_url}")
try:
async with aiohttp.ClientSession() as session:
while True:
try:
await changelog_bot(client, session, matrix, gitea, data_dir)
except Exception as e:
log.exception(e)
await send_error(client, matrix, f"Changelog bot failed: {e}")
try:
await review_requested_bot(client, session, matrix, gitea, data_dir)
except Exception as e:
log.exception(e)
await send_error(
client, matrix, f"Review requested bot failed: {e}"
)
log.debug(f"Sleeping for {60 * gitea.poll_frequency / 60} minutes")
await asyncio.sleep(60 * gitea.poll_frequency)
except Exception as e:
log.exception(e)
finally:
await client.close()

View File

@ -1,89 +0,0 @@
import logging
from pathlib import Path
log = logging.getLogger(__name__)
from dataclasses import dataclass
from markdown2 import markdown
from nio import (
AsyncClient,
JoinedMembersResponse,
JoinResponse,
ProfileSetAvatarResponse,
RoomMember,
RoomSendResponse,
UploadResponse,
)
async def upload_image(client: AsyncClient, image_path: Path) -> str:
with open(image_path, "rb") as image_file:
response: UploadResponse
response, _ = await client.upload(image_file, content_type="image/png")
if not response.transport_response.ok:
raise Exception(f"Failed to upload image {response}")
return response.content_uri # This is the MXC URL
async def set_avatar(client: AsyncClient, mxc_url: str) -> None:
response: ProfileSetAvatarResponse
response = await client.set_avatar(mxc_url)
if not response.transport_response.ok:
raise Exception(f"Failed to set avatar {response}")
async def get_room_members(client: AsyncClient, room: JoinResponse) -> list[RoomMember]:
users: JoinedMembersResponse = await client.joined_members(room.room_id)
if not users.transport_response.ok:
raise Exception(f"Failed to get users {users}")
return users.members
async def send_message(
client: AsyncClient,
room: JoinResponse,
message: str,
user_ids: list[str] | None = None,
) -> None:
"""
Send a message in a Matrix room, optionally mentioning users.
"""
# If user_ids are provided, format the message to mention them
formatted_message = markdown(message)
if user_ids:
mention_list = ", ".join(
[
f"<a href='https://matrix.to/#/{user_id}'>{user_id}</a>"
for user_id in user_ids
]
)
formatted_message = f"{mention_list}: {formatted_message}"
content = {
"msgtype": "m.text" if user_ids else "m.notice",
"format": "org.matrix.custom.html",
"body": message,
"formatted_body": formatted_message,
}
res: RoomSendResponse = await client.room_send(
room_id=room.room_id, message_type="m.room.message", content=content
)
if not res.transport_response.ok:
raise Exception(f"Failed to send message {res}")
@dataclass
class MatrixData:
server: str
user: str
avatar: Path
password: str
changelog_room: str
review_room: str
changelog_frequency: int
publish_day: str
admin: str

View File

@ -1,194 +0,0 @@
import asyncio
import json
import logging
from os import environ
from typing import Any
import aiohttp
import tiktoken
log = logging.getLogger(__name__)
# The URL to which the request is sent
url: str = "https://api.openai.com/v1/chat/completions"
def api_key() -> str:
openapi_key = environ.get("OPENAI_API_KEY")
if openapi_key is not None:
return openapi_key
openai_key_file = environ.get("OPENAI_API_KEY_FILE", default=None)
if openai_key_file is None:
raise Exception("OPENAI_API_KEY_FILE environment variable is not set")
with open(openai_key_file) as f:
return f.read().strip()
async def create_jsonl_data(
*,
user_prompt: str,
system_prompt: str,
model: str = "gpt-4o",
max_response_tokens: int = 4096,
) -> list[bytes]:
def split_message(content: str, max_tokens: int) -> list[str]:
# Split the content into chunks of max_tokens
content_tokens = encoder.encode(content)
chunks = []
for i in range(0, len(content_tokens), max_tokens):
chunk = content_tokens[i : i + max_tokens]
chunks.append(encoder.decode(chunk))
log.debug(f"Chunk {i/max_tokens}: {len(chunk)} tokens")
return chunks
encoder = tiktoken.encoding_for_model(model)
max_message_tokens = 127_000 - max_response_tokens
# Split user_prompt into multiple user messages if it exceeds the max_message_tokens
user_messages = []
for message_chunk in split_message(user_prompt, max_message_tokens):
if len(message_chunk) == 0:
raise Exception("Empty message chunk")
user_messages.append({"role": "user", "content": message_chunk})
## count number of tokens for every user message
count_tokens: int = 0
for i, message in enumerate(user_messages):
count_tokens = len(encoder.encode(message["content"]))
log.debug(f"Number of tokens in the user messages: {count_tokens}")
if count_tokens > max_message_tokens:
raise Exception(f"Too many tokens in the user message[{i}] {count_tokens}")
batch_jobs: list[bytes] = []
for message in user_messages:
summary_request: dict[str, Any] = {
"custom_id": "request-1",
"method": "POST",
"url": "/v1/chat/completions",
"body": {
"model": model,
"messages": [
{"role": "system", "content": system_prompt},
message,
],
"max_tokens": max_response_tokens,
},
}
dumped = json.dumps(summary_request)
batch_jobs.append(dumped.encode("utf-8"))
return batch_jobs
async def upload_and_process_files(
*,
session: aiohttp.ClientSession,
jsonl_files: list[bytes],
api_key: str = api_key(),
completion_window: str = "24h",
) -> list[dict[str, Any]]:
"""
Upload multiple JSONL files to OpenAI's Batch API and process them asynchronously.
"""
headers = {
"Authorization": f"Bearer {api_key}",
}
log.debug(
f"Uploading {len(jsonl_files)} files to OpenAI, completion window: {completion_window}"
)
async def upload_file(jsonl_data: bytes) -> str:
upload_url = "https://api.openai.com/v1/files"
data = aiohttp.FormData()
data.add_field(
"file",
jsonl_data,
filename="changelog.jsonl",
content_type="application/jsonl",
)
data.add_field("purpose", "batch")
async with session.post(upload_url, headers=headers, data=data) as response:
if response.status != 200:
raise Exception(
f"File upload failed with status code {response.status}"
)
upload_response = await response.json()
file_id = upload_response.get("id")
if not file_id:
raise Exception("File ID not returned from upload")
return file_id
async def create_batch(file_id: str) -> str:
batch_url = "https://api.openai.com/v1/batches"
batch_data = {
"input_file_id": file_id,
"endpoint": "/v1/chat/completions",
"completion_window": f"{completion_window}",
}
async with session.post(
batch_url, headers=headers, json=batch_data
) as response:
if response.status != 200:
raise Exception(
f"Batch creation failed with status code {response.status}"
)
batch_response = await response.json()
batch_id = batch_response.get("id")
if not batch_id:
raise Exception("Batch ID not returned from creation")
return batch_id
async def check_batch_status(batch_id: str) -> str:
status_url = f"https://api.openai.com/v1/batches/{batch_id}"
while True:
async with session.get(status_url, headers=headers) as response:
if response.status != 200:
raise Exception(
f"Failed to check batch status with status code {response.status}"
)
status_response = await response.json()
status = status_response.get("status")
if status in ["completed", "failed", "expired"]:
if status != "completed":
raise Exception(
f"Batch processing failed with status: {status}"
)
return status_response.get("output_file_id")
await asyncio.sleep(10)
async def retrieve_results(output_file_id: str) -> list[dict[str, Any]]:
output_url = f"https://api.openai.com/v1/files/{output_file_id}/content"
async with session.get(output_url, headers=headers) as response:
if response.status != 200:
raise Exception(
f"Failed to retrieve batch results with status code {response.status} reason {response.reason}"
)
content = await response.text()
results = [json.loads(line) for line in content.splitlines()]
return results
file_ids = await asyncio.gather(
*[upload_file(jsonl_data) for jsonl_data in jsonl_files]
)
batch_ids = await asyncio.gather(*[create_batch(file_id) for file_id in file_ids])
output_file_ids = await asyncio.gather(
*[check_batch_status(batch_id) for batch_id in batch_ids]
)
all_results = await asyncio.gather(
*[retrieve_results(output_file_id) for output_file_id in output_file_ids]
)
# Flatten the list of results
combined_results = [item for sublist in all_results for item in sublist]
return combined_results

View File

@ -1,122 +0,0 @@
import logging
log = logging.getLogger(__name__)
import datetime
import time
from pathlib import Path
import aiohttp
from nio import (
AsyncClient,
JoinResponse,
MatrixRoom,
RoomMessageText,
)
from matrix_bot.gitea import (
GiteaData,
PullState,
fetch_pull_requests,
)
from .locked_open import read_locked_file, write_locked_file
from .matrix import MatrixData, get_room_members, send_message
async def message_callback(room: MatrixRoom, event: RoomMessageText) -> None:
log.debug(
f"Message received in room {room.display_name}\n"
f"{room.user_name(event.sender)} | {event.body}"
)
async def send_error(client: AsyncClient, matrix: MatrixData, msg: str) -> None:
# If you made a new room and haven't joined as that user, you can use
room: JoinResponse = await client.join(matrix.review_room)
if not room.transport_response.ok:
log.error("This can happen if the room doesn't exist or the bot isn't invited")
raise Exception(f"Failed to join room {room}")
await send_message(client, room, msg, user_ids=[matrix.admin])
async def review_requested_bot(
client: AsyncClient,
http: aiohttp.ClientSession,
matrix: MatrixData,
gitea: GiteaData,
data_dir: Path,
) -> None:
# If you made a new room and haven't joined as that user, you can use
room: JoinResponse = await client.join(matrix.review_room)
if not room.transport_response.ok:
log.error("This can happen if the room doesn't exist or the bot isn't invited")
raise Exception(f"Failed to join room {room}")
# Get the members of the room
room_users = await get_room_members(client, room)
# Fetch the pull requests
tstart = time.time()
pulls = await fetch_pull_requests(gitea, http, limit=50, state=PullState.ALL)
# Read the last updated pull request
ping_hist_path = data_dir / "last_review_run.json"
ping_hist = read_locked_file(ping_hist_path)
# Check if the pull request is mergeable and needs review
# and if the pull request is newer than the last updated pull request
for pull in pulls:
requested_reviewers = pull["requested_reviewers"]
assigned_users = pull["assignees"]
mentioned_users = []
if assigned_users:
mentioned_users.extend(assigned_users)
if requested_reviewers:
mentioned_users.extend(requested_reviewers)
mentioned_users = list(map(lambda x: x["login"].lower(), mentioned_users))
mentioned_users = list(
filter(lambda name: name not in matrix.user, mentioned_users)
)
pull_id = str(pull["id"])
needs_review_label = any(x["name"] == "needs-review" for x in pull["labels"])
if (
len(mentioned_users) > 0
and pull["mergeable"]
or needs_review_label
and pull["mergeable"]
):
last_time_updated = ping_hist.get(pull_id, {}).get(
"updated_at", datetime.datetime.min.isoformat()
)
if ping_hist == {} or pull["updated_at"] > last_time_updated:
ping_hist[pull_id] = pull
else:
continue
# Check if the requested reviewers are in the room
ping_users = []
for user in room_users:
user_name = user.display_name.lower()
if any(
user_name in mentioned_user or mentioned_user in user_name
for mentioned_user in mentioned_users
):
ping_users.append(user.user_id)
# Send a message to the room and mention the users
log.info(f"Pull request {pull['title']} needs review")
log.debug(
f"Mentioned users: {mentioned_users}, has needs-review label: {needs_review_label}"
)
message = f"Review Requested:\n[{pull['title']}]({pull['html_url']})"
await send_message(client, room, message, user_ids=ping_users)
# Write the new last updated pull request
write_locked_file(ping_hist_path, ping_hist)
# Time taken
tend = time.time()
tdiff = round(tend - tstart)
log.debug(f"Time taken: {tdiff}s")

View File

@ -1,67 +0,0 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "matrix-bot"
description = "matrix bot for release messages from git commits"
dynamic = ["version"]
scripts = { mbot = "matrix_bot:main" }
license = {text = "MIT"}
[project.urls]
Homepage = "https://clan.lol/"
Documentation = "https://docs.clan.lol/"
Repository = "https://git.clan.lol/clan/clan-core"
[tool.setuptools.packages.find]
exclude = ["result"]
[tool.setuptools.package-data]
matrix_bot = ["py.typed"]
[tool.pytest.ini_options]
testpaths = "tests"
faulthandler_timeout = 60
log_level = "DEBUG"
log_format = "%(levelname)s: %(message)s\n %(pathname)s:%(lineno)d::%(funcName)s"
addopts = "--cov . --cov-report term --cov-report html:.reports/html --no-cov-on-fail --durations 5 --color=yes --new-first" # Add --pdb for debugging
norecursedirs = "tests/helpers"
markers = ["impure", "with_core"]
[tool.mypy]
python_version = "3.11"
warn_redundant_casts = true
disallow_untyped_calls = true
disallow_untyped_defs = true
no_implicit_optional = true
[[tool.mypy.overrides]]
module = "argcomplete.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "ipdb.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "pytest.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "setuptools.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "nio.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "markdown2.*"
ignore_missing_imports = true
[tool.ruff]
target-version = "py311"
line-length = 88
lint.select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
lint.ignore = ["E501", "E402", "E731", "ANN101", "ANN401", "A003"]

View File

@ -1,30 +0,0 @@
{
matrix-bot,
mkShell,
ruff,
python3,
}:
let
devshellTestDeps =
matrix-bot.passthru.testDependencies
++ (with python3.pkgs; [
rope
setuptools
wheel
ipdb
pip
]);
in
mkShell {
buildInputs = [ ruff ] ++ devshellTestDeps;
PYTHONBREAKPOINT = "ipdb.set_trace";
shellHook = ''
export GIT_ROOT="$(git rev-parse --show-toplevel)"
export PKG_ROOT="$GIT_ROOT/pkgs/matrix-bot"
# Add clan command to PATH
export PATH="$PKG_ROOT/bin":"$PATH"
'';
}

View File

@ -1 +0,0 @@
../../../machines/web01

View File

@ -1,24 +0,0 @@
{
"data": "ENC[AES256_GCM,data:iJTjs8bG2GLGnGp/Hf4Egtorrk87rkgh9Yn+gPuWAJ61wIAtN3g9SU3vyYpvRrIqHVUyLObGbrWYi3Ol07M=,iv:YTOctq9aw4tc9xwoOO4UbR2cYPHV0ZmuE1FRWn13sgk=,tag:zU3HFqxwZcn/9S02bj3/fA==,type:str]",
"sops": {
"kms": null,
"gcp_kms": null,
"azure_kv": null,
"hc_vault": null,
"age": [
{
"recipient": "age17xuvz0fqtynzdmf8rfh4g3e46tx8w3mc6zgytrmuj5v9dhnldgxs7ue7ct",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBUa1FIbWt2aHduZVBlcGpq\nOUsybTRmR1I4M1JZY3A4ZUFpZEN3dlZCTkZRCmdVajFUcDMyeEdJMThVZElJdmlJ\naEhvSk9sYThXdkxoaXVLem15dlJMcm8KLS0tIEJsdFY4L0M3Q2cwdzFOdy9LN0k3\nOEdCM09PUWlZbE91U2ZYNmVHeU43bUUKC+z+6XZCiVfwGQQCAHoB+WGE5Mm3qJZq\nuyD5r3Ra6MAvvwIhnqbwadRoxVH1HcdIB6hJsNREE/x6YNLxi3T7nw==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age1zwte859d9nvg6wy5dugjkf38dqe8w8qkt2as7xcc5pw3285833xs797uan",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBFOHBub0J3U2FuOFNuS2xl\nRkExYVEvcmowWDlyUGlpYko3N1dIcmN5dmxJCkRtRm9qVVNQK3FQcTB1U3g3OHhS\ncU9XaHUxNWVlL2tpblpZUHF1UWt4am8KLS0tIGJOZkJ4eDJ6WWx1d2R1VG1qODZS\ncHhXMVhEUHdLZjIvNUYxRmduZkpjaU0KqZKUb9KYpSvwxaJRAbYhkuOdnzsU3p9Q\nU2WO5TIwS762yNqWTzyYdxb9YxvTOatW7uWTorRXZu1yqCTMTuq+1Q==\n-----END AGE ENCRYPTED FILE-----\n"
}
],
"lastmodified": "2024-07-03T09:47:40Z",
"mac": "ENC[AES256_GCM,data:dpdmkhedaqivzIlxhoWb+u77JmfWRo94iWDolAa9UKvnjBo1QE5sHbqWasCH81wjO0wPBPRUqnj9JQ7kG9AFp24Fad+gAp74Gwx5M/PSx1dsd6xkcxt6PJ8sFXGb0H3lYduCaNfDGgsJTVoDcbk8rgYzjo5+mxs2pqrrn10t4iU=,iv:MGMIq2rF4+hr89/dppi2JDVbpAShscYTMM9viHPepIY=,tag:Dj9B6qvAkmiUmgRvZ6B94Q==,type:str]",
"pgp": null,
"unencrypted_suffix": "_unencrypted",
"version": "3.8.1"
}
}

View File

@ -1 +0,0 @@
../../../users/qubasa

View File

@ -1,14 +1,16 @@
{ self, inputs, ... }:
{
clan = {
flake = inputs.clan-core.lib.buildClan {
meta.name = "infra";
directory = self;
# Make flake available in modules
specialArgs.self = {
inherit (self) inputs nixosModules packages;
};
directory = self;
machines.web01 = {
imports = [ ./web01/configuration.nix ];
machines = {
web01 = {
imports = [ (./web01/configuration.nix) ];
};
};
};
}

View File

@ -7,11 +7,10 @@
networking.hostName = "web01";
systemd.network.networks."10-uplink".networkConfig.Address = "2a01:4f9:3080:418b::1";
clan.core.networking.targetHost = "root@clan.lol";
clan-infra.networking.ipv4.address = "65.21.12.51";
clan-infra.networking.ipv4.gateway = "65.21.12.1";
clan-infra.networking.ipv6.address =
config.systemd.network.networks."10-uplink".networkConfig.Address;
clan.networking.targetHost = "root@clan.lol";
clan.networking.ipv4.address = "65.21.12.51";
clan.networking.ipv4.gateway = "65.21.12.1";
clan.networking.ipv6.address = config.systemd.network.networks."10-uplink".networkConfig.Address;
system.stateVersion = "23.05";
}

View File

@ -1,6 +1,5 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p coreutils sops openssh
# shellcheck shell=bash
# shellcheck disable=SC1008,SC1128

View File

@ -1,6 +1,5 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p nix jq bash rsync
# shellcheck shell=bash
# shellcheck disable=SC1008,SC1128

View File

@ -0,0 +1,64 @@
cryptsetup_key: ENC[AES256_GCM,data:79qOTOi4ftTmIWuc/7bFf3NXaa2Fs6mTUfji,iv:xq9HM2uB4rr75qeZEAh2pFvEDAtXdFhsrT/manI7RqM=,tag:iELo+UHSplsQWIK9aQ+uMw==,type:str]
hetzner-storagebox-password: ENC[AES256_GCM,data:vmH1NlKTuEDGb1F3Ni0PSDk=,iv:0q3vngK4SvjjPVHTGTBmpU+bdBc7IyY90EL3zJsf+BQ=,tag:iWqmuT6IJgVG8yPT6YZzUQ==,type:str]
hetzner-borgbackup-ssh: ENC[AES256_GCM,data:/x2bRdkv6Q7ymBmiedK0eV+FxKAS3R192KjReIvixLvPLOZFr9ajFy0mHSZiDjjzPn7vaP2/PI9/PWzGrkWeXv4MAbU9S8QvgUFPYbfUSQV2srSh1/UTl7rv0o2tw/LuCIbBivSu5d4xMWtHE4e/x50eWImG6e20q4+5ZF4hSXPGLmSayCGptbSq7JlgtcVNmTHFSdxPesGx7t50553nzU2ZoJxn9GDnhuVbSOmvhDpwaUYCDY/bGhmMpk32inzUxtPFacgz2JSygo3JCyWxervaE2OMd69fGI24F6cbqPV4ORWNymOnGGsFmMiTCyfKZjOVxhnBXAyeP444PE7MeNf6s2fFhAyV1M/mToa/ElYPHeJbY9t4bK0UPBXtral3vqMVNP6sYMzIPl0DBYsPeY71uEo5ctGJMum+AIhSulYzfTPq7IdPqo1NZGIXQbPb+8P+FZxUOEBm6imLdhG1DmL4Ji80yAPJ7w6Qgs7VoJPHdYTOOE+Z/s/1o52VUtGsSVergP8macRGHR432UIZjRvIxjdu7wvs7GLM,iv:af8J70mGekRpNCT15NjrYkgmoBQyTzBR866fRyrSmos=,tag:ZWLvsFQCFz72ih6UCDP2uA==,type:str]
hetzner-borgbackup-passphrase: ENC[AES256_GCM,data:Stu8kYR+jP9aOjWz16/DhUTpxf4xwK8e7kJo,iv:rU6Gi0yoe7EBxQJ4wczDEjZG4GrB2mPmB1dD143HyeA=,tag:sSR3Do4vepb0vaMRhkj1Vw==,type:str]
initrd_ssh_key: ENC[AES256_GCM,data:SpSX6RgnpgVkd3sL+mJx0Lk6RnagfxwO1cUKtbj4wxlJHpSsBnI6+tGJjssoCp38jHOPYZ4U0IE960ojjtXyBL/sF37Sw0E8uDGr0rL/wuuQmzhF3AC9VfuDOQNbe0pYTr7HldzIvbDRowIShxqbKfBVizkR1bxZkmHfDpMKE1gGivFLYeHC+gSVgTBtPEgDCx361+I103K2kCczu2VGnfmfc9ExrTO6/7ruj2DRjFLOaVmkXe896KjN+YpTTjT85gjEZJ75AGEUNKCNppQRkM0RpJBJyRunHmKqxh5VnFnlbiklsX2S5ev07G9oqIu0kZI6XduQjj/okB/4SeoY9QE6FOj6dRi2WSBNGpT9fBnV4i6bv2Z612ISXwO0GGfXQeWE4mA8QSaJ9oa/fnVFb7WolU9DISq8sYPc85VXVJGCFZ17DDVGK/capjveGErXnk6lJieBwArN5xEZfr/tPL15Q1DNdyYOJwiL1bODQwxYExpFu32XJ/ZMDiucWDXnEwJJf7WpThh0FiAZFzGAJ0b3SeJpuQvK6xXD,iv:w+YuoZMUswV9sw31PXFLKHbinRit9twPDqofeojVdZo=,tag:eCYSUX5EA/NTD3yIdTC7PA==,type:str]
ssh_host_ed25519_key: ENC[AES256_GCM,data:68nXUeyy7xh/KKdd4ajdrkuzc54ZpnXhMpPjaDYtwMLlHja/O/t7g4IlVgLTKWwgMbr5/lAj04cEI99dAuoARaE+p4ldQeQNzPb7ZOPyRmSnBgO/qgtZoKNLaIX7q+Mwl+vsa2d2ZSHG8Fu7hzNIELWHQoaIFi782U+yKt2LHhahdVyY/FUPcymi0EtrwCqBHKSlEu+SXiwDXT4f+PCBtyaCJT4T4Mo2+TbERur9r9YOnKG2GEg46lDwTrr6FMya5K2WBks7AQwQ+rpoHCEy05tTg3GTJd8DypLhemrHMD7HeYzRf+HnVCyTngxmoquCD5/g9OM+fu63GIsnbGItWxREfjfzvODKuPaVCOat4mWQr1pLch1lcIkxQhU4EXg4LgHUMXFnQFrR8rvRT++YK1nRLB3w/lyvU4PAoocYlNR3G9JEClRnu4GH615ILEjXhyUZyAHIGx1+W7M6j4aGFhm3NOJWCTctaFd5r6uUeTqDpV757UzgHIR5lhtlfjeL41r3mmN09os/HpKt9EZ0,iv:+T4xz2xvyerO/ffW/YAKUkf5B/UVL8cUOl/ifWKIIx4=,tag:NTJklV5yqMT7uq0TvclhIA==,type:str]
ssh_host_ed25519_key.pub: ENC[AES256_GCM,data:k5T5CX56wSm1DADOH47sGb1h65aPk3NSvQR6Rgu7ZzRrq4pF84ofaRMEJU5d9MHnb+Eg92jnibRNwKUH36e5c9PJXtU14aY2f7HzOCyVk7WXd8H0eOuOfzG5ICQ=,iv:CcqwTYnk1NkJpn9q1Rnz4ERxhhnn60h3sXqMd3ILTk4=,tag:LhAIzkeozvT4L7+vJ9ojnQ==,type:str]
ssh_host_rsa_key: ENC[AES256_GCM,data:/f0EkmRbX9vSlhUmuJIAEJLs5R4ryxrtTN0hqcJlxTU9B0woh3pFK0GrXBbL1l5uvEQo2Vw2p7yeCqFch7ypfhBTRxD8IhWoc2l7trawhKa0bhhiQ8oQU47AW96ZCQHQx4h329PZBmFwjK31gtHoY3nkj862vWoFLucJs0yRkCVQJEgkjehO1KQbPU7PlYgUECPnpOgQW0wtGLSND20QyJyEQFSg0Vk7fWY6ycSiu+Piejg5MZVgdVofqEw/fEWyp0Za0P2SEyf0lyO8Ob/amObS1oHmLrb++bcOXl/BBqLPow+zx79juwmuFvhk/vZfeO1P04J83eg3f40s0HPP3g7SKjJTf0pYgdu3k3uXN3WsDWX0Tg3YfAxhKzIScM8gyrkCY4ju8fORGCRSzCddA1p1DvtsMAK5RmT6rSob3yS9nEYIlVUXeDsYGMtspoOd9vts0nS2QQ3VHIg27OYieixevzOtpCtyfogMIqMYgvwLlEzHt9Xi4uzFPxb5MQh0vt4PTy97pooCx0HHKYueMGeByZk2EMIVZzIBq1t3GEtj2bIiFIuAxPdU5odBCM29eJwBbwDw8Mngo/dF/HnDvx/AJ89wlbv1AtAqaIPPfQDUwdUJBhKW2rnKmef9E3oDiikhmDOq1UEAaf31NgrDL9nDrZ9nhHbkuL6Piets+jQ0yBCXVzP1ymHGqPAshlnYDSukAuZ8ByCPJr2p5LT09zYLyhtMMO04Qqcu7x8AMJ7Ha2iVgKDzukNcWEtNF5k2XmZ/lUidb7GZvdUsymkRxqAyZeWmP8DzjP7L+8LQhJhY4WixYsXQkVD8eTQyo+hHwQpM/j2XYwIaEOM/fEBvSVwR91U9J4ONqOD0SxIV/i9BoS472q3vjGbzQ1PyaNznvqBuL0WFfWVoUHNw+62LuOF57x6dhzy+et17xk5A4qmsQ2hScDLg6Ha4ygsGKbkLbFz+HQt7IUpIGAmN31ybtrkGR7Z973866njx7yLMFW4gR6bLgz/uNO55FNDLJ20YPSGtt6xoS2dDFfvJD9CTWaw7Z/kfGwjI/IEJgnFDNIEm3mIiM3f4O19m9EJy/ySXTYpTyGnRaEjz2DFIQiStNB+CmDuGgG4fTymFArGt2ieF+aB10E0pLJsZrV/62fRE37BXBuZWhCUshu1HfO9wkkooPunjqVN+5OqN+zgmi6nHzadlbnATK/dPcuBF6nA1wCQd/Y/qpFiYhy2NY1G3lG80/fUdwlTCYP+FFY4x+fxw4qh9J6XhNf1FFTcRpmrmhzjyjb6OvWCgdSRybBxzifTGaAWUde3UEpLHyZzd8Ag+etLwCcLBLeKPfVtfceqWYgkMp62zIXPZ5KZsDDQ8BN9JTMOEk93lL/vGmdXQhxioZ1NmA+Pi1dkSgDEDoUqZLL/FVrbsJ4v9WbpAexjcmKdVxJLCuzxzZKNfc8YeGc/xPgFRAC0BfTxGNkvKONIt6FGc7IF+lJze9HwDR5OLx+cFRhnGJjiw5OoguxRmMl5nzwFggmBCB1IZT4m6ADL8DaCw9RDjErmyz4wU5kROnGBLNgX6tWvYNStUFdIgSoXDAZxr5ArX4kpj93ls8rI+azvRHOATb5kLCS+N8JlyBQRjs22OGf/rjP5Tb7u857MSEy8TWtLwbt8PpL0Zg4h9pAzSgdZHPdVFMc/fZFOT8psVZn03jcsDETeya6uzMUHyBdGnbvbHrJ2y6MCbcWn8suacfBSANoicuhHXW1Kj8dNSpNYrD3xfHfU0s8ajR4HuG92GEGqV4NAb/IXXU5h14OH3bkt46dDABg3vV0bcunmtO8ReWcL4FKrehJU1ycfcgFMVGzxhOQObqWhCbWk3qS2YvUbK7ODzBFCqHUAwfJFNd1wnI+Ml7AjHdG+boFI07C6D0NX3mu1y9Mnns+9Ghr0kh30qeBux2hyz3jGjAy1qrhgbkyR6rBGEQoFz2iSueT867r4D5XPgIKeh8CR0ZkVMME2Z2DKpv3nfbKYZhXzAvQ8TYEueOvKT8f1zuDjcmo3x+GvVPYz1KntNKRrSN5pgPoQLiF1fHommEUAAXM90ysijUGw9K3w5FGrzd05ipcGRaUGb4bOglEezfzt3ODOoe5oW4P8LbnEu1I9GkoU7fDeNZJZ2ZrtL9zNpELo77daupcHt8Ytt6JwQaQcQXNmUcAgIg7sCet+063WES5jwDbR8UgZ5MYNC3Kca4zdI5Ki3ncWHecCtGKCNLeOTbSPb+iGzZI0Grb1pZXs2LfkU4Q2/GIPFF5lkDjPlXATuGh9SjR2xM8YQdYKBDjkMYITSdarjLPR51vGS+RgexJjFNNh3gY/qv8C0j1TUZ5C+FaT4H6A4KCJgo8Pb9hZsJ+hRM+yLDZeK3JUno1IckkLdBkB0E4s5l92ABg87AiTwHeYalQIJIb5mCaMOt4cu/0szWLvd32lmVHN08Xs9AapfG5StpAqSssx0/bbaPPdJW4D0ystMTit+SVvhwVqeGX+8LI0zVsJTzuJCUtfZosdOdThg9zjtYfQFChpogNUBu+4XD6ere02ihY4JvL8Souc2Rel9b/lB9bVAc9Vvq6YrqFsM0w1L1tkqG3TCZ2Mga3at04CP7wItDejmzkLPVEHm7OjuWtBEAl4vqUkV2bfkEzICQPwC6ugiTOQraJqXhlgWLPkBWsQBTxrMT5bnhPg5KvwjRbUXEHL5txqH8C/0EFsaU48Dg/y9CLq8ADIuTRE9R4tR63eOZ017ObkkQeNt76mq+S3DCTrvU+6qUNczLmi0BX5RWqNXJzD46KIEdjkQxr+NBCMC/QdVBdbt/orqNXpPqzVkgmLB4Uze6ppZ+64qWO/Ana7hVtb7cFv8MABIAFxII1y6DZAftA9kfZl1aL7JD4KLZVo9+PfKkH5Z4ZngRaNt2YAruqn0WmgwdPYoZTTCDgqxRfxsfh33LWYJQMV55Tc5UdDol1OpomUoKrNI+zOSeIvMO1C9tUupNjkWNlCEbwVSOqWh5tjMAtH1FSpZgAsqbxVvsYOsF8S/5TgdzMz7N9N4MoIdU2PEwLtOZAhMbTQR2iGIBGJQinFJvjPZGuYQF8+e29gDjRvEzOIuamwmuWtELfqmsFz346a8d/emZkMX8pyprt1afaX4xFfI8qFqm8Vj3Tx6Cd4ll7fQaWkNraPPetZBEKuuHnDfPAHzuYhgYKsIhxaMxkRfT+q7ihVdj6SgcLZRWxF2VyqwLKH6oKJljUso+FxQoY3SdF2bTSCGbIzKeLpej9NMq1rb8jLb1lI1weKY5Hg6cglt8UGSGvNY4qouh/9//Ab6dkEuEBwRvDgwC5a0y/wkGR8VQlsw0+17oudVhjbyv//8cPU8q+VSYrJ3QYJ63MjHAsu+5uukP0s2UuArzszROg2SdHGDdff5svZoG2hVptUgyGh8GlvTst7QDMIM+Wp/+Daer/7YUrqdIoOX+Por9u9++R+FLmnX/MGI3cV/TaeLEHVT6N/A8XtliicE1nWCcqvc+fwOg1WJIVkyyKRI1hPzVJudONOd54m5YKGxOIHNKNGSCu5i/K2Ft01tQMd7g3DcTGHiGys6cC+CkZvAFFXiOCvfhtAVqGKgbZxyJBzdUnkf6uqtP8TSP2+Ea8xUY2tq4Hg2FnC6sk6wuk9v5nE4e8XMyrYNWuZAiBeQ228ko6GOZyKaGxTqh2DQaFSvjNabtUbu5byiqdTySTvhTqV0HhruH1geb6VHNFiYAvV3dP5rIkCwIfRLyqHcNauOtgS+r+hKCg9vH7dFiQJugGzOvxM2yqi7762QCeMhaWAUxRR0m/x2dOymd8HKHUBY4fP7f6qExYE2l+yFbzms/KMaUyX9Ppspyhnw89b85fT1OsFhdphc4vzhN138YfXcOUQa4SjrCIO2D4/jrc46TlEh5byAucNSeX9da8PBE24vDl1vbMft9z/KK5dIRqbhb5TaSUgxUjfimbSaim9dh1jL94XPYmACy6EqYBIcKFNj9Ghw/J1e2xAnDs8S9HCCMiJHUUOCGTHFjpjq585iqasQvxPzccZfteXxObmOVrL7CjiiCY/+IuPnSoBvynPAW86Z7YLCYzx1yEgKcVbK0Q5qXJBjCQdqZ9/ySN0R9HSDW3m2+I6Q58sAMiU4hhzkIm/64GWAoJHqwZFX3aoJd4kYx0sgbWwvItn00UEmU6Q0AW2ZugV+95J3GRMuusq2IKdYOHFR+b538HWN8sk4nMKIRKaJDdFChBWmIF/ASDXwd29tM+nTHU/UNYTQTLp8J4JNtbbUbTADQ5Fy7xCiO0/UVZQju5iReJi0aqFZAHR6gSqMfWFEfISGjliCdt1Eo93YbBL+A66GgSgKX7vGWw0Q46yFzc0uQhfAIcwvT+wa5ZI6L50wJSr/TY1NF4prV/sTKRbeUGtOcO0MOpC1ToRxC+LTfmLYsxFBTg7uYFPDWOkHxwGkUTBCpDD5gs1PgAPekdJ5fiWVt1TdYqIuFexoe9+V,iv:zW+4q1dRbz8WYtDWoHXZMrdyBS+lbmgc/kLvaxluOKU=,tag:lg7uOWcUPXK1BCl6jVV7dg==,type:str]
ssh_host_rsa_key.pub: ENC[AES256_GCM,data:Gqk5+cDBsYg84d5Y5vowhnPyGncW3bycpeZAsuclUbiET5z9nVzK2CT06ktQb+MHN8jytc7RfME0c2uk3lQxtFRqxmSYcE0fhM8Lg047eEIswRTYpW+54m2WQL4WYsfZorMRPiWEdt0m7l5dZAC7tOmplGo/ZJxtkhPmf8M142yYjoCgk5Fv24GQkuYh4tKwVYYfoVem1ALE64tl1PXT20uVzvq7kVUJ0Ge3UyEY1zLpg61O2N/3tQQ2FqlstulKHtXjwFkeERUhju7sIuL9VskHKwYbG2JJxlTY5XWMDFPU3Ey3VECsBwjxfcGspp8bP9KdNICsL8pSxkT/clKqkDY6AwW4/C8QXLsrijLbe4M9f9QK1KwunBBRj3nk0DXYySYrn++GyZJRPASyhALyBGf2cMNcbHhB+ARGqhxixUyCmXx+vDqvFe8SzuVGKXGm16uP02ZKgdUVwPByWFydqOISibtJiZxyEyWNCLKpYCiZMoZchZv5PZ/jdZe4kv5YBtK3SfNeyWxPijR5ye+JORPPhjBQJLtX5xtknWF/z2SmJu6iMIw4cjQvkNREVtGj/C2xsRPNhNeP+gpk0iy/VEWj6Mk/I2lI+23jbX1m/JElsj4ig/vnUOhZnI5FNvGjfavnTvVeKaXVAjnWk04rxF2zsxm5H91xSh6qUWe1fFBdgzG2KW4d69Kmi3I9zPo9p6GwqkKEHJdXd6KmLbSoPmfP22Gv6vYbFWl5NKKfPiimw+IXLAGyzz8gWHk7cLA4J9vGSiaxbdLkvbAbvdZJ9K97LmyYsFVzAuz75UbLqyGZysn18OceoSo3nTEiy/WCIjfXsKu95/lYOd6fycfDF+gpkp8ejw3C0JRiZ939CFpG/MrUpCQrgzz9xgKpcTy5w0yWt30naZGlVZ0kcAjv/Bd2X0ON/eEdBrq/MnJHaMDOuuVe9iA+lNVqhFMqEsemOUSJm7R1Ttug4PFdiaMYUg==,iv:9DD76j3rDz+KFw6BmC4mVfhfgadjCR1DXytfV6dKeHY=,tag:Z7Akx72UnALXIcdUIrYWpw==,type:str]
harmonia-key: ENC[AES256_GCM,data:pZObqfbLogp0DYs47Tg2STKT9HptPSiP4sgcf31FD68PKSWhkgJbdY3gO/pfa0zsnvZTrAiljR8Ugh/x9z70T/XhjgZ/dIKqtcrGw0or9WPDmVzD4UHYm6iWR30MZLa9EBK0GFInlcSa/g==,iv:9HRnOaqP1iKMyyRX7evl6woZgfw9h4t7mBD98v/iBng=,tag:MQDio//aEOAOTVWlgADYDQ==,type:str]
matrix-server-key: ENC[AES256_GCM,data:0148ezOFk8jX5KPQPCG0jQK9ajSfe/iOdUqlvys5/M8DrIwPXH9GzrkknwH+l8kF9ViTRDC/q5md8J2bj3/FBR/RW4rwjDrYx9cBEFm8wjHrywUlwON8kNKtj9ycJmXgtRyCrVGv7sBmODy0ZC5ZfWbhIQh6xWBkX2/rsSh4zwi/1PoHLpOO3u4=,iv:IwHPDi1E3R9LAY/seGpvx1U+N8mB9NMrUjLg4KMA1UA=,tag:pwRJ/CqkFN2eedrnMAaj2w==,type:str]
registration-secret: ENC[AES256_GCM,data:EvPearZAxxb2irZFYgvy/tFA72h+IABuzwCbvy94IYR0eoHjuYw6GBde8CNUWG4SUiwyXJr4v438o/YThDhehsZ/cZFjg2o=,iv:ogN4/Iia5Zl95a3HP1KZoy86K8LyBFYw50cZUpkDNQo=,tag:5wU2OrNi7b5gWPfFZcGLjg==,type:str]
gitea-buildbot-user: ENC[AES256_GCM,data:GsSP6YMfFoaYslLwceRh9OU6lNYUWQnpTi6Fazyxz/NF8bpy3wbYe+I8P1OlE50rpQ==,iv:ZFnFwXBXZc8c3Q60ZnG7WgcLXQNV9iUhjQxfu3w1lh0=,tag:6WlZkgwA4YY1C3VOEAx4Ww==,type:str]
gitea-actions-runner: ENC[AES256_GCM,data:JKXAa7J1V3GH8lp3UtHTBmiezJlqxX1ItHLE7UcaIeNFQH8We2imaOMVftMpVCeXTpRX,iv:W9+4wH4asw3+w28i5om0OcJFHrABC85bhjhbgGWEs8E=,tag:Rf9XBeiEoJ1Pt8Z1TDIyJA==,type:str]
merge-bot-gitea-token: ENC[AES256_GCM,data:ULHcaNSYJwMVeeEq4bSiRcVRuUkE9fFUV0AkWW1wM0yHQtD+dmo1GcQ=,iv:dujDWGZ+seoVN8Eez1w3tUuMpGeOHtNLMaa+f2hOpAo=,tag:WoDTsZegC6rrbh7ygWSk+A==,type:str]
clan-bot-gitea-token: ENC[AES256_GCM,data:J+8AuAT50Xh4lKUWmigZQ/QBfNuaNKJDVuPj6jAOx06XZDwLEFtE8R8=,iv:8OGDcHbGfv6SOxe6+UBU7rTNgzYJYNJtUysSLao6H50=,tag:LxzSogjPBlxIrPcsgRU2Zw==,type:str]
clan-bot-ssh-key: ENC[AES256_GCM,data:mQAzPbzFt/FIIEo5ThXINN2FXsRMrBs/+1x/p0jDbNGLYt2LdMuYXQFevlYAK59fObNn/U6y/dheqKjtkX6BdBTLJgUKXktsJSeudsZPZ1OMtvzL0xxgx5+8Q0R+Er7BZ4ZVMpc9rolNtUojU/9gWMCALcVXz7FqGTtAYe8SEWCYinO7oP4JmIjWYnJBfPKXwq79uRp3y6dUnRLOL1Q7hCo7qFttCSssTF6HT+NJBVeAQvQ5wnWlfu/T0b7n4HB3GKfkei8Sh1ydW9k3kjFA34balXfEp3C/HHmzZO2/RWdFf45NN7itVIZhw04nrmFIW6mXaqhgJk/us9aqWFeUTTPH+nIDQfRWzcDddxrb8ZJC+/znLzX06wP18fmCHfIffyYPKcMrNxS2BKFZabybombbTYGnRV0a4881bvQLD+ScOYavYpEUTDnOg7SRQCuyDm2TtRQ5+wSUQCosYg7vTKr3btM5sV8PgDti4ou0t7YSt9bbnXv0s8/jRT+x7X7gWtw1MspJ1dWhH+FpyoPwDC4fAqjfTdcCkBFc,iv:1i3W/KWJCVG4F5uFDBttRyG9Z3BdyRa6XlkrkPNWkXQ=,tag:FjqH1Sfo1+1ALuUWAvrjyg==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age17n64ahe3wesh8l8lj0zylf4nljdmqn28hvqns2g7hgm9mdkhlsvsjuvkxz
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBqMjVzYUQ5N1R4d1ZNbnlu
RlRMT3BCZXdoT3FTRlE3MDFzUUxIcStidGk4ClhmeHRDVUJyM1ErNWxaZ3hUTWFz
RkFTK0dmNm4vZ0FXNnlXbzNCZHhkZFUKLS0tIFI5OWU0QytzdmRWSDBsV0xZZkFT
emdIWmVJQnFKeEpEZzBwcmU3TzNDd3MKtn0T52DL+q1LN7KNlBU0qnsh2Osjgwhh
dQn5njsoO0NZ5S/NHiSri7mWNrLji1eJAI9WxENy0yagpdgoT4L7gw==
-----END AGE ENCRYPTED FILE-----
- recipient: age1eq0e6uhjj2tja8v338tkdz8ema2aw5anpuyaq2uru7rt4lq7msyqqut6m2
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBNeHhZMlg5VlpCKzhuZkdD
WFpaMXVzeTV5TlM2NEE5KzB1VDZac3A3Y1NNCjJ4eFpxODltdTlqZmw1RE9DTVJU
ZmxMS3B2bGkyWUpkR3ptNXV5eEdSVTgKLS0tIHRuSHF0WU1OUFZjbUdWZHE2NUI0
cHo4eGdaQXdxQ0xOaVpKam5jZllHMXMK8ZDeRJjhrDur0ou1f5fbMJHOWjG2DqNi
UklTTKasabzT9X/wJCEpcm8inhQnJpX5F4mnLczBZyS1p3PmKZ6DgQ==
-----END AGE ENCRYPTED FILE-----
- recipient: age1vphy2sr6uw4ptsua3gh9khrm2cqyt65t46tusmt44z98qa7q6ymq6prrdl
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSArSXI1MHVWQmtPQksyaE9j
TGxJbzJKTHFwVXVybTFRTTdtOUp0SG9BOEdnCjUrOFRrQ2dYc2VCR2E3NEE5TmQ3
Y3h2VDNHcXlXSVJVWXZCcUFwK0dRMWcKLS0tIGJsNmtHaDhoNUhrL1o4OHNEYnhw
RGk2NjlMS3doaG85N0h6VHg4Y1R0cEkKqkkyARc0Q+E9I98gYUfdmCiyAwSb/D9P
VpFJNC9R3dHU1YR1O/4/qfsF9DbnvSPxxkgKsDiVjpClnHtLIzkiMg==
-----END AGE ENCRYPTED FILE-----
- recipient: age17xuvz0fqtynzdmf8rfh4g3e46tx8w3mc6zgytrmuj5v9dhnldgxs7ue7ct
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBvTnIzWUJZVTNUSkVOT3Vo
TU1WMU5QYzJ6a0hlUlJqNjJRVmQ3ZGZsZkRBCk9HL2JSY0JCVkNNQkhYL1Y4WHdY
MWx3YjdmTGFlcVVLNWdhMldEc2kvWVkKLS0tIFAyRHR0NkNQaVJ0L21Tck5UcUU4
TGk4dUlwcE9XWWIzZE1nQXdXcWY0V0kKJi5yXdrsEOP4Z8K6k/sPA7yadNPKQtzo
Iyt//Y+Y7n55KwuO8Doogu42SiVTUhHDICM9lezQmcugFqCoh3Lk4A==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2024-05-01T09:44:24Z"
mac: ENC[AES256_GCM,data:jH1w5Xk9aAHQreykHiG9PMfljaWO5tm0rIWx1avLntbGVs7Ov1kIuAQ1U8otLMmjI3vA1QXGRMTJFoODqNEMxpBvER60dPPtkwkgnSYE1v9C88PFp3xBDeryrh4aLE9PKxZcY9kf9f7anZ8p1+FL7iYo25pDygD+bHvT/y+qM1k=,iv:L0oI5D5jq4n0x5KsveotGc91+M+Y7EVO6UIzLFfgW98=,tag:vTekW9SRjkdJkIJqcoXa5Q==,type:str]
pgp: []
unencrypted_suffix: _unencrypted
version: 3.8.1

View File

@ -4,31 +4,31 @@ resource "hetznerdns_zone" "server" {
}
resource "hetznerdns_record" "root_a" {
zone_id = hetznerdns_zone.server.id
name = "@"
type = "A"
value = var.ipv4_address
zone_id = hetznerdns_zone.server.id
name = "@"
type = "A"
value = var.ipv4_address
}
resource "hetznerdns_record" "root_aaaa" {
zone_id = hetznerdns_zone.server.id
name = "@"
type = "AAAA"
value = var.ipv6_address
zone_id = hetznerdns_zone.server.id
name = "@"
type = "AAAA"
value = var.ipv6_address
}
resource "hetznerdns_record" "wildcard_a" {
zone_id = hetznerdns_zone.server.id
name = "*"
type = "A"
value = var.ipv4_address
zone_id = hetznerdns_zone.server.id
name = "*"
type = "A"
value = var.ipv4_address
}
resource "hetznerdns_record" "wildcard_aaaa" {
zone_id = hetznerdns_zone.server.id
name = "*"
type = "AAAA"
value = var.ipv6_address
zone_id = hetznerdns_zone.server.id
name = "*"
type = "AAAA"
value = var.ipv6_address
}
# for sending emails

View File

@ -1,8 +1,8 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p coreutils sops openssh nix
# shellcheck shell=bash
# shellcheck disable=SC1008,SC1128
set -euox pipefail
if [[ -z "${HOST:-}" ]]; then
echo "HOST is not set"
@ -12,6 +12,10 @@ if [[ -z "${FLAKE_ATTR:-}" ]]; then
echo "FLAKE_ATTR is not set"
exit 1
fi
if [[ -z "${SOPS_SECRETS_FILE:-}" ]]; then
echo "SOPS_SECRETS_FILE is not set"
exit 1
fi
tmp=$(mktemp -d)
trap 'rm -rf $tmp' EXIT
@ -24,11 +28,11 @@ for keyname in ssh_host_rsa_key ssh_host_rsa_key.pub ssh_host_ed25519_key ssh_ho
else
umask 0177
fi
clan secrets get "$keyname" > "$tmp/etc/ssh/$keyname"
sops --extract '["'$keyname'"]' -d "$SOPS_SECRETS_FILE" > "$tmp/etc/ssh/$keyname"
done
umask 0177
clan secrets get "initrd_ssh_key" > "$tmp/var/lib/secrets/initrd_ssh_key"
sops --extract '["initrd_ssh_key"]' -d "$SOPS_SECRETS_FILE" > "$tmp/var/lib/secrets/initrd_ssh_key"
# restore umask
umask 0022
@ -36,7 +40,7 @@ ssh "root@$HOST" "modprobe dm-raid && modprobe dm-integrity"
nix run --refresh github:numtide/nixos-anywhere -- \
--debug \
--disk-encryption-keys /tmp/secret.key <(clan secrets get cryptsetup_key) \
--disk-encryption-keys /tmp/secret.key <(sops --extract '["cryptsetup_key"]' --decrypt "$SOPS_SECRETS_FILE") \
--extra-files "$tmp" \
--flake "$FLAKE_ATTR" \
"root@$HOST"

View File

@ -0,0 +1,78 @@
#!/bin/sh
# shellcheck disable=SC1091
set -eu
installNix() {
if ! command -v nix >/dev/null; then
echo "Installing Nix..."
trap 'rm -f /tmp/nix-install' EXIT
if command -v curl; then
curl -L https://nixos.org/nix/install >/tmp/nix-install
elif command -v wget; then
wget -O /tmp/nix-install https://nixos.org/nix/install
else
echo "Please install curl or wget"
exit 1
fi
sh /tmp/nix-install --daemon --yes
fi
set +u
. /etc/profile
set -u
}
patchOsRelease() {
cat >/etc/os-release <<EOF
ID=nixos
VARIANT_ID=installer
EOF
}
installTools() {
env=$(
cat <<EOF
with import <nixpkgs> {};
buildEnv {
name = "install-tools";
paths = [
nix
nixos-install-tools
parted
mdadm
xfsprogs
dosfstools
btrfs-progs
e2fsprogs
jq
util-linux
];
}
EOF
)
tools=$(nix-build --no-out-link -E "$env")
# check if /usr/local/bin is in PATH
if ! echo "$PATH" | grep -q /usr/local/bin; then
echo "WARNING: /usr/local/bin is not in PATH" >&2
fi
mkdir -p /usr/local/bin
for i in "$tools/bin/"*; do
ln -sf "$i" /usr/local/bin
done
}
applyHetznerZfsQuirk() {
if test -f /etc/hetzner-build; then
# Hetzner has dummy binaries here for zfs,
# however those won't work and even crashed the system.
rm -f /usr/local/sbin/zfs /usr/local/sbin/zpool /usr/local/sbin/zdb
fi
}
installNix
patchOsRelease
installTools
applyHetznerZfsQuirk

View File

@ -1,23 +0,0 @@
From 43b15f8757a7f8de0340cc977ff9619741a5d43f Mon Sep 17 00:00:00 2001
From: Brian McGee <brian@bmcgee.ie>
Date: Mon, 1 Jul 2024 14:20:22 +0100
Subject: [PATCH] fix: configure toml key for global excludes
Signed-off-by: Brian McGee <brian@bmcgee.ie>
---
config/config.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/config/config.go b/config/config.go
index d9e281f..be5c991 100644
--- a/config/config.go
+++ b/config/config.go
@@ -10,7 +10,7 @@ import (
type Config struct {
Global struct {
// Excludes is an optional list of glob patterns used to exclude certain files from all formatters.
- Excludes []string
+ Excludes []string `toml:"excludes"`
}
Formatters map[string]*Formatter `toml:"formatter"`
}