Compare commits
No commits in common. "main" and "Qubasa-main" have entirely different histories.
main
...
Qubasa-mai
17
LICENSE.md
17
LICENSE.md
|
@ -1,18 +1,7 @@
|
||||||
Copyright 2023 Clan contributers
|
Copyright 2023 Clan contributers
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
|
||||||
the Software without restriction, including without limitation the rights to
|
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
|
||||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
|
||||||
subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
|
||||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
|
||||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
|
||||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
||||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
10
README.md
10
README.md
|
@ -4,10 +4,8 @@ This repository contains nixos modules and terraform code that powers clan.lol.
|
||||||
The website and git hosting is currently on [hetzner](https://www.hetzner.com/).
|
The website and git hosting is currently on [hetzner](https://www.hetzner.com/).
|
||||||
|
|
||||||
## Servers
|
## Servers
|
||||||
|
|
||||||
- web01:
|
- web01:
|
||||||
- Instance type:
|
- Instance type: [ex101](https://www.hetzner.com/de/dedicated-rootserver/ex101)
|
||||||
[ex101](https://www.hetzner.com/de/dedicated-rootserver/ex101)
|
|
||||||
- CPU: Intel Core i9-13900 (24 cores / 32 threads)
|
- CPU: Intel Core i9-13900 (24 cores / 32 threads)
|
||||||
- RAM: 64GB DDR5
|
- RAM: 64GB DDR5
|
||||||
- Drives: 2 x 1.92 TB NVME
|
- Drives: 2 x 1.92 TB NVME
|
||||||
|
@ -28,7 +26,5 @@ $ ./tf.sh apply
|
||||||
|
|
||||||
## To add a new project to CI
|
## To add a new project to CI
|
||||||
|
|
||||||
1. Add the 'buildbot-clan' topic to the repository using the "Manage topics"
|
1. Add the 'buildbot-clan' topic to the repository using the "Manage topics" button below the project description
|
||||||
button below the project description
|
2. Go to https://buildbot.clan.lol/#/builders/2 and press "Update projects" after you have logged in.
|
||||||
2. Go to https://buildbot.clan.lol/#/builders/2 and press "Update projects"
|
|
||||||
after you have logged in.
|
|
||||||
|
|
108
flake.lock
108
flake.lock
|
@ -29,11 +29,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719797756,
|
"lastModified": 1718502800,
|
||||||
"narHash": "sha256-TGZthxgxLdT8boadFm6+MK7HZlIxN1u1V+x3hu+Fd8I=",
|
"narHash": "sha256-Arnuj2v9HCrmV9ZU5fln/MoKhQfICO6o9ia8xQ386CY=",
|
||||||
"owner": "Mic92",
|
"owner": "Mic92",
|
||||||
"repo": "buildbot-nix",
|
"repo": "buildbot-nix",
|
||||||
"rev": "0b56574a5c823097771487d1bac952c3549fe9fb",
|
"rev": "c3b59dac3ee3b4c1dd9cabb2f850e2d8bcfaf417",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -59,11 +59,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1720009622,
|
"lastModified": 1718900431,
|
||||||
"narHash": "sha256-uA0FvklUt4M3yrNaSecCFgxXjnQZY8mmafLTuGSdUmU=",
|
"narHash": "sha256-iEpESD8Hywek3lkGgvTjG5C25UTaAAjnqX9R0lIvhSI=",
|
||||||
"rev": "fa41f94ae751b654088bb8f268f5dc0f4bb323fe",
|
"rev": "b3123b150ff7a287d36efd1cce29bd4d1e7e4d86",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://git.clan.lol/api/v1/repos/clan/clan-core/archive/fa41f94ae751b654088bb8f268f5dc0f4bb323fe.tar.gz"
|
"url": "https://git.clan.lol/api/v1/repos/clan/clan-core/archive/b3123b150ff7a287d36efd1cce29bd4d1e7e4d86.tar.gz"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
|
@ -78,11 +78,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1718846788,
|
"lastModified": 1717915259,
|
||||||
"narHash": "sha256-9dtXYtEkmXoUJV+PGLqscqF7qTn4AIhAKpFWRFU2NYs=",
|
"narHash": "sha256-VsGPboaleIlPELHY5cNTrXK4jHVmgUra8uC6h7KVC5c=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "disko",
|
"repo": "disko",
|
||||||
"rev": "e1174d991944a01eaaa04bc59c6281edca4c0e6e",
|
"rev": "1bbdb06f14e2621290b250e631cf3d8948e4d19b",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -113,11 +113,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719994518,
|
"lastModified": 1717285511,
|
||||||
"narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=",
|
"narHash": "sha256-iKzJcpdXih14qYVcZ9QC9XuZYnPc6T8YImb6dX166kw=",
|
||||||
"owner": "hercules-ci",
|
"owner": "hercules-ci",
|
||||||
"repo": "flake-parts",
|
"repo": "flake-parts",
|
||||||
"rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7",
|
"rev": "2a55567fcf15b1b1c7ed712a2c6fadaec7412ea8",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -144,23 +144,35 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"nixlib": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1712450863,
|
||||||
|
"narHash": "sha256-K6IkdtMtq9xktmYPj0uaYc8NsIqHuaAoRBaMgu9Fvrw=",
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "nixpkgs.lib",
|
||||||
|
"rev": "3c62b6a12571c9a7f65ab037173ee153d539905f",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "nixpkgs.lib",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"nixos-generators": {
|
"nixos-generators": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixlib": [
|
"nixlib": "nixlib",
|
||||||
"clan-core",
|
|
||||||
"nixpkgs"
|
|
||||||
],
|
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
"clan-core",
|
"clan-core",
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1718025593,
|
"lastModified": 1716210724,
|
||||||
"narHash": "sha256-WZ1gdKq/9u1Ns/oXuNsDm+W0salonVA0VY1amw8urJ4=",
|
"narHash": "sha256-iqQa3omRcHGpWb1ds75jS9ruA5R39FTmAkeR3J+ve1w=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "nixos-generators",
|
"repo": "nixos-generators",
|
||||||
"rev": "35c20ba421dfa5059e20e0ef2343c875372bdcf3",
|
"rev": "d14b286322c7f4f897ca4b1726ce38cb68596c94",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -180,11 +192,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1718845599,
|
"lastModified": 1717770332,
|
||||||
"narHash": "sha256-HbQ0iKohKJC5grC95HNjLxGPdgsc/BJgoENDYNbzkLo=",
|
"narHash": "sha256-NQmFHj0hTCUgnMAsaNTu6sNTRyo0rFQEe+/lVgV5yxU=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "nixos-images",
|
"repo": "nixos-images",
|
||||||
"rev": "c1e6a5f7b08f1c9993de1cfc5f15f838bf783b88",
|
"rev": "72771bd35f4e19e32d6f652528483b5e07fc317b",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -202,14 +214,17 @@
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
],
|
],
|
||||||
"nixpkgs-24_05": []
|
"nixpkgs-24_05": "nixpkgs-24_05",
|
||||||
|
"utils": [
|
||||||
|
"flake-utils"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1718697807,
|
"lastModified": 1718084203,
|
||||||
"narHash": "sha256-Enla61WFisytTYbWygPynEbu8vozjeGc6Obkj2GRj7o=",
|
"narHash": "sha256-Cx1xoVfSMv1XDLgKg08CUd1EoTYWB45VmB9XIQzhmzI=",
|
||||||
"owner": "simple-nixos-mailserver",
|
"owner": "simple-nixos-mailserver",
|
||||||
"repo": "nixos-mailserver",
|
"repo": "nixos-mailserver",
|
||||||
"rev": "290a995de5c3d3f08468fa548f0d55ab2efc7b6b",
|
"rev": "29916981e7b3b5782dc5085ad18490113f8ff63b",
|
||||||
"type": "gitlab"
|
"type": "gitlab"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -220,11 +235,11 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719931832,
|
"lastModified": 1718396522,
|
||||||
"narHash": "sha256-0LD+KePCKKEb4CcPsTBOwf019wDtZJanjoKm1S8q3Do=",
|
"narHash": "sha256-C0re6ZtCqC1ndL7ib7vOqmgwvZDhOhJ1W0wQgX1tTIo=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "0aeab749216e4c073cece5d34bc01b79e717c3e0",
|
"rev": "3e6b9369165397184774a4b7c5e8e5e46531b53f",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -234,6 +249,21 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"nixpkgs-24_05": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1717144377,
|
||||||
|
"narHash": "sha256-F/TKWETwB5RaR8owkPPi+SPJh83AQsm6KrQAlJ8v/uA=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "805a384895c696f802a9bf5bf4720f37385df547",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "nixpkgs",
|
||||||
|
"ref": "nixos-24.05",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"buildbot-nix": "buildbot-nix",
|
"buildbot-nix": "buildbot-nix",
|
||||||
|
@ -258,11 +288,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719111739,
|
"lastModified": 1717902109,
|
||||||
"narHash": "sha256-kr2QzRrplzlCP87ddayCZQS+dhGW98kw2zy7+jUXtF4=",
|
"narHash": "sha256-OQTjaEZcByyVmHwJlKp/8SE9ikC4w+mFd3X0jJs6wiA=",
|
||||||
"owner": "Mic92",
|
"owner": "Mic92",
|
||||||
"repo": "sops-nix",
|
"repo": "sops-nix",
|
||||||
"rev": "5e2e9421e9ed2b918be0a441c4535cfa45e04811",
|
"rev": "f0922ad001829b400f0160ba85b47d252fa3d925",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -278,11 +308,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719965291,
|
"lastModified": 1718585173,
|
||||||
"narHash": "sha256-IQiO6VNESSmgxQkpI1q86pqxRw0SZ45iSeM1jsmBpSw=",
|
"narHash": "sha256-G5DB6D3p8ucyGfmWt3JmiWcVW55DeuUoiT230wQ9Am4=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "srvos",
|
"repo": "srvos",
|
||||||
"rev": "1844f1a15ef530c963bb07c3846172fccbfb9f74",
|
"rev": "c607ffef7c234d88f37ed12d75b2c48de3f4b3fe",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -313,11 +343,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719887753,
|
"lastModified": 1718522839,
|
||||||
"narHash": "sha256-p0B2r98UtZzRDM5miGRafL4h7TwGRC4DII+XXHDHqek=",
|
"narHash": "sha256-ULzoKzEaBOiLRtjeY3YoGFJMwWSKRYOic6VNw2UyTls=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "treefmt-nix",
|
"repo": "treefmt-nix",
|
||||||
"rev": "bdb6355009562d8f9313d9460c0d3860f525bc6c",
|
"rev": "68eb1dc333ce82d0ab0c0357363ea17c31ea1f81",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|
21
flake.nix
21
flake.nix
|
@ -18,7 +18,7 @@
|
||||||
nixos-mailserver = {
|
nixos-mailserver = {
|
||||||
url = "gitlab:simple-nixos-mailserver/nixos-mailserver";
|
url = "gitlab:simple-nixos-mailserver/nixos-mailserver";
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
inputs.nixpkgs-24_05.follows = "";
|
inputs.utils.follows = "flake-utils";
|
||||||
inputs.flake-compat.follows = "flake-compat";
|
inputs.flake-compat.follows = "flake-compat";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -58,29 +58,12 @@
|
||||||
lib,
|
lib,
|
||||||
self',
|
self',
|
||||||
system,
|
system,
|
||||||
pkgs,
|
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
{
|
{
|
||||||
treefmt = {
|
treefmt = {
|
||||||
package = pkgs.treefmt.overrideAttrs (_old: {
|
|
||||||
# https://github.com/numtide/treefmt/pull/325
|
|
||||||
patches = [ ./treefmt-config.patch ];
|
|
||||||
});
|
|
||||||
projectRootFile = ".git/config";
|
projectRootFile = ".git/config";
|
||||||
programs.terraform.enable = true;
|
programs.hclfmt.enable = true;
|
||||||
programs.shellcheck.enable = true;
|
|
||||||
|
|
||||||
programs.deno.enable = true;
|
|
||||||
settings.global.excludes = [
|
|
||||||
# generated files
|
|
||||||
"sops/*"
|
|
||||||
"terraform.tfstate"
|
|
||||||
"*.tfvars.sops.json"
|
|
||||||
"*nixos-vars.json"
|
|
||||||
"secrets.yaml"
|
|
||||||
];
|
|
||||||
|
|
||||||
programs.nixfmt-rfc-style.enable = true;
|
programs.nixfmt-rfc-style.enable = true;
|
||||||
settings.formatter.nixfmt-rfc-style.excludes = [
|
settings.formatter.nixfmt-rfc-style.excludes = [
|
||||||
# generated files
|
# generated files
|
||||||
|
|
|
@ -27,7 +27,6 @@
|
||||||
self.nixosModules.buildbot
|
self.nixosModules.buildbot
|
||||||
inputs.srvos.nixosModules.mixins-nginx
|
inputs.srvos.nixosModules.mixins-nginx
|
||||||
inputs.srvos.nixosModules.mixins-nix-experimental
|
inputs.srvos.nixosModules.mixins-nix-experimental
|
||||||
./matrix-bot.nix
|
|
||||||
./web01
|
./web01
|
||||||
inputs.nixos-mailserver.nixosModules.mailserver
|
inputs.nixos-mailserver.nixosModules.mailserver
|
||||||
./mailserver.nix
|
./mailserver.nix
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
{
|
|
||||||
config,
|
|
||||||
pkgs,
|
|
||||||
self,
|
|
||||||
...
|
|
||||||
}:
|
|
||||||
|
|
||||||
let
|
|
||||||
name = "matrix-bot";
|
|
||||||
in
|
|
||||||
{
|
|
||||||
users.groups.matrix-bot-user = { };
|
|
||||||
users.users.matrix-bot-user = {
|
|
||||||
group = "matrix-bot-user";
|
|
||||||
isSystemUser = true;
|
|
||||||
description = "User for matrix-bot service";
|
|
||||||
home = "/var/lib/matrix-bot";
|
|
||||||
createHome = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
systemd.services.${name} = {
|
|
||||||
path = [ self.packages.${pkgs.system}.matrix-bot ];
|
|
||||||
description = "Matrix bot for changelog and reviews";
|
|
||||||
after = [ "network.target" ];
|
|
||||||
wantedBy = [ "multi-user.target" ];
|
|
||||||
environment = {
|
|
||||||
MATRIX_PASSWORD_FILE = "%d/MATRIX_PASSWORD_FILE";
|
|
||||||
OPENAI_API_KEY_FILE = "%d/OPENAI_API_KEY_FILE";
|
|
||||||
HOME = "/var/lib/${name}";
|
|
||||||
};
|
|
||||||
|
|
||||||
serviceConfig = {
|
|
||||||
LoadCredential = [
|
|
||||||
"MATRIX_PASSWORD_FILE:${config.sops.secrets.web01-matrix-password-clan-bot.path}"
|
|
||||||
"OPENAI_API_KEY_FILE:${config.sops.secrets.qubasas-openai-api-key.path}"
|
|
||||||
];
|
|
||||||
User = "matrix-bot-user";
|
|
||||||
Group = "matrix-bot-user";
|
|
||||||
WorkingDirectory = "/var/lib/${name}";
|
|
||||||
RuntimeDirectory = "/var/lib/${name}";
|
|
||||||
};
|
|
||||||
|
|
||||||
script = ''
|
|
||||||
set -euxo pipefail
|
|
||||||
|
|
||||||
mbot --changelog-room "!FdCwyKsRlfooNYKYzx:matrix.org" --review-room "!tmSRJlbsVXFUKAddiM:gchq.icu"
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -8,23 +8,4 @@
|
||||||
};
|
};
|
||||||
clan.matrix-synapse.users.monitoring = { };
|
clan.matrix-synapse.users.monitoring = { };
|
||||||
clan.matrix-synapse.users.clan-bot = { };
|
clan.matrix-synapse.users.clan-bot = { };
|
||||||
|
|
||||||
# Rate limiting settings
|
|
||||||
# we need to up this to be able to support matrix bots
|
|
||||||
services.matrix-synapse.settings = {
|
|
||||||
rc_login = {
|
|
||||||
address = {
|
|
||||||
per_second = 20;
|
|
||||||
burst_count = 200;
|
|
||||||
};
|
|
||||||
account = {
|
|
||||||
per_second = 20;
|
|
||||||
burst_count = 200;
|
|
||||||
};
|
|
||||||
failed_attempts = {
|
|
||||||
per_second = 3;
|
|
||||||
burst_count = 15;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,10 +5,10 @@ set -euo pipefail
|
||||||
export KEEP_VARS="GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_COMMITTER_NAME GIT_COMMITTER_EMAIL GITEA_URL GITEA_USER PR_TITLE REMOTE_BRANCH REPO_DIR${KEEP_VARS:+ $KEEP_VARS}"
|
export KEEP_VARS="GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_COMMITTER_NAME GIT_COMMITTER_EMAIL GITEA_URL GITEA_USER PR_TITLE REMOTE_BRANCH REPO_DIR${KEEP_VARS:+ $KEEP_VARS}"
|
||||||
|
|
||||||
# configure variables for actions
|
# configure variables for actions
|
||||||
today=$(date --iso-8601)
|
PR_TITLE="Automatic flake update - $(date --iso-8601=minutes)"
|
||||||
today_minutes=$(date --iso-8601=minutes)
|
export PR_TITLE
|
||||||
export PR_TITLE="Automatic flake update - ${today_minutes}"
|
REMOTE_BRANCH="flake-update-$(date --iso-8601)"
|
||||||
export REMOTE_BRANCH="flake-update-${today}"
|
export REMOTE_BRANCH
|
||||||
export REPO_DIR=$TMPDIR/repo
|
export REPO_DIR=$TMPDIR/repo
|
||||||
export GIT_AUTHOR_NAME="Clan Merge Bot"
|
export GIT_AUTHOR_NAME="Clan Merge Bot"
|
||||||
export GIT_AUTHOR_EMAIL="clan-bot@git.clan.lol"
|
export GIT_AUTHOR_EMAIL="clan-bot@git.clan.lol"
|
||||||
|
|
|
@ -1,8 +1,5 @@
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [ ./clan-merge/flake-module.nix ];
|
||||||
./clan-merge/flake-module.nix
|
|
||||||
./matrix-bot/flake-module.nix
|
|
||||||
];
|
|
||||||
perSystem =
|
perSystem =
|
||||||
{ pkgs, config, ... }:
|
{ pkgs, config, ... }:
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
source_up
|
|
||||||
|
|
||||||
watch_file flake-module.nix shell.nix default.nix
|
|
||||||
|
|
||||||
# Because we depend on nixpkgs sources, uploading to builders takes a long time
|
|
||||||
use flake .#matrix-bot --builders ''
|
|
3
pkgs/matrix-bot/.gitignore
vendored
3
pkgs/matrix-bot/.gitignore
vendored
|
@ -1,3 +0,0 @@
|
||||||
*.json
|
|
||||||
**/data
|
|
||||||
**/__pycache__
|
|
|
@ -1,13 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(
|
|
||||||
0, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
|
|
||||||
)
|
|
||||||
|
|
||||||
from matrix_bot import main # NOQA
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
|
|
@ -1,42 +0,0 @@
|
||||||
{
|
|
||||||
python3,
|
|
||||||
setuptools,
|
|
||||||
matrix-nio,
|
|
||||||
aiofiles,
|
|
||||||
aiohttp,
|
|
||||||
markdown2,
|
|
||||||
git,
|
|
||||||
...
|
|
||||||
}:
|
|
||||||
|
|
||||||
let
|
|
||||||
|
|
||||||
pythonDependencies = [
|
|
||||||
matrix-nio
|
|
||||||
aiofiles
|
|
||||||
aiohttp
|
|
||||||
markdown2
|
|
||||||
];
|
|
||||||
|
|
||||||
runtimeDependencies = [ git ];
|
|
||||||
|
|
||||||
testDependencies = pythonDependencies ++ runtimeDependencies ++ [ ];
|
|
||||||
in
|
|
||||||
python3.pkgs.buildPythonApplication {
|
|
||||||
name = "matrix-bot";
|
|
||||||
src = ./.;
|
|
||||||
format = "pyproject";
|
|
||||||
|
|
||||||
nativeBuildInputs = [ setuptools ];
|
|
||||||
|
|
||||||
propagatedBuildInputs = pythonDependencies ++ runtimeDependencies;
|
|
||||||
|
|
||||||
passthru.testDependencies = testDependencies;
|
|
||||||
|
|
||||||
# Clean up after the package to avoid leaking python packages into a devshell
|
|
||||||
postFixup = ''
|
|
||||||
rm $out/nix-support/propagated-build-inputs
|
|
||||||
'';
|
|
||||||
|
|
||||||
meta.mainProgram = "matrix-bot";
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
{ ... }:
|
|
||||||
{
|
|
||||||
perSystem =
|
|
||||||
{ self', pkgs, ... }:
|
|
||||||
{
|
|
||||||
|
|
||||||
devShells.matrix-bot = pkgs.callPackage ./shell.nix { inherit (self'.packages) matrix-bot; };
|
|
||||||
packages = {
|
|
||||||
matrix-bot = pkgs.python3.pkgs.callPackage ./default.nix { };
|
|
||||||
};
|
|
||||||
|
|
||||||
checks = { };
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -1,169 +0,0 @@
|
||||||
import argparse
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from os import environ
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from matrix_bot.custom_logger import setup_logging
|
|
||||||
from matrix_bot.gitea import GiteaData
|
|
||||||
from matrix_bot.main import bot_main
|
|
||||||
from matrix_bot.matrix import MatrixData
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
curr_dir = Path(__file__).parent
|
|
||||||
data_dir = Path(os.getcwd()) / "data"
|
|
||||||
|
|
||||||
|
|
||||||
def create_parser(prog: str | None = None) -> argparse.ArgumentParser:
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
prog=prog,
|
|
||||||
description="A gitea bot for matrix",
|
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--debug",
|
|
||||||
help="Enable debug logging",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--server",
|
|
||||||
help="The matrix server to connect to",
|
|
||||||
default="https://matrix.clan.lol",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--user",
|
|
||||||
help="The matrix user to connect as",
|
|
||||||
default="@clan-bot:clan.lol",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--avatar",
|
|
||||||
help="The path to the image to use as the avatar",
|
|
||||||
default=curr_dir / "avatar.png",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--repo-owner",
|
|
||||||
help="The owner of gitea the repository",
|
|
||||||
default="clan",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--repo-name",
|
|
||||||
help="The name of the repository",
|
|
||||||
default="clan-core",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--changelog-room",
|
|
||||||
help="The matrix room to join for the changelog bot",
|
|
||||||
default="#bot-test:gchq.icu",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--review-room",
|
|
||||||
help="The matrix room to join for the review bot",
|
|
||||||
default="#bot-test:gchq.icu",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--changelog-frequency",
|
|
||||||
help="The frequency to check for changelog updates in days",
|
|
||||||
default=7,
|
|
||||||
type=int,
|
|
||||||
)
|
|
||||||
|
|
||||||
def valid_weekday(value: str) -> str:
|
|
||||||
days = [
|
|
||||||
"Monday",
|
|
||||||
"Tuesday",
|
|
||||||
"Wednesday",
|
|
||||||
"Thursday",
|
|
||||||
"Friday",
|
|
||||||
"Saturday",
|
|
||||||
"Sunday",
|
|
||||||
]
|
|
||||||
if value not in days:
|
|
||||||
raise argparse.ArgumentTypeError(
|
|
||||||
f"{value} is not a valid weekday. Choose from {', '.join(days)}"
|
|
||||||
)
|
|
||||||
return value
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--publish-day",
|
|
||||||
help="The day of the week to publish the changelog. Ignored if changelog-frequency is less than 7 days.",
|
|
||||||
default="Wednesday",
|
|
||||||
type=valid_weekday,
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--gitea-url",
|
|
||||||
help="The gitea url to connect to",
|
|
||||||
default="https://git.clan.lol",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--data-dir",
|
|
||||||
help="The directory to store data",
|
|
||||||
default=data_dir,
|
|
||||||
type=Path,
|
|
||||||
)
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
def matrix_password() -> str:
|
|
||||||
matrix_password = environ.get("MATRIX_PASSWORD")
|
|
||||||
if matrix_password is not None:
|
|
||||||
return matrix_password
|
|
||||||
matrix_password_file = environ.get("MATRIX_PASSWORD_FILE", default=None)
|
|
||||||
if matrix_password_file is None:
|
|
||||||
raise Exception("MATRIX_PASSWORD_FILE environment variable is not set")
|
|
||||||
with open(matrix_password_file) as f:
|
|
||||||
return f.read().strip()
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
parser = create_parser()
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
if args.debug:
|
|
||||||
setup_logging(logging.DEBUG, root_log_name=__name__.split(".")[0])
|
|
||||||
log.debug("Debug log activated")
|
|
||||||
else:
|
|
||||||
setup_logging(logging.INFO, root_log_name=__name__.split(".")[0])
|
|
||||||
|
|
||||||
matrix = MatrixData(
|
|
||||||
server=args.server,
|
|
||||||
user=args.user,
|
|
||||||
avatar=args.avatar,
|
|
||||||
changelog_room=args.changelog_room,
|
|
||||||
changelog_frequency=args.changelog_frequency,
|
|
||||||
publish_day=args.publish_day,
|
|
||||||
review_room=args.review_room,
|
|
||||||
password=matrix_password(),
|
|
||||||
)
|
|
||||||
|
|
||||||
gitea = GiteaData(
|
|
||||||
url=args.gitea_url,
|
|
||||||
owner=args.repo_owner,
|
|
||||||
repo=args.repo_name,
|
|
||||||
access_token=os.getenv("GITEA_ACCESS_TOKEN"),
|
|
||||||
)
|
|
||||||
|
|
||||||
args.data_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
try:
|
|
||||||
asyncio.run(bot_main(matrix, gitea, args.data_dir))
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("User Interrupt", file=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,4 +0,0 @@
|
||||||
from . import main
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
Binary file not shown.
Before Width: | Height: | Size: 105 KiB |
|
@ -1,214 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import shlex
|
|
||||||
import subprocess
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
from nio import (
|
|
||||||
AsyncClient,
|
|
||||||
JoinResponse,
|
|
||||||
)
|
|
||||||
|
|
||||||
from matrix_bot.gitea import (
|
|
||||||
GiteaData,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .locked_open import read_locked_file, write_locked_file
|
|
||||||
from .matrix import MatrixData, send_message
|
|
||||||
from .openai import create_jsonl_data, upload_and_process_file
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def last_ndays_to_today(ndays: int) -> (str, str):
|
|
||||||
# Get today's date
|
|
||||||
today = datetime.datetime.now()
|
|
||||||
|
|
||||||
# Calculate the date one week ago
|
|
||||||
last_week = today - datetime.timedelta(days=ndays)
|
|
||||||
|
|
||||||
# Format both dates to "YYYY-MM-DD"
|
|
||||||
todate = today.strftime("%Y-%m-%d")
|
|
||||||
fromdate = last_week.strftime("%Y-%m-%d")
|
|
||||||
|
|
||||||
return (fromdate, todate)
|
|
||||||
|
|
||||||
|
|
||||||
def write_file_with_date_prefix(
|
|
||||||
content: str, directory: Path, *, ndays: int, suffix: str
|
|
||||||
) -> Path:
|
|
||||||
"""
|
|
||||||
Write content to a file with the current date as filename prefix.
|
|
||||||
|
|
||||||
:param content: The content to write to the file.
|
|
||||||
:param directory: The directory where the file will be saved.
|
|
||||||
:return: The path to the created file.
|
|
||||||
"""
|
|
||||||
# Ensure the directory exists
|
|
||||||
directory.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
# Get the current date
|
|
||||||
fromdate, todate = last_ndays_to_today(ndays)
|
|
||||||
|
|
||||||
# Create the filename
|
|
||||||
filename = f"{fromdate}__{todate}_{suffix}.txt"
|
|
||||||
file_path = directory / filename
|
|
||||||
|
|
||||||
# Write the content to the file
|
|
||||||
with open(file_path, "w") as file:
|
|
||||||
file.write(content)
|
|
||||||
|
|
||||||
return file_path
|
|
||||||
|
|
||||||
|
|
||||||
async def git_pull(repo_path: Path) -> None:
|
|
||||||
cmd = ["git", "pull"]
|
|
||||||
log.debug(f"Running command: {shlex.join(cmd)}")
|
|
||||||
process = await asyncio.create_subprocess_exec(
|
|
||||||
*cmd,
|
|
||||||
cwd=str(repo_path),
|
|
||||||
)
|
|
||||||
await process.wait()
|
|
||||||
|
|
||||||
|
|
||||||
async def git_log(repo_path: str, ndays: int) -> str:
|
|
||||||
cmd = [
|
|
||||||
"git",
|
|
||||||
"log",
|
|
||||||
f"--since={ndays} days ago",
|
|
||||||
"--pretty=format:%h - %an, %ar : %s",
|
|
||||||
"--stat",
|
|
||||||
"--patch",
|
|
||||||
]
|
|
||||||
log.debug(f"Running command: {shlex.join(cmd)}")
|
|
||||||
process = await asyncio.create_subprocess_exec(
|
|
||||||
*cmd,
|
|
||||||
cwd=repo_path,
|
|
||||||
stdout=asyncio.subprocess.PIPE,
|
|
||||||
stderr=asyncio.subprocess.PIPE,
|
|
||||||
)
|
|
||||||
stdout, stderr = await process.communicate()
|
|
||||||
|
|
||||||
if process.returncode != 0:
|
|
||||||
raise Exception(
|
|
||||||
f"Command '{' '.join(cmd)}' failed with exit code {process.returncode}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return stdout.decode()
|
|
||||||
|
|
||||||
|
|
||||||
async def changelog_bot(
|
|
||||||
client: AsyncClient,
|
|
||||||
http: aiohttp.ClientSession,
|
|
||||||
matrix: MatrixData,
|
|
||||||
gitea: GiteaData,
|
|
||||||
data_dir: Path,
|
|
||||||
) -> None:
|
|
||||||
last_run_path = data_dir / "last_changelog_run.json"
|
|
||||||
last_run = read_locked_file(last_run_path)
|
|
||||||
|
|
||||||
if last_run == {}:
|
|
||||||
fromdate, todate = last_ndays_to_today(matrix.changelog_frequency)
|
|
||||||
last_run = {
|
|
||||||
"fromdate": fromdate,
|
|
||||||
"todate": todate,
|
|
||||||
"ndays": matrix.changelog_frequency,
|
|
||||||
}
|
|
||||||
log.debug(f"First run. Setting last_run to {last_run}")
|
|
||||||
today = datetime.datetime.now()
|
|
||||||
today_weekday = today.strftime("%A")
|
|
||||||
if today_weekday != matrix.publish_day:
|
|
||||||
log.debug(f"Changelog not due yet. Due on {matrix.publish_day}")
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
last_date = datetime.datetime.strptime(last_run["todate"], "%Y-%m-%d")
|
|
||||||
today = datetime.datetime.now()
|
|
||||||
today_weekday = today.strftime("%A")
|
|
||||||
delta = datetime.timedelta(days=matrix.changelog_frequency)
|
|
||||||
if today - last_date <= delta:
|
|
||||||
log.debug(f"Changelog not due yet. Due in {delta.days} days")
|
|
||||||
return
|
|
||||||
elif today_weekday != matrix.publish_day:
|
|
||||||
log.debug(f"Changelog not due yet. Due on {matrix.publish_day}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# If you made a new room and haven't joined as that user, you can use
|
|
||||||
room: JoinResponse = await client.join(matrix.changelog_room)
|
|
||||||
|
|
||||||
if not room.transport_response.ok:
|
|
||||||
log.error("This can happen if the room doesn't exist or the bot isn't invited")
|
|
||||||
raise Exception(f"Failed to join room {room}")
|
|
||||||
|
|
||||||
repo_path = data_dir / gitea.repo
|
|
||||||
|
|
||||||
if not repo_path.exists():
|
|
||||||
cmd = [
|
|
||||||
"git",
|
|
||||||
"clone",
|
|
||||||
f"{gitea.url}/{gitea.owner}/{gitea.repo}.git",
|
|
||||||
gitea.repo,
|
|
||||||
]
|
|
||||||
subprocess.run(cmd, cwd=data_dir, check=True)
|
|
||||||
|
|
||||||
# git pull
|
|
||||||
await git_pull(repo_path)
|
|
||||||
|
|
||||||
# git log
|
|
||||||
diff = await git_log(repo_path, matrix.changelog_frequency)
|
|
||||||
|
|
||||||
fromdate, todate = last_ndays_to_today(matrix.changelog_frequency)
|
|
||||||
log.info(f"Generating changelog from {fromdate} to {todate}")
|
|
||||||
|
|
||||||
system_prompt = f"""
|
|
||||||
Create a concise changelog for the {matrix.changelog_frequency}.
|
|
||||||
Follow these guidelines:
|
|
||||||
|
|
||||||
- The header should include the date range from {fromdate} to {todate}
|
|
||||||
- Use present tense
|
|
||||||
- Keep the summary brief
|
|
||||||
- Follow commit message format: "scope: message (#number)"
|
|
||||||
- Link pull requests as: '{gitea.url}/{gitea.owner}/{gitea.repo}/pulls/<number>'
|
|
||||||
- Use markdown links to make the pull request number clickable
|
|
||||||
- Mention each scope and pull request number only once
|
|
||||||
- Have these headers in the changelog if applicable:
|
|
||||||
- New Features
|
|
||||||
- Documentation
|
|
||||||
- Refactoring
|
|
||||||
- Bug Fixes
|
|
||||||
- Other Changes
|
|
||||||
|
|
||||||
Changelog:
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Step 1: Create the JSONL file
|
|
||||||
jsonl_data = await create_jsonl_data(user_prompt=diff, system_prompt=system_prompt)
|
|
||||||
|
|
||||||
# Step 2: Upload the JSONL file and process it
|
|
||||||
results = await upload_and_process_file(session=http, jsonl_data=jsonl_data)
|
|
||||||
|
|
||||||
# Write the results to a file in the changelogs directory
|
|
||||||
result_file = write_file_with_date_prefix(
|
|
||||||
json.dumps(results, indent=4),
|
|
||||||
data_dir / "changelogs",
|
|
||||||
ndays=matrix.changelog_frequency,
|
|
||||||
suffix="result",
|
|
||||||
)
|
|
||||||
log.info(f"LLM result written to: {result_file}")
|
|
||||||
|
|
||||||
# Join responses together
|
|
||||||
all_changelogs = []
|
|
||||||
for result in results:
|
|
||||||
choices = result["response"]["body"]["choices"]
|
|
||||||
changelog = "\n".join(choice["message"]["content"] for choice in choices)
|
|
||||||
all_changelogs.append(changelog)
|
|
||||||
full_changelog = "\n\n".join(all_changelogs)
|
|
||||||
|
|
||||||
# Write the last run to the file
|
|
||||||
write_locked_file(last_run_path, last_run)
|
|
||||||
log.info(f"Changelog generated:\n{full_changelog}")
|
|
||||||
|
|
||||||
await send_message(client, room, full_changelog)
|
|
|
@ -1,97 +0,0 @@
|
||||||
import inspect
|
|
||||||
import logging
|
|
||||||
from collections.abc import Callable
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
grey = "\x1b[38;20m"
|
|
||||||
yellow = "\x1b[33;20m"
|
|
||||||
red = "\x1b[31;20m"
|
|
||||||
bold_red = "\x1b[31;1m"
|
|
||||||
green = "\u001b[32m"
|
|
||||||
blue = "\u001b[34m"
|
|
||||||
|
|
||||||
|
|
||||||
def get_formatter(color: str) -> Callable[[logging.LogRecord, bool], logging.Formatter]:
|
|
||||||
def myformatter(
|
|
||||||
record: logging.LogRecord, with_location: bool
|
|
||||||
) -> logging.Formatter:
|
|
||||||
reset = "\x1b[0m"
|
|
||||||
|
|
||||||
try:
|
|
||||||
filepath = Path(record.pathname).resolve()
|
|
||||||
filepath = Path("~", filepath.relative_to(Path.home()))
|
|
||||||
except Exception:
|
|
||||||
filepath = Path(record.pathname)
|
|
||||||
|
|
||||||
if not with_location:
|
|
||||||
return logging.Formatter(f"{color}%(levelname)s{reset}: %(message)s")
|
|
||||||
|
|
||||||
return logging.Formatter(
|
|
||||||
f"{color}%(levelname)s{reset}: %(message)s\nLocation: {filepath}:%(lineno)d::%(funcName)s\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
return myformatter
|
|
||||||
|
|
||||||
|
|
||||||
FORMATTER = {
|
|
||||||
logging.DEBUG: get_formatter(blue),
|
|
||||||
logging.INFO: get_formatter(green),
|
|
||||||
logging.WARNING: get_formatter(yellow),
|
|
||||||
logging.ERROR: get_formatter(red),
|
|
||||||
logging.CRITICAL: get_formatter(bold_red),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class CustomFormatter(logging.Formatter):
|
|
||||||
def __init__(self, log_locations: bool) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self.log_locations = log_locations
|
|
||||||
|
|
||||||
def format(self, record: logging.LogRecord) -> str:
|
|
||||||
return FORMATTER[record.levelno](record, self.log_locations).format(record)
|
|
||||||
|
|
||||||
|
|
||||||
class ThreadFormatter(logging.Formatter):
|
|
||||||
def format(self, record: logging.LogRecord) -> str:
|
|
||||||
return FORMATTER[record.levelno](record, False).format(record)
|
|
||||||
|
|
||||||
|
|
||||||
def get_caller() -> str:
|
|
||||||
frame = inspect.currentframe()
|
|
||||||
if frame is None:
|
|
||||||
return "unknown"
|
|
||||||
caller_frame = frame.f_back
|
|
||||||
if caller_frame is None:
|
|
||||||
return "unknown"
|
|
||||||
caller_frame = caller_frame.f_back
|
|
||||||
if caller_frame is None:
|
|
||||||
return "unknown"
|
|
||||||
frame_info = inspect.getframeinfo(caller_frame)
|
|
||||||
|
|
||||||
try:
|
|
||||||
filepath = Path(frame_info.filename).resolve()
|
|
||||||
filepath = Path("~", filepath.relative_to(Path.home()))
|
|
||||||
except Exception:
|
|
||||||
filepath = Path(frame_info.filename)
|
|
||||||
|
|
||||||
ret = f"{filepath}:{frame_info.lineno}::{frame_info.function}"
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging(level: Any, root_log_name: str = __name__.split(".")[0]) -> None:
|
|
||||||
# Get the root logger and set its level
|
|
||||||
main_logger = logging.getLogger(root_log_name)
|
|
||||||
main_logger.setLevel(level)
|
|
||||||
|
|
||||||
# Create and add the default handler
|
|
||||||
default_handler = logging.StreamHandler()
|
|
||||||
|
|
||||||
# Create and add your custom handler
|
|
||||||
default_handler.setLevel(level)
|
|
||||||
default_handler.setFormatter(CustomFormatter(str(level) == str(logging.DEBUG)))
|
|
||||||
main_logger.addHandler(default_handler)
|
|
||||||
|
|
||||||
# Set logging level for other modules used by this module
|
|
||||||
logging.getLogger("asyncio").setLevel(logging.INFO)
|
|
||||||
logging.getLogger("httpx").setLevel(level=logging.WARNING)
|
|
|
@ -1,88 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class GiteaData:
|
|
||||||
url: str
|
|
||||||
owner: str
|
|
||||||
repo: str
|
|
||||||
access_token: str | None = None
|
|
||||||
|
|
||||||
|
|
||||||
def endpoint_url(gitea: GiteaData, endpoint: str) -> str:
|
|
||||||
return f"{gitea.url}/api/v1/repos/{gitea.owner}/{gitea.repo}/{endpoint}"
|
|
||||||
|
|
||||||
|
|
||||||
async def fetch_repo_labels(
|
|
||||||
gitea: GiteaData,
|
|
||||||
session: aiohttp.ClientSession,
|
|
||||||
) -> list[dict]:
|
|
||||||
"""
|
|
||||||
Fetch labels from a Gitea repository.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: List of labels in the repository.
|
|
||||||
"""
|
|
||||||
url = endpoint_url(gitea, "labels")
|
|
||||||
headers = {"Accept": "application/vnd.github.v3+json"}
|
|
||||||
if gitea.access_token:
|
|
||||||
headers["Authorization"] = f"token {gitea.access_token}"
|
|
||||||
|
|
||||||
async with session.get(url, headers=headers) as response:
|
|
||||||
if response.status == 200:
|
|
||||||
labels = await response.json()
|
|
||||||
return labels
|
|
||||||
else:
|
|
||||||
# You may want to handle different statuses differently
|
|
||||||
raise Exception(
|
|
||||||
f"Failed to fetch labels: {response.status}, {await response.text()}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PullState(Enum):
|
|
||||||
OPEN = "open"
|
|
||||||
CLOSED = "closed"
|
|
||||||
ALL = "all"
|
|
||||||
|
|
||||||
|
|
||||||
async def fetch_pull_requests(
|
|
||||||
gitea: GiteaData,
|
|
||||||
session: aiohttp.ClientSession,
|
|
||||||
*,
|
|
||||||
limit: int,
|
|
||||||
state: PullState,
|
|
||||||
label_ids: list[int] = [],
|
|
||||||
) -> list[dict]:
|
|
||||||
"""
|
|
||||||
Fetch pull requests from a Gitea repository.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: List of pull requests.
|
|
||||||
"""
|
|
||||||
# You can use the same pattern as fetch_repo_labels
|
|
||||||
url = endpoint_url(gitea, "pulls")
|
|
||||||
params = {
|
|
||||||
"state": state.value,
|
|
||||||
"sort": "recentupdate",
|
|
||||||
"limit": limit,
|
|
||||||
"labels": label_ids,
|
|
||||||
}
|
|
||||||
headers = {"accept": "application/json"}
|
|
||||||
|
|
||||||
async with session.get(url, params=params, headers=headers) as response:
|
|
||||||
if response.status == 200:
|
|
||||||
labels = await response.json()
|
|
||||||
return labels
|
|
||||||
else:
|
|
||||||
# You may want to handle different statuses differently
|
|
||||||
raise Exception(
|
|
||||||
f"Failed to fetch labels: {response.status}, {await response.text()}"
|
|
||||||
)
|
|
|
@ -1,31 +0,0 @@
|
||||||
import fcntl
|
|
||||||
import json
|
|
||||||
from collections.abc import Generator
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def locked_open(filename: str | Path, mode: str = "r") -> Generator:
|
|
||||||
"""
|
|
||||||
This is a context manager that provides an advisory write lock on the file specified by `filename` when entering the context, and releases the lock when leaving the context. The lock is acquired using the `fcntl` module's `LOCK_EX` flag, which applies an exclusive write lock to the file.
|
|
||||||
"""
|
|
||||||
with open(filename, mode) as fd:
|
|
||||||
fcntl.flock(fd, fcntl.LOCK_EX)
|
|
||||||
yield fd
|
|
||||||
fcntl.flock(fd, fcntl.LOCK_UN)
|
|
||||||
|
|
||||||
|
|
||||||
def write_locked_file(path: Path, data: dict[str, Any]) -> None:
|
|
||||||
with locked_open(path, "w+") as f:
|
|
||||||
f.write(json.dumps(data, indent=4))
|
|
||||||
|
|
||||||
|
|
||||||
def read_locked_file(path: Path) -> dict[str, Any]:
|
|
||||||
if not path.exists():
|
|
||||||
return {}
|
|
||||||
with locked_open(path, "r") as f:
|
|
||||||
content: str = f.read()
|
|
||||||
parsed: dict[str, Any] = json.loads(content)
|
|
||||||
return parsed
|
|
|
@ -1,54 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
from nio import AsyncClient, ClientConfig, ProfileGetAvatarResponse, RoomMessageText
|
|
||||||
|
|
||||||
from .changelog_bot import changelog_bot
|
|
||||||
from .gitea import GiteaData
|
|
||||||
from .matrix import MatrixData, set_avatar, upload_image
|
|
||||||
from .review_bot import message_callback, review_requested_bot
|
|
||||||
|
|
||||||
|
|
||||||
async def bot_main(
|
|
||||||
matrix: MatrixData,
|
|
||||||
gitea: GiteaData,
|
|
||||||
data_dir: Path,
|
|
||||||
) -> None:
|
|
||||||
# Setup client configuration to handle encryption
|
|
||||||
client_config = ClientConfig(
|
|
||||||
encryption_enabled=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(f"Connecting to {matrix.server} as {matrix.user}")
|
|
||||||
client = AsyncClient(matrix.server, matrix.user, config=client_config)
|
|
||||||
client.add_event_callback(message_callback, RoomMessageText)
|
|
||||||
|
|
||||||
result = await client.login(matrix.password)
|
|
||||||
if not result.transport_response.ok:
|
|
||||||
log.critical(f"Failed to login: {result}")
|
|
||||||
exit(1)
|
|
||||||
log.info(f"Logged in as {result}")
|
|
||||||
|
|
||||||
avatar: ProfileGetAvatarResponse = await client.get_avatar()
|
|
||||||
if not avatar.avatar_url:
|
|
||||||
mxc_url = await upload_image(client, matrix.avatar)
|
|
||||||
log.info(f"Uploaded avatar to {mxc_url}")
|
|
||||||
await set_avatar(client, mxc_url)
|
|
||||||
else:
|
|
||||||
log.info(f"Bot already has an avatar {avatar.avatar_url}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
while True:
|
|
||||||
await changelog_bot(client, session, matrix, gitea, data_dir)
|
|
||||||
await review_requested_bot(client, session, matrix, gitea, data_dir)
|
|
||||||
await asyncio.sleep(60 * 5)
|
|
||||||
except Exception as e:
|
|
||||||
log.exception(e)
|
|
||||||
finally:
|
|
||||||
await client.close()
|
|
|
@ -1,88 +0,0 @@
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
from markdown2 import markdown
|
|
||||||
from nio import (
|
|
||||||
AsyncClient,
|
|
||||||
JoinedMembersResponse,
|
|
||||||
JoinResponse,
|
|
||||||
ProfileSetAvatarResponse,
|
|
||||||
RoomMember,
|
|
||||||
RoomSendResponse,
|
|
||||||
UploadResponse,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def upload_image(client: AsyncClient, image_path: str) -> str:
|
|
||||||
with open(image_path, "rb") as image_file:
|
|
||||||
response: UploadResponse
|
|
||||||
response, _ = await client.upload(image_file, content_type="image/png")
|
|
||||||
if not response.transport_response.ok:
|
|
||||||
raise Exception(f"Failed to upload image {response}")
|
|
||||||
return response.content_uri # This is the MXC URL
|
|
||||||
|
|
||||||
|
|
||||||
async def set_avatar(client: AsyncClient, mxc_url: str) -> None:
|
|
||||||
response: ProfileSetAvatarResponse
|
|
||||||
response = await client.set_avatar(mxc_url)
|
|
||||||
if not response.transport_response.ok:
|
|
||||||
raise Exception(f"Failed to set avatar {response}")
|
|
||||||
|
|
||||||
|
|
||||||
async def get_room_members(client: AsyncClient, room: JoinResponse) -> list[RoomMember]:
|
|
||||||
users: JoinedMembersResponse = await client.joined_members(room.room_id)
|
|
||||||
|
|
||||||
if not users.transport_response.ok:
|
|
||||||
raise Exception(f"Failed to get users {users}")
|
|
||||||
return users.members
|
|
||||||
|
|
||||||
|
|
||||||
async def send_message(
|
|
||||||
client: AsyncClient,
|
|
||||||
room: JoinResponse,
|
|
||||||
message: str,
|
|
||||||
user_ids: list[str] | None = None,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Send a message in a Matrix room, optionally mentioning users.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# If user_ids are provided, format the message to mention them
|
|
||||||
formatted_message = markdown(message)
|
|
||||||
if user_ids:
|
|
||||||
mention_list = ", ".join(
|
|
||||||
[
|
|
||||||
f"<a href='https://matrix.to/#/{user_id}'>{user_id}</a>"
|
|
||||||
for user_id in user_ids
|
|
||||||
]
|
|
||||||
)
|
|
||||||
formatted_message = f"{mention_list}: {formatted_message}"
|
|
||||||
|
|
||||||
content = {
|
|
||||||
"msgtype": "m.text" if user_ids else "m.notice",
|
|
||||||
"format": "org.matrix.custom.html",
|
|
||||||
"body": message,
|
|
||||||
"formatted_body": formatted_message,
|
|
||||||
}
|
|
||||||
|
|
||||||
res: RoomSendResponse = await client.room_send(
|
|
||||||
room_id=room.room_id, message_type="m.room.message", content=content
|
|
||||||
)
|
|
||||||
|
|
||||||
if not res.transport_response.ok:
|
|
||||||
raise Exception(f"Failed to send message {res}")
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class MatrixData:
|
|
||||||
server: str
|
|
||||||
user: str
|
|
||||||
avatar: Path
|
|
||||||
password: str
|
|
||||||
changelog_room: str
|
|
||||||
review_room: str
|
|
||||||
changelog_frequency: int
|
|
||||||
publish_day: str
|
|
|
@ -1,129 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
from os import environ
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# The URL to which the request is sent
|
|
||||||
url: str = "https://api.openai.com/v1/chat/completions"
|
|
||||||
|
|
||||||
|
|
||||||
def api_key() -> str:
|
|
||||||
openapi_key = environ.get("OPENAI_API_KEY")
|
|
||||||
if openapi_key is not None:
|
|
||||||
return openapi_key
|
|
||||||
|
|
||||||
openai_key_file = environ.get("OPENAI_API_KEY_FILE", default=None)
|
|
||||||
if openai_key_file is None:
|
|
||||||
raise Exception("OPENAI_API_KEY_FILE environment variable is not set")
|
|
||||||
with open(openai_key_file) as f:
|
|
||||||
return f.read().strip()
|
|
||||||
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
|
|
||||||
async def create_jsonl_data(
|
|
||||||
*,
|
|
||||||
user_prompt: str,
|
|
||||||
system_prompt: str,
|
|
||||||
model: str = "gpt-4o",
|
|
||||||
max_tokens: int = 1000,
|
|
||||||
) -> bytes:
|
|
||||||
summary_request = {
|
|
||||||
"custom_id": "request-1",
|
|
||||||
"method": "POST",
|
|
||||||
"url": "/v1/chat/completions",
|
|
||||||
"body": {
|
|
||||||
"model": model,
|
|
||||||
"messages": [
|
|
||||||
{"role": "system", "content": system_prompt},
|
|
||||||
{"role": "user", "content": user_prompt},
|
|
||||||
],
|
|
||||||
"max_tokens": max_tokens,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
return json.dumps(summary_request).encode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
async def upload_and_process_file(
|
|
||||||
*, session: aiohttp.ClientSession, jsonl_data: bytes, api_key: str = api_key()
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Upload a JSONL file to OpenAI's Batch API and process it asynchronously.
|
|
||||||
"""
|
|
||||||
|
|
||||||
upload_url = "https://api.openai.com/v1/files"
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Bearer {api_key}",
|
|
||||||
}
|
|
||||||
data = aiohttp.FormData()
|
|
||||||
data.add_field(
|
|
||||||
"file", jsonl_data, filename="changelog.jsonl", content_type="application/jsonl"
|
|
||||||
)
|
|
||||||
data.add_field("purpose", "batch")
|
|
||||||
|
|
||||||
async with session.post(upload_url, headers=headers, data=data) as response:
|
|
||||||
if response.status != 200:
|
|
||||||
raise Exception(f"File upload failed with status code {response.status}")
|
|
||||||
upload_response = await response.json()
|
|
||||||
file_id = upload_response.get("id")
|
|
||||||
|
|
||||||
if not file_id:
|
|
||||||
raise Exception("File ID not returned from upload")
|
|
||||||
|
|
||||||
# Step 2: Create a batch using the uploaded file ID
|
|
||||||
batch_url = "https://api.openai.com/v1/batches"
|
|
||||||
batch_data = {
|
|
||||||
"input_file_id": file_id,
|
|
||||||
"endpoint": "/v1/chat/completions",
|
|
||||||
"completion_window": "24h",
|
|
||||||
}
|
|
||||||
|
|
||||||
async with session.post(batch_url, headers=headers, json=batch_data) as response:
|
|
||||||
if response.status != 200:
|
|
||||||
raise Exception(f"Batch creation failed with status code {response.status}")
|
|
||||||
batch_response = await response.json()
|
|
||||||
batch_id = batch_response.get("id")
|
|
||||||
|
|
||||||
if not batch_id:
|
|
||||||
raise Exception("Batch ID not returned from creation")
|
|
||||||
|
|
||||||
# Step 3: Check the status of the batch until completion
|
|
||||||
status_url = f"https://api.openai.com/v1/batches/{batch_id}"
|
|
||||||
|
|
||||||
while True:
|
|
||||||
async with session.get(status_url, headers=headers) as response:
|
|
||||||
if response.status != 200:
|
|
||||||
raise Exception(
|
|
||||||
f"Failed to check batch status with status code {response.status}"
|
|
||||||
)
|
|
||||||
status_response = await response.json()
|
|
||||||
status = status_response.get("status")
|
|
||||||
if status in ["completed", "failed", "expired"]:
|
|
||||||
break
|
|
||||||
await asyncio.sleep(10) # Wait before checking again
|
|
||||||
|
|
||||||
if status != "completed":
|
|
||||||
raise Exception(f"Batch processing failed with status: {status}")
|
|
||||||
|
|
||||||
# Step 4: Retrieve the results
|
|
||||||
output_file_id = status_response.get("output_file_id")
|
|
||||||
output_url = f"https://api.openai.com/v1/files/{output_file_id}/content"
|
|
||||||
|
|
||||||
async with session.get(output_url, headers=headers) as response:
|
|
||||||
if response.status != 200:
|
|
||||||
raise Exception(
|
|
||||||
f"Failed to retrieve batch results with status code {response.status}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Read content as text
|
|
||||||
content = await response.text()
|
|
||||||
|
|
||||||
# Parse the content as JSONL
|
|
||||||
results = [json.loads(line) for line in content.splitlines()]
|
|
||||||
return results
|
|
|
@ -1,90 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
from nio import (
|
|
||||||
AsyncClient,
|
|
||||||
JoinResponse,
|
|
||||||
MatrixRoom,
|
|
||||||
RoomMessageText,
|
|
||||||
)
|
|
||||||
|
|
||||||
from matrix_bot.gitea import (
|
|
||||||
GiteaData,
|
|
||||||
PullState,
|
|
||||||
fetch_pull_requests,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .locked_open import read_locked_file, write_locked_file
|
|
||||||
from .matrix import MatrixData, get_room_members, send_message
|
|
||||||
|
|
||||||
|
|
||||||
async def message_callback(room: MatrixRoom, event: RoomMessageText) -> None:
|
|
||||||
log.debug(
|
|
||||||
f"Message received in room {room.display_name}\n"
|
|
||||||
f"{room.user_name(event.sender)} | {event.body}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def review_requested_bot(
|
|
||||||
client: AsyncClient,
|
|
||||||
http: aiohttp.ClientSession,
|
|
||||||
matrix: MatrixData,
|
|
||||||
gitea: GiteaData,
|
|
||||||
data_dir: Path,
|
|
||||||
) -> None:
|
|
||||||
# If you made a new room and haven't joined as that user, you can use
|
|
||||||
room: JoinResponse = await client.join(matrix.review_room)
|
|
||||||
|
|
||||||
if not room.transport_response.ok:
|
|
||||||
log.error("This can happen if the room doesn't exist or the bot isn't invited")
|
|
||||||
raise Exception(f"Failed to join room {room}")
|
|
||||||
|
|
||||||
# Get the members of the room
|
|
||||||
users = await get_room_members(client, room)
|
|
||||||
|
|
||||||
# Fetch the pull requests
|
|
||||||
tstart = time.time()
|
|
||||||
pulls = await fetch_pull_requests(gitea, http, limit=50, state=PullState.ALL)
|
|
||||||
|
|
||||||
# Read the last updated pull request
|
|
||||||
ping_hist_path = data_dir / "last_review_run.json"
|
|
||||||
ping_hist = read_locked_file(ping_hist_path)
|
|
||||||
|
|
||||||
# Check if the pull request is mergeable and needs review
|
|
||||||
# and if the pull request is newer than the last updated pull request
|
|
||||||
for pull in pulls:
|
|
||||||
requested_reviewers = pull["requested_reviewers"]
|
|
||||||
pid = str(pull["id"])
|
|
||||||
if requested_reviewers and pull["mergeable"]:
|
|
||||||
last_time_updated = ping_hist.get(pid, {}).get(
|
|
||||||
"updated_at", datetime.datetime.min.isoformat()
|
|
||||||
)
|
|
||||||
if ping_hist == {} or pull["updated_at"] > last_time_updated:
|
|
||||||
ping_hist[pid] = pull
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if the requested reviewers are in the room
|
|
||||||
requested_reviewers = [r["login"].lower() for r in requested_reviewers]
|
|
||||||
ping_users = []
|
|
||||||
for user in users:
|
|
||||||
if user.display_name.lower() in requested_reviewers:
|
|
||||||
ping_users.append(user.user_id)
|
|
||||||
|
|
||||||
# Send a message to the room and mention the users
|
|
||||||
log.info(f"Pull request {pull['title']} needs review")
|
|
||||||
message = f"Review Requested:\n[{pull['title']}]({pull['html_url']})"
|
|
||||||
await send_message(client, room, message, user_ids=ping_users)
|
|
||||||
|
|
||||||
# Write the new last updated pull request
|
|
||||||
write_locked_file(ping_hist_path, ping_hist)
|
|
||||||
|
|
||||||
# Time taken
|
|
||||||
tend = time.time()
|
|
||||||
tdiff = round(tend - tstart)
|
|
||||||
log.debug(f"Time taken: {tdiff}s")
|
|
|
@ -1,59 +0,0 @@
|
||||||
[build-system]
|
|
||||||
requires = ["setuptools"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "matrix-bot"
|
|
||||||
description = "matrix bot for release messages from git commits"
|
|
||||||
dynamic = ["version"]
|
|
||||||
scripts = { mbot = "matrix_bot:main" }
|
|
||||||
license = {text = "MIT"}
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
Homepage = "https://clan.lol/"
|
|
||||||
Documentation = "https://docs.clan.lol/"
|
|
||||||
Repository = "https://git.clan.lol/clan/clan-core"
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
exclude = ["result"]
|
|
||||||
|
|
||||||
[tool.setuptools.package-data]
|
|
||||||
matrix_bot = ["py.typed"]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
testpaths = "tests"
|
|
||||||
faulthandler_timeout = 60
|
|
||||||
log_level = "DEBUG"
|
|
||||||
log_format = "%(levelname)s: %(message)s\n %(pathname)s:%(lineno)d::%(funcName)s"
|
|
||||||
addopts = "--cov . --cov-report term --cov-report html:.reports/html --no-cov-on-fail --durations 5 --color=yes --new-first" # Add --pdb for debugging
|
|
||||||
norecursedirs = "tests/helpers"
|
|
||||||
markers = ["impure", "with_core"]
|
|
||||||
|
|
||||||
[tool.mypy]
|
|
||||||
python_version = "3.11"
|
|
||||||
warn_redundant_casts = true
|
|
||||||
disallow_untyped_calls = true
|
|
||||||
disallow_untyped_defs = true
|
|
||||||
no_implicit_optional = true
|
|
||||||
|
|
||||||
[[tool.mypy.overrides]]
|
|
||||||
module = "argcomplete.*"
|
|
||||||
ignore_missing_imports = true
|
|
||||||
|
|
||||||
[[tool.mypy.overrides]]
|
|
||||||
module = "ipdb.*"
|
|
||||||
ignore_missing_imports = true
|
|
||||||
|
|
||||||
[[tool.mypy.overrides]]
|
|
||||||
module = "pytest.*"
|
|
||||||
ignore_missing_imports = true
|
|
||||||
|
|
||||||
[[tool.mypy.overrides]]
|
|
||||||
module = "setuptools.*"
|
|
||||||
ignore_missing_imports = true
|
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
target-version = "py311"
|
|
||||||
line-length = 88
|
|
||||||
lint.select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
|
||||||
lint.ignore = ["E501", "E402", "E731", "ANN101", "ANN401", "A003"]
|
|
|
@ -1,30 +0,0 @@
|
||||||
{
|
|
||||||
matrix-bot,
|
|
||||||
mkShell,
|
|
||||||
ruff,
|
|
||||||
python3,
|
|
||||||
}:
|
|
||||||
let
|
|
||||||
devshellTestDeps =
|
|
||||||
matrix-bot.passthru.testDependencies
|
|
||||||
++ (with python3.pkgs; [
|
|
||||||
rope
|
|
||||||
setuptools
|
|
||||||
wheel
|
|
||||||
ipdb
|
|
||||||
pip
|
|
||||||
]);
|
|
||||||
in
|
|
||||||
mkShell {
|
|
||||||
buildInputs = [ ruff ] ++ devshellTestDeps;
|
|
||||||
|
|
||||||
PYTHONBREAKPOINT = "ipdb.set_trace";
|
|
||||||
|
|
||||||
shellHook = ''
|
|
||||||
export GIT_ROOT="$(git rev-parse --show-toplevel)"
|
|
||||||
export PKG_ROOT="$GIT_ROOT/pkgs/matrix-bot"
|
|
||||||
|
|
||||||
# Add clan command to PATH
|
|
||||||
export PATH="$PKG_ROOT/bin":"$PATH"
|
|
||||||
'';
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
../../../machines/web01
|
|
|
@ -1,24 +0,0 @@
|
||||||
{
|
|
||||||
"data": "ENC[AES256_GCM,data:iJTjs8bG2GLGnGp/Hf4Egtorrk87rkgh9Yn+gPuWAJ61wIAtN3g9SU3vyYpvRrIqHVUyLObGbrWYi3Ol07M=,iv:YTOctq9aw4tc9xwoOO4UbR2cYPHV0ZmuE1FRWn13sgk=,tag:zU3HFqxwZcn/9S02bj3/fA==,type:str]",
|
|
||||||
"sops": {
|
|
||||||
"kms": null,
|
|
||||||
"gcp_kms": null,
|
|
||||||
"azure_kv": null,
|
|
||||||
"hc_vault": null,
|
|
||||||
"age": [
|
|
||||||
{
|
|
||||||
"recipient": "age17xuvz0fqtynzdmf8rfh4g3e46tx8w3mc6zgytrmuj5v9dhnldgxs7ue7ct",
|
|
||||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBUa1FIbWt2aHduZVBlcGpq\nOUsybTRmR1I4M1JZY3A4ZUFpZEN3dlZCTkZRCmdVajFUcDMyeEdJMThVZElJdmlJ\naEhvSk9sYThXdkxoaXVLem15dlJMcm8KLS0tIEJsdFY4L0M3Q2cwdzFOdy9LN0k3\nOEdCM09PUWlZbE91U2ZYNmVHeU43bUUKC+z+6XZCiVfwGQQCAHoB+WGE5Mm3qJZq\nuyD5r3Ra6MAvvwIhnqbwadRoxVH1HcdIB6hJsNREE/x6YNLxi3T7nw==\n-----END AGE ENCRYPTED FILE-----\n"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"recipient": "age1zwte859d9nvg6wy5dugjkf38dqe8w8qkt2as7xcc5pw3285833xs797uan",
|
|
||||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBFOHBub0J3U2FuOFNuS2xl\nRkExYVEvcmowWDlyUGlpYko3N1dIcmN5dmxJCkRtRm9qVVNQK3FQcTB1U3g3OHhS\ncU9XaHUxNWVlL2tpblpZUHF1UWt4am8KLS0tIGJOZkJ4eDJ6WWx1d2R1VG1qODZS\ncHhXMVhEUHdLZjIvNUYxRmduZkpjaU0KqZKUb9KYpSvwxaJRAbYhkuOdnzsU3p9Q\nU2WO5TIwS762yNqWTzyYdxb9YxvTOatW7uWTorRXZu1yqCTMTuq+1Q==\n-----END AGE ENCRYPTED FILE-----\n"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"lastmodified": "2024-07-03T09:47:40Z",
|
|
||||||
"mac": "ENC[AES256_GCM,data:dpdmkhedaqivzIlxhoWb+u77JmfWRo94iWDolAa9UKvnjBo1QE5sHbqWasCH81wjO0wPBPRUqnj9JQ7kG9AFp24Fad+gAp74Gwx5M/PSx1dsd6xkcxt6PJ8sFXGb0H3lYduCaNfDGgsJTVoDcbk8rgYzjo5+mxs2pqrrn10t4iU=,iv:MGMIq2rF4+hr89/dppi2JDVbpAShscYTMM9viHPepIY=,tag:Dj9B6qvAkmiUmgRvZ6B94Q==,type:str]",
|
|
||||||
"pgp": null,
|
|
||||||
"unencrypted_suffix": "_unencrypted",
|
|
||||||
"version": "3.8.1"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
../../../users/qubasa
|
|
|
@ -1,6 +1,5 @@
|
||||||
#!/usr/bin/env nix-shell
|
#!/usr/bin/env nix-shell
|
||||||
#!nix-shell -i bash -p coreutils sops openssh
|
#!nix-shell -i bash -p coreutils sops openssh
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
# shellcheck disable=SC1008,SC1128
|
# shellcheck disable=SC1008,SC1128
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
#!/usr/bin/env nix-shell
|
#!/usr/bin/env nix-shell
|
||||||
#!nix-shell -i bash -p nix jq bash rsync
|
#!nix-shell -i bash -p nix jq bash rsync
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
# shellcheck disable=SC1008,SC1128
|
# shellcheck disable=SC1008,SC1128
|
||||||
|
|
||||||
|
|
|
@ -4,31 +4,31 @@ resource "hetznerdns_zone" "server" {
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "hetznerdns_record" "root_a" {
|
resource "hetznerdns_record" "root_a" {
|
||||||
zone_id = hetznerdns_zone.server.id
|
zone_id = hetznerdns_zone.server.id
|
||||||
name = "@"
|
name = "@"
|
||||||
type = "A"
|
type = "A"
|
||||||
value = var.ipv4_address
|
value = var.ipv4_address
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "hetznerdns_record" "root_aaaa" {
|
resource "hetznerdns_record" "root_aaaa" {
|
||||||
zone_id = hetznerdns_zone.server.id
|
zone_id = hetznerdns_zone.server.id
|
||||||
name = "@"
|
name = "@"
|
||||||
type = "AAAA"
|
type = "AAAA"
|
||||||
value = var.ipv6_address
|
value = var.ipv6_address
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "hetznerdns_record" "wildcard_a" {
|
resource "hetznerdns_record" "wildcard_a" {
|
||||||
zone_id = hetznerdns_zone.server.id
|
zone_id = hetznerdns_zone.server.id
|
||||||
name = "*"
|
name = "*"
|
||||||
type = "A"
|
type = "A"
|
||||||
value = var.ipv4_address
|
value = var.ipv4_address
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "hetznerdns_record" "wildcard_aaaa" {
|
resource "hetznerdns_record" "wildcard_aaaa" {
|
||||||
zone_id = hetznerdns_zone.server.id
|
zone_id = hetznerdns_zone.server.id
|
||||||
name = "*"
|
name = "*"
|
||||||
type = "AAAA"
|
type = "AAAA"
|
||||||
value = var.ipv6_address
|
value = var.ipv6_address
|
||||||
}
|
}
|
||||||
|
|
||||||
# for sending emails
|
# for sending emails
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
#!/usr/bin/env nix-shell
|
#!/usr/bin/env nix-shell
|
||||||
#!nix-shell -i bash -p coreutils sops openssh nix
|
#!nix-shell -i bash -p coreutils sops openssh nix
|
||||||
|
|
||||||
# shellcheck shell=bash
|
|
||||||
# shellcheck disable=SC1008,SC1128
|
# shellcheck disable=SC1008,SC1128
|
||||||
|
set -euox pipefail
|
||||||
|
|
||||||
if [[ -z "${HOST:-}" ]]; then
|
if [[ -z "${HOST:-}" ]]; then
|
||||||
echo "HOST is not set"
|
echo "HOST is not set"
|
||||||
|
|
78
terraform/web01/nixosify.sh
Normal file
78
terraform/web01/nixosify.sh
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
installNix() {
|
||||||
|
if ! command -v nix >/dev/null; then
|
||||||
|
echo "Installing Nix..."
|
||||||
|
trap 'rm -f /tmp/nix-install' EXIT
|
||||||
|
if command -v curl; then
|
||||||
|
curl -L https://nixos.org/nix/install >/tmp/nix-install
|
||||||
|
elif command -v wget; then
|
||||||
|
wget -O /tmp/nix-install https://nixos.org/nix/install
|
||||||
|
else
|
||||||
|
echo "Please install curl or wget"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
sh /tmp/nix-install --daemon --yes
|
||||||
|
fi
|
||||||
|
set +u
|
||||||
|
. /etc/profile
|
||||||
|
set -u
|
||||||
|
}
|
||||||
|
|
||||||
|
patchOsRelease() {
|
||||||
|
cat >/etc/os-release <<EOF
|
||||||
|
ID=nixos
|
||||||
|
VARIANT_ID=installer
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
installTools() {
|
||||||
|
env=$(
|
||||||
|
cat <<EOF
|
||||||
|
with import <nixpkgs> {};
|
||||||
|
buildEnv {
|
||||||
|
name = "install-tools";
|
||||||
|
paths = [
|
||||||
|
nix
|
||||||
|
nixos-install-tools
|
||||||
|
parted
|
||||||
|
mdadm
|
||||||
|
xfsprogs
|
||||||
|
dosfstools
|
||||||
|
btrfs-progs
|
||||||
|
e2fsprogs
|
||||||
|
jq
|
||||||
|
util-linux
|
||||||
|
];
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
tools=$(nix-build --no-out-link -E "$env")
|
||||||
|
|
||||||
|
# check if /usr/local/bin is in PATH
|
||||||
|
if ! echo "$PATH" | grep -q /usr/local/bin; then
|
||||||
|
echo "WARNING: /usr/local/bin is not in PATH" >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p /usr/local/bin
|
||||||
|
for i in "$tools/bin/"*; do
|
||||||
|
ln -sf "$i" /usr/local/bin
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
applyHetznerZfsQuirk() {
|
||||||
|
if test -f /etc/hetzner-build; then
|
||||||
|
# Hetzner has dummy binaries here for zfs,
|
||||||
|
# however those won't work and even crashed the system.
|
||||||
|
rm -f /usr/local/sbin/zfs /usr/local/sbin/zpool /usr/local/sbin/zdb
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
installNix
|
||||||
|
patchOsRelease
|
||||||
|
installTools
|
||||||
|
applyHetznerZfsQuirk
|
|
@ -1,23 +0,0 @@
|
||||||
From 43b15f8757a7f8de0340cc977ff9619741a5d43f Mon Sep 17 00:00:00 2001
|
|
||||||
From: Brian McGee <brian@bmcgee.ie>
|
|
||||||
Date: Mon, 1 Jul 2024 14:20:22 +0100
|
|
||||||
Subject: [PATCH] fix: configure toml key for global excludes
|
|
||||||
|
|
||||||
Signed-off-by: Brian McGee <brian@bmcgee.ie>
|
|
||||||
---
|
|
||||||
config/config.go | 2 +-
|
|
||||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
||||||
|
|
||||||
diff --git a/config/config.go b/config/config.go
|
|
||||||
index d9e281f..be5c991 100644
|
|
||||||
--- a/config/config.go
|
|
||||||
+++ b/config/config.go
|
|
||||||
@@ -10,7 +10,7 @@ import (
|
|
||||||
type Config struct {
|
|
||||||
Global struct {
|
|
||||||
// Excludes is an optional list of glob patterns used to exclude certain files from all formatters.
|
|
||||||
- Excludes []string
|
|
||||||
+ Excludes []string `toml:"excludes"`
|
|
||||||
}
|
|
||||||
Formatters map[string]*Formatter `toml:"formatter"`
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user