Compare commits
17 Commits
Mic92-main
...
main
Author | SHA1 | Date | |
---|---|---|---|
039e26c28f | |||
efe12541fe | |||
b99cbbaa4c | |||
813982b2d6 | |||
34a284d191 | |||
670a5c3e3f | |||
f4c8243e9c | |||
aa5e6c7c7c | |||
ad07fec05f | |||
b663547364 | |||
ed2d910832 | |||
8f79b31ac3 | |||
64ddd5e574 | |||
0075e822a0 | |||
7f17bb53ed | |||
2ca09ea549 | |||
ded75bf7ab |
39
flake.lock
39
flake.lock
|
@ -29,11 +29,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719712463,
|
"lastModified": 1719797756,
|
||||||
"narHash": "sha256-O2f16m1wnJtl3OldhucFuZpcF/cQ/xwtF7YQqVIoq0c=",
|
"narHash": "sha256-TGZthxgxLdT8boadFm6+MK7HZlIxN1u1V+x3hu+Fd8I=",
|
||||||
"owner": "Mic92",
|
"owner": "Mic92",
|
||||||
"repo": "buildbot-nix",
|
"repo": "buildbot-nix",
|
||||||
"rev": "8d972a1a7675ab7429c6378b0203dc9408995e74",
|
"rev": "0b56574a5c823097771487d1bac952c3549fe9fb",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -59,11 +59,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719728739,
|
"lastModified": 1720009622,
|
||||||
"narHash": "sha256-Gf46MC7uCK1YKlGfiYH3coAyAacoRsLRpu7ijW939mI=",
|
"narHash": "sha256-uA0FvklUt4M3yrNaSecCFgxXjnQZY8mmafLTuGSdUmU=",
|
||||||
"rev": "0f95bfd279b12865382f0ffd3459086090217fa1",
|
"rev": "fa41f94ae751b654088bb8f268f5dc0f4bb323fe",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://git.clan.lol/api/v1/repos/clan/clan-core/archive/0f95bfd279b12865382f0ffd3459086090217fa1.tar.gz"
|
"url": "https://git.clan.lol/api/v1/repos/clan/clan-core/archive/fa41f94ae751b654088bb8f268f5dc0f4bb323fe.tar.gz"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
|
@ -113,11 +113,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719745305,
|
"lastModified": 1719994518,
|
||||||
"narHash": "sha256-xwgjVUpqSviudEkpQnioeez1Uo2wzrsMaJKJClh+Bls=",
|
"narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=",
|
||||||
"owner": "hercules-ci",
|
"owner": "hercules-ci",
|
||||||
"repo": "flake-parts",
|
"repo": "flake-parts",
|
||||||
"rev": "c3c5ecc05edc7dafba779c6c1a61cd08ac6583e9",
|
"rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -220,11 +220,11 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719760900,
|
"lastModified": 1719931832,
|
||||||
"narHash": "sha256-NkvFphHXKtQQ8F0XrqGlqkOhjHbE3671F8oLxwtTHhk=",
|
"narHash": "sha256-0LD+KePCKKEb4CcPsTBOwf019wDtZJanjoKm1S8q3Do=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "12a9c0004bc987afb1ff511ebb97b67497a68e22",
|
"rev": "0aeab749216e4c073cece5d34bc01b79e717c3e0",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -278,11 +278,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719753014,
|
"lastModified": 1719965291,
|
||||||
"narHash": "sha256-Lfv5qtltKuO5+HNqOKZPlEuEZo7WaLiZjAI+sTqpwws=",
|
"narHash": "sha256-IQiO6VNESSmgxQkpI1q86pqxRw0SZ45iSeM1jsmBpSw=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "srvos",
|
"repo": "srvos",
|
||||||
"rev": "22155bc76855f28a681b1d6987ea2420b899ad7e",
|
"rev": "1844f1a15ef530c963bb07c3846172fccbfb9f74",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -313,16 +313,15 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719836491,
|
"lastModified": 1719887753,
|
||||||
"narHash": "sha256-0kZeCwwYe51lN/9X2eCcBaAxFHeHTN1ieyuq/4UG8xg=",
|
"narHash": "sha256-p0B2r98UtZzRDM5miGRafL4h7TwGRC4DII+XXHDHqek=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "treefmt-nix",
|
"repo": "treefmt-nix",
|
||||||
"rev": "e78a4ce2041d5179f84b9a91001d9d35e72d3d21",
|
"rev": "bdb6355009562d8f9313d9460c0d3860f525bc6c",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"ref": "opentofu",
|
|
||||||
"repo": "treefmt-nix",
|
"repo": "treefmt-nix",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
flake-compat.url = "github:edolstra/flake-compat";
|
flake-compat.url = "github:edolstra/flake-compat";
|
||||||
flake-parts.url = "github:hercules-ci/flake-parts";
|
flake-parts.url = "github:hercules-ci/flake-parts";
|
||||||
flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs";
|
flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs";
|
||||||
treefmt-nix.url = "github:numtide/treefmt-nix/opentofu";
|
treefmt-nix.url = "github:numtide/treefmt-nix";
|
||||||
treefmt-nix.inputs.nixpkgs.follows = "nixpkgs";
|
treefmt-nix.inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
|
||||||
nixos-mailserver = {
|
nixos-mailserver = {
|
||||||
|
|
|
@ -27,6 +27,7 @@
|
||||||
self.nixosModules.buildbot
|
self.nixosModules.buildbot
|
||||||
inputs.srvos.nixosModules.mixins-nginx
|
inputs.srvos.nixosModules.mixins-nginx
|
||||||
inputs.srvos.nixosModules.mixins-nix-experimental
|
inputs.srvos.nixosModules.mixins-nix-experimental
|
||||||
|
./matrix-bot.nix
|
||||||
./web01
|
./web01
|
||||||
inputs.nixos-mailserver.nixosModules.mailserver
|
inputs.nixos-mailserver.nixosModules.mailserver
|
||||||
./mailserver.nix
|
./mailserver.nix
|
||||||
|
|
49
modules/matrix-bot.nix
Normal file
49
modules/matrix-bot.nix
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
{
|
||||||
|
config,
|
||||||
|
pkgs,
|
||||||
|
self,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
name = "matrix-bot";
|
||||||
|
in
|
||||||
|
{
|
||||||
|
users.groups.matrix-bot-user = { };
|
||||||
|
users.users.matrix-bot-user = {
|
||||||
|
group = "matrix-bot-user";
|
||||||
|
isSystemUser = true;
|
||||||
|
description = "User for matrix-bot service";
|
||||||
|
home = "/var/lib/matrix-bot";
|
||||||
|
createHome = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
systemd.services.${name} = {
|
||||||
|
path = [ self.packages.${pkgs.system}.matrix-bot ];
|
||||||
|
description = "Matrix bot for changelog and reviews";
|
||||||
|
after = [ "network.target" ];
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
environment = {
|
||||||
|
MATRIX_PASSWORD_FILE = "%d/MATRIX_PASSWORD_FILE";
|
||||||
|
OPENAI_API_KEY_FILE = "%d/OPENAI_API_KEY_FILE";
|
||||||
|
HOME = "/var/lib/${name}";
|
||||||
|
};
|
||||||
|
|
||||||
|
serviceConfig = {
|
||||||
|
LoadCredential = [
|
||||||
|
"MATRIX_PASSWORD_FILE:${config.sops.secrets.web01-matrix-password-clan-bot.path}"
|
||||||
|
"OPENAI_API_KEY_FILE:${config.sops.secrets.qubasas-openai-api-key.path}"
|
||||||
|
];
|
||||||
|
User = "matrix-bot-user";
|
||||||
|
Group = "matrix-bot-user";
|
||||||
|
WorkingDirectory = "/var/lib/${name}";
|
||||||
|
RuntimeDirectory = "/var/lib/${name}";
|
||||||
|
};
|
||||||
|
|
||||||
|
script = ''
|
||||||
|
set -euxo pipefail
|
||||||
|
|
||||||
|
mbot --changelog-room "!FdCwyKsRlfooNYKYzx:matrix.org" --review-room "!tmSRJlbsVXFUKAddiM:gchq.icu"
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
}
|
|
@ -8,4 +8,23 @@
|
||||||
};
|
};
|
||||||
clan.matrix-synapse.users.monitoring = { };
|
clan.matrix-synapse.users.monitoring = { };
|
||||||
clan.matrix-synapse.users.clan-bot = { };
|
clan.matrix-synapse.users.clan-bot = { };
|
||||||
|
|
||||||
|
# Rate limiting settings
|
||||||
|
# we need to up this to be able to support matrix bots
|
||||||
|
services.matrix-synapse.settings = {
|
||||||
|
rc_login = {
|
||||||
|
address = {
|
||||||
|
per_second = 20;
|
||||||
|
burst_count = 200;
|
||||||
|
};
|
||||||
|
account = {
|
||||||
|
per_second = 20;
|
||||||
|
burst_count = 200;
|
||||||
|
};
|
||||||
|
failed_attempts = {
|
||||||
|
per_second = 3;
|
||||||
|
burst_count = 15;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
{
|
{
|
||||||
imports = [ ./clan-merge/flake-module.nix ];
|
imports = [
|
||||||
|
./clan-merge/flake-module.nix
|
||||||
|
./matrix-bot/flake-module.nix
|
||||||
|
];
|
||||||
perSystem =
|
perSystem =
|
||||||
{ pkgs, config, ... }:
|
{ pkgs, config, ... }:
|
||||||
{
|
{
|
||||||
|
|
6
pkgs/matrix-bot/.envrc
Normal file
6
pkgs/matrix-bot/.envrc
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
source_up
|
||||||
|
|
||||||
|
watch_file flake-module.nix shell.nix default.nix
|
||||||
|
|
||||||
|
# Because we depend on nixpkgs sources, uploading to builders takes a long time
|
||||||
|
use flake .#matrix-bot --builders ''
|
3
pkgs/matrix-bot/.gitignore
vendored
Normal file
3
pkgs/matrix-bot/.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
*.json
|
||||||
|
**/data
|
||||||
|
**/__pycache__
|
13
pkgs/matrix-bot/bin/mbot
Executable file
13
pkgs/matrix-bot/bin/mbot
Executable file
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
|
||||||
|
)
|
||||||
|
|
||||||
|
from matrix_bot import main # NOQA
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
42
pkgs/matrix-bot/default.nix
Normal file
42
pkgs/matrix-bot/default.nix
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
{
|
||||||
|
python3,
|
||||||
|
setuptools,
|
||||||
|
matrix-nio,
|
||||||
|
aiofiles,
|
||||||
|
aiohttp,
|
||||||
|
markdown2,
|
||||||
|
git,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
pythonDependencies = [
|
||||||
|
matrix-nio
|
||||||
|
aiofiles
|
||||||
|
aiohttp
|
||||||
|
markdown2
|
||||||
|
];
|
||||||
|
|
||||||
|
runtimeDependencies = [ git ];
|
||||||
|
|
||||||
|
testDependencies = pythonDependencies ++ runtimeDependencies ++ [ ];
|
||||||
|
in
|
||||||
|
python3.pkgs.buildPythonApplication {
|
||||||
|
name = "matrix-bot";
|
||||||
|
src = ./.;
|
||||||
|
format = "pyproject";
|
||||||
|
|
||||||
|
nativeBuildInputs = [ setuptools ];
|
||||||
|
|
||||||
|
propagatedBuildInputs = pythonDependencies ++ runtimeDependencies;
|
||||||
|
|
||||||
|
passthru.testDependencies = testDependencies;
|
||||||
|
|
||||||
|
# Clean up after the package to avoid leaking python packages into a devshell
|
||||||
|
postFixup = ''
|
||||||
|
rm $out/nix-support/propagated-build-inputs
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta.mainProgram = "matrix-bot";
|
||||||
|
}
|
14
pkgs/matrix-bot/flake-module.nix
Normal file
14
pkgs/matrix-bot/flake-module.nix
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
{ ... }:
|
||||||
|
{
|
||||||
|
perSystem =
|
||||||
|
{ self', pkgs, ... }:
|
||||||
|
{
|
||||||
|
|
||||||
|
devShells.matrix-bot = pkgs.callPackage ./shell.nix { inherit (self'.packages) matrix-bot; };
|
||||||
|
packages = {
|
||||||
|
matrix-bot = pkgs.python3.pkgs.callPackage ./default.nix { };
|
||||||
|
};
|
||||||
|
|
||||||
|
checks = { };
|
||||||
|
};
|
||||||
|
}
|
169
pkgs/matrix-bot/matrix_bot/__init__.py
Normal file
169
pkgs/matrix-bot/matrix_bot/__init__.py
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
import argparse
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from os import environ
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from matrix_bot.custom_logger import setup_logging
|
||||||
|
from matrix_bot.gitea import GiteaData
|
||||||
|
from matrix_bot.main import bot_main
|
||||||
|
from matrix_bot.matrix import MatrixData
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
curr_dir = Path(__file__).parent
|
||||||
|
data_dir = Path(os.getcwd()) / "data"
|
||||||
|
|
||||||
|
|
||||||
|
def create_parser(prog: str | None = None) -> argparse.ArgumentParser:
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog=prog,
|
||||||
|
description="A gitea bot for matrix",
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--debug",
|
||||||
|
help="Enable debug logging",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--server",
|
||||||
|
help="The matrix server to connect to",
|
||||||
|
default="https://matrix.clan.lol",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--user",
|
||||||
|
help="The matrix user to connect as",
|
||||||
|
default="@clan-bot:clan.lol",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--avatar",
|
||||||
|
help="The path to the image to use as the avatar",
|
||||||
|
default=curr_dir / "avatar.png",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--repo-owner",
|
||||||
|
help="The owner of gitea the repository",
|
||||||
|
default="clan",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--repo-name",
|
||||||
|
help="The name of the repository",
|
||||||
|
default="clan-core",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--changelog-room",
|
||||||
|
help="The matrix room to join for the changelog bot",
|
||||||
|
default="#bot-test:gchq.icu",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--review-room",
|
||||||
|
help="The matrix room to join for the review bot",
|
||||||
|
default="#bot-test:gchq.icu",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--changelog-frequency",
|
||||||
|
help="The frequency to check for changelog updates in days",
|
||||||
|
default=7,
|
||||||
|
type=int,
|
||||||
|
)
|
||||||
|
|
||||||
|
def valid_weekday(value: str) -> str:
|
||||||
|
days = [
|
||||||
|
"Monday",
|
||||||
|
"Tuesday",
|
||||||
|
"Wednesday",
|
||||||
|
"Thursday",
|
||||||
|
"Friday",
|
||||||
|
"Saturday",
|
||||||
|
"Sunday",
|
||||||
|
]
|
||||||
|
if value not in days:
|
||||||
|
raise argparse.ArgumentTypeError(
|
||||||
|
f"{value} is not a valid weekday. Choose from {', '.join(days)}"
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--publish-day",
|
||||||
|
help="The day of the week to publish the changelog. Ignored if changelog-frequency is less than 7 days.",
|
||||||
|
default="Wednesday",
|
||||||
|
type=valid_weekday,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--gitea-url",
|
||||||
|
help="The gitea url to connect to",
|
||||||
|
default="https://git.clan.lol",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--data-dir",
|
||||||
|
help="The directory to store data",
|
||||||
|
default=data_dir,
|
||||||
|
type=Path,
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def matrix_password() -> str:
|
||||||
|
matrix_password = environ.get("MATRIX_PASSWORD")
|
||||||
|
if matrix_password is not None:
|
||||||
|
return matrix_password
|
||||||
|
matrix_password_file = environ.get("MATRIX_PASSWORD_FILE", default=None)
|
||||||
|
if matrix_password_file is None:
|
||||||
|
raise Exception("MATRIX_PASSWORD_FILE environment variable is not set")
|
||||||
|
with open(matrix_password_file) as f:
|
||||||
|
return f.read().strip()
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
parser = create_parser()
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.debug:
|
||||||
|
setup_logging(logging.DEBUG, root_log_name=__name__.split(".")[0])
|
||||||
|
log.debug("Debug log activated")
|
||||||
|
else:
|
||||||
|
setup_logging(logging.INFO, root_log_name=__name__.split(".")[0])
|
||||||
|
|
||||||
|
matrix = MatrixData(
|
||||||
|
server=args.server,
|
||||||
|
user=args.user,
|
||||||
|
avatar=args.avatar,
|
||||||
|
changelog_room=args.changelog_room,
|
||||||
|
changelog_frequency=args.changelog_frequency,
|
||||||
|
publish_day=args.publish_day,
|
||||||
|
review_room=args.review_room,
|
||||||
|
password=matrix_password(),
|
||||||
|
)
|
||||||
|
|
||||||
|
gitea = GiteaData(
|
||||||
|
url=args.gitea_url,
|
||||||
|
owner=args.repo_owner,
|
||||||
|
repo=args.repo_name,
|
||||||
|
access_token=os.getenv("GITEA_ACCESS_TOKEN"),
|
||||||
|
)
|
||||||
|
|
||||||
|
args.data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
asyncio.run(bot_main(matrix, gitea, args.data_dir))
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("User Interrupt", file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
4
pkgs/matrix-bot/matrix_bot/__main__.py
Normal file
4
pkgs/matrix-bot/matrix_bot/__main__.py
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
from . import main
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
BIN
pkgs/matrix-bot/matrix_bot/avatar.png
Normal file
BIN
pkgs/matrix-bot/matrix_bot/avatar.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 105 KiB |
214
pkgs/matrix-bot/matrix_bot/changelog_bot.py
Normal file
214
pkgs/matrix-bot/matrix_bot/changelog_bot.py
Normal file
|
@ -0,0 +1,214 @@
|
||||||
|
import asyncio
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from nio import (
|
||||||
|
AsyncClient,
|
||||||
|
JoinResponse,
|
||||||
|
)
|
||||||
|
|
||||||
|
from matrix_bot.gitea import (
|
||||||
|
GiteaData,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .locked_open import read_locked_file, write_locked_file
|
||||||
|
from .matrix import MatrixData, send_message
|
||||||
|
from .openai import create_jsonl_data, upload_and_process_file
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def last_ndays_to_today(ndays: int) -> (str, str):
|
||||||
|
# Get today's date
|
||||||
|
today = datetime.datetime.now()
|
||||||
|
|
||||||
|
# Calculate the date one week ago
|
||||||
|
last_week = today - datetime.timedelta(days=ndays)
|
||||||
|
|
||||||
|
# Format both dates to "YYYY-MM-DD"
|
||||||
|
todate = today.strftime("%Y-%m-%d")
|
||||||
|
fromdate = last_week.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
return (fromdate, todate)
|
||||||
|
|
||||||
|
|
||||||
|
def write_file_with_date_prefix(
|
||||||
|
content: str, directory: Path, *, ndays: int, suffix: str
|
||||||
|
) -> Path:
|
||||||
|
"""
|
||||||
|
Write content to a file with the current date as filename prefix.
|
||||||
|
|
||||||
|
:param content: The content to write to the file.
|
||||||
|
:param directory: The directory where the file will be saved.
|
||||||
|
:return: The path to the created file.
|
||||||
|
"""
|
||||||
|
# Ensure the directory exists
|
||||||
|
directory.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Get the current date
|
||||||
|
fromdate, todate = last_ndays_to_today(ndays)
|
||||||
|
|
||||||
|
# Create the filename
|
||||||
|
filename = f"{fromdate}__{todate}_{suffix}.txt"
|
||||||
|
file_path = directory / filename
|
||||||
|
|
||||||
|
# Write the content to the file
|
||||||
|
with open(file_path, "w") as file:
|
||||||
|
file.write(content)
|
||||||
|
|
||||||
|
return file_path
|
||||||
|
|
||||||
|
|
||||||
|
async def git_pull(repo_path: Path) -> None:
|
||||||
|
cmd = ["git", "pull"]
|
||||||
|
log.debug(f"Running command: {shlex.join(cmd)}")
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
*cmd,
|
||||||
|
cwd=str(repo_path),
|
||||||
|
)
|
||||||
|
await process.wait()
|
||||||
|
|
||||||
|
|
||||||
|
async def git_log(repo_path: str, ndays: int) -> str:
|
||||||
|
cmd = [
|
||||||
|
"git",
|
||||||
|
"log",
|
||||||
|
f"--since={ndays} days ago",
|
||||||
|
"--pretty=format:%h - %an, %ar : %s",
|
||||||
|
"--stat",
|
||||||
|
"--patch",
|
||||||
|
]
|
||||||
|
log.debug(f"Running command: {shlex.join(cmd)}")
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
*cmd,
|
||||||
|
cwd=repo_path,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
if process.returncode != 0:
|
||||||
|
raise Exception(
|
||||||
|
f"Command '{' '.join(cmd)}' failed with exit code {process.returncode}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return stdout.decode()
|
||||||
|
|
||||||
|
|
||||||
|
async def changelog_bot(
|
||||||
|
client: AsyncClient,
|
||||||
|
http: aiohttp.ClientSession,
|
||||||
|
matrix: MatrixData,
|
||||||
|
gitea: GiteaData,
|
||||||
|
data_dir: Path,
|
||||||
|
) -> None:
|
||||||
|
last_run_path = data_dir / "last_changelog_run.json"
|
||||||
|
last_run = read_locked_file(last_run_path)
|
||||||
|
|
||||||
|
if last_run == {}:
|
||||||
|
fromdate, todate = last_ndays_to_today(matrix.changelog_frequency)
|
||||||
|
last_run = {
|
||||||
|
"fromdate": fromdate,
|
||||||
|
"todate": todate,
|
||||||
|
"ndays": matrix.changelog_frequency,
|
||||||
|
}
|
||||||
|
log.debug(f"First run. Setting last_run to {last_run}")
|
||||||
|
today = datetime.datetime.now()
|
||||||
|
today_weekday = today.strftime("%A")
|
||||||
|
if today_weekday != matrix.publish_day:
|
||||||
|
log.debug(f"Changelog not due yet. Due on {matrix.publish_day}")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
last_date = datetime.datetime.strptime(last_run["todate"], "%Y-%m-%d")
|
||||||
|
today = datetime.datetime.now()
|
||||||
|
today_weekday = today.strftime("%A")
|
||||||
|
delta = datetime.timedelta(days=matrix.changelog_frequency)
|
||||||
|
if today - last_date <= delta:
|
||||||
|
log.debug(f"Changelog not due yet. Due in {delta.days} days")
|
||||||
|
return
|
||||||
|
elif today_weekday != matrix.publish_day:
|
||||||
|
log.debug(f"Changelog not due yet. Due on {matrix.publish_day}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# If you made a new room and haven't joined as that user, you can use
|
||||||
|
room: JoinResponse = await client.join(matrix.changelog_room)
|
||||||
|
|
||||||
|
if not room.transport_response.ok:
|
||||||
|
log.error("This can happen if the room doesn't exist or the bot isn't invited")
|
||||||
|
raise Exception(f"Failed to join room {room}")
|
||||||
|
|
||||||
|
repo_path = data_dir / gitea.repo
|
||||||
|
|
||||||
|
if not repo_path.exists():
|
||||||
|
cmd = [
|
||||||
|
"git",
|
||||||
|
"clone",
|
||||||
|
f"{gitea.url}/{gitea.owner}/{gitea.repo}.git",
|
||||||
|
gitea.repo,
|
||||||
|
]
|
||||||
|
subprocess.run(cmd, cwd=data_dir, check=True)
|
||||||
|
|
||||||
|
# git pull
|
||||||
|
await git_pull(repo_path)
|
||||||
|
|
||||||
|
# git log
|
||||||
|
diff = await git_log(repo_path, matrix.changelog_frequency)
|
||||||
|
|
||||||
|
fromdate, todate = last_ndays_to_today(matrix.changelog_frequency)
|
||||||
|
log.info(f"Generating changelog from {fromdate} to {todate}")
|
||||||
|
|
||||||
|
system_prompt = f"""
|
||||||
|
Create a concise changelog for the {matrix.changelog_frequency}.
|
||||||
|
Follow these guidelines:
|
||||||
|
|
||||||
|
- The header should include the date range from {fromdate} to {todate}
|
||||||
|
- Use present tense
|
||||||
|
- Keep the summary brief
|
||||||
|
- Follow commit message format: "scope: message (#number)"
|
||||||
|
- Link pull requests as: '{gitea.url}/{gitea.owner}/{gitea.repo}/pulls/<number>'
|
||||||
|
- Use markdown links to make the pull request number clickable
|
||||||
|
- Mention each scope and pull request number only once
|
||||||
|
- Have these headers in the changelog if applicable:
|
||||||
|
- New Features
|
||||||
|
- Documentation
|
||||||
|
- Refactoring
|
||||||
|
- Bug Fixes
|
||||||
|
- Other Changes
|
||||||
|
|
||||||
|
Changelog:
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Step 1: Create the JSONL file
|
||||||
|
jsonl_data = await create_jsonl_data(user_prompt=diff, system_prompt=system_prompt)
|
||||||
|
|
||||||
|
# Step 2: Upload the JSONL file and process it
|
||||||
|
results = await upload_and_process_file(session=http, jsonl_data=jsonl_data)
|
||||||
|
|
||||||
|
# Write the results to a file in the changelogs directory
|
||||||
|
result_file = write_file_with_date_prefix(
|
||||||
|
json.dumps(results, indent=4),
|
||||||
|
data_dir / "changelogs",
|
||||||
|
ndays=matrix.changelog_frequency,
|
||||||
|
suffix="result",
|
||||||
|
)
|
||||||
|
log.info(f"LLM result written to: {result_file}")
|
||||||
|
|
||||||
|
# Join responses together
|
||||||
|
all_changelogs = []
|
||||||
|
for result in results:
|
||||||
|
choices = result["response"]["body"]["choices"]
|
||||||
|
changelog = "\n".join(choice["message"]["content"] for choice in choices)
|
||||||
|
all_changelogs.append(changelog)
|
||||||
|
full_changelog = "\n\n".join(all_changelogs)
|
||||||
|
|
||||||
|
# Write the last run to the file
|
||||||
|
write_locked_file(last_run_path, last_run)
|
||||||
|
log.info(f"Changelog generated:\n{full_changelog}")
|
||||||
|
|
||||||
|
await send_message(client, room, full_changelog)
|
97
pkgs/matrix-bot/matrix_bot/custom_logger.py
Normal file
97
pkgs/matrix-bot/matrix_bot/custom_logger.py
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
import inspect
|
||||||
|
import logging
|
||||||
|
from collections.abc import Callable
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
grey = "\x1b[38;20m"
|
||||||
|
yellow = "\x1b[33;20m"
|
||||||
|
red = "\x1b[31;20m"
|
||||||
|
bold_red = "\x1b[31;1m"
|
||||||
|
green = "\u001b[32m"
|
||||||
|
blue = "\u001b[34m"
|
||||||
|
|
||||||
|
|
||||||
|
def get_formatter(color: str) -> Callable[[logging.LogRecord, bool], logging.Formatter]:
|
||||||
|
def myformatter(
|
||||||
|
record: logging.LogRecord, with_location: bool
|
||||||
|
) -> logging.Formatter:
|
||||||
|
reset = "\x1b[0m"
|
||||||
|
|
||||||
|
try:
|
||||||
|
filepath = Path(record.pathname).resolve()
|
||||||
|
filepath = Path("~", filepath.relative_to(Path.home()))
|
||||||
|
except Exception:
|
||||||
|
filepath = Path(record.pathname)
|
||||||
|
|
||||||
|
if not with_location:
|
||||||
|
return logging.Formatter(f"{color}%(levelname)s{reset}: %(message)s")
|
||||||
|
|
||||||
|
return logging.Formatter(
|
||||||
|
f"{color}%(levelname)s{reset}: %(message)s\nLocation: {filepath}:%(lineno)d::%(funcName)s\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
return myformatter
|
||||||
|
|
||||||
|
|
||||||
|
FORMATTER = {
|
||||||
|
logging.DEBUG: get_formatter(blue),
|
||||||
|
logging.INFO: get_formatter(green),
|
||||||
|
logging.WARNING: get_formatter(yellow),
|
||||||
|
logging.ERROR: get_formatter(red),
|
||||||
|
logging.CRITICAL: get_formatter(bold_red),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CustomFormatter(logging.Formatter):
|
||||||
|
def __init__(self, log_locations: bool) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.log_locations = log_locations
|
||||||
|
|
||||||
|
def format(self, record: logging.LogRecord) -> str:
|
||||||
|
return FORMATTER[record.levelno](record, self.log_locations).format(record)
|
||||||
|
|
||||||
|
|
||||||
|
class ThreadFormatter(logging.Formatter):
|
||||||
|
def format(self, record: logging.LogRecord) -> str:
|
||||||
|
return FORMATTER[record.levelno](record, False).format(record)
|
||||||
|
|
||||||
|
|
||||||
|
def get_caller() -> str:
|
||||||
|
frame = inspect.currentframe()
|
||||||
|
if frame is None:
|
||||||
|
return "unknown"
|
||||||
|
caller_frame = frame.f_back
|
||||||
|
if caller_frame is None:
|
||||||
|
return "unknown"
|
||||||
|
caller_frame = caller_frame.f_back
|
||||||
|
if caller_frame is None:
|
||||||
|
return "unknown"
|
||||||
|
frame_info = inspect.getframeinfo(caller_frame)
|
||||||
|
|
||||||
|
try:
|
||||||
|
filepath = Path(frame_info.filename).resolve()
|
||||||
|
filepath = Path("~", filepath.relative_to(Path.home()))
|
||||||
|
except Exception:
|
||||||
|
filepath = Path(frame_info.filename)
|
||||||
|
|
||||||
|
ret = f"{filepath}:{frame_info.lineno}::{frame_info.function}"
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging(level: Any, root_log_name: str = __name__.split(".")[0]) -> None:
|
||||||
|
# Get the root logger and set its level
|
||||||
|
main_logger = logging.getLogger(root_log_name)
|
||||||
|
main_logger.setLevel(level)
|
||||||
|
|
||||||
|
# Create and add the default handler
|
||||||
|
default_handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
# Create and add your custom handler
|
||||||
|
default_handler.setLevel(level)
|
||||||
|
default_handler.setFormatter(CustomFormatter(str(level) == str(logging.DEBUG)))
|
||||||
|
main_logger.addHandler(default_handler)
|
||||||
|
|
||||||
|
# Set logging level for other modules used by this module
|
||||||
|
logging.getLogger("asyncio").setLevel(logging.INFO)
|
||||||
|
logging.getLogger("httpx").setLevel(level=logging.WARNING)
|
88
pkgs/matrix-bot/matrix_bot/gitea.py
Normal file
88
pkgs/matrix-bot/matrix_bot/gitea.py
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class GiteaData:
|
||||||
|
url: str
|
||||||
|
owner: str
|
||||||
|
repo: str
|
||||||
|
access_token: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def endpoint_url(gitea: GiteaData, endpoint: str) -> str:
|
||||||
|
return f"{gitea.url}/api/v1/repos/{gitea.owner}/{gitea.repo}/{endpoint}"
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_repo_labels(
|
||||||
|
gitea: GiteaData,
|
||||||
|
session: aiohttp.ClientSession,
|
||||||
|
) -> list[dict]:
|
||||||
|
"""
|
||||||
|
Fetch labels from a Gitea repository.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of labels in the repository.
|
||||||
|
"""
|
||||||
|
url = endpoint_url(gitea, "labels")
|
||||||
|
headers = {"Accept": "application/vnd.github.v3+json"}
|
||||||
|
if gitea.access_token:
|
||||||
|
headers["Authorization"] = f"token {gitea.access_token}"
|
||||||
|
|
||||||
|
async with session.get(url, headers=headers) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
labels = await response.json()
|
||||||
|
return labels
|
||||||
|
else:
|
||||||
|
# You may want to handle different statuses differently
|
||||||
|
raise Exception(
|
||||||
|
f"Failed to fetch labels: {response.status}, {await response.text()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PullState(Enum):
|
||||||
|
OPEN = "open"
|
||||||
|
CLOSED = "closed"
|
||||||
|
ALL = "all"
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_pull_requests(
|
||||||
|
gitea: GiteaData,
|
||||||
|
session: aiohttp.ClientSession,
|
||||||
|
*,
|
||||||
|
limit: int,
|
||||||
|
state: PullState,
|
||||||
|
label_ids: list[int] = [],
|
||||||
|
) -> list[dict]:
|
||||||
|
"""
|
||||||
|
Fetch pull requests from a Gitea repository.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of pull requests.
|
||||||
|
"""
|
||||||
|
# You can use the same pattern as fetch_repo_labels
|
||||||
|
url = endpoint_url(gitea, "pulls")
|
||||||
|
params = {
|
||||||
|
"state": state.value,
|
||||||
|
"sort": "recentupdate",
|
||||||
|
"limit": limit,
|
||||||
|
"labels": label_ids,
|
||||||
|
}
|
||||||
|
headers = {"accept": "application/json"}
|
||||||
|
|
||||||
|
async with session.get(url, params=params, headers=headers) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
labels = await response.json()
|
||||||
|
return labels
|
||||||
|
else:
|
||||||
|
# You may want to handle different statuses differently
|
||||||
|
raise Exception(
|
||||||
|
f"Failed to fetch labels: {response.status}, {await response.text()}"
|
||||||
|
)
|
31
pkgs/matrix-bot/matrix_bot/locked_open.py
Normal file
31
pkgs/matrix-bot/matrix_bot/locked_open.py
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
import fcntl
|
||||||
|
import json
|
||||||
|
from collections.abc import Generator
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def locked_open(filename: str | Path, mode: str = "r") -> Generator:
|
||||||
|
"""
|
||||||
|
This is a context manager that provides an advisory write lock on the file specified by `filename` when entering the context, and releases the lock when leaving the context. The lock is acquired using the `fcntl` module's `LOCK_EX` flag, which applies an exclusive write lock to the file.
|
||||||
|
"""
|
||||||
|
with open(filename, mode) as fd:
|
||||||
|
fcntl.flock(fd, fcntl.LOCK_EX)
|
||||||
|
yield fd
|
||||||
|
fcntl.flock(fd, fcntl.LOCK_UN)
|
||||||
|
|
||||||
|
|
||||||
|
def write_locked_file(path: Path, data: dict[str, Any]) -> None:
|
||||||
|
with locked_open(path, "w+") as f:
|
||||||
|
f.write(json.dumps(data, indent=4))
|
||||||
|
|
||||||
|
|
||||||
|
def read_locked_file(path: Path) -> dict[str, Any]:
|
||||||
|
if not path.exists():
|
||||||
|
return {}
|
||||||
|
with locked_open(path, "r") as f:
|
||||||
|
content: str = f.read()
|
||||||
|
parsed: dict[str, Any] = json.loads(content)
|
||||||
|
return parsed
|
54
pkgs/matrix-bot/matrix_bot/main.py
Normal file
54
pkgs/matrix-bot/matrix_bot/main.py
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
from nio import AsyncClient, ClientConfig, ProfileGetAvatarResponse, RoomMessageText
|
||||||
|
|
||||||
|
from .changelog_bot import changelog_bot
|
||||||
|
from .gitea import GiteaData
|
||||||
|
from .matrix import MatrixData, set_avatar, upload_image
|
||||||
|
from .review_bot import message_callback, review_requested_bot
|
||||||
|
|
||||||
|
|
||||||
|
async def bot_main(
|
||||||
|
matrix: MatrixData,
|
||||||
|
gitea: GiteaData,
|
||||||
|
data_dir: Path,
|
||||||
|
) -> None:
|
||||||
|
# Setup client configuration to handle encryption
|
||||||
|
client_config = ClientConfig(
|
||||||
|
encryption_enabled=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
log.info(f"Connecting to {matrix.server} as {matrix.user}")
|
||||||
|
client = AsyncClient(matrix.server, matrix.user, config=client_config)
|
||||||
|
client.add_event_callback(message_callback, RoomMessageText)
|
||||||
|
|
||||||
|
result = await client.login(matrix.password)
|
||||||
|
if not result.transport_response.ok:
|
||||||
|
log.critical(f"Failed to login: {result}")
|
||||||
|
exit(1)
|
||||||
|
log.info(f"Logged in as {result}")
|
||||||
|
|
||||||
|
avatar: ProfileGetAvatarResponse = await client.get_avatar()
|
||||||
|
if not avatar.avatar_url:
|
||||||
|
mxc_url = await upload_image(client, matrix.avatar)
|
||||||
|
log.info(f"Uploaded avatar to {mxc_url}")
|
||||||
|
await set_avatar(client, mxc_url)
|
||||||
|
else:
|
||||||
|
log.info(f"Bot already has an avatar {avatar.avatar_url}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
while True:
|
||||||
|
await changelog_bot(client, session, matrix, gitea, data_dir)
|
||||||
|
await review_requested_bot(client, session, matrix, gitea, data_dir)
|
||||||
|
await asyncio.sleep(60 * 5)
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(e)
|
||||||
|
finally:
|
||||||
|
await client.close()
|
88
pkgs/matrix-bot/matrix_bot/matrix.py
Normal file
88
pkgs/matrix-bot/matrix_bot/matrix.py
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from markdown2 import markdown
|
||||||
|
from nio import (
|
||||||
|
AsyncClient,
|
||||||
|
JoinedMembersResponse,
|
||||||
|
JoinResponse,
|
||||||
|
ProfileSetAvatarResponse,
|
||||||
|
RoomMember,
|
||||||
|
RoomSendResponse,
|
||||||
|
UploadResponse,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def upload_image(client: AsyncClient, image_path: str) -> str:
|
||||||
|
with open(image_path, "rb") as image_file:
|
||||||
|
response: UploadResponse
|
||||||
|
response, _ = await client.upload(image_file, content_type="image/png")
|
||||||
|
if not response.transport_response.ok:
|
||||||
|
raise Exception(f"Failed to upload image {response}")
|
||||||
|
return response.content_uri # This is the MXC URL
|
||||||
|
|
||||||
|
|
||||||
|
async def set_avatar(client: AsyncClient, mxc_url: str) -> None:
|
||||||
|
response: ProfileSetAvatarResponse
|
||||||
|
response = await client.set_avatar(mxc_url)
|
||||||
|
if not response.transport_response.ok:
|
||||||
|
raise Exception(f"Failed to set avatar {response}")
|
||||||
|
|
||||||
|
|
||||||
|
async def get_room_members(client: AsyncClient, room: JoinResponse) -> list[RoomMember]:
|
||||||
|
users: JoinedMembersResponse = await client.joined_members(room.room_id)
|
||||||
|
|
||||||
|
if not users.transport_response.ok:
|
||||||
|
raise Exception(f"Failed to get users {users}")
|
||||||
|
return users.members
|
||||||
|
|
||||||
|
|
||||||
|
async def send_message(
|
||||||
|
client: AsyncClient,
|
||||||
|
room: JoinResponse,
|
||||||
|
message: str,
|
||||||
|
user_ids: list[str] | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Send a message in a Matrix room, optionally mentioning users.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# If user_ids are provided, format the message to mention them
|
||||||
|
formatted_message = markdown(message)
|
||||||
|
if user_ids:
|
||||||
|
mention_list = ", ".join(
|
||||||
|
[
|
||||||
|
f"<a href='https://matrix.to/#/{user_id}'>{user_id}</a>"
|
||||||
|
for user_id in user_ids
|
||||||
|
]
|
||||||
|
)
|
||||||
|
formatted_message = f"{mention_list}: {formatted_message}"
|
||||||
|
|
||||||
|
content = {
|
||||||
|
"msgtype": "m.text" if user_ids else "m.notice",
|
||||||
|
"format": "org.matrix.custom.html",
|
||||||
|
"body": message,
|
||||||
|
"formatted_body": formatted_message,
|
||||||
|
}
|
||||||
|
|
||||||
|
res: RoomSendResponse = await client.room_send(
|
||||||
|
room_id=room.room_id, message_type="m.room.message", content=content
|
||||||
|
)
|
||||||
|
|
||||||
|
if not res.transport_response.ok:
|
||||||
|
raise Exception(f"Failed to send message {res}")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MatrixData:
|
||||||
|
server: str
|
||||||
|
user: str
|
||||||
|
avatar: Path
|
||||||
|
password: str
|
||||||
|
changelog_room: str
|
||||||
|
review_room: str
|
||||||
|
changelog_frequency: int
|
||||||
|
publish_day: str
|
129
pkgs/matrix-bot/matrix_bot/openai.py
Normal file
129
pkgs/matrix-bot/matrix_bot/openai.py
Normal file
|
@ -0,0 +1,129 @@
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from os import environ
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# The URL to which the request is sent
|
||||||
|
url: str = "https://api.openai.com/v1/chat/completions"
|
||||||
|
|
||||||
|
|
||||||
|
def api_key() -> str:
|
||||||
|
openapi_key = environ.get("OPENAI_API_KEY")
|
||||||
|
if openapi_key is not None:
|
||||||
|
return openapi_key
|
||||||
|
|
||||||
|
openai_key_file = environ.get("OPENAI_API_KEY_FILE", default=None)
|
||||||
|
if openai_key_file is None:
|
||||||
|
raise Exception("OPENAI_API_KEY_FILE environment variable is not set")
|
||||||
|
with open(openai_key_file) as f:
|
||||||
|
return f.read().strip()
|
||||||
|
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
async def create_jsonl_data(
|
||||||
|
*,
|
||||||
|
user_prompt: str,
|
||||||
|
system_prompt: str,
|
||||||
|
model: str = "gpt-4o",
|
||||||
|
max_tokens: int = 1000,
|
||||||
|
) -> bytes:
|
||||||
|
summary_request = {
|
||||||
|
"custom_id": "request-1",
|
||||||
|
"method": "POST",
|
||||||
|
"url": "/v1/chat/completions",
|
||||||
|
"body": {
|
||||||
|
"model": model,
|
||||||
|
"messages": [
|
||||||
|
{"role": "system", "content": system_prompt},
|
||||||
|
{"role": "user", "content": user_prompt},
|
||||||
|
],
|
||||||
|
"max_tokens": max_tokens,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return json.dumps(summary_request).encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
async def upload_and_process_file(
|
||||||
|
*, session: aiohttp.ClientSession, jsonl_data: bytes, api_key: str = api_key()
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Upload a JSONL file to OpenAI's Batch API and process it asynchronously.
|
||||||
|
"""
|
||||||
|
|
||||||
|
upload_url = "https://api.openai.com/v1/files"
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {api_key}",
|
||||||
|
}
|
||||||
|
data = aiohttp.FormData()
|
||||||
|
data.add_field(
|
||||||
|
"file", jsonl_data, filename="changelog.jsonl", content_type="application/jsonl"
|
||||||
|
)
|
||||||
|
data.add_field("purpose", "batch")
|
||||||
|
|
||||||
|
async with session.post(upload_url, headers=headers, data=data) as response:
|
||||||
|
if response.status != 200:
|
||||||
|
raise Exception(f"File upload failed with status code {response.status}")
|
||||||
|
upload_response = await response.json()
|
||||||
|
file_id = upload_response.get("id")
|
||||||
|
|
||||||
|
if not file_id:
|
||||||
|
raise Exception("File ID not returned from upload")
|
||||||
|
|
||||||
|
# Step 2: Create a batch using the uploaded file ID
|
||||||
|
batch_url = "https://api.openai.com/v1/batches"
|
||||||
|
batch_data = {
|
||||||
|
"input_file_id": file_id,
|
||||||
|
"endpoint": "/v1/chat/completions",
|
||||||
|
"completion_window": "24h",
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.post(batch_url, headers=headers, json=batch_data) as response:
|
||||||
|
if response.status != 200:
|
||||||
|
raise Exception(f"Batch creation failed with status code {response.status}")
|
||||||
|
batch_response = await response.json()
|
||||||
|
batch_id = batch_response.get("id")
|
||||||
|
|
||||||
|
if not batch_id:
|
||||||
|
raise Exception("Batch ID not returned from creation")
|
||||||
|
|
||||||
|
# Step 3: Check the status of the batch until completion
|
||||||
|
status_url = f"https://api.openai.com/v1/batches/{batch_id}"
|
||||||
|
|
||||||
|
while True:
|
||||||
|
async with session.get(status_url, headers=headers) as response:
|
||||||
|
if response.status != 200:
|
||||||
|
raise Exception(
|
||||||
|
f"Failed to check batch status with status code {response.status}"
|
||||||
|
)
|
||||||
|
status_response = await response.json()
|
||||||
|
status = status_response.get("status")
|
||||||
|
if status in ["completed", "failed", "expired"]:
|
||||||
|
break
|
||||||
|
await asyncio.sleep(10) # Wait before checking again
|
||||||
|
|
||||||
|
if status != "completed":
|
||||||
|
raise Exception(f"Batch processing failed with status: {status}")
|
||||||
|
|
||||||
|
# Step 4: Retrieve the results
|
||||||
|
output_file_id = status_response.get("output_file_id")
|
||||||
|
output_url = f"https://api.openai.com/v1/files/{output_file_id}/content"
|
||||||
|
|
||||||
|
async with session.get(output_url, headers=headers) as response:
|
||||||
|
if response.status != 200:
|
||||||
|
raise Exception(
|
||||||
|
f"Failed to retrieve batch results with status code {response.status}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read content as text
|
||||||
|
content = await response.text()
|
||||||
|
|
||||||
|
# Parse the content as JSONL
|
||||||
|
results = [json.loads(line) for line in content.splitlines()]
|
||||||
|
return results
|
90
pkgs/matrix-bot/matrix_bot/review_bot.py
Normal file
90
pkgs/matrix-bot/matrix_bot/review_bot.py
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from nio import (
|
||||||
|
AsyncClient,
|
||||||
|
JoinResponse,
|
||||||
|
MatrixRoom,
|
||||||
|
RoomMessageText,
|
||||||
|
)
|
||||||
|
|
||||||
|
from matrix_bot.gitea import (
|
||||||
|
GiteaData,
|
||||||
|
PullState,
|
||||||
|
fetch_pull_requests,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .locked_open import read_locked_file, write_locked_file
|
||||||
|
from .matrix import MatrixData, get_room_members, send_message
|
||||||
|
|
||||||
|
|
||||||
|
async def message_callback(room: MatrixRoom, event: RoomMessageText) -> None:
|
||||||
|
log.debug(
|
||||||
|
f"Message received in room {room.display_name}\n"
|
||||||
|
f"{room.user_name(event.sender)} | {event.body}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def review_requested_bot(
|
||||||
|
client: AsyncClient,
|
||||||
|
http: aiohttp.ClientSession,
|
||||||
|
matrix: MatrixData,
|
||||||
|
gitea: GiteaData,
|
||||||
|
data_dir: Path,
|
||||||
|
) -> None:
|
||||||
|
# If you made a new room and haven't joined as that user, you can use
|
||||||
|
room: JoinResponse = await client.join(matrix.review_room)
|
||||||
|
|
||||||
|
if not room.transport_response.ok:
|
||||||
|
log.error("This can happen if the room doesn't exist or the bot isn't invited")
|
||||||
|
raise Exception(f"Failed to join room {room}")
|
||||||
|
|
||||||
|
# Get the members of the room
|
||||||
|
users = await get_room_members(client, room)
|
||||||
|
|
||||||
|
# Fetch the pull requests
|
||||||
|
tstart = time.time()
|
||||||
|
pulls = await fetch_pull_requests(gitea, http, limit=50, state=PullState.ALL)
|
||||||
|
|
||||||
|
# Read the last updated pull request
|
||||||
|
ping_hist_path = data_dir / "last_review_run.json"
|
||||||
|
ping_hist = read_locked_file(ping_hist_path)
|
||||||
|
|
||||||
|
# Check if the pull request is mergeable and needs review
|
||||||
|
# and if the pull request is newer than the last updated pull request
|
||||||
|
for pull in pulls:
|
||||||
|
requested_reviewers = pull["requested_reviewers"]
|
||||||
|
pid = str(pull["id"])
|
||||||
|
if requested_reviewers and pull["mergeable"]:
|
||||||
|
last_time_updated = ping_hist.get(pid, {}).get(
|
||||||
|
"updated_at", datetime.datetime.min.isoformat()
|
||||||
|
)
|
||||||
|
if ping_hist == {} or pull["updated_at"] > last_time_updated:
|
||||||
|
ping_hist[pid] = pull
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if the requested reviewers are in the room
|
||||||
|
requested_reviewers = [r["login"].lower() for r in requested_reviewers]
|
||||||
|
ping_users = []
|
||||||
|
for user in users:
|
||||||
|
if user.display_name.lower() in requested_reviewers:
|
||||||
|
ping_users.append(user.user_id)
|
||||||
|
|
||||||
|
# Send a message to the room and mention the users
|
||||||
|
log.info(f"Pull request {pull['title']} needs review")
|
||||||
|
message = f"Review Requested:\n[{pull['title']}]({pull['html_url']})"
|
||||||
|
await send_message(client, room, message, user_ids=ping_users)
|
||||||
|
|
||||||
|
# Write the new last updated pull request
|
||||||
|
write_locked_file(ping_hist_path, ping_hist)
|
||||||
|
|
||||||
|
# Time taken
|
||||||
|
tend = time.time()
|
||||||
|
tdiff = round(tend - tstart)
|
||||||
|
log.debug(f"Time taken: {tdiff}s")
|
59
pkgs/matrix-bot/pyproject.toml
Normal file
59
pkgs/matrix-bot/pyproject.toml
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "matrix-bot"
|
||||||
|
description = "matrix bot for release messages from git commits"
|
||||||
|
dynamic = ["version"]
|
||||||
|
scripts = { mbot = "matrix_bot:main" }
|
||||||
|
license = {text = "MIT"}
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://clan.lol/"
|
||||||
|
Documentation = "https://docs.clan.lol/"
|
||||||
|
Repository = "https://git.clan.lol/clan/clan-core"
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
exclude = ["result"]
|
||||||
|
|
||||||
|
[tool.setuptools.package-data]
|
||||||
|
matrix_bot = ["py.typed"]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = "tests"
|
||||||
|
faulthandler_timeout = 60
|
||||||
|
log_level = "DEBUG"
|
||||||
|
log_format = "%(levelname)s: %(message)s\n %(pathname)s:%(lineno)d::%(funcName)s"
|
||||||
|
addopts = "--cov . --cov-report term --cov-report html:.reports/html --no-cov-on-fail --durations 5 --color=yes --new-first" # Add --pdb for debugging
|
||||||
|
norecursedirs = "tests/helpers"
|
||||||
|
markers = ["impure", "with_core"]
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.11"
|
||||||
|
warn_redundant_casts = true
|
||||||
|
disallow_untyped_calls = true
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = "argcomplete.*"
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = "ipdb.*"
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = "pytest.*"
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = "setuptools.*"
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
target-version = "py311"
|
||||||
|
line-length = 88
|
||||||
|
lint.select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
||||||
|
lint.ignore = ["E501", "E402", "E731", "ANN101", "ANN401", "A003"]
|
30
pkgs/matrix-bot/shell.nix
Normal file
30
pkgs/matrix-bot/shell.nix
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
{
|
||||||
|
matrix-bot,
|
||||||
|
mkShell,
|
||||||
|
ruff,
|
||||||
|
python3,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
devshellTestDeps =
|
||||||
|
matrix-bot.passthru.testDependencies
|
||||||
|
++ (with python3.pkgs; [
|
||||||
|
rope
|
||||||
|
setuptools
|
||||||
|
wheel
|
||||||
|
ipdb
|
||||||
|
pip
|
||||||
|
]);
|
||||||
|
in
|
||||||
|
mkShell {
|
||||||
|
buildInputs = [ ruff ] ++ devshellTestDeps;
|
||||||
|
|
||||||
|
PYTHONBREAKPOINT = "ipdb.set_trace";
|
||||||
|
|
||||||
|
shellHook = ''
|
||||||
|
export GIT_ROOT="$(git rev-parse --show-toplevel)"
|
||||||
|
export PKG_ROOT="$GIT_ROOT/pkgs/matrix-bot"
|
||||||
|
|
||||||
|
# Add clan command to PATH
|
||||||
|
export PATH="$PKG_ROOT/bin":"$PATH"
|
||||||
|
'';
|
||||||
|
}
|
1
sops/secrets/qubasas-openai-api-key/machines/web01
Symbolic link
1
sops/secrets/qubasas-openai-api-key/machines/web01
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../../../machines/web01
|
24
sops/secrets/qubasas-openai-api-key/secret
Normal file
24
sops/secrets/qubasas-openai-api-key/secret
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
{
|
||||||
|
"data": "ENC[AES256_GCM,data:iJTjs8bG2GLGnGp/Hf4Egtorrk87rkgh9Yn+gPuWAJ61wIAtN3g9SU3vyYpvRrIqHVUyLObGbrWYi3Ol07M=,iv:YTOctq9aw4tc9xwoOO4UbR2cYPHV0ZmuE1FRWn13sgk=,tag:zU3HFqxwZcn/9S02bj3/fA==,type:str]",
|
||||||
|
"sops": {
|
||||||
|
"kms": null,
|
||||||
|
"gcp_kms": null,
|
||||||
|
"azure_kv": null,
|
||||||
|
"hc_vault": null,
|
||||||
|
"age": [
|
||||||
|
{
|
||||||
|
"recipient": "age17xuvz0fqtynzdmf8rfh4g3e46tx8w3mc6zgytrmuj5v9dhnldgxs7ue7ct",
|
||||||
|
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBUa1FIbWt2aHduZVBlcGpq\nOUsybTRmR1I4M1JZY3A4ZUFpZEN3dlZCTkZRCmdVajFUcDMyeEdJMThVZElJdmlJ\naEhvSk9sYThXdkxoaXVLem15dlJMcm8KLS0tIEJsdFY4L0M3Q2cwdzFOdy9LN0k3\nOEdCM09PUWlZbE91U2ZYNmVHeU43bUUKC+z+6XZCiVfwGQQCAHoB+WGE5Mm3qJZq\nuyD5r3Ra6MAvvwIhnqbwadRoxVH1HcdIB6hJsNREE/x6YNLxi3T7nw==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"recipient": "age1zwte859d9nvg6wy5dugjkf38dqe8w8qkt2as7xcc5pw3285833xs797uan",
|
||||||
|
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBFOHBub0J3U2FuOFNuS2xl\nRkExYVEvcmowWDlyUGlpYko3N1dIcmN5dmxJCkRtRm9qVVNQK3FQcTB1U3g3OHhS\ncU9XaHUxNWVlL2tpblpZUHF1UWt4am8KLS0tIGJOZkJ4eDJ6WWx1d2R1VG1qODZS\ncHhXMVhEUHdLZjIvNUYxRmduZkpjaU0KqZKUb9KYpSvwxaJRAbYhkuOdnzsU3p9Q\nU2WO5TIwS762yNqWTzyYdxb9YxvTOatW7uWTorRXZu1yqCTMTuq+1Q==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"lastmodified": "2024-07-03T09:47:40Z",
|
||||||
|
"mac": "ENC[AES256_GCM,data:dpdmkhedaqivzIlxhoWb+u77JmfWRo94iWDolAa9UKvnjBo1QE5sHbqWasCH81wjO0wPBPRUqnj9JQ7kG9AFp24Fad+gAp74Gwx5M/PSx1dsd6xkcxt6PJ8sFXGb0H3lYduCaNfDGgsJTVoDcbk8rgYzjo5+mxs2pqrrn10t4iU=,iv:MGMIq2rF4+hr89/dppi2JDVbpAShscYTMM9viHPepIY=,tag:Dj9B6qvAkmiUmgRvZ6B94Q==,type:str]",
|
||||||
|
"pgp": null,
|
||||||
|
"unencrypted_suffix": "_unencrypted",
|
||||||
|
"version": "3.8.1"
|
||||||
|
}
|
||||||
|
}
|
1
sops/secrets/qubasas-openai-api-key/users/qubasa
Symbolic link
1
sops/secrets/qubasas-openai-api-key/users/qubasa
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../../../users/qubasa
|
Loading…
Reference in New Issue
Block a user