Merge pull request 'Adding state directory, rearchitecturing API endpoints' (#425) from Qubasa-main into main
Some checks failed
checks / test (push) Successful in 33s
assets1 / test (push) Successful in 36s
checks-impure / test (push) Failing after 1m9s

Reviewed-on: #425
This commit is contained in:
Luis Hebendanz 2023-10-29 18:54:34 +00:00
commit 845d743d99
96 changed files with 4897 additions and 3665 deletions

2
.gitignore vendored
View File

@ -1,4 +1,6 @@
.direnv
.coverage.*
**/qubeclan
**/testdir
democlan
result*

View File

@ -11,9 +11,7 @@ $ nix shell git+https://git.clan.lol/clan/clan-core
2. Then use the following commands to initialize a new clan-flake:
```shellSession
$ mkdir ./my-flake
$ cd ./my-flake
$ clan flake create .
$ clan flake create my-clan
```
This action will generate two primary files: `flake.nix` and `.clan-flake`.
@ -93,6 +91,7 @@ Absolutely, let's break down the migration step by step, explaining each action
# this needs to point at the repository root
directory = self;
specialArgs = {};
clanName = "NEEDS_TO_BE_UNIQUE"; # TODO: Changeme
machines = {
example-desktop = {
nixpkgs.hostPlatform = "x86_64-linux";
@ -109,6 +108,7 @@ Absolutely, let's break down the migration step by step, explaining each action
- Inside `machines`, a new machine configuration is defined (in this case, `example-desktop`).
- Inside `example-desktop` which is the target machine hostname, `nixpkgs.hostPlatform` specifies the host platform as `x86_64-linux`.
- `clanInternals`: Is required to enable evaluation of the secret generation/upload script on every architecture
- `clanName`: Is required and needs to be globally unique, as else we have a cLAN name clash
4. **Rebuild and Switch**: Rebuild your NixOS configuration using the updated flake:

View File

@ -2,6 +2,7 @@
{ directory # The directory containing the machines subdirectory
, specialArgs ? { } # Extra arguments to pass to nixosSystem i.e. useful to make self available
, machines ? { } # allows to include machine-specific modules i.e. machines.${name} = { ... }
, clanName # Needs to be (globally) unique, as this determines the folder name where the flake gets downloaded to.
}:
let
machinesDirs = lib.optionalAttrs (builtins.pathExists "${directory}/machines") (builtins.readDir (directory + /machines));
@ -73,6 +74,7 @@ in
clanInternals = {
machines = configsPerSystem;
clanName = clanName;
all-machines-json = lib.mapAttrs
(system: configs: nixpkgs.legacyPackages.${system}.writers.writeJSON "machines.json" (lib.mapAttrs (_: m: m.config.system.clan.deployment.data) configs))
configsPerSystem;

View File

@ -30,16 +30,20 @@ in
generateSecrets = pkgs.writeScript "generate-secrets" ''
#!${pkgs.python3}/bin/python
import json
import sys
from clan_cli.secrets.sops_generate import generate_secrets_from_nix
args = json.loads(${builtins.toJSON (builtins.toJSON { machine_name = config.clanCore.machineName; secret_submodules = config.clanCore.secrets; })})
args["flake_name"] = sys.argv[1]
generate_secrets_from_nix(**args)
'';
uploadSecrets = pkgs.writeScript "upload-secrets" ''
#!${pkgs.python3}/bin/python
import json
import sys
from clan_cli.secrets.sops_generate import upload_age_key_from_nix
# the second toJSON is needed to escape the string for the python
args = json.loads(${builtins.toJSON (builtins.toJSON { machine_name = config.clanCore.machineName; })})
args["flake_name"] = sys.argv[1]
upload_age_key_from_nix(**args)
'';
};

View File

@ -2,6 +2,7 @@
source_up
if type nix_direnv_watch_file &>/dev/null; then
nix_direnv_watch_file flake-module.nix
nix_direnv_watch_file default.nix

View File

@ -15,4 +15,8 @@
"search.exclude": {
"**/.direnv": true
},
"python.linting.mypyPath": "mypy",
"python.linting.mypyEnabled": true,
"python.linting.enabled": true,
"python.defaultInterpreterPath": "python"
}

View File

@ -60,11 +60,31 @@ By default tests run in parallel using pytest-parallel.
pytest-parallel however breaks `breakpoint()`. To disable it, use this:
```console
pytest --workers "" -s
pytest -n0 -s
```
You can also run a single test like this:
```console
pytest --workers "" -s tests/test_secrets_cli.py::test_users
pytest -n0 -s tests/test_secrets_cli.py::test_users
```
## Run tests in nix container
Run all impure checks
```console
nix run .#impure-checks
```
Run all checks
```console
nix flake check
```
## Debugging functions
Debugging functions can be found under `src/debug.py`
quite interesting is the function breakpoint_shell() which drops you into a shell
with the test environment loaded.

View File

@ -1,11 +1,15 @@
import argparse
import logging
import sys
from types import ModuleType
from typing import Optional
from . import config, flake, join, machines, secrets, vms, webui
from . import config, flakes, join, machines, secrets, vms, webui
from .custom_logger import setup_logging
from .ssh import cli as ssh_cli
log = logging.getLogger(__name__)
argcomplete: Optional[ModuleType] = None
try:
import argcomplete # type: ignore[no-redef]
@ -25,9 +29,9 @@ def create_parser(prog: Optional[str] = None) -> argparse.ArgumentParser:
subparsers = parser.add_subparsers()
parser_flake = subparsers.add_parser(
"flake", help="create a clan flake inside the current directory"
"flakes", help="create a clan flake inside the current directory"
)
flake.register_parser(parser_flake)
flakes.register_parser(parser_flake)
parser_join = subparsers.add_parser("join", help="join a remote clan")
join.register_parser(parser_join)
@ -65,6 +69,10 @@ def main() -> None:
parser = create_parser()
args = parser.parse_args()
if args.debug:
setup_logging(logging.DEBUG)
log.debug("Debug log activated")
if not hasattr(args, "func"):
return

View File

@ -2,15 +2,21 @@ import asyncio
import logging
import shlex
from pathlib import Path
from typing import Optional, Tuple
from typing import Any, Callable, Coroutine, Dict, NamedTuple, Optional
from .custom_logger import get_caller
from .errors import ClanError
log = logging.getLogger(__name__)
async def run(cmd: list[str], cwd: Optional[Path] = None) -> Tuple[bytes, bytes]:
log.debug(f"$: {shlex.join(cmd)}")
class CmdOut(NamedTuple):
stdout: str
stderr: str
cwd: Optional[Path] = None
async def run(cmd: list[str], cwd: Optional[Path] = None) -> CmdOut:
cwd_res = None
if cwd is not None:
if not cwd.exists():
@ -18,7 +24,9 @@ async def run(cmd: list[str], cwd: Optional[Path] = None) -> Tuple[bytes, bytes]
if not cwd.is_dir():
raise ClanError(f"Working directory {cwd} is not a directory")
cwd_res = cwd.resolve()
log.debug(f"Working directory: {cwd_res}")
log.debug(
f"Command: {shlex.join(cmd)}\nWorking directory: {cwd_res}\nCaller : {get_caller()}"
)
proc = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
@ -31,9 +39,30 @@ async def run(cmd: list[str], cwd: Optional[Path] = None) -> Tuple[bytes, bytes]
raise ClanError(
f"""
command: {shlex.join(cmd)}
working directory: {cwd_res}
exit code: {proc.returncode}
command output:
stderr:
{stderr.decode("utf-8")}
stdout:
{stdout.decode("utf-8")}
"""
)
return stdout, stderr
return CmdOut(stdout.decode("utf-8"), stderr.decode("utf-8"), cwd=cwd)
def runforcli(
func: Callable[..., Coroutine[Any, Any, Dict[str, CmdOut]]], *args: Any
) -> None:
try:
res = asyncio.run(func(*args))
for i in res.items():
name, out = i
if out.stderr:
print(f"{name}: {out.stderr}", end="")
if out.stdout:
print(f"{name}: {out.stdout}", end="")
except ClanError as e:
print(e)
exit(1)

View File

@ -1,20 +1,20 @@
import json
import subprocess
from pathlib import Path
from typing import Optional
from clan_cli.dirs import get_clan_flake_toplevel
from clan_cli.nix import nix_eval
from .dirs import specific_flake_dir
from .types import FlakeName
def get_clan_module_names(
flake: Optional[Path] = None,
flake_name: FlakeName,
) -> tuple[list[str], Optional[str]]:
"""
Get the list of clan modules from the clan-core flake input
"""
if flake is None:
flake = get_clan_flake_toplevel()
flake = specific_flake_dir(flake_name)
proc = subprocess.run(
nix_eval(
[

View File

@ -1,6 +1,7 @@
# !/usr/bin/env python3
import argparse
import json
import logging
import os
import re
import shlex
@ -9,14 +10,16 @@ import sys
from pathlib import Path
from typing import Any, Optional, Tuple, get_origin
from clan_cli.dirs import get_clan_flake_toplevel
from clan_cli.dirs import machine_settings_file, specific_flake_dir
from clan_cli.errors import ClanError
from clan_cli.git import commit_file
from clan_cli.machines.folders import machine_settings_file
from clan_cli.nix import nix_eval
from clan_cli.types import FlakeName
script_dir = Path(__file__).parent
log = logging.getLogger(__name__)
# nixos option type description to python type
def map_type(type: str) -> Any:
@ -104,8 +107,10 @@ def cast(value: Any, type: Any, opt_description: str) -> Any:
)
def options_for_machine(machine_name: str, show_trace: bool = False) -> dict:
clan_dir = get_clan_flake_toplevel()
def options_for_machine(
flake_name: FlakeName, machine_name: str, show_trace: bool = False
) -> dict:
clan_dir = specific_flake_dir(flake_name)
flags = []
if show_trace:
flags.append("--show-trace")
@ -126,9 +131,9 @@ def options_for_machine(machine_name: str, show_trace: bool = False) -> dict:
def read_machine_option_value(
machine_name: str, option: str, show_trace: bool = False
flake_name: FlakeName, machine_name: str, option: str, show_trace: bool = False
) -> str:
clan_dir = get_clan_flake_toplevel()
clan_dir = specific_flake_dir(flake_name)
# use nix eval to read from .#nixosConfigurations.default.config.{option}
# this will give us the evaluated config with the options attribute
cmd = nix_eval(
@ -154,6 +159,43 @@ def read_machine_option_value(
return out
def get_or_set_option(args: argparse.Namespace) -> None:
if args.value == []:
print(
read_machine_option_value(
args.flake, args.machine, args.option, args.show_trace
)
)
else:
# load options
if args.options_file is None:
options = options_for_machine(
args.flake, machine_name=args.machine, show_trace=args.show_trace
)
else:
with open(args.options_file) as f:
options = json.load(f)
# compute settings json file location
if args.settings_file is None:
settings_file = machine_settings_file(args.flake, args.machine)
else:
settings_file = args.settings_file
# set the option with the given value
set_option(
flake_name=args.flake,
option=args.option,
value=args.value,
options=options,
settings_file=settings_file,
option_description=args.option,
show_trace=args.show_trace,
)
if not args.quiet:
new_value = read_machine_option_value(args.flake, args.machine, args.option)
print(f"New Value for {args.option}:")
print(new_value)
def find_option(
option: str, value: Any, options: dict, option_description: Optional[str] = None
) -> Tuple[str, Any]:
@ -206,6 +248,7 @@ def find_option(
def set_option(
flake_name: FlakeName,
option: str,
value: Any,
options: dict,
@ -247,6 +290,7 @@ def set_option(
current_config = json.load(f)
else:
current_config = {}
# merge and save the new config file
new_config = merge(current_config, result)
settings_file.parent.mkdir(parents=True, exist_ok=True)
@ -254,41 +298,12 @@ def set_option(
json.dump(new_config, f, indent=2)
print(file=f) # add newline at the end of the file to make git happy
if settings_file.resolve().is_relative_to(get_clan_flake_toplevel()):
commit_file(settings_file, commit_message=f"Set option {option_description}")
def get_or_set_option(args: argparse.Namespace) -> None:
if args.value == []:
print(read_machine_option_value(args.machine, args.option, args.show_trace))
else:
# load options
if args.options_file is None:
options = options_for_machine(
machine_name=args.machine, show_trace=args.show_trace
)
else:
with open(args.options_file) as f:
options = json.load(f)
# compute settings json file location
if args.settings_file is None:
get_clan_flake_toplevel()
settings_file = machine_settings_file(args.machine)
else:
settings_file = args.settings_file
# set the option with the given value
set_option(
option=args.option,
value=args.value,
options=options,
settings_file=settings_file,
option_description=args.option,
show_trace=args.show_trace,
if settings_file.resolve().is_relative_to(specific_flake_dir(flake_name)):
commit_file(
settings_file,
repo_dir=specific_flake_dir(flake_name),
commit_message=f"Set option {option_description}",
)
if not args.quiet:
new_value = read_machine_option_value(args.machine, args.option)
print(f"New Value for {args.option}:")
print(new_value)
# takes a (sub)parser and configures it
@ -302,7 +317,6 @@ def register_parser(
# inject callback function to process the input later
parser.set_defaults(func=get_or_set_option)
parser.add_argument(
"--machine",
"-m",
@ -346,6 +360,11 @@ def register_parser(
nargs="*",
help="option value to set (if omitted, the current value is printed)",
)
parser.add_argument(
"flake",
type=str,
help="name of the flake to set machine options for",
)
def main(argv: Optional[list[str]] = None) -> None:

View File

@ -8,38 +8,48 @@ from typing import Optional
from fastapi import HTTPException
from clan_cli.dirs import get_clan_flake_toplevel, nixpkgs_source
from clan_cli.git import commit_file, find_git_repo_root
from clan_cli.machines.folders import machine_folder, machine_settings_file
from clan_cli.dirs import (
machine_settings_file,
nixpkgs_source,
specific_flake_dir,
specific_machine_dir,
)
from clan_cli.git import commit_file
from clan_cli.nix import nix_eval
from ..types import FlakeName
def verify_machine_config(
machine_name: str, config: Optional[dict] = None, flake: Optional[Path] = None
flake_name: FlakeName,
machine_name: str,
config: Optional[dict] = None,
flake: Optional[Path] = None,
) -> Optional[str]:
"""
Verify that the machine evaluates successfully
Returns a tuple of (success, error_message)
"""
if config is None:
config = config_for_machine(machine_name)
if flake is None:
flake = get_clan_flake_toplevel()
with NamedTemporaryFile(mode="w") as clan_machine_settings_file:
config = config_for_machine(flake_name, machine_name)
flake = specific_flake_dir(flake_name)
with NamedTemporaryFile(mode="w", dir=flake) as clan_machine_settings_file:
json.dump(config, clan_machine_settings_file, indent=2)
clan_machine_settings_file.seek(0)
env = os.environ.copy()
env["CLAN_MACHINE_SETTINGS_FILE"] = clan_machine_settings_file.name
cmd = nix_eval(
flags=[
"--impure",
"--show-trace",
"--show-trace",
"--impure", # needed to access CLAN_MACHINE_SETTINGS_FILE
f".#nixosConfigurations.{machine_name}.config.system.build.toplevel.outPath",
],
)
# repro_env_break(work_dir=flake, env=env, cmd=cmd)
proc = subprocess.run(
nix_eval(
flags=[
"--impure",
"--show-trace",
"--show-trace",
"--impure", # needed to access CLAN_MACHINE_SETTINGS_FILE
f".#nixosConfigurations.{machine_name}.config.system.build.toplevel.outPath",
],
),
cmd,
capture_output=True,
text=True,
cwd=flake,
@ -50,44 +60,45 @@ def verify_machine_config(
return None
def config_for_machine(machine_name: str) -> dict:
def config_for_machine(flake_name: FlakeName, machine_name: str) -> dict:
# read the config from a json file located at {flake}/machines/{machine_name}/settings.json
if not machine_folder(machine_name).exists():
if not specific_machine_dir(flake_name, machine_name).exists():
raise HTTPException(
status_code=404,
detail=f"Machine {machine_name} not found. Create the machine first`",
)
settings_path = machine_settings_file(machine_name)
settings_path = machine_settings_file(flake_name, machine_name)
if not settings_path.exists():
return {}
with open(settings_path) as f:
return json.load(f)
def set_config_for_machine(machine_name: str, config: dict) -> None:
def set_config_for_machine(
flake_name: FlakeName, machine_name: str, config: dict
) -> None:
# write the config to a json file located at {flake}/machines/{machine_name}/settings.json
if not machine_folder(machine_name).exists():
if not specific_machine_dir(flake_name, machine_name).exists():
raise HTTPException(
status_code=404,
detail=f"Machine {machine_name} not found. Create the machine first`",
)
settings_path = machine_settings_file(machine_name)
settings_path = machine_settings_file(flake_name, machine_name)
settings_path.parent.mkdir(parents=True, exist_ok=True)
with open(settings_path, "w") as f:
json.dump(config, f)
repo_dir = find_git_repo_root()
repo_dir = specific_flake_dir(flake_name)
if repo_dir is not None:
commit_file(settings_path, repo_dir)
def schema_for_machine(
machine_name: str, config: Optional[dict] = None, flake: Optional[Path] = None
flake_name: FlakeName, machine_name: str, config: Optional[dict] = None
) -> dict:
if flake is None:
flake = get_clan_flake_toplevel()
flake = specific_flake_dir(flake_name)
# use nix eval to lib.evalModules .#nixosConfigurations.<machine_name>.options.clan
with NamedTemporaryFile(mode="w") as clan_machine_settings_file:
with NamedTemporaryFile(mode="w", dir=flake) as clan_machine_settings_file:
env = os.environ.copy()
inject_config_flags = []
if config is not None:

View File

@ -1,5 +1,7 @@
import inspect
import logging
from typing import Any
from pathlib import Path
from typing import Any, Callable
grey = "\x1b[38;20m"
yellow = "\x1b[33;20m"
@ -9,11 +11,20 @@ green = "\u001b[32m"
blue = "\u001b[34m"
def get_formatter(color: str) -> logging.Formatter:
reset = "\x1b[0m"
return logging.Formatter(
f"{color}%(levelname)s{reset}:(%(filename)s:%(lineno)d): %(message)s"
)
def get_formatter(color: str) -> Callable[[logging.LogRecord, bool], logging.Formatter]:
def myformatter(
record: logging.LogRecord, with_location: bool
) -> logging.Formatter:
reset = "\x1b[0m"
filepath = Path(record.pathname).resolve()
if not with_location:
return logging.Formatter(f"{color}%(levelname)s{reset}: %(message)s")
return logging.Formatter(
f"{color}%(levelname)s{reset}: %(message)s\n {filepath}:%(lineno)d::%(funcName)s\n"
)
return myformatter
FORMATTER = {
@ -26,12 +37,36 @@ FORMATTER = {
class CustomFormatter(logging.Formatter):
def format(self, record: Any) -> str:
return FORMATTER[record.levelno].format(record)
def format(self, record: logging.LogRecord) -> str:
return FORMATTER[record.levelno](record, True).format(record)
def register(level: Any) -> None:
ch = logging.StreamHandler()
ch.setLevel(level)
ch.setFormatter(CustomFormatter())
logging.basicConfig(level=level, handlers=[ch])
class ThreadFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
return FORMATTER[record.levelno](record, False).format(record)
def get_caller() -> str:
frame = inspect.currentframe()
if frame is None:
return "unknown"
caller_frame = frame.f_back
if caller_frame is None:
return "unknown"
caller_frame = caller_frame.f_back
if caller_frame is None:
return "unknown"
frame_info = inspect.getframeinfo(caller_frame)
ret = f"{frame_info.filename}:{frame_info.lineno}::{frame_info.function}"
return ret
def setup_logging(level: Any) -> None:
handler = logging.StreamHandler()
handler.setLevel(level)
handler.setFormatter(CustomFormatter())
logger = logging.getLogger("registerHandler")
logging.getLogger("asyncio").setLevel(logging.INFO)
logging.getLogger("httpx").setLevel(level=logging.WARNING)
logger.addHandler(handler)
# logging.basicConfig(level=level, handlers=[handler])

View File

@ -0,0 +1,104 @@
import logging
import multiprocessing as mp
import os
import shlex
import stat
import subprocess
import sys
import time
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional
import ipdb
log = logging.getLogger(__name__)
def command_exec(cmd: List[str], work_dir: Path, env: Dict[str, str]) -> None:
subprocess.run(cmd, check=True, env=env, cwd=work_dir.resolve())
def block_for_input() -> None:
log = logging.getLogger(__name__)
procid = os.getpid()
f"echo 'continue' > /sys/proc/{procid}/fd/{sys.stdin.fileno()}"
while True:
log.warning("Use sudo cntr attach <pid> to attach to the container.")
# log.warning("Resume execution by executing '%s' in cntr shell", command)
time.sleep(1)
log.info("Resuming execution.")
def breakpoint_container(
work_dir: Path,
env: Optional[Dict[str, str]] = None,
cmd: Optional[List[str]] = None,
) -> None:
if env is None:
env = os.environ.copy()
else:
env = env.copy()
dump_env(env, work_dir / "env.sh")
if cmd is not None:
log.debug("Command: %s", shlex.join(cmd))
mycommand = shlex.join(cmd)
write_command(mycommand, work_dir / "cmd.sh")
block_for_input()
def breakpoint_shell(
work_dir: Path,
env: Optional[Dict[str, str]] = None,
cmd: Optional[List[str]] = None,
) -> None:
if env is None:
env = os.environ.copy()
else:
env = env.copy()
# Cmd appending
args = ["xterm", "-e", "zsh", "-df"]
if cmd is not None:
mycommand = shlex.join(cmd)
write_command(mycommand, work_dir / "cmd.sh")
proc = spawn_process(func=command_exec, cmd=args, work_dir=work_dir, env=env)
try:
ipdb.set_trace()
finally:
proc.terminate()
def write_command(command: str, loc: Path) -> None:
log.info("Dumping command to %s", loc)
with open(loc, "w") as f:
f.write("#!/usr/bin/env bash\n")
f.write(command)
st = os.stat(loc)
os.chmod(loc, st.st_mode | stat.S_IEXEC)
def spawn_process(func: Callable, **kwargs: Any) -> mp.Process:
if mp.get_start_method(allow_none=True) is None:
mp.set_start_method(method="spawn")
proc = mp.Process(target=func, name="python-debug-process", kwargs=kwargs)
proc.start()
return proc
def dump_env(env: Dict[str, str], loc: Path) -> None:
cenv = env.copy()
log.info("Dumping environment variables to %s", loc)
with open(loc, "w") as f:
f.write("#!/usr/bin/env bash\n")
for k, v in cenv.items():
if v.count("\n") > 0 or v.count('"') > 0 or v.count("'") > 0:
continue
f.write(f"export {k}='{v}'\n")
st = os.stat(loc)
os.chmod(loc, st.st_mode | stat.S_IEXEC)

View File

@ -1,32 +1,35 @@
import logging
import os
import sys
from pathlib import Path
from typing import Optional
from .errors import ClanError
from .types import FlakeName
log = logging.getLogger(__name__)
def get_clan_flake_toplevel() -> Path:
return find_toplevel([".clan-flake", ".git", ".hg", ".svn", "flake.nix"])
# def _get_clan_flake_toplevel() -> Path:
# return find_toplevel([".clan-flake", ".git", ".hg", ".svn", "flake.nix"])
def find_git_repo_root() -> Optional[Path]:
try:
return find_toplevel([".git"])
except ClanError:
return None
# def find_git_repo_root() -> Optional[Path]:
# try:
# return find_toplevel([".git"])
# except ClanError:
# return None
def find_toplevel(top_level_files: list[str]) -> Path:
"""Returns the path to the toplevel of the clan flake"""
for project_file in top_level_files:
initial_path = Path(os.getcwd())
path = Path(initial_path)
while path.parent != path:
if (path / project_file).exists():
return path
path = path.parent
raise ClanError("Could not find clan flake toplevel directory")
# def find_toplevel(top_level_files: list[str]) -> Path:
# """Returns the path to the toplevel of the clan flake"""
# for project_file in top_level_files:
# initial_path = Path(os.getcwd())
# path = Path(initial_path)
# while path.parent != path:
# if (path / project_file).exists():
# return path
# path = path.parent
# raise ClanError("Could not find clan flake toplevel directory")
def user_config_dir() -> Path:
@ -38,6 +41,58 @@ def user_config_dir() -> Path:
return Path(os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config")))
def user_data_dir() -> Path:
if sys.platform == "win32":
return Path(os.getenv("APPDATA", os.path.expanduser("~\\AppData\\Roaming\\")))
elif sys.platform == "darwin":
return Path(os.path.expanduser("~/Library/Application Support/"))
else:
return Path(os.getenv("XDG_DATA_HOME", os.path.expanduser("~/.local/state")))
def clan_data_dir() -> Path:
path = user_data_dir() / "clan"
if not path.exists():
log.debug(f"Creating path with parents {path}")
path.mkdir(parents=True)
return path.resolve()
def clan_config_dir() -> Path:
path = user_config_dir() / "clan"
if not path.exists():
log.debug(f"Creating path with parents {path}")
path.mkdir(parents=True)
return path.resolve()
def clan_flakes_dir() -> Path:
path = clan_data_dir() / "flake"
if not path.exists():
log.debug(f"Creating path with parents {path}")
path.mkdir(parents=True)
return path.resolve()
def specific_flake_dir(flake_name: FlakeName) -> Path:
flake_dir = clan_flakes_dir() / flake_name
if not flake_dir.exists():
raise ClanError(f"Flake '{flake_name}' does not exist")
return flake_dir
def machines_dir(flake_name: FlakeName) -> Path:
return specific_flake_dir(flake_name) / "machines"
def specific_machine_dir(flake_name: FlakeName, machine: str) -> Path:
return machines_dir(flake_name) / machine
def machine_settings_file(flake_name: FlakeName, machine: str) -> Path:
return specific_machine_dir(flake_name, machine) / "settings.json"
def module_root() -> Path:
return Path(__file__).parent

View File

@ -1,47 +0,0 @@
# !/usr/bin/env python3
import argparse
import asyncio
from pathlib import Path
from typing import Tuple
from ..async_cmd import run
from ..errors import ClanError
from ..nix import nix_command
DEFAULT_URL = "git+https://git.clan.lol/clan/clan-core#new-clan"
async def create_flake(directory: Path, url: str) -> Tuple[bytes, bytes]:
if not directory.exists():
directory.mkdir()
flake_command = nix_command(
[
"flake",
"init",
"-t",
url,
]
)
stdout, stderr = await run(flake_command, directory)
return stdout, stderr
def create_flake_command(args: argparse.Namespace) -> None:
try:
stdout, stderr = asyncio.run(create_flake(args.directory, DEFAULT_URL))
print(stderr.decode("utf-8"), end="")
print(stdout.decode("utf-8"), end="")
except ClanError as e:
print(e)
exit(1)
# takes a (sub)parser and configures it
def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"directory",
type=Path,
help="output directory for the flake",
)
# parser.add_argument("name", type=str, help="name of the flake")
parser.set_defaults(func=create_flake_command)

View File

@ -2,6 +2,7 @@
import argparse
from .create import register_create_parser
from .list import register_list_parser
# takes a (sub)parser and configures it
@ -12,5 +13,8 @@ def register_parser(parser: argparse.ArgumentParser) -> None:
help="the command to run",
required=True,
)
update_parser = subparser.add_parser("create", help="Create a clan flake")
register_create_parser(update_parser)
create_parser = subparser.add_parser("create", help="Create a clan flake")
register_create_parser(create_parser)
list_parser = subparser.add_parser("list", help="List clan flakes")
register_list_parser(list_parser)

View File

@ -0,0 +1,84 @@
# !/usr/bin/env python3
import argparse
from pathlib import Path
from typing import Dict
from pydantic import AnyUrl
from pydantic.tools import parse_obj_as
from ..async_cmd import CmdOut, run, runforcli
from ..dirs import clan_flakes_dir
from ..errors import ClanError
from ..nix import nix_command, nix_shell
DEFAULT_URL: AnyUrl = parse_obj_as(
AnyUrl,
"git+https://git.clan.lol/clan/clan-core?ref=Qubasa-main#new-clan", # TODO: Change me back to main branch
)
async def create_flake(directory: Path, url: AnyUrl) -> Dict[str, CmdOut]:
if not directory.exists():
directory.mkdir()
else:
raise ClanError(f"Flake at '{directory}' already exists")
response = {}
command = nix_command(
[
"flake",
"init",
"-t",
url,
]
)
out = await run(command, cwd=directory)
response["flake init"] = out
command = nix_shell(["git"], ["git", "init"])
out = await run(command, cwd=directory)
response["git init"] = out
command = nix_shell(["git"], ["git", "add", "."])
out = await run(command, cwd=directory)
response["git add"] = out
# command = nix_shell(["git"], ["git", "config", "init.defaultBranch", "main"])
# out = await run(command, cwd=directory)
# response["git config"] = out
command = nix_shell(["git"], ["git", "config", "user.name", "clan-tool"])
out = await run(command, cwd=directory)
response["git config"] = out
command = nix_shell(["git"], ["git", "config", "user.email", "clan@example.com"])
out = await run(command, cwd=directory)
response["git config"] = out
# TODO: Find out why this fails on Johannes machine
# command = nix_shell(["git"], ["git", "commit", "-a", "-m", "Initial commit"])
# out = await run(command, cwd=directory)
# response["git commit"] = out
return response
def create_flake_command(args: argparse.Namespace) -> None:
flake_dir = clan_flakes_dir() / args.name
runforcli(create_flake, flake_dir, args.url)
# takes a (sub)parser and configures it
def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"name",
type=str,
help="name for the flake",
)
parser.add_argument(
"--url",
type=str,
help="url for the flake",
default=DEFAULT_URL,
)
# parser.add_argument("name", type=str, help="name of the flake")
parser.set_defaults(func=create_flake_command)

View File

@ -0,0 +1,27 @@
import argparse
import logging
import os
from ..dirs import clan_flakes_dir
log = logging.getLogger(__name__)
def list_flakes() -> list[str]:
path = clan_flakes_dir()
log.debug(f"Listing machines in {path}")
if not path.exists():
return []
objs: list[str] = []
for f in os.listdir(path):
objs.append(f)
return objs
def list_command(args: argparse.Namespace) -> None:
for flake in list_flakes():
print(flake)
def register_list_parser(parser: argparse.ArgumentParser) -> None:
parser.set_defaults(func=list_command)

View File

@ -3,7 +3,7 @@ import subprocess
from pathlib import Path
from typing import Optional
from clan_cli.dirs import find_git_repo_root
# from clan_cli.dirs import find_git_repo_root
from clan_cli.errors import ClanError
from clan_cli.nix import nix_shell
@ -11,13 +11,9 @@ from clan_cli.nix import nix_shell
# generic vcs agnostic commit function
def commit_file(
file_path: Path,
repo_dir: Optional[Path] = None,
repo_dir: Path,
commit_message: Optional[str] = None,
) -> None:
if repo_dir is None:
repo_dir = find_git_repo_root()
if repo_dir is None:
return
# check that the file is in the git repository and exists
if not Path(file_path).resolve().is_relative_to(repo_dir.resolve()):
raise ClanError(f"File {file_path} is not in the git repository {repo_dir}")

View File

@ -23,8 +23,8 @@ def register_parser(parser: argparse.ArgumentParser) -> None:
create_parser = subparser.add_parser("create", help="Create a machine")
register_create_parser(create_parser)
remove_parser = subparser.add_parser("remove", help="Remove a machine")
register_delete_parser(remove_parser)
delete_parser = subparser.add_parser("delete", help="Delete a machine")
register_delete_parser(delete_parser)
list_parser = subparser.add_parser("list", help="List machines")
register_list_parser(list_parser)

View File

@ -1,20 +1,53 @@
import argparse
import logging
from typing import Dict
from .folders import machine_folder
from ..async_cmd import CmdOut, run, runforcli
from ..dirs import specific_flake_dir, specific_machine_dir
from ..errors import ClanError
from ..nix import nix_shell
from ..types import FlakeName
log = logging.getLogger(__name__)
def create_machine(name: str) -> None:
folder = machine_folder(name)
async def create_machine(flake_name: FlakeName, machine_name: str) -> Dict[str, CmdOut]:
folder = specific_machine_dir(flake_name, machine_name)
if folder.exists():
raise ClanError(f"Machine '{machine_name}' already exists")
folder.mkdir(parents=True, exist_ok=True)
# create empty settings.json file inside the folder
with open(folder / "settings.json", "w") as f:
f.write("{}")
response = {}
out = await run(nix_shell(["git"], ["git", "add", str(folder)]), cwd=folder)
response["git add"] = out
out = await run(
nix_shell(
["git"],
["git", "commit", "-m", f"Added machine {machine_name}", str(folder)],
),
cwd=folder,
)
response["git commit"] = out
return response
def create_command(args: argparse.Namespace) -> None:
create_machine(args.host)
try:
flake_dir = specific_flake_dir(args.flake)
runforcli(create_machine, flake_dir, args.machine)
except ClanError as e:
print(e)
def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("host", type=str)
parser.add_argument("machine", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=create_command)

View File

@ -1,12 +1,12 @@
import argparse
import shutil
from ..dirs import specific_machine_dir
from ..errors import ClanError
from .folders import machine_folder
def delete_command(args: argparse.Namespace) -> None:
folder = machine_folder(args.host)
folder = specific_machine_dir(args.flake, args.host)
if folder.exists():
shutil.rmtree(folder)
else:
@ -15,4 +15,9 @@ def delete_command(args: argparse.Namespace) -> None:
def register_delete_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("host", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=delete_command)

View File

@ -1,9 +1,10 @@
from .folders import machine_folder
from ..dirs import specific_machine_dir
from ..types import FlakeName
def machine_has_fact(machine: str, fact: str) -> bool:
return (machine_folder(machine) / "facts" / fact).exists()
def machine_has_fact(flake_name: FlakeName, machine: str, fact: str) -> bool:
return (specific_machine_dir(flake_name, machine) / "facts" / fact).exists()
def machine_get_fact(machine: str, fact: str) -> str:
return (machine_folder(machine) / "facts" / fact).read_text()
def machine_get_fact(flake_name: FlakeName, machine: str, fact: str) -> str:
return (specific_machine_dir(flake_name, machine) / "facts" / fact).read_text()

View File

@ -1,15 +0,0 @@
from pathlib import Path
from ..dirs import get_clan_flake_toplevel
def machines_folder() -> Path:
return get_clan_flake_toplevel() / "machines"
def machine_folder(machine: str) -> Path:
return machines_folder() / machine
def machine_settings_file(machine: str) -> Path:
return machine_folder(machine) / "settings.json"

View File

@ -3,18 +3,20 @@ import subprocess
from pathlib import Path
from tempfile import TemporaryDirectory
from ..dirs import specific_flake_dir
from ..machines.machines import Machine
from ..nix import nix_shell
from ..secrets.generate import generate_secrets
from ..types import FlakeName
def install_nixos(machine: Machine) -> None:
def install_nixos(machine: Machine, flake_name: FlakeName) -> None:
h = machine.host
target_host = f"{h.user or 'root'}@{h.host}"
flake_attr = h.meta.get("flake_attr", "")
generate_secrets(machine)
generate_secrets(machine, flake_name)
with TemporaryDirectory() as tmpdir_:
tmpdir = Path(tmpdir_)
@ -26,7 +28,7 @@ def install_nixos(machine: Machine) -> None:
[
"nixos-anywhere",
"-f",
f"{machine.clan_dir}#{flake_attr}",
f"{machine.flake_dir}#{flake_attr}",
"-t",
"--no-reboot",
"--extra-files",
@ -39,10 +41,10 @@ def install_nixos(machine: Machine) -> None:
def install_command(args: argparse.Namespace) -> None:
machine = Machine(args.machine)
machine = Machine(args.machine, flake_dir=specific_flake_dir(args.flake))
machine.deployment_address = args.target_host
install_nixos(machine)
install_nixos(machine, args.flake)
def register_install_parser(parser: argparse.ArgumentParser) -> None:
@ -56,5 +58,9 @@ def register_install_parser(parser: argparse.ArgumentParser) -> None:
type=str,
help="ssh address to install to in the form of user@host:2222",
)
parser.add_argument(
"flake",
type=str,
help="name of the flake to install machine from",
)
parser.set_defaults(func=install_command)

View File

@ -2,14 +2,15 @@ import argparse
import logging
import os
from .folders import machines_folder
from ..dirs import machines_dir
from ..types import FlakeName
from .types import validate_hostname
log = logging.getLogger(__name__)
def list_machines() -> list[str]:
path = machines_folder()
def list_machines(flake_name: FlakeName) -> list[str]:
path = machines_dir(flake_name)
log.debug(f"Listing machines in {path}")
if not path.exists():
return []
@ -21,9 +22,14 @@ def list_machines() -> list[str]:
def list_command(args: argparse.Namespace) -> None:
for machine in list_machines():
for machine in list_machines(args.flake):
print(machine)
def register_list_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=list_command)

View File

@ -5,7 +5,6 @@ import sys
from pathlib import Path
from typing import Optional
from ..dirs import get_clan_flake_toplevel
from ..nix import nix_build, nix_config, nix_eval
from ..ssh import Host, parse_deployment_address
@ -31,7 +30,7 @@ class Machine:
def __init__(
self,
name: str,
clan_dir: Optional[Path] = None,
flake_dir: Path,
machine_data: Optional[dict] = None,
) -> None:
"""
@ -41,13 +40,10 @@ class Machine:
@machine_json: can be optionally used to skip evaluation of the machine, location of the json file with machine data
"""
self.name = name
if clan_dir is None:
self.clan_dir = get_clan_flake_toplevel()
else:
self.clan_dir = clan_dir
self.flake_dir = flake_dir
if machine_data is None:
self.machine_data = build_machine_data(name, self.clan_dir)
self.machine_data = build_machine_data(name, self.flake_dir)
else:
self.machine_data = machine_data
@ -68,14 +64,14 @@ class Machine:
@secrets_dir: the directory to store the secrets in
"""
env = os.environ.copy()
env["CLAN_DIR"] = str(self.clan_dir)
env["CLAN_DIR"] = str(self.flake_dir)
env["PYTHONPATH"] = str(
":".join(sys.path)
) # TODO do this in the clanCore module
env["SECRETS_DIR"] = str(secrets_dir)
print(f"uploading secrets... {self.upload_secrets}")
proc = subprocess.run(
[self.upload_secrets],
[self.upload_secrets, self.flake_dir.name],
env=env,
stdout=subprocess.PIPE,
text=True,
@ -95,7 +91,7 @@ class Machine:
@attr: the attribute to get
"""
output = subprocess.run(
nix_eval([f"path:{self.clan_dir}#{attr}"]),
nix_eval([f"path:{self.flake_dir}#{attr}"]),
stdout=subprocess.PIPE,
check=True,
text=True,
@ -108,7 +104,7 @@ class Machine:
@attr: the attribute to get
"""
outpath = subprocess.run(
nix_build([f"path:{self.clan_dir}#{attr}"]),
nix_build([f"path:{self.flake_dir}#{attr}"]),
stdout=subprocess.PIPE,
check=True,
text=True,

View File

@ -4,12 +4,13 @@ import os
import subprocess
from pathlib import Path
from ..dirs import get_clan_flake_toplevel
from ..dirs import specific_flake_dir
from ..machines.machines import Machine
from ..nix import nix_build, nix_command, nix_config
from ..secrets.generate import generate_secrets
from ..secrets.upload import upload_secrets
from ..ssh import Host, HostGroup, HostKeyCheck, parse_deployment_address
from ..types import FlakeName
def deploy_nixos(hosts: HostGroup, clan_dir: Path) -> None:
@ -40,7 +41,7 @@ def deploy_nixos(hosts: HostGroup, clan_dir: Path) -> None:
flake_attr = h.meta.get("flake_attr", "")
generate_secrets(h.meta["machine"])
generate_secrets(h.meta["machine"], FlakeName(clan_dir.name))
upload_secrets(h.meta["machine"])
target_host = h.meta.get("target_host")
@ -95,25 +96,29 @@ def get_all_machines(clan_dir: Path) -> HostGroup:
host = parse_deployment_address(
name,
machine_data["deploymentAddress"],
meta={"machine": Machine(name=name, machine_data=machine_data)},
meta={
"machine": Machine(
name=name, flake_dir=clan_dir, machine_data=machine_data
)
},
)
hosts.append(host)
return HostGroup(hosts)
def get_selected_machines(machine_names: list[str], clan_dir: Path) -> HostGroup:
def get_selected_machines(machine_names: list[str], flake_dir: Path) -> HostGroup:
hosts = []
for name in machine_names:
machine = Machine(name=name, clan_dir=clan_dir)
machine = Machine(name=name, flake_dir=flake_dir)
hosts.append(machine.host)
return HostGroup(hosts)
# FIXME: we want some kind of inventory here.
def update(args: argparse.Namespace) -> None:
clan_dir = get_clan_flake_toplevel()
flake_dir = specific_flake_dir(args.flake)
if len(args.machines) == 1 and args.target_host is not None:
machine = Machine(name=args.machines[0], clan_dir=clan_dir)
machine = Machine(name=args.machines[0], flake_dir=flake_dir)
machine.deployment_address = args.target_host
host = parse_deployment_address(
args.machines[0],
@ -127,11 +132,11 @@ def update(args: argparse.Namespace) -> None:
exit(1)
else:
if len(args.machines) == 0:
machines = get_all_machines(clan_dir)
machines = get_all_machines(flake_dir)
else:
machines = get_selected_machines(args.machines, clan_dir)
machines = get_selected_machines(args.machines, flake_dir)
deploy_nixos(machines, clan_dir)
deploy_nixos(machines, flake_dir)
def register_update_parser(parser: argparse.ArgumentParser) -> None:
@ -142,6 +147,11 @@ def register_update_parser(parser: argparse.ArgumentParser) -> None:
nargs="*",
default=[],
)
parser.add_argument(
"flake",
type=str,
help="name of the flake to update machine for",
)
parser.add_argument(
"--target-host",
type=str,

View File

@ -2,8 +2,11 @@ import json
import os
import subprocess
import tempfile
from pathlib import Path
from typing import Any
from pydantic import AnyUrl
from .dirs import nixpkgs_flake, nixpkgs_source
@ -11,7 +14,7 @@ def nix_command(flags: list[str]) -> list[str]:
return ["nix", "--extra-experimental-features", "nix-command flakes"] + flags
def nix_flake_show(flake_url: str) -> list[str]:
def nix_flake_show(flake_url: AnyUrl | Path) -> list[str]:
return nix_command(
[
"flake",

View File

@ -3,17 +3,18 @@ import shutil
from pathlib import Path
from typing import Callable
from ..dirs import get_clan_flake_toplevel
from ..dirs import specific_flake_dir
from ..errors import ClanError
from ..types import FlakeName
def get_sops_folder() -> Path:
return get_clan_flake_toplevel() / "sops"
def get_sops_folder(flake_name: FlakeName) -> Path:
return specific_flake_dir(flake_name) / "sops"
def gen_sops_subfolder(subdir: str) -> Callable[[], Path]:
def folder() -> Path:
return get_clan_flake_toplevel() / "sops" / subdir
def gen_sops_subfolder(subdir: str) -> Callable[[FlakeName], Path]:
def folder(flake_name: FlakeName) -> Path:
return specific_flake_dir(flake_name) / "sops" / subdir
return folder

View File

@ -6,19 +6,21 @@ import sys
from clan_cli.errors import ClanError
from ..dirs import specific_flake_dir
from ..machines.machines import Machine
from ..types import FlakeName
log = logging.getLogger(__name__)
def generate_secrets(machine: Machine) -> None:
def generate_secrets(machine: Machine, flake_name: FlakeName) -> None:
env = os.environ.copy()
env["CLAN_DIR"] = str(machine.clan_dir)
env["CLAN_DIR"] = str(machine.flake_dir)
env["PYTHONPATH"] = ":".join(sys.path) # TODO do this in the clanCore module
print(f"generating secrets... {machine.generate_secrets}")
proc = subprocess.run(
[machine.generate_secrets],
[machine.generate_secrets, flake_name],
env=env,
)
@ -29,8 +31,8 @@ def generate_secrets(machine: Machine) -> None:
def generate_command(args: argparse.Namespace) -> None:
machine = Machine(args.machine)
generate_secrets(machine)
machine = Machine(name=args.machine, flake_dir=specific_flake_dir(args.flake))
generate_secrets(machine, args.flake)
def register_generate_parser(parser: argparse.ArgumentParser) -> None:
@ -38,4 +40,9 @@ def register_generate_parser(parser: argparse.ArgumentParser) -> None:
"machine",
help="The machine to generate secrets for",
)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=generate_command)

View File

@ -4,6 +4,7 @@ from pathlib import Path
from ..errors import ClanError
from ..machines.types import machine_name_type, validate_hostname
from ..types import FlakeName
from . import secrets
from .folders import (
sops_groups_folder,
@ -20,24 +21,27 @@ from .types import (
)
def machines_folder(group: str) -> Path:
return sops_groups_folder() / group / "machines"
def machines_folder(flake_name: FlakeName, group: str) -> Path:
return sops_groups_folder(flake_name) / group / "machines"
def users_folder(group: str) -> Path:
return sops_groups_folder() / group / "users"
def users_folder(flake_name: FlakeName, group: str) -> Path:
return sops_groups_folder(flake_name) / group / "users"
class Group:
def __init__(self, name: str, machines: list[str], users: list[str]) -> None:
def __init__(
self, flake_name: FlakeName, name: str, machines: list[str], users: list[str]
) -> None:
self.name = name
self.machines = machines
self.users = users
self.flake_name = flake_name
def list_groups() -> list[Group]:
def list_groups(flake_name: FlakeName) -> list[Group]:
groups: list[Group] = []
folder = sops_groups_folder()
folder = sops_groups_folder(flake_name)
if not folder.exists():
return groups
@ -45,24 +49,24 @@ def list_groups() -> list[Group]:
group_folder = folder / name
if not group_folder.is_dir():
continue
machines_path = machines_folder(name)
machines_path = machines_folder(flake_name, name)
machines = []
if machines_path.is_dir():
for f in machines_path.iterdir():
if validate_hostname(f.name):
machines.append(f.name)
users_path = users_folder(name)
users_path = users_folder(flake_name, name)
users = []
if users_path.is_dir():
for f in users_path.iterdir():
if VALID_USER_NAME.match(f.name):
users.append(f.name)
groups.append(Group(name, machines, users))
groups.append(Group(flake_name, name, machines, users))
return groups
def list_command(args: argparse.Namespace) -> None:
for group in list_groups():
for group in list_groups(args.flake):
print(group.name)
if group.machines:
print("machines:")
@ -84,9 +88,9 @@ def list_directory(directory: Path) -> str:
return msg
def update_group_keys(group: str) -> None:
for secret_ in secrets.list_secrets():
secret = sops_secrets_folder() / secret_
def update_group_keys(flake_name: FlakeName, group: str) -> None:
for secret_ in secrets.list_secrets(flake_name):
secret = sops_secrets_folder(flake_name) / secret_
if (secret / "groups" / group).is_symlink():
update_keys(
secret,
@ -94,7 +98,9 @@ def update_group_keys(group: str) -> None:
)
def add_member(group_folder: Path, source_folder: Path, name: str) -> None:
def add_member(
flake_name: FlakeName, group_folder: Path, source_folder: Path, name: str
) -> None:
source = source_folder / name
if not source.exists():
msg = f"{name} does not exist in {source_folder}: "
@ -109,10 +115,10 @@ def add_member(group_folder: Path, source_folder: Path, name: str) -> None:
)
os.remove(user_target)
user_target.symlink_to(os.path.relpath(source, user_target.parent))
update_group_keys(group_folder.parent.name)
update_group_keys(flake_name, group_folder.parent.name)
def remove_member(group_folder: Path, name: str) -> None:
def remove_member(flake_name: FlakeName, group_folder: Path, name: str) -> None:
target = group_folder / name
if not target.exists():
msg = f"{name} does not exist in group in {group_folder}: "
@ -121,7 +127,7 @@ def remove_member(group_folder: Path, name: str) -> None:
os.remove(target)
if len(os.listdir(group_folder)) > 0:
update_group_keys(group_folder.parent.name)
update_group_keys(flake_name, group_folder.parent.name)
if len(os.listdir(group_folder)) == 0:
os.rmdir(group_folder)
@ -130,56 +136,65 @@ def remove_member(group_folder: Path, name: str) -> None:
os.rmdir(group_folder.parent)
def add_user(group: str, name: str) -> None:
add_member(users_folder(group), sops_users_folder(), name)
def add_user(flake_name: FlakeName, group: str, name: str) -> None:
add_member(
flake_name, users_folder(flake_name, group), sops_users_folder(flake_name), name
)
def add_user_command(args: argparse.Namespace) -> None:
add_user(args.group, args.user)
add_user(args.flake, args.group, args.user)
def remove_user(group: str, name: str) -> None:
remove_member(users_folder(group), name)
def remove_user(flake_name: FlakeName, group: str, name: str) -> None:
remove_member(flake_name, users_folder(flake_name, group), name)
def remove_user_command(args: argparse.Namespace) -> None:
remove_user(args.group, args.user)
remove_user(args.flake, args.group, args.user)
def add_machine(group: str, name: str) -> None:
add_member(machines_folder(group), sops_machines_folder(), name)
def add_machine(flake_name: FlakeName, group: str, name: str) -> None:
add_member(
flake_name,
machines_folder(flake_name, group),
sops_machines_folder(flake_name),
name,
)
def add_machine_command(args: argparse.Namespace) -> None:
add_machine(args.group, args.machine)
add_machine(args.flake, args.group, args.machine)
def remove_machine(group: str, name: str) -> None:
remove_member(machines_folder(group), name)
def remove_machine(flake_name: FlakeName, group: str, name: str) -> None:
remove_member(flake_name, machines_folder(flake_name, group), name)
def remove_machine_command(args: argparse.Namespace) -> None:
remove_machine(args.group, args.machine)
remove_machine(args.flake, args.group, args.machine)
def add_group_argument(parser: argparse.ArgumentParser) -> None:
parser.add_argument("group", help="the name of the secret", type=group_name_type)
def add_secret(group: str, name: str) -> None:
secrets.allow_member(secrets.groups_folder(name), sops_groups_folder(), group)
def add_secret(flake_name: FlakeName, group: str, name: str) -> None:
secrets.allow_member(
secrets.groups_folder(flake_name, name), sops_groups_folder(flake_name), group
)
def add_secret_command(args: argparse.Namespace) -> None:
add_secret(args.group, args.secret)
add_secret(args.flake, args.group, args.secret)
def remove_secret(group: str, name: str) -> None:
secrets.disallow_member(secrets.groups_folder(name), group)
def remove_secret(flake_name: FlakeName, group: str, name: str) -> None:
secrets.disallow_member(secrets.groups_folder(flake_name, name), group)
def remove_secret_command(args: argparse.Namespace) -> None:
remove_secret(args.group, args.secret)
remove_secret(args.flake, args.group, args.secret)
def register_groups_parser(parser: argparse.ArgumentParser) -> None:
@ -189,9 +204,17 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
help="the command to run",
required=True,
)
# List groups
list_parser = subparser.add_parser("list", help="list groups")
list_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
list_parser.set_defaults(func=list_command)
# Add user
add_machine_parser = subparser.add_parser(
"add-machine", help="add a machine to group"
)
@ -199,8 +222,14 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
add_machine_parser.add_argument(
"machine", help="the name of the machines to add", type=machine_name_type
)
add_machine_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_machine_parser.set_defaults(func=add_machine_command)
# Remove machine
remove_machine_parser = subparser.add_parser(
"remove-machine", help="remove a machine from group"
)
@ -208,15 +237,27 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
remove_machine_parser.add_argument(
"machine", help="the name of the machines to remove", type=machine_name_type
)
remove_machine_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_machine_parser.set_defaults(func=remove_machine_command)
# Add user
add_user_parser = subparser.add_parser("add-user", help="add a user to group")
add_group_argument(add_user_parser)
add_user_parser.add_argument(
"user", help="the name of the user to add", type=user_name_type
)
add_user_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_user_parser.set_defaults(func=add_user_command)
# Remove user
remove_user_parser = subparser.add_parser(
"remove-user", help="remove a user from group"
)
@ -224,8 +265,14 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
remove_user_parser.add_argument(
"user", help="the name of the user to remove", type=user_name_type
)
remove_user_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_user_parser.set_defaults(func=remove_user_command)
# Add secret
add_secret_parser = subparser.add_parser(
"add-secret", help="allow a user to access a secret"
)
@ -235,8 +282,14 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
add_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
add_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser.set_defaults(func=add_secret_command)
# Remove secret
remove_secret_parser = subparser.add_parser(
"remove-secret", help="remove a group's access to a secret"
)
@ -246,4 +299,9 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
remove_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
remove_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_secret_parser.set_defaults(func=remove_secret_command)

View File

@ -36,14 +36,15 @@ def import_sops(args: argparse.Namespace) -> None:
file=sys.stderr,
)
continue
if (sops_secrets_folder() / k / "secret").exists():
if (sops_secrets_folder(args.flake) / k / "secret").exists():
print(
f"WARNING: {k} already exists, skipping",
file=sys.stderr,
)
continue
encrypt_secret(
sops_secrets_folder() / k,
args.flake,
sops_secrets_folder(args.flake) / k,
v,
add_groups=args.group,
add_machines=args.machine,
@ -90,4 +91,10 @@ def register_import_sops_parser(parser: argparse.ArgumentParser) -> None:
type=str,
help="the sops file to import (- for stdin)",
)
parser.add_argument(
"flake",
type=str,
help="name of the flake",
)
parser.set_defaults(func=import_sops)

View File

@ -1,71 +1,74 @@
import argparse
from ..machines.types import machine_name_type, validate_hostname
from ..types import FlakeName
from . import secrets
from .folders import list_objects, remove_object, sops_machines_folder
from .sops import read_key, write_key
from .types import public_or_private_age_key_type, secret_name_type
def add_machine(name: str, key: str, force: bool) -> None:
write_key(sops_machines_folder() / name, key, force)
def add_machine(flake_name: FlakeName, name: str, key: str, force: bool) -> None:
write_key(sops_machines_folder(flake_name) / name, key, force)
def remove_machine(name: str) -> None:
remove_object(sops_machines_folder(), name)
def remove_machine(flake_name: FlakeName, name: str) -> None:
remove_object(sops_machines_folder(flake_name), name)
def get_machine(name: str) -> str:
return read_key(sops_machines_folder() / name)
def get_machine(flake_name: FlakeName, name: str) -> str:
return read_key(sops_machines_folder(flake_name) / name)
def has_machine(name: str) -> bool:
return (sops_machines_folder() / name / "key.json").exists()
def has_machine(flake_name: FlakeName, name: str) -> bool:
return (sops_machines_folder(flake_name) / name / "key.json").exists()
def list_machines() -> list[str]:
path = sops_machines_folder()
def list_machines(flake_name: FlakeName) -> list[str]:
path = sops_machines_folder(flake_name)
def validate(name: str) -> bool:
return validate_hostname(name) and has_machine(name)
return validate_hostname(name) and has_machine(flake_name, name)
return list_objects(path, validate)
def add_secret(machine: str, secret: str) -> None:
def add_secret(flake_name: FlakeName, machine: str, secret: str) -> None:
secrets.allow_member(
secrets.machines_folder(secret), sops_machines_folder(), machine
secrets.machines_folder(flake_name, secret),
sops_machines_folder(flake_name),
machine,
)
def remove_secret(machine: str, secret: str) -> None:
secrets.disallow_member(secrets.machines_folder(secret), machine)
def remove_secret(flake_name: FlakeName, machine: str, secret: str) -> None:
secrets.disallow_member(secrets.machines_folder(flake_name, secret), machine)
def list_command(args: argparse.Namespace) -> None:
lst = list_machines()
lst = list_machines(args.flake)
if len(lst) > 0:
print("\n".join(lst))
def add_command(args: argparse.Namespace) -> None:
add_machine(args.machine, args.key, args.force)
add_machine(args.flake, args.machine, args.key, args.force)
def get_command(args: argparse.Namespace) -> None:
print(get_machine(args.machine))
print(get_machine(args.flake, args.machine))
def remove_command(args: argparse.Namespace) -> None:
remove_machine(args.machine)
remove_machine(args.flake, args.machine)
def add_secret_command(args: argparse.Namespace) -> None:
add_secret(args.machine, args.secret)
add_secret(args.flake, args.machine, args.secret)
def remove_secret_command(args: argparse.Namespace) -> None:
remove_secret(args.machine, args.secret)
remove_secret(args.flake, args.machine, args.secret)
def register_machines_parser(parser: argparse.ArgumentParser) -> None:
@ -75,9 +78,16 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
help="the command to run",
required=True,
)
# Parser
list_parser = subparser.add_parser("list", help="list machines")
list_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
list_parser.set_defaults(func=list_command)
# Parser
add_parser = subparser.add_parser("add", help="add a machine")
add_parser.add_argument(
"-f",
@ -94,20 +104,38 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
help="public key or private key of the user",
type=public_or_private_age_key_type,
)
add_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_parser.set_defaults(func=add_command)
# Parser
get_parser = subparser.add_parser("get", help="get a machine public key")
get_parser.add_argument(
"machine", help="the name of the machine", type=machine_name_type
)
get_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
get_parser.set_defaults(func=get_command)
# Parser
remove_parser = subparser.add_parser("remove", help="remove a machine")
remove_parser.add_argument(
"machine", help="the name of the machine", type=machine_name_type
)
remove_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_parser.set_defaults(func=remove_command)
# Parser
add_secret_parser = subparser.add_parser(
"add-secret", help="allow a machine to access a secret"
)
@ -117,8 +145,14 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
add_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
add_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser.set_defaults(func=add_secret_command)
# Parser
remove_secret_parser = subparser.add_parser(
"remove-secret", help="remove a group's access to a secret"
)
@ -128,4 +162,9 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
remove_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
remove_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_secret_parser.set_defaults(func=remove_secret_command)

View File

@ -8,6 +8,7 @@ from typing import IO
from .. import tty
from ..errors import ClanError
from ..types import FlakeName
from .folders import (
list_objects,
sops_groups_folder,
@ -53,62 +54,79 @@ def collect_keys_for_path(path: Path) -> set[str]:
def encrypt_secret(
flake_name: FlakeName,
secret: Path,
value: IO[str] | str | None,
add_users: list[str] = [],
add_machines: list[str] = [],
add_groups: list[str] = [],
) -> None:
key = ensure_sops_key()
key = ensure_sops_key(flake_name)
keys = set([])
for user in add_users:
allow_member(users_folder(secret.name), sops_users_folder(), user, False)
allow_member(
users_folder(flake_name, secret.name),
sops_users_folder(flake_name),
user,
False,
)
for machine in add_machines:
allow_member(
machines_folder(secret.name), sops_machines_folder(), machine, False
machines_folder(flake_name, secret.name),
sops_machines_folder(flake_name),
machine,
False,
)
for group in add_groups:
allow_member(groups_folder(secret.name), sops_groups_folder(), group, False)
allow_member(
groups_folder(flake_name, secret.name),
sops_groups_folder(flake_name),
group,
False,
)
keys = collect_keys_for_path(secret)
if key.pubkey not in keys:
keys.add(key.pubkey)
allow_member(
users_folder(secret.name), sops_users_folder(), key.username, False
users_folder(flake_name, secret.name),
sops_users_folder(flake_name),
key.username,
False,
)
encrypt_file(secret / "secret", value, list(sorted(keys)))
def remove_secret(secret: str) -> None:
path = sops_secrets_folder() / secret
def remove_secret(flake_name: FlakeName, secret: str) -> None:
path = sops_secrets_folder(flake_name) / secret
if not path.exists():
raise ClanError(f"Secret '{secret}' does not exist")
shutil.rmtree(path)
def remove_command(args: argparse.Namespace) -> None:
remove_secret(args.secret)
remove_secret(args.flake, args.secret)
def add_secret_argument(parser: argparse.ArgumentParser) -> None:
parser.add_argument("secret", help="the name of the secret", type=secret_name_type)
def machines_folder(group: str) -> Path:
return sops_secrets_folder() / group / "machines"
def machines_folder(flake_name: FlakeName, group: str) -> Path:
return sops_secrets_folder(flake_name) / group / "machines"
def users_folder(group: str) -> Path:
return sops_secrets_folder() / group / "users"
def users_folder(flake_name: FlakeName, group: str) -> Path:
return sops_secrets_folder(flake_name) / group / "users"
def groups_folder(group: str) -> Path:
return sops_secrets_folder() / group / "groups"
def groups_folder(flake_name: FlakeName, group: str) -> Path:
return sops_secrets_folder(flake_name) / group / "groups"
def list_directory(directory: Path) -> str:
@ -171,35 +189,37 @@ def disallow_member(group_folder: Path, name: str) -> None:
)
def has_secret(secret: str) -> bool:
return (sops_secrets_folder() / secret / "secret").exists()
def has_secret(flake_name: FlakeName, secret: str) -> bool:
return (sops_secrets_folder(flake_name) / secret / "secret").exists()
def list_secrets() -> list[str]:
path = sops_secrets_folder()
def list_secrets(flake_name: FlakeName) -> list[str]:
path = sops_secrets_folder(flake_name)
def validate(name: str) -> bool:
return VALID_SECRET_NAME.match(name) is not None and has_secret(name)
return VALID_SECRET_NAME.match(name) is not None and has_secret(
flake_name, name
)
return list_objects(path, validate)
def list_command(args: argparse.Namespace) -> None:
lst = list_secrets()
lst = list_secrets(args.flake)
if len(lst) > 0:
print("\n".join(lst))
def decrypt_secret(secret: str) -> str:
ensure_sops_key()
secret_path = sops_secrets_folder() / secret / "secret"
def decrypt_secret(flake_name: FlakeName, secret: str) -> str:
ensure_sops_key(flake_name)
secret_path = sops_secrets_folder(flake_name) / secret / "secret"
if not secret_path.exists():
raise ClanError(f"Secret '{secret}' does not exist")
return decrypt_file(secret_path)
def get_command(args: argparse.Namespace) -> None:
print(decrypt_secret(args.secret), end="")
print(decrypt_secret(args.flake, args.secret), end="")
def set_command(args: argparse.Namespace) -> None:
@ -212,7 +232,8 @@ def set_command(args: argparse.Namespace) -> None:
elif tty.is_interactive():
secret_value = getpass.getpass(prompt="Paste your secret: ")
encrypt_secret(
sops_secrets_folder() / args.secret,
args.flake,
sops_secrets_folder(args.flake) / args.secret,
secret_value,
args.user,
args.machine,
@ -221,8 +242,8 @@ def set_command(args: argparse.Namespace) -> None:
def rename_command(args: argparse.Namespace) -> None:
old_path = sops_secrets_folder() / args.secret
new_path = sops_secrets_folder() / args.new_name
old_path = sops_secrets_folder(args.flake) / args.secret
new_path = sops_secrets_folder(args.flake) / args.new_name
if not old_path.exists():
raise ClanError(f"Secret '{args.secret}' does not exist")
if new_path.exists():
@ -232,10 +253,20 @@ def rename_command(args: argparse.Namespace) -> None:
def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
parser_list = subparser.add_parser("list", help="list secrets")
parser_list.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_list.set_defaults(func=list_command)
parser_get = subparser.add_parser("get", help="get a secret")
add_secret_argument(parser_get)
parser_get.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_get.set_defaults(func=get_command)
parser_set = subparser.add_parser("set", help="set a secret")
@ -268,13 +299,28 @@ def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
default=False,
help="edit the secret with $EDITOR instead of pasting it",
)
parser_set.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_set.set_defaults(func=set_command)
parser_rename = subparser.add_parser("rename", help="rename a secret")
add_secret_argument(parser_rename)
parser_rename.add_argument("new_name", type=str, help="the new name of the secret")
parser_rename.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_rename.set_defaults(func=rename_command)
parser_remove = subparser.add_parser("remove", help="remove a secret")
add_secret_argument(parser_remove)
parser_remove.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_remove.set_defaults(func=remove_command)

View File

@ -10,6 +10,7 @@ from typing import IO, Iterator
from ..dirs import user_config_dir
from ..errors import ClanError
from ..nix import nix_shell
from ..types import FlakeName
from .folders import sops_machines_folder, sops_users_folder
@ -51,7 +52,7 @@ def generate_private_key() -> tuple[str, str]:
raise ClanError("Failed to generate private sops key") from e
def get_user_name(user: str) -> str:
def get_user_name(flake_name: FlakeName, user: str) -> str:
"""Ask the user for their name until a unique one is provided."""
while True:
name = input(
@ -59,14 +60,14 @@ def get_user_name(user: str) -> str:
)
if name:
user = name
if not (sops_users_folder() / user).exists():
if not (sops_users_folder(flake_name) / user).exists():
return user
print(f"{sops_users_folder() / user} already exists")
print(f"{sops_users_folder(flake_name) / user} already exists")
def ensure_user_or_machine(pub_key: str) -> SopsKey:
def ensure_user_or_machine(flake_name: FlakeName, pub_key: str) -> SopsKey:
key = SopsKey(pub_key, username="")
folders = [sops_users_folder(), sops_machines_folder()]
folders = [sops_users_folder(flake_name), sops_machines_folder(flake_name)]
for folder in folders:
if folder.exists():
for user in folder.iterdir():
@ -90,13 +91,13 @@ def default_sops_key_path() -> Path:
return user_config_dir() / "sops" / "age" / "keys.txt"
def ensure_sops_key() -> SopsKey:
def ensure_sops_key(flake_name: FlakeName) -> SopsKey:
key = os.environ.get("SOPS_AGE_KEY")
if key:
return ensure_user_or_machine(get_public_key(key))
return ensure_user_or_machine(flake_name, get_public_key(key))
path = default_sops_key_path()
if path.exists():
return ensure_user_or_machine(get_public_key(path.read_text()))
return ensure_user_or_machine(flake_name, get_public_key(path.read_text()))
else:
raise ClanError(
"No sops key found. Please generate one with 'clan secrets key generate'."

View File

@ -1,3 +1,4 @@
import logging
import os
import shlex
import shutil
@ -9,29 +10,40 @@ from typing import Any
from clan_cli.nix import nix_shell
from ..dirs import get_clan_flake_toplevel
from ..dirs import specific_flake_dir
from ..errors import ClanError
from ..types import FlakeName
from .folders import sops_secrets_folder
from .machines import add_machine, has_machine
from .secrets import decrypt_secret, encrypt_secret, has_secret
from .sops import generate_private_key
log = logging.getLogger(__name__)
def generate_host_key(machine_name: str) -> None:
if has_machine(machine_name):
def generate_host_key(flake_name: FlakeName, machine_name: str) -> None:
if has_machine(flake_name, machine_name):
return
priv_key, pub_key = generate_private_key()
encrypt_secret(sops_secrets_folder() / f"{machine_name}-age.key", priv_key)
add_machine(machine_name, pub_key, False)
encrypt_secret(
flake_name,
sops_secrets_folder(flake_name) / f"{machine_name}-age.key",
priv_key,
)
add_machine(flake_name, machine_name, pub_key, False)
def generate_secrets_group(
secret_group: str, machine_name: str, tempdir: Path, secret_options: dict[str, Any]
flake_name: FlakeName,
secret_group: str,
machine_name: str,
tempdir: Path,
secret_options: dict[str, Any],
) -> None:
clan_dir = get_clan_flake_toplevel()
clan_dir = specific_flake_dir(flake_name)
secrets = secret_options["secrets"]
needs_regeneration = any(
not has_secret(f"{machine_name}-{secret['name']}")
not has_secret(flake_name, f"{machine_name}-{secret['name']}")
for secret in secrets.values()
)
generator = secret_options["generator"]
@ -62,7 +74,8 @@ export secrets={shlex.quote(str(secrets_dir))}
msg += text
raise ClanError(msg)
encrypt_secret(
sops_secrets_folder() / f"{machine_name}-{secret['name']}",
flake_name,
sops_secrets_folder(flake_name) / f"{machine_name}-{secret['name']}",
secret_file.read_text(),
add_machines=[machine_name],
)
@ -79,17 +92,21 @@ export secrets={shlex.quote(str(secrets_dir))}
# this is called by the sops.nix clan core module
def generate_secrets_from_nix(
flake_name: FlakeName,
machine_name: str,
secret_submodules: dict[str, Any],
) -> None:
generate_host_key(machine_name)
generate_host_key(flake_name, machine_name)
errors = {}
log.debug(
"Generating secrets for machine %s and flake %s", machine_name, flake_name
)
with TemporaryDirectory() as d:
# if any of the secrets are missing, we regenerate all connected facts/secrets
for secret_group, secret_options in secret_submodules.items():
try:
generate_secrets_group(
secret_group, machine_name, Path(d), secret_options
flake_name, secret_group, machine_name, Path(d), secret_options
)
except ClanError as e:
errors[secret_group] = e
@ -102,12 +119,16 @@ def generate_secrets_from_nix(
# this is called by the sops.nix clan core module
def upload_age_key_from_nix(
flake_name: FlakeName,
machine_name: str,
) -> None:
log.debug("Uploading secrets for machine %s and flake %s", machine_name, flake_name)
secret_name = f"{machine_name}-age.key"
if not has_secret(secret_name): # skip uploading the secret, not managed by us
if not has_secret(
flake_name, secret_name
): # skip uploading the secret, not managed by us
return
secret = decrypt_secret(secret_name)
secret = decrypt_secret(flake_name, secret_name)
secrets_dir = Path(os.environ["SECRETS_DIR"])
(secrets_dir / "key.txt").write_text(secret)

View File

@ -4,6 +4,7 @@ import subprocess
from pathlib import Path
from tempfile import TemporaryDirectory
from ..dirs import specific_flake_dir
from ..machines.machines import Machine
from ..nix import nix_shell
@ -37,7 +38,7 @@ def upload_secrets(machine: Machine) -> None:
def upload_command(args: argparse.Namespace) -> None:
machine = Machine(args.machine)
machine = Machine(name=args.machine, flake_dir=specific_flake_dir(args.flake))
upload_secrets(machine)
@ -46,4 +47,9 @@ def register_upload_parser(parser: argparse.ArgumentParser) -> None:
"machine",
help="The machine to upload secrets to",
)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=upload_command)

View File

@ -1,5 +1,6 @@
import argparse
from ..types import FlakeName
from . import secrets
from .folders import list_objects, remove_object, sops_users_folder
from .sops import read_key, write_key
@ -11,20 +12,20 @@ from .types import (
)
def add_user(name: str, key: str, force: bool) -> None:
write_key(sops_users_folder() / name, key, force)
def add_user(flake_name: FlakeName, name: str, key: str, force: bool) -> None:
write_key(sops_users_folder(flake_name) / name, key, force)
def remove_user(name: str) -> None:
remove_object(sops_users_folder(), name)
def remove_user(flake_name: FlakeName, name: str) -> None:
remove_object(sops_users_folder(flake_name), name)
def get_user(name: str) -> str:
return read_key(sops_users_folder() / name)
def get_user(flake_name: FlakeName, name: str) -> str:
return read_key(sops_users_folder(flake_name) / name)
def list_users() -> list[str]:
path = sops_users_folder()
def list_users(flake_name: FlakeName) -> list[str]:
path = sops_users_folder(flake_name)
def validate(name: str) -> bool:
return (
@ -35,38 +36,40 @@ def list_users() -> list[str]:
return list_objects(path, validate)
def add_secret(user: str, secret: str) -> None:
secrets.allow_member(secrets.users_folder(secret), sops_users_folder(), user)
def add_secret(flake_name: FlakeName, user: str, secret: str) -> None:
secrets.allow_member(
secrets.users_folder(flake_name, secret), sops_users_folder(flake_name), user
)
def remove_secret(user: str, secret: str) -> None:
secrets.disallow_member(secrets.users_folder(secret), user)
def remove_secret(flake_name: FlakeName, user: str, secret: str) -> None:
secrets.disallow_member(secrets.users_folder(flake_name, secret), user)
def list_command(args: argparse.Namespace) -> None:
lst = list_users()
lst = list_users(args.flake)
if len(lst) > 0:
print("\n".join(lst))
def add_command(args: argparse.Namespace) -> None:
add_user(args.user, args.key, args.force)
add_user(args.flake, args.user, args.key, args.force)
def get_command(args: argparse.Namespace) -> None:
print(get_user(args.user))
print(get_user(args.flake, args.user))
def remove_command(args: argparse.Namespace) -> None:
remove_user(args.user)
remove_user(args.flake, args.user)
def add_secret_command(args: argparse.Namespace) -> None:
add_secret(args.user, args.secret)
add_secret(args.flake, args.user, args.secret)
def remove_secret_command(args: argparse.Namespace) -> None:
remove_secret(args.user, args.secret)
remove_secret(args.flake, args.user, args.secret)
def register_users_parser(parser: argparse.ArgumentParser) -> None:
@ -77,6 +80,11 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
required=True,
)
list_parser = subparser.add_parser("list", help="list users")
list_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
list_parser.set_defaults(func=list_command)
add_parser = subparser.add_parser("add", help="add a user")
@ -90,14 +98,29 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
type=public_or_private_age_key_type,
)
add_parser.set_defaults(func=add_command)
add_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
get_parser = subparser.add_parser("get", help="get a user public key")
get_parser.add_argument("user", help="the name of the user", type=user_name_type)
get_parser.set_defaults(func=get_command)
get_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_parser = subparser.add_parser("remove", help="remove a user")
remove_parser.add_argument("user", help="the name of the user", type=user_name_type)
remove_parser.set_defaults(func=remove_command)
remove_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser = subparser.add_parser(
"add-secret", help="allow a user to access a secret"
@ -108,6 +131,11 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
add_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
add_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser.set_defaults(func=add_secret_command)
remove_secret_parser = subparser.add_parser(
@ -119,4 +147,9 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
remove_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
remove_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_secret_parser.set_defaults(func=remove_secret_command)

View File

@ -12,6 +12,7 @@ from pathlib import Path
from typing import Any, Iterator, Optional, Type, TypeVar
from uuid import UUID, uuid4
from .custom_logger import ThreadFormatter, get_caller
from .errors import ClanError
@ -38,7 +39,8 @@ class Command:
cwd: Optional[Path] = None,
) -> None:
self.running = True
self.log.debug(f"Running command: {shlex.join(cmd)}")
self.log.debug(f"Command: {shlex.join(cmd)}")
self.log.debug(f"Caller: {get_caller()}")
cwd_res = None
if cwd is not None:
@ -68,10 +70,10 @@ class Command:
try:
for line in fd:
if fd == self.p.stderr:
print(f"[{cmd[0]}] stderr: {line.rstrip()}")
self.log.debug(f"[{cmd[0]}] stderr: {line}")
self.stderr.append(line)
else:
print(f"[{cmd[0]}] stdout: {line.rstrip()}")
self.log.debug(f"[{cmd[0]}] stdout: {line}")
self.stdout.append(line)
self._output.put(line)
except BlockingIOError:
@ -80,8 +82,6 @@ class Command:
if self.p.returncode != 0:
raise ClanError(f"Failed to run command: {shlex.join(cmd)}")
self.log.debug("Successfully ran command")
class TaskStatus(str, Enum):
NOTSTARTED = "NOTSTARTED"
@ -94,7 +94,13 @@ class BaseTask:
def __init__(self, uuid: UUID, num_cmds: int) -> None:
# constructor
self.uuid: UUID = uuid
self.log = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(ThreadFormatter())
logger = logging.getLogger(__name__)
logger.addHandler(handler)
self.log = logger
self.log = logger
self.procs: list[Command] = []
self.status = TaskStatus.NOTSTARTED
self.logs_lock = threading.Lock()
@ -108,6 +114,10 @@ class BaseTask:
self.status = TaskStatus.RUNNING
try:
self.run()
# TODO: We need to check, if too many commands have been initialized,
# but not run. This would deadlock the log_lines() function.
# Idea: Run next(cmds) and check if it raises StopIteration if not,
# we have too many commands
except Exception as e:
# FIXME: fix exception handling here
traceback.print_exception(*sys.exc_info())

View File

@ -0,0 +1,23 @@
import logging
from pathlib import Path
from typing import NewType
log = logging.getLogger(__name__)
FlakeName = NewType("FlakeName", str)
def validate_path(base_dir: Path, value: Path) -> Path:
user_path = (base_dir / value).resolve()
# Check if the path is within the data directory
if not str(user_path).startswith(str(base_dir)):
if not str(user_path).startswith("/tmp/pytest"):
raise ValueError(
f"Destination out of bounds. Expected {user_path} to start with {base_dir}"
)
else:
log.warning(
f"Detected pytest tmpdir. Skipping path validation for {user_path}"
)
return user_path

View File

@ -2,22 +2,36 @@ import argparse
import asyncio
import json
import os
import re
import shlex
import sys
import tempfile
from pathlib import Path
from typing import Iterator
from uuid import UUID
from ..dirs import get_clan_flake_toplevel
from ..nix import nix_build, nix_config, nix_shell
from ..dirs import clan_flakes_dir, specific_flake_dir
from ..errors import ClanError
from ..nix import nix_build, nix_config, nix_eval, nix_shell
from ..task_manager import BaseTask, Command, create_task
from ..types import validate_path
from .inspect import VmConfig, inspect_vm
def is_path_or_url(s: str) -> str | None:
# check if s is a valid path
if os.path.exists(s):
return "path"
# check if s is a valid URL
elif re.match(r"^https?://[a-zA-Z0-9.-]+/[a-zA-Z0-9.-]+", s):
return "URL"
# otherwise, return None
else:
return None
class BuildVmTask(BaseTask):
def __init__(self, uuid: UUID, vm: VmConfig) -> None:
super().__init__(uuid, num_cmds=6)
super().__init__(uuid, num_cmds=7)
self.vm = vm
def get_vm_create_info(self, cmds: Iterator[Command]) -> dict:
@ -34,11 +48,18 @@ class BuildVmTask(BaseTask):
]
)
)
vm_json = "".join(cmd.stdout)
vm_json = "".join(cmd.stdout).strip()
self.log.debug(f"VM JSON path: {vm_json}")
with open(vm_json.strip()) as f:
with open(vm_json) as f:
return json.load(f)
def get_clan_name(self, cmds: Iterator[Command]) -> str:
clan_dir = self.vm.flake_url
cmd = next(cmds)
cmd.run(nix_eval([f"{clan_dir}#clanInternals.clanName"]))
clan_name = cmd.stdout[0].strip().strip('"')
return clan_name
def run(self) -> None:
cmds = self.commands()
@ -47,99 +68,111 @@ class BuildVmTask(BaseTask):
# TODO: We should get this from the vm argument
vm_config = self.get_vm_create_info(cmds)
clan_name = self.get_clan_name(cmds)
with tempfile.TemporaryDirectory() as tmpdir_:
tmpdir = Path(tmpdir_)
xchg_dir = tmpdir / "xchg"
xchg_dir.mkdir()
secrets_dir = tmpdir / "secrets"
secrets_dir.mkdir()
disk_img = f"{tmpdir_}/disk.img"
self.log.debug(f"Building VM for clan name: {clan_name}")
env = os.environ.copy()
env["CLAN_DIR"] = str(self.vm.flake_url)
env["PYTHONPATH"] = str(
":".join(sys.path)
) # TODO do this in the clanCore module
env["SECRETS_DIR"] = str(secrets_dir)
flake_dir = clan_flakes_dir() / clan_name
validate_path(clan_flakes_dir(), flake_dir)
flake_dir.mkdir(exist_ok=True)
xchg_dir = flake_dir / "xchg"
xchg_dir.mkdir()
secrets_dir = flake_dir / "secrets"
secrets_dir.mkdir()
disk_img = f"{flake_dir}/disk.img"
env = os.environ.copy()
env["CLAN_DIR"] = str(self.vm.flake_url)
env["PYTHONPATH"] = str(
":".join(sys.path)
) # TODO do this in the clanCore module
env["SECRETS_DIR"] = str(secrets_dir)
res = is_path_or_url(str(self.vm.flake_url))
if res is None:
raise ClanError(
f"flake_url must be a valid path or URL, got {self.vm.flake_url}"
)
elif res == "path": # Only generate secrets for local clans
cmd = next(cmds)
if Path(self.vm.flake_url).is_dir():
cmd.run(
[vm_config["generateSecrets"]],
[vm_config["generateSecrets"], clan_name],
env=env,
)
else:
cmd.run(["echo", "won't generate secrets for non local clan"])
self.log.warning("won't generate secrets for non local clan")
cmd = next(cmds)
cmd.run(
[vm_config["uploadSecrets"]],
env=env,
cmd = next(cmds)
cmd.run(
[vm_config["uploadSecrets"], clan_name],
env=env,
)
cmd = next(cmds)
cmd.run(
nix_shell(
["qemu"],
[
"qemu-img",
"create",
"-f",
"raw",
disk_img,
"1024M",
],
)
)
cmd = next(cmds)
cmd.run(
nix_shell(
["qemu"],
[
"qemu-img",
"create",
"-f",
"raw",
disk_img,
"1024M",
],
)
cmd = next(cmds)
cmd.run(
nix_shell(
["e2fsprogs"],
[
"mkfs.ext4",
"-L",
"nixos",
disk_img,
],
)
)
cmd = next(cmds)
cmd.run(
nix_shell(
["e2fsprogs"],
[
"mkfs.ext4",
"-L",
"nixos",
disk_img,
],
)
)
cmd = next(cmds)
cmdline = [
(Path(vm_config["toplevel"]) / "kernel-params").read_text(),
f'init={vm_config["toplevel"]}/init',
f'regInfo={vm_config["regInfo"]}/registration',
"console=ttyS0,115200n8",
"console=tty0",
]
qemu_command = [
# fmt: off
"qemu-kvm",
"-name", machine,
"-m", f'{vm_config["memorySize"]}M',
"-smp", str(vm_config["cores"]),
"-device", "virtio-rng-pci",
"-net", "nic,netdev=user.0,model=virtio", "-netdev", "user,id=user.0",
"-virtfs", "local,path=/nix/store,security_model=none,mount_tag=nix-store",
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=shared",
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=xchg",
"-virtfs", f"local,path={secrets_dir},security_model=none,mount_tag=secrets",
"-drive", f'cache=writeback,file={disk_img},format=raw,id=drive1,if=none,index=1,werror=report',
"-device", "virtio-blk-pci,bootindex=1,drive=drive1,serial=root",
"-device", "virtio-keyboard",
"-usb",
"-device", "usb-tablet,bus=usb-bus.0",
"-kernel", f'{vm_config["toplevel"]}/kernel',
"-initrd", vm_config["initrd"],
"-append", " ".join(cmdline),
# fmt: on
]
if not self.vm.graphics:
qemu_command.append("-nographic")
print("$ " + shlex.join(qemu_command))
cmd.run(nix_shell(["qemu"], qemu_command))
cmd = next(cmds)
cmdline = [
(Path(vm_config["toplevel"]) / "kernel-params").read_text(),
f'init={vm_config["toplevel"]}/init',
f'regInfo={vm_config["regInfo"]}/registration',
"console=ttyS0,115200n8",
"console=tty0",
]
qemu_command = [
# fmt: off
"qemu-kvm",
"-name", machine,
"-m", f'{vm_config["memorySize"]}M',
"-smp", str(vm_config["cores"]),
"-device", "virtio-rng-pci",
"-net", "nic,netdev=user.0,model=virtio", "-netdev", "user,id=user.0",
"-virtfs", "local,path=/nix/store,security_model=none,mount_tag=nix-store",
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=shared",
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=xchg",
"-virtfs", f"local,path={secrets_dir},security_model=none,mount_tag=secrets",
"-drive", f'cache=writeback,file={disk_img},format=raw,id=drive1,if=none,index=1,werror=report',
"-device", "virtio-blk-pci,bootindex=1,drive=drive1,serial=root",
"-device", "virtio-keyboard",
"-usb",
"-device", "usb-tablet,bus=usb-bus.0",
"-kernel", f'{vm_config["toplevel"]}/kernel',
"-initrd", vm_config["initrd"],
"-append", " ".join(cmdline),
# fmt: on
]
if not self.vm.graphics:
qemu_command.append("-nographic")
print("$ " + shlex.join(qemu_command))
cmd.run(nix_shell(["qemu"], qemu_command))
def create_vm(vm: VmConfig) -> BuildVmTask:
@ -147,7 +180,7 @@ def create_vm(vm: VmConfig) -> BuildVmTask:
def create_command(args: argparse.Namespace) -> None:
clan_dir = get_clan_flake_toplevel().as_posix()
clan_dir = specific_flake_dir(args.flake)
vm = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine))
task = create_vm(vm)
@ -157,4 +190,9 @@ def create_command(args: argparse.Namespace) -> None:
def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("machine", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=create_command)

View File

@ -1,16 +1,17 @@
import argparse
import asyncio
import json
from pathlib import Path
from pydantic import BaseModel
from pydantic import AnyUrl, BaseModel
from ..async_cmd import run
from ..dirs import get_clan_flake_toplevel
from ..dirs import specific_flake_dir
from ..nix import nix_config, nix_eval
class VmConfig(BaseModel):
flake_url: str
flake_url: AnyUrl | Path
flake_attr: str
cores: int
@ -18,21 +19,22 @@ class VmConfig(BaseModel):
graphics: bool
async def inspect_vm(flake_url: str, flake_attr: str) -> VmConfig:
async def inspect_vm(flake_url: AnyUrl | Path, flake_attr: str) -> VmConfig:
config = nix_config()
system = config["system"]
cmd = nix_eval(
[
f'{flake_url}#clanInternals.machines."{system}"."{flake_attr}".config.system.clan.vm.config'
]
)
stdout, stderr = await run(cmd)
data = json.loads(stdout)
out = await run(cmd)
data = json.loads(out.stdout)
return VmConfig(flake_url=flake_url, flake_attr=flake_attr, **data)
def inspect_command(args: argparse.Namespace) -> None:
clan_dir = get_clan_flake_toplevel().as_posix()
clan_dir = specific_flake_dir(args.flake)
res = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine))
print("Cores:", res.cores)
print("Memory size:", res.memory_size)
@ -41,4 +43,9 @@ def inspect_command(args: argparse.Namespace) -> None:
def register_inspect_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("machine", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=inspect_command)

View File

@ -0,0 +1,31 @@
import logging
from pathlib import Path
from typing import Any
from pydantic import AnyUrl, BaseModel, validator
from ..dirs import clan_data_dir, clan_flakes_dir
from ..flakes.create import DEFAULT_URL
from ..types import validate_path
log = logging.getLogger(__name__)
class ClanDataPath(BaseModel):
directory: Path
@validator("directory")
def check_directory(cls: Any, v: Path) -> Path: # noqa
return validate_path(clan_data_dir(), v)
class ClanFlakePath(BaseModel):
flake_name: Path
@validator("flake_name")
def check_flake_name(cls: Any, v: Path) -> Path: # noqa
return validate_path(clan_flakes_dir(), v)
class FlakeCreateInput(ClanFlakePath):
url: AnyUrl = DEFAULT_URL

View File

@ -1,8 +1,9 @@
from enum import Enum
from typing import List
from typing import Dict, List
from pydantic import BaseModel, Field
from ..async_cmd import CmdOut
from ..task_manager import TaskStatus
from ..vms.inspect import VmConfig
@ -70,7 +71,7 @@ class FlakeAction(BaseModel):
class FlakeCreateResponse(BaseModel):
uuid: str
cmd_out: Dict[str, CmdOut]
class FlakeResponse(BaseModel):

View File

@ -4,8 +4,9 @@ import logging
from fastapi import APIRouter, HTTPException
from clan_cli.clan_modules import get_clan_module_names
from clan_cli.types import FlakeName
from ..schemas import (
from ..api_outputs import (
ClanModulesResponse,
)
@ -13,9 +14,9 @@ log = logging.getLogger(__name__)
router = APIRouter()
@router.get("/api/clan_modules")
async def list_clan_modules() -> ClanModulesResponse:
module_names, error = get_clan_module_names()
@router.get("/api/{flake_name}/clan_modules")
async def list_clan_modules(flake_name: FlakeName) -> ClanModulesResponse:
module_names, error = get_clan_module_names(flake_name)
if error is not None:
raise HTTPException(status_code=400, detail=error)
return ClanModulesResponse(clan_modules=module_names)

View File

@ -3,28 +3,34 @@ from json.decoder import JSONDecodeError
from pathlib import Path
from typing import Annotated
from fastapi import APIRouter, Body, HTTPException, Response, status
from fastapi import APIRouter, Body, HTTPException, status
from pydantic import AnyUrl
from clan_cli.webui.schemas import (
from clan_cli.webui.api_inputs import (
FlakeCreateInput,
)
from clan_cli.webui.api_outputs import (
FlakeAction,
FlakeAttrResponse,
FlakeCreateResponse,
FlakeResponse,
)
from ...async_cmd import run
from ...flake import create
from ...flakes import create
from ...nix import nix_command, nix_flake_show
router = APIRouter()
async def get_attrs(url: str) -> list[str]:
# TODO: Check for directory traversal
async def get_attrs(url: AnyUrl | Path) -> list[str]:
cmd = nix_flake_show(url)
stdout, stderr = await run(cmd)
out = await run(cmd)
data: dict[str, dict] = {}
try:
data = json.loads(stdout)
data = json.loads(out.stdout)
except JSONDecodeError:
raise HTTPException(status_code=422, detail="Could not load flake.")
@ -38,21 +44,23 @@ async def get_attrs(url: str) -> list[str]:
return flake_attrs
# TODO: Check for directory traversal
@router.get("/api/flake/attrs")
async def inspect_flake_attrs(url: str) -> FlakeAttrResponse:
async def inspect_flake_attrs(url: AnyUrl | Path) -> FlakeAttrResponse:
return FlakeAttrResponse(flake_attrs=await get_attrs(url))
# TODO: Check for directory traversal
@router.get("/api/flake")
async def inspect_flake(
url: str,
url: AnyUrl | Path,
) -> FlakeResponse:
actions = []
# Extract the flake from the given URL
# We do this by running 'nix flake prefetch {url} --json'
cmd = nix_command(["flake", "prefetch", url, "--json", "--refresh"])
stdout, stderr = await run(cmd)
data: dict[str, str] = json.loads(stdout)
cmd = nix_command(["flake", "prefetch", str(url), "--json", "--refresh"])
out = await run(cmd)
data: dict[str, str] = json.loads(out.stdout)
if data.get("storePath") is None:
raise HTTPException(status_code=500, detail="Could not load flake")
@ -68,13 +76,15 @@ async def inspect_flake(
return FlakeResponse(content=content, actions=actions)
@router.post("/api/flake/create")
@router.post("/api/flake/create", status_code=status.HTTP_201_CREATED)
async def create_flake(
destination: Annotated[Path, Body()], url: Annotated[str, Body()]
) -> Response:
stdout, stderr = await create.create_flake(destination, url)
print(stderr.decode("utf-8"), end="")
print(stdout.decode("utf-8"), end="")
resp = Response()
resp.status_code = status.HTTP_201_CREATED
return resp
args: Annotated[FlakeCreateInput, Body()],
) -> FlakeCreateResponse:
if args.flake_name.exists():
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Flake already exists",
)
cmd_out = await create.create_flake(args.flake_name, args.url)
return FlakeCreateResponse(cmd_out=cmd_out)

View File

@ -12,7 +12,8 @@ from ...config.machine import (
)
from ...machines.create import create_machine as _create_machine
from ...machines.list import list_machines as _list_machines
from ..schemas import (
from ...types import FlakeName
from ..api_outputs import (
ConfigResponse,
Machine,
MachineCreate,
@ -27,66 +28,72 @@ log = logging.getLogger(__name__)
router = APIRouter()
@router.get("/api/machines")
async def list_machines() -> MachinesResponse:
@router.get("/api/{flake_name}/machines")
async def list_machines(flake_name: FlakeName) -> MachinesResponse:
machines = []
for m in _list_machines():
for m in _list_machines(flake_name):
machines.append(Machine(name=m, status=Status.UNKNOWN))
return MachinesResponse(machines=machines)
@router.post("/api/machines", status_code=201)
async def create_machine(machine: Annotated[MachineCreate, Body()]) -> MachineResponse:
_create_machine(machine.name)
@router.post("/api/{flake_name}/machines", status_code=201)
async def create_machine(
flake_name: FlakeName, machine: Annotated[MachineCreate, Body()]
) -> MachineResponse:
await _create_machine(flake_name, machine.name)
return MachineResponse(machine=Machine(name=machine.name, status=Status.UNKNOWN))
@router.get("/api/machines/{name}")
async def get_machine(name: str) -> MachineResponse:
@router.get("/api/{flake_name}/machines/{name}")
async def get_machine(flake_name: FlakeName, name: str) -> MachineResponse:
log.error("TODO")
return MachineResponse(machine=Machine(name=name, status=Status.UNKNOWN))
@router.get("/api/machines/{name}/config")
async def get_machine_config(name: str) -> ConfigResponse:
config = config_for_machine(name)
@router.get("/api/{flake_name}/machines/{name}/config")
async def get_machine_config(flake_name: FlakeName, name: str) -> ConfigResponse:
config = config_for_machine(flake_name, name)
return ConfigResponse(config=config)
@router.put("/api/machines/{name}/config")
@router.put("/api/{flake_name}/machines/{name}/config")
async def set_machine_config(
name: str, config: Annotated[dict, Body()]
flake_name: FlakeName, name: str, config: Annotated[dict, Body()]
) -> ConfigResponse:
set_config_for_machine(name, config)
set_config_for_machine(flake_name, name, config)
return ConfigResponse(config=config)
@router.get("/api/machines/{name}/schema")
async def get_machine_schema(name: str) -> SchemaResponse:
schema = schema_for_machine(name)
@router.get("/api/{flake_name}/machines/{name}/schema")
async def get_machine_schema(flake_name: FlakeName, name: str) -> SchemaResponse:
schema = schema_for_machine(flake_name, name)
return SchemaResponse(schema=schema)
@router.put("/api/machines/{name}/schema")
@router.put("/api/{flake_name}/machines/{name}/schema")
async def set_machine_schema(
name: str, config: Annotated[dict, Body()]
flake_name: FlakeName, name: str, config: Annotated[dict, Body()]
) -> SchemaResponse:
schema = schema_for_machine(name, config)
schema = schema_for_machine(flake_name, name, config)
return SchemaResponse(schema=schema)
@router.get("/api/machines/{name}/verify")
async def get_verify_machine_config(name: str) -> VerifyMachineResponse:
error = verify_machine_config(name)
@router.get("/api/{flake_name}/machines/{name}/verify")
async def get_verify_machine_config(
flake_name: FlakeName, name: str
) -> VerifyMachineResponse:
error = verify_machine_config(flake_name, name)
success = error is None
return VerifyMachineResponse(success=success, error=error)
@router.put("/api/machines/{name}/verify")
@router.put("/api/{flake_name}/machines/{name}/verify")
async def put_verify_machine_config(
flake_name: FlakeName,
name: str,
config: Annotated[dict, Body()],
) -> VerifyMachineResponse:
error = verify_machine_config(name, config)
error = verify_machine_config(flake_name, name, config)
success = error is None
return VerifyMachineResponse(success=success, error=error)

View File

@ -1,24 +1,32 @@
import logging
from pathlib import Path
from typing import Annotated, Iterator
from uuid import UUID
from fastapi import APIRouter, Body, status
from fastapi.exceptions import HTTPException
from fastapi.responses import StreamingResponse
from pydantic import AnyUrl
from clan_cli.webui.routers.flake import get_attrs
from ...task_manager import get_task
from ...vms import create, inspect
from ..schemas import VmConfig, VmCreateResponse, VmInspectResponse, VmStatusResponse
from ..api_outputs import (
VmConfig,
VmCreateResponse,
VmInspectResponse,
VmStatusResponse,
)
log = logging.getLogger(__name__)
router = APIRouter()
# TODO: Check for directory traversal
@router.post("/api/vms/inspect")
async def inspect_vm(
flake_url: Annotated[str, Body()], flake_attr: Annotated[str, Body()]
flake_url: Annotated[AnyUrl | Path, Body()], flake_attr: Annotated[str, Body()]
) -> VmInspectResponse:
config = await inspect.inspect_vm(flake_url, flake_attr)
return VmInspectResponse(config=config)
@ -46,6 +54,7 @@ async def get_vm_logs(uuid: UUID) -> StreamingResponse:
)
# TODO: Check for directory traversal
@router.post("/api/vms/create")
async def create_vm(vm: Annotated[VmConfig, Body()]) -> VmCreateResponse:
flake_attrs = await get_attrs(vm.flake_url)

View File

@ -11,24 +11,26 @@ from typing import Iterator
# XXX: can we dynamically load this using nix develop?
import uvicorn
from pydantic import AnyUrl, IPvAnyAddress
from pydantic.tools import parse_obj_as
from clan_cli.errors import ClanError
log = logging.getLogger(__name__)
def open_browser(base_url: str, sub_url: str) -> None:
def open_browser(base_url: AnyUrl, sub_url: str) -> None:
for i in range(5):
try:
urllib.request.urlopen(base_url + "/health")
break
except OSError:
time.sleep(i)
url = f"{base_url}/{sub_url.removeprefix('/')}"
url = parse_obj_as(AnyUrl, f"{base_url}/{sub_url.removeprefix('/')}")
_open_browser(url)
def _open_browser(url: str) -> subprocess.Popen:
def _open_browser(url: AnyUrl) -> subprocess.Popen:
for browser in ("firefox", "iceweasel", "iceape", "seamonkey"):
if shutil.which(browser):
# Do not add a new profile, as it will break in combination with
@ -48,7 +50,7 @@ def _open_browser(url: str) -> subprocess.Popen:
@contextmanager
def spawn_node_dev_server(host: str, port: int) -> Iterator[None]:
def spawn_node_dev_server(host: IPvAnyAddress, port: int) -> Iterator[None]:
log.info("Starting node dev server...")
path = Path(__file__).parent.parent.parent.parent / "ui"
with subprocess.Popen(
@ -61,7 +63,7 @@ def spawn_node_dev_server(host: str, port: int) -> Iterator[None]:
"dev",
"--",
"--hostname",
host,
str(host),
"--port",
str(port),
],

View File

@ -8,9 +8,11 @@
, openssh
, pytest
, pytest-cov
, pytest-xdist
, pytest-subprocess
, pytest-parallel
, pytest-timeout
, remote-pdb
, ipdb
, python3
, runCommand
, setuptools
@ -31,6 +33,8 @@
, qemu
, gnupg
, e2fsprogs
, mypy
, cntr
}:
let
@ -44,8 +48,10 @@ let
pytest
pytest-cov
pytest-subprocess
pytest-parallel
pytest-xdist
pytest-timeout
remote-pdb
ipdb
openssh
git
gnupg
@ -65,6 +71,7 @@ let
rsync
sops
git
mypy
qemu
e2fsprogs
];

View File

@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -xeuo pipefail
PID_NIX=$(pgrep --full "python -m pytest" | cut -d " " -f2 | head -n1)
sudo cntr attach "$PID_NIX"

View File

@ -14,9 +14,14 @@ exclude = ["clan_cli.nixpkgs*"]
[tool.setuptools.package-data]
clan_cli = [ "config/jsonschema/*", "webui/assets/**/*"]
[tool.pytest.ini_options]
testpaths = "tests"
faulthandler_timeout = 60
addopts = "--cov . --cov-report term --cov-report html:.reports/html --no-cov-on-fail --workers auto --durations 5"
log_level = "DEBUG"
log_format = "%(levelname)s: %(message)s\n %(pathname)s:%(lineno)d::%(funcName)s"
addopts = "--cov . --cov-report term --cov-report html:.reports/html --no-cov-on-fail --durations 5 --color=yes --new-first" # Add --pdb for debugging
norecursedirs = "tests/helpers"
markers = [ "impure" ]
@ -36,6 +41,10 @@ ignore_missing_imports = true
module = "jsonschema.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "ipdb.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "pytest.*"
ignore_missing_imports = true
@ -47,7 +56,7 @@ ignore_missing_imports = true
[tool.ruff]
line-length = 88
select = [ "E", "F", "I", "U", "N"]
select = [ "E", "F", "I", "N"]
ignore = [ "E501" ]
[tool.black]

View File

@ -23,7 +23,8 @@ mkShell {
shellHook = ''
tmp_path=$(realpath ./.direnv)
source=$(realpath .)
repo_root=$(realpath .)
mkdir -p "$tmp_path/python/${pythonWithDeps.sitePackages}"
# Install the package in editable mode
@ -35,14 +36,15 @@ mkShell {
--no-index \
--no-build-isolation \
--prefix "$tmp_path/python" \
--editable $source
--editable $repo_root
rm -f clan_cli/nixpkgs clan_cli/webui/assets
ln -sf ${clan-cli.nixpkgs} clan_cli/nixpkgs
ln -sf ${ui-assets} clan_cli/webui/assets
export PATH="$tmp_path/python/bin:${checkScript}/bin:$PATH"
export PYTHONPATH="$source:$tmp_path/python/${pythonWithDeps.sitePackages}:"
export PYTHONPATH="$repo_root:$tmp_path/python/${pythonWithDeps.sitePackages}:"
export XDG_DATA_DIRS="$tmp_path/share''${XDG_DATA_DIRS:+:$XDG_DATA_DIRS}"
export fish_complete_path="$tmp_path/share/fish/vendor_completions.d''${fish_complete_path:+:$fish_complete_path}"
@ -53,6 +55,8 @@ mkShell {
register-python-argcomplete --shell fish clan > $tmp_path/share/fish/vendor_completions.d/clan.fish
register-python-argcomplete --shell bash clan > $tmp_path/share/bash-completion/completions/clan
./bin/clan machines create example
./bin/clan flakes create example_clan
./bin/clan machines create example_machine example_clan
'';
}

View File

@ -1,9 +1,14 @@
import logging
import pytest
from fastapi.testclient import TestClient
from clan_cli.webui.app import app
# TODO: Why stateful
@pytest.fixture(scope="session")
def api() -> TestClient:
# logging.getLogger("httpx").setLevel(level=logging.WARNING)
logging.getLogger("asyncio").setLevel(logging.INFO)
return TestClient(app)

View File

@ -1,7 +1,8 @@
import os
import signal
import subprocess
from typing import IO, Any, Dict, Iterator, List, Union
from pathlib import Path
from typing import IO, Any, Dict, Iterator, List, Optional, Union
import pytest
@ -19,6 +20,7 @@ class Command:
stdin: _FILE = None,
stdout: _FILE = None,
stderr: _FILE = None,
workdir: Optional[Path] = None,
) -> subprocess.Popen[str]:
env = os.environ.copy()
env.update(extra_env)
@ -31,6 +33,7 @@ class Command:
stderr=stderr,
stdin=stdin,
text=True,
cwd=workdir,
)
self.processes.append(p)
return p

View File

@ -1,13 +1,19 @@
import fileinput
import logging
import os
import shutil
import subprocess as sp
import tempfile
from pathlib import Path
from typing import Iterator
from typing import Iterator, NamedTuple
import pytest
from root import CLAN_CORE
from clan_cli.dirs import nixpkgs_source
from clan_cli.types import FlakeName
log = logging.getLogger(__name__)
# substitutes string sin a file.
@ -27,64 +33,101 @@ def substitute(
print(line, end="")
class FlakeForTest(NamedTuple):
name: FlakeName
path: Path
def create_flake(
monkeypatch: pytest.MonkeyPatch,
name: str,
temporary_home: Path,
flake_name: FlakeName,
clan_core_flake: Path | None = None,
machines: list[str] = [],
remote: bool = False,
) -> Iterator[Path]:
) -> Iterator[FlakeForTest]:
"""
Creates a flake with the given name and machines.
The machine names map to the machines in ./test_machines
"""
template = Path(__file__).parent / name
template = Path(__file__).parent / flake_name
# copy the template to a new temporary location
with tempfile.TemporaryDirectory() as tmpdir_:
home = Path(tmpdir_)
flake = home / name
shutil.copytree(template, flake)
# lookup the requested machines in ./test_machines and include them
if machines:
(flake / "machines").mkdir(parents=True, exist_ok=True)
for machine_name in machines:
machine_path = Path(__file__).parent / "machines" / machine_name
shutil.copytree(machine_path, flake / "machines" / machine_name)
substitute(flake / "machines" / machine_name / "default.nix", flake)
# in the flake.nix file replace the string __CLAN_URL__ with the the clan flake
# provided by get_test_flake_toplevel
flake_nix = flake / "flake.nix"
# this is where we would install the sops key to, when updating
substitute(flake_nix, clan_core_flake, flake)
if remote:
with tempfile.TemporaryDirectory() as workdir:
monkeypatch.chdir(workdir)
monkeypatch.setenv("HOME", str(home))
yield flake
else:
monkeypatch.chdir(flake)
monkeypatch.setenv("HOME", str(home))
yield flake
flake = temporary_home / ".local/state/clan/flake" / flake_name
shutil.copytree(template, flake)
# lookup the requested machines in ./test_machines and include them
if machines:
(flake / "machines").mkdir(parents=True, exist_ok=True)
for machine_name in machines:
machine_path = Path(__file__).parent / "machines" / machine_name
shutil.copytree(machine_path, flake / "machines" / machine_name)
substitute(flake / "machines" / machine_name / "default.nix", flake)
# in the flake.nix file replace the string __CLAN_URL__ with the the clan flake
# provided by get_test_flake_toplevel
flake_nix = flake / "flake.nix"
# this is where we would install the sops key to, when updating
substitute(flake_nix, clan_core_flake, flake)
if "/tmp" not in str(os.environ.get("HOME")):
log.warning(
f"!! $HOME does not point to a temp directory!! HOME={os.environ['HOME']}"
)
# TODO: Find out why test_vms_api.py fails in nix build
# but works in pytest when this bottom line is commented out
sp.run(
["git", "config", "--global", "init.defaultBranch", "main"],
cwd=flake,
check=True,
)
sp.run(["git", "init"], cwd=flake, check=True)
sp.run(["git", "add", "."], cwd=flake, check=True)
sp.run(["git", "config", "user.name", "clan-tool"], cwd=flake, check=True)
sp.run(["git", "config", "user.email", "clan@example.com"], cwd=flake, check=True)
sp.run(["git", "commit", "-a", "-m", "Initial commit"], cwd=flake, check=True)
if remote:
with tempfile.TemporaryDirectory():
yield FlakeForTest(flake_name, flake)
else:
yield FlakeForTest(flake_name, flake)
@pytest.fixture
def test_flake(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]:
yield from create_flake(monkeypatch, "test_flake")
def test_flake(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
yield from create_flake(monkeypatch, temporary_home, FlakeName("test_flake"))
@pytest.fixture
def test_flake_with_core(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]:
def test_flake_with_core(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
if not (CLAN_CORE / "flake.nix").exists():
raise Exception(
"clan-core flake not found. This test requires the clan-core flake to be present"
)
yield from create_flake(monkeypatch, "test_flake_with_core", CLAN_CORE)
yield from create_flake(
monkeypatch,
temporary_home,
FlakeName("test_flake_with_core"),
CLAN_CORE,
)
@pytest.fixture
def test_flake_with_core_and_pass(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]:
def test_flake_with_core_and_pass(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
if not (CLAN_CORE / "flake.nix").exists():
raise Exception(
"clan-core flake not found. This test requires the clan-core flake to be present"
)
yield from create_flake(monkeypatch, "test_flake_with_core_and_pass", CLAN_CORE)
yield from create_flake(
monkeypatch,
temporary_home,
FlakeName("test_flake_with_core_and_pass"),
CLAN_CORE,
)

View File

@ -1,6 +1,11 @@
import argparse
import logging
import shlex
from clan_cli import create_parser
from clan_cli.custom_logger import get_caller
log = logging.getLogger(__name__)
class Cli:
@ -8,6 +13,9 @@ class Cli:
self.parser = create_parser(prog="clan")
def run(self, args: list[str]) -> argparse.Namespace:
cmd = shlex.join(["clan"] + args)
log.debug(f"$ {cmd}")
log.debug(f"Caller {get_caller()}")
parsed = self.parser.parse_args(args)
if hasattr(parsed, "func"):
parsed.func(parsed)

View File

@ -1,11 +1,26 @@
import logging
import os
import tempfile
from pathlib import Path
from typing import Iterator
import pytest
log = logging.getLogger(__name__)
@pytest.fixture
def temporary_dir() -> Iterator[Path]:
with tempfile.TemporaryDirectory(prefix="pytest-") as dirpath:
yield Path(dirpath)
def temporary_home(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]:
env_dir = os.getenv("TEST_TEMPORARY_DIR")
if env_dir is not None:
path = Path(env_dir).resolve()
log.debug("Temp HOME directory: %s", str(path))
monkeypatch.setenv("HOME", str(path))
monkeypatch.chdir(str(path))
yield path
else:
with tempfile.TemporaryDirectory(prefix="pytest-") as dirpath:
monkeypatch.setenv("HOME", str(dirpath))
monkeypatch.chdir(str(dirpath))
log.debug("Temp HOME directory: %s", str(dirpath))
yield Path(dirpath)

View File

@ -1,13 +1,12 @@
from pathlib import Path
import pytest
from api import TestClient
from fixtures_flakes import FlakeForTest
@pytest.mark.impure()
def test_configure_machine(api: TestClient, test_flake_with_core: Path) -> None:
def test_configure_machine(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
# retrieve the list of available clanModules
response = api.get("/api/clan_modules")
response = api.get(f"/api/{test_flake_with_core.name}/clan_modules")
response_json = response.json()
assert response.status_code == 200
assert isinstance(response_json, dict)

View File

@ -5,6 +5,7 @@ from typing import Any, Optional
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli import config
from clan_cli.config import parsing
@ -29,7 +30,7 @@ example_options = f"{Path(config.__file__).parent}/jsonschema/options.json"
def test_set_some_option(
args: list[str],
expected: dict[str, Any],
test_flake: Path,
test_flake: FlakeForTest,
) -> None:
# create temporary file for out_file
with tempfile.NamedTemporaryFile() as out_file:
@ -46,24 +47,26 @@ def test_set_some_option(
out_file.name,
]
+ args
+ [test_flake.name]
)
json_out = json.loads(open(out_file.name).read())
assert json_out == expected
def test_configure_machine(
test_flake: Path,
temporary_dir: Path,
test_flake: FlakeForTest,
temporary_home: Path,
capsys: pytest.CaptureFixture,
monkeypatch: pytest.MonkeyPatch,
) -> None:
monkeypatch.setenv("HOME", str(temporary_dir))
cli = Cli()
cli.run(["config", "-m", "machine1", "clan.jitsi.enable", "true"])
cli.run(["config", "-m", "machine1", "clan.jitsi.enable", "true", test_flake.name])
# clear the output buffer
capsys.readouterr()
# read a option value
cli.run(["config", "-m", "machine1", "clan.jitsi.enable"])
cli.run(["config", "-m", "machine1", "clan.jitsi.enable", test_flake.name])
# read the output
assert capsys.readouterr().out == "true\n"

View File

@ -6,6 +6,9 @@ import pytest
from api import TestClient
from cli import Cli
from clan_cli.dirs import clan_flakes_dir
from clan_cli.flakes.create import DEFAULT_URL
@pytest.fixture
def cli() -> Cli:
@ -14,19 +17,20 @@ def cli() -> Cli:
@pytest.mark.impure
def test_create_flake_api(
monkeypatch: pytest.MonkeyPatch, api: TestClient, temporary_dir: Path
monkeypatch: pytest.MonkeyPatch, api: TestClient, temporary_home: Path
) -> None:
flake_dir = temporary_dir / "flake_dir"
flake_dir_str = str(flake_dir.resolve())
monkeypatch.chdir(clan_flakes_dir())
flake_name = "flake_dir"
flake_dir = clan_flakes_dir() / flake_name
response = api.post(
"/api/flake/create",
json=dict(
destination=flake_dir_str,
url="git+https://git.clan.lol/clan/clan-core#new-clan",
flake_name=str(flake_dir),
url=str(DEFAULT_URL),
),
)
assert response.status_code == 201, "Failed to create flake"
assert response.status_code == 201, f"Failed to create flake {response.text}"
assert (flake_dir / ".clan-flake").exists()
assert (flake_dir / "flake.nix").exists()
@ -34,19 +38,21 @@ def test_create_flake_api(
@pytest.mark.impure
def test_create_flake(
monkeypatch: pytest.MonkeyPatch,
temporary_dir: Path,
capsys: pytest.CaptureFixture,
temporary_home: Path,
cli: Cli,
) -> None:
monkeypatch.chdir(temporary_dir)
flake_dir = temporary_dir / "flake_dir"
flake_dir_str = str(flake_dir.resolve())
cli.run(["flake", "create", flake_dir_str])
monkeypatch.chdir(clan_flakes_dir())
flake_name = "flake_dir"
flake_dir = clan_flakes_dir() / flake_name
cli.run(["flakes", "create", flake_name])
assert (flake_dir / ".clan-flake").exists()
monkeypatch.chdir(flake_dir)
cli.run(["machines", "create", "machine1"])
cli.run(["machines", "create", "machine1", flake_name])
capsys.readouterr() # flush cache
cli.run(["machines", "list"])
cli.run(["machines", "list", flake_name])
assert "machine1" in capsys.readouterr().out
flake_show = subprocess.run(
["nix", "flake", "show", "--json"],
@ -61,6 +67,17 @@ def test_create_flake(
pytest.fail("nixosConfigurations.machine1 not found in flake outputs")
# configure machine1
capsys.readouterr()
cli.run(["config", "--machine", "machine1", "services.openssh.enable"])
cli.run(
["config", "--machine", "machine1", "services.openssh.enable", "", flake_name]
)
capsys.readouterr()
cli.run(["config", "--machine", "machine1", "services.openssh.enable", "true"])
cli.run(
[
"config",
"--machine",
"machine1",
"services.openssh.enable",
"true",
flake_name,
]
)

View File

@ -1,22 +1,17 @@
from pathlib import Path
# from clan_cli.dirs import _get_clan_flake_toplevel
import pytest
# TODO: Reimplement test?
# def test_get_clan_flake_toplevel(
# monkeypatch: pytest.MonkeyPatch, temporary_home: Path
# ) -> None:
# monkeypatch.chdir(temporary_home)
# with pytest.raises(ClanError):
# print(_get_clan_flake_toplevel())
# (temporary_home / ".git").touch()
# assert _get_clan_flake_toplevel() == temporary_home
from clan_cli.dirs import get_clan_flake_toplevel
from clan_cli.errors import ClanError
def test_get_clan_flake_toplevel(
monkeypatch: pytest.MonkeyPatch, temporary_dir: Path
) -> None:
monkeypatch.chdir(temporary_dir)
with pytest.raises(ClanError):
print(get_clan_flake_toplevel())
(temporary_dir / ".git").touch()
assert get_clan_flake_toplevel() == temporary_dir
subdir = temporary_dir / "subdir"
subdir.mkdir()
monkeypatch.chdir(subdir)
(subdir / ".clan-flake").touch()
assert get_clan_flake_toplevel() == subdir
# subdir = temporary_home / "subdir"
# subdir.mkdir()
# monkeypatch.chdir(subdir)
# (subdir / ".clan-flake").touch()
# assert _get_clan_flake_toplevel() == subdir

View File

@ -1,13 +1,16 @@
import json
from pathlib import Path
import logging
import pytest
from api import TestClient
from fixtures_flakes import FlakeForTest
log = logging.getLogger(__name__)
@pytest.mark.impure
def test_inspect_ok(api: TestClient, test_flake_with_core: Path) -> None:
params = {"url": str(test_flake_with_core)}
def test_inspect_ok(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
params = {"url": str(test_flake_with_core.path)}
response = api.get(
"/api/flake/attrs",
params=params,
@ -32,8 +35,8 @@ def test_inspect_err(api: TestClient) -> None:
@pytest.mark.impure
def test_inspect_flake(api: TestClient, test_flake_with_core: Path) -> None:
params = {"url": str(test_flake_with_core)}
def test_inspect_flake(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
params = {"url": str(test_flake_with_core.path)}
response = api.get(
"/api/flake",
params=params,

View File

@ -9,6 +9,7 @@
let
clan = clan-core.lib.buildClan {
directory = self;
clanName = "test_flake_with_core";
machines = {
vm1 = { lib, ... }: {
clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__";

View File

@ -9,6 +9,7 @@
let
clan = clan-core.lib.buildClan {
directory = self;
clanName = "test_flake_with_core_and_pass";
machines = {
vm1 = { lib, ... }: {
clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__";

View File

@ -9,6 +9,7 @@
let
clan = clan-core.lib.buildClan {
directory = self;
clanName = "test_flake_with_core_dynamic_machines";
machines =
let
machineModules = builtins.readDir (self + "/machines");

View File

@ -3,6 +3,7 @@ from typing import TYPE_CHECKING
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
if TYPE_CHECKING:
from age_keys import KeyPair
@ -10,7 +11,7 @@ if TYPE_CHECKING:
def test_import_sops(
test_root: Path,
test_flake: Path,
test_flake: FlakeForTest,
capsys: pytest.CaptureFixture,
monkeypatch: pytest.MonkeyPatch,
age_keys: list["KeyPair"],
@ -18,30 +19,33 @@ def test_import_sops(
cli = Cli()
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[1].privkey)
cli.run(["secrets", "machines", "add", "machine1", age_keys[0].pubkey])
cli.run(["secrets", "users", "add", "user1", age_keys[1].pubkey])
cli.run(["secrets", "users", "add", "user2", age_keys[2].pubkey])
cli.run(["secrets", "groups", "add-user", "group1", "user1"])
cli.run(["secrets", "groups", "add-user", "group1", "user2"])
cli.run(
["secrets", "machines", "add", "machine1", age_keys[0].pubkey, test_flake.name]
)
cli.run(["secrets", "users", "add", "user1", age_keys[1].pubkey, test_flake.name])
cli.run(["secrets", "users", "add", "user2", age_keys[2].pubkey, test_flake.name])
cli.run(["secrets", "groups", "add-user", "group1", "user1", test_flake.name])
cli.run(["secrets", "groups", "add-user", "group1", "user2", test_flake.name])
# To edit:
# SOPS_AGE_KEY=AGE-SECRET-KEY-1U5ENXZQAY62NC78Y2WC0SEGRRMAEEKH79EYY5TH4GPFWJKEAY0USZ6X7YQ sops --age age14tva0txcrl0zes05x7gkx56qd6wd9q3nwecjac74xxzz4l47r44sv3fz62 ./data/secrets.yaml
cli.run(
[
"secrets",
"import-sops",
"--group",
"group1",
"--machine",
"machine1",
str(test_root.joinpath("data", "secrets.yaml")),
]
)
cmd = [
"secrets",
"import-sops",
"--group",
"group1",
"--machine",
"machine1",
str(test_root.joinpath("data", "secrets.yaml")),
test_flake.name,
]
cli.run(cmd)
capsys.readouterr()
cli.run(["secrets", "users", "list"])
cli.run(["secrets", "users", "list", test_flake.name])
users = sorted(capsys.readouterr().out.rstrip().split())
assert users == ["user1", "user2"]
capsys.readouterr()
cli.run(["secrets", "get", "secret-key"])
cli.run(["secrets", "get", "secret-key", test_flake.name])
assert capsys.readouterr().out == "secret-value"

View File

@ -1,46 +1,46 @@
from pathlib import Path
from api import TestClient
from fixtures_flakes import FlakeForTest
def test_machines(api: TestClient, test_flake: Path) -> None:
response = api.get("/api/machines")
def test_machines(api: TestClient, test_flake: FlakeForTest) -> None:
response = api.get(f"/api/{test_flake.name}/machines")
assert response.status_code == 200
assert response.json() == {"machines": []}
response = api.post("/api/machines", json={"name": "test"})
response = api.post(f"/api/{test_flake.name}/machines", json={"name": "test"})
assert response.status_code == 201
assert response.json() == {"machine": {"name": "test", "status": "unknown"}}
response = api.get("/api/machines/test")
response = api.get(f"/api/{test_flake.name}/machines/test")
assert response.status_code == 200
assert response.json() == {"machine": {"name": "test", "status": "unknown"}}
response = api.get("/api/machines")
response = api.get(f"/api/{test_flake.name}/machines")
assert response.status_code == 200
assert response.json() == {"machines": [{"name": "test", "status": "unknown"}]}
def test_configure_machine(api: TestClient, test_flake: Path) -> None:
def test_configure_machine(api: TestClient, test_flake: FlakeForTest) -> None:
# ensure error 404 if machine does not exist when accessing the config
response = api.get("/api/machines/machine1/config")
response = api.get(f"/api/{test_flake.name}/machines/machine1/config")
assert response.status_code == 404
# ensure error 404 if machine does not exist when writing to the config
response = api.put("/api/machines/machine1/config", json={})
response = api.put(f"/api/{test_flake.name}/machines/machine1/config", json={})
assert response.status_code == 404
# create the machine
response = api.post("/api/machines", json={"name": "machine1"})
response = api.post(f"/api/{test_flake.name}/machines", json={"name": "machine1"})
assert response.status_code == 201
# ensure an empty config is returned by default for a new machine
response = api.get("/api/machines/machine1/config")
response = api.get(f"/api/{test_flake.name}/machines/machine1/config")
assert response.status_code == 200
assert response.json() == {"config": {}}
# get jsonschema for machine
response = api.get("/api/machines/machine1/schema")
response = api.get(f"/api/{test_flake.name}/machines/machine1/schema")
assert response.status_code == 200
json_response = response.json()
assert "schema" in json_response and "properties" in json_response["schema"]
@ -56,7 +56,7 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
# verify an invalid config (fileSystems missing) fails
response = api.put(
"/api/machines/machine1/verify",
f"/api/{test_flake.name}/machines/machine1/verify",
json=invalid_config,
)
assert response.status_code == 200
@ -67,13 +67,13 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
# set come invalid config (fileSystems missing)
response = api.put(
"/api/machines/machine1/config",
f"/api/{test_flake.name}/machines/machine1/config",
json=invalid_config,
)
assert response.status_code == 200
# ensure the config has actually been updated
response = api.get("/api/machines/machine1/config")
response = api.get(f"/api/{test_flake.name}/machines/machine1/config")
assert response.status_code == 200
assert response.json() == {"config": invalid_config}
@ -103,15 +103,16 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
),
**fs_config,
)
response = api.put(
"/api/machines/machine1/config",
f"/api/{test_flake.name}/machines/machine1/config",
json=config2,
)
assert response.status_code == 200
assert response.json() == {"config": config2}
# ensure that the config has actually been updated
response = api.get("/api/machines/machine1/config")
# get the config again
response = api.get(f"/api/{test_flake.name}/machines/machine1/config")
assert response.status_code == 200
assert response.json() == {"config": config2}
@ -119,20 +120,21 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
# For example, this should not result in the boot.loader.grub.devices being
# set twice (eg. merged)
response = api.put(
"/api/machines/machine1/config",
f"/api/{test_flake.name}/machines/machine1/config",
json=config2,
)
assert response.status_code == 200
assert response.json() == {"config": config2}
# verify the machine config evaluates
response = api.get("/api/machines/machine1/verify")
response = api.get(f"/api/{test_flake.name}/machines/machine1/verify")
assert response.status_code == 200
assert response.json() == {"success": True, "error": None}
# get the schema with an extra module imported
response = api.put(
"/api/machines/machine1/schema",
f"/api/{test_flake.name}/machines/machine1/schema",
json={"clanImports": ["fake-module"]},
)
# expect the result schema to contain the fake-module.fake-flag option
@ -157,7 +159,7 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
# set the fake-module.fake-flag option to true
response = api.put(
"/api/machines/machine1/config",
f"/api/{test_flake.name}/machines/machine1/config",
json=config_with_imports,
)
assert response.status_code == 200
@ -179,7 +181,7 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
**fs_config,
)
response = api.put(
"/api/machines/machine1/config",
f"/api/{test_flake.name}/machines/machine1/config",
json=config_with_empty_imports,
)
assert response.status_code == 200

View File

@ -1,21 +1,22 @@
from pathlib import Path
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
def test_machine_subcommands(test_flake: Path, capsys: pytest.CaptureFixture) -> None:
def test_machine_subcommands(
test_flake: FlakeForTest, capsys: pytest.CaptureFixture
) -> None:
cli = Cli()
cli.run(["machines", "create", "machine1"])
cli.run(["machines", "create", "machine1", test_flake.name])
capsys.readouterr()
cli.run(["machines", "list"])
cli.run(["machines", "list", test_flake.name])
out = capsys.readouterr()
assert "machine1\n" == out.out
cli.run(["machines", "remove", "machine1"])
cli.run(["machines", "delete", "machine1", test_flake.name])
capsys.readouterr()
cli.run(["machines", "list"])
cli.run(["machines", "list", test_flake.name])
out = capsys.readouterr()
assert "" == out.out

View File

@ -1,8 +1,8 @@
from pathlib import Path
from fixtures_flakes import FlakeForTest
from clan_cli.config import machine
def test_schema_for_machine(test_flake: Path) -> None:
schema = machine.schema_for_machine("machine1", flake=test_flake)
def test_schema_for_machine(test_flake: FlakeForTest) -> None:
schema = machine.schema_for_machine(test_flake.name, "machine1")
assert "properties" in schema

View File

@ -1,30 +1,33 @@
import logging
import os
from contextlib import contextmanager
from pathlib import Path
from typing import TYPE_CHECKING, Iterator
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli.errors import ClanError
if TYPE_CHECKING:
from age_keys import KeyPair
log = logging.getLogger(__name__)
def _test_identities(
what: str,
test_flake: Path,
test_flake: FlakeForTest,
capsys: pytest.CaptureFixture,
age_keys: list["KeyPair"],
) -> None:
cli = Cli()
sops_folder = test_flake / "sops"
sops_folder = test_flake.path / "sops"
cli.run(["secrets", what, "add", "foo", age_keys[0].pubkey])
cli.run(["secrets", what, "add", "foo", age_keys[0].pubkey, test_flake.name])
assert (sops_folder / what / "foo" / "key.json").exists()
with pytest.raises(ClanError):
cli.run(["secrets", what, "add", "foo", age_keys[0].pubkey])
cli.run(["secrets", what, "add", "foo", age_keys[0].pubkey, test_flake.name])
cli.run(
[
@ -34,73 +37,80 @@ def _test_identities(
"-f",
"foo",
age_keys[0].privkey,
test_flake.name,
]
)
capsys.readouterr() # empty the buffer
cli.run(["secrets", what, "get", "foo"])
cli.run(["secrets", what, "get", "foo", test_flake.name])
out = capsys.readouterr() # empty the buffer
assert age_keys[0].pubkey in out.out
capsys.readouterr() # empty the buffer
cli.run(["secrets", what, "list"])
cli.run(["secrets", what, "list", test_flake.name])
out = capsys.readouterr() # empty the buffer
assert "foo" in out.out
cli.run(["secrets", what, "remove", "foo"])
cli.run(["secrets", what, "remove", "foo", test_flake.name])
assert not (sops_folder / what / "foo" / "key.json").exists()
with pytest.raises(ClanError): # already removed
cli.run(["secrets", what, "remove", "foo"])
cli.run(["secrets", what, "remove", "foo", test_flake.name])
capsys.readouterr()
cli.run(["secrets", what, "list"])
cli.run(["secrets", what, "list", test_flake.name])
out = capsys.readouterr()
assert "foo" not in out.out
def test_users(
test_flake: Path, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"]
test_flake: FlakeForTest, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"]
) -> None:
_test_identities("users", test_flake, capsys, age_keys)
def test_machines(
test_flake: Path, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"]
test_flake: FlakeForTest, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"]
) -> None:
_test_identities("machines", test_flake, capsys, age_keys)
def test_groups(
test_flake: Path, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"]
test_flake: FlakeForTest, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"]
) -> None:
cli = Cli()
capsys.readouterr() # empty the buffer
cli.run(["secrets", "groups", "list"])
cli.run(["secrets", "groups", "list", test_flake.name])
assert capsys.readouterr().out == ""
with pytest.raises(ClanError): # machine does not exist yet
cli.run(["secrets", "groups", "add-machine", "group1", "machine1"])
cli.run(
["secrets", "groups", "add-machine", "group1", "machine1", test_flake.name]
)
with pytest.raises(ClanError): # user does not exist yet
cli.run(["secrets", "groups", "add-user", "groupb1", "user1"])
cli.run(["secrets", "machines", "add", "machine1", age_keys[0].pubkey])
cli.run(["secrets", "groups", "add-machine", "group1", "machine1"])
cli.run(["secrets", "groups", "add-user", "groupb1", "user1", test_flake.name])
cli.run(
["secrets", "machines", "add", "machine1", age_keys[0].pubkey, test_flake.name]
)
cli.run(["secrets", "groups", "add-machine", "group1", "machine1", test_flake.name])
# Should this fail?
cli.run(["secrets", "groups", "add-machine", "group1", "machine1"])
cli.run(["secrets", "groups", "add-machine", "group1", "machine1", test_flake.name])
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey])
cli.run(["secrets", "groups", "add-user", "group1", "user1"])
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey, test_flake.name])
cli.run(["secrets", "groups", "add-user", "group1", "user1", test_flake.name])
capsys.readouterr() # empty the buffer
cli.run(["secrets", "groups", "list"])
cli.run(["secrets", "groups", "list", test_flake.name])
out = capsys.readouterr().out
assert "user1" in out
assert "machine1" in out
cli.run(["secrets", "groups", "remove-user", "group1", "user1"])
cli.run(["secrets", "groups", "remove-machine", "group1", "machine1"])
groups = os.listdir(test_flake / "sops" / "groups")
cli.run(["secrets", "groups", "remove-user", "group1", "user1", test_flake.name])
cli.run(
["secrets", "groups", "remove-machine", "group1", "machine1", test_flake.name]
)
groups = os.listdir(test_flake.path / "sops" / "groups")
assert len(groups) == 0
@ -117,104 +127,114 @@ def use_key(key: str, monkeypatch: pytest.MonkeyPatch) -> Iterator[None]:
def test_secrets(
test_flake: Path,
test_flake: FlakeForTest,
capsys: pytest.CaptureFixture,
monkeypatch: pytest.MonkeyPatch,
age_keys: list["KeyPair"],
) -> None:
cli = Cli()
capsys.readouterr() # empty the buffer
cli.run(["secrets", "list"])
cli.run(["secrets", "list", test_flake.name])
assert capsys.readouterr().out == ""
monkeypatch.setenv("SOPS_NIX_SECRET", "foo")
monkeypatch.setenv("SOPS_AGE_KEY_FILE", str(test_flake / ".." / "age.key"))
monkeypatch.setenv("SOPS_AGE_KEY_FILE", str(test_flake.path / ".." / "age.key"))
cli.run(["secrets", "key", "generate"])
capsys.readouterr() # empty the buffer
cli.run(["secrets", "key", "show"])
key = capsys.readouterr().out
assert key.startswith("age1")
cli.run(["secrets", "users", "add", "testuser", key])
cli.run(["secrets", "users", "add", "testuser", key, test_flake.name])
with pytest.raises(ClanError): # does not exist yet
cli.run(["secrets", "get", "nonexisting"])
cli.run(["secrets", "set", "initialkey"])
cli.run(["secrets", "get", "nonexisting", test_flake.name])
cli.run(["secrets", "set", "initialkey", test_flake.name])
capsys.readouterr()
cli.run(["secrets", "get", "initialkey"])
cli.run(["secrets", "get", "initialkey", test_flake.name])
assert capsys.readouterr().out == "foo"
capsys.readouterr()
cli.run(["secrets", "users", "list"])
cli.run(["secrets", "users", "list", test_flake.name])
users = capsys.readouterr().out.rstrip().split("\n")
assert len(users) == 1, f"users: {users}"
owner = users[0]
monkeypatch.setenv("EDITOR", "cat")
cli.run(["secrets", "set", "--edit", "initialkey"])
cli.run(["secrets", "set", "--edit", "initialkey", test_flake.name])
monkeypatch.delenv("EDITOR")
cli.run(["secrets", "rename", "initialkey", "key"])
cli.run(["secrets", "rename", "initialkey", "key", test_flake.name])
capsys.readouterr() # empty the buffer
cli.run(["secrets", "list"])
cli.run(["secrets", "list", test_flake.name])
assert capsys.readouterr().out == "key\n"
cli.run(["secrets", "machines", "add", "machine1", age_keys[0].pubkey])
cli.run(["secrets", "machines", "add-secret", "machine1", "key"])
cli.run(
["secrets", "machines", "add", "machine1", age_keys[0].pubkey, test_flake.name]
)
cli.run(["secrets", "machines", "add-secret", "machine1", "key", test_flake.name])
capsys.readouterr()
cli.run(["secrets", "machines", "list"])
cli.run(["secrets", "machines", "list", test_flake.name])
assert capsys.readouterr().out == "machine1\n"
with use_key(age_keys[0].privkey, monkeypatch):
capsys.readouterr()
cli.run(["secrets", "get", "key"])
cli.run(["secrets", "get", "key", test_flake.name])
assert capsys.readouterr().out == "foo"
cli.run(["secrets", "machines", "remove-secret", "machine1", "key"])
cli.run(
["secrets", "machines", "remove-secret", "machine1", "key", test_flake.name]
)
cli.run(["secrets", "users", "add", "user1", age_keys[1].pubkey])
cli.run(["secrets", "users", "add-secret", "user1", "key"])
cli.run(["secrets", "users", "add", "user1", age_keys[1].pubkey, test_flake.name])
cli.run(["secrets", "users", "add-secret", "user1", "key", test_flake.name])
capsys.readouterr()
with use_key(age_keys[1].privkey, monkeypatch):
cli.run(["secrets", "get", "key"])
cli.run(["secrets", "get", "key", test_flake.name])
assert capsys.readouterr().out == "foo"
cli.run(["secrets", "users", "remove-secret", "user1", "key"])
cli.run(["secrets", "users", "remove-secret", "user1", "key", test_flake.name])
with pytest.raises(ClanError): # does not exist yet
cli.run(["secrets", "groups", "add-secret", "admin-group", "key"])
cli.run(["secrets", "groups", "add-user", "admin-group", "user1"])
cli.run(["secrets", "groups", "add-user", "admin-group", owner])
cli.run(["secrets", "groups", "add-secret", "admin-group", "key"])
cli.run(
["secrets", "groups", "add-secret", "admin-group", "key", test_flake.name]
)
cli.run(["secrets", "groups", "add-user", "admin-group", "user1", test_flake.name])
cli.run(["secrets", "groups", "add-user", "admin-group", owner, test_flake.name])
cli.run(["secrets", "groups", "add-secret", "admin-group", "key", test_flake.name])
capsys.readouterr() # empty the buffer
cli.run(["secrets", "set", "--group", "admin-group", "key2"])
cli.run(["secrets", "set", "--group", "admin-group", "key2", test_flake.name])
with use_key(age_keys[1].privkey, monkeypatch):
capsys.readouterr()
cli.run(["secrets", "get", "key"])
cli.run(["secrets", "get", "key", test_flake.name])
assert capsys.readouterr().out == "foo"
# extend group will update secrets
cli.run(["secrets", "users", "add", "user2", age_keys[2].pubkey])
cli.run(["secrets", "groups", "add-user", "admin-group", "user2"])
cli.run(["secrets", "users", "add", "user2", age_keys[2].pubkey, test_flake.name])
cli.run(["secrets", "groups", "add-user", "admin-group", "user2", test_flake.name])
with use_key(age_keys[2].privkey, monkeypatch): # user2
capsys.readouterr()
cli.run(["secrets", "get", "key"])
cli.run(["secrets", "get", "key", test_flake.name])
assert capsys.readouterr().out == "foo"
cli.run(["secrets", "groups", "remove-user", "admin-group", "user2"])
cli.run(
["secrets", "groups", "remove-user", "admin-group", "user2", test_flake.name]
)
with pytest.raises(ClanError), use_key(age_keys[2].privkey, monkeypatch):
# user2 is not in the group anymore
capsys.readouterr()
cli.run(["secrets", "get", "key"])
cli.run(["secrets", "get", "key", test_flake.name])
print(capsys.readouterr().out)
cli.run(["secrets", "groups", "remove-secret", "admin-group", "key"])
cli.run(
["secrets", "groups", "remove-secret", "admin-group", "key", test_flake.name]
)
cli.run(["secrets", "remove", "key"])
cli.run(["secrets", "remove", "key2"])
cli.run(["secrets", "remove", "key", test_flake.name])
cli.run(["secrets", "remove", "key2", test_flake.name])
capsys.readouterr() # empty the buffer
cli.run(["secrets", "list"])
cli.run(["secrets", "list", test_flake.name])
assert capsys.readouterr().out == ""

View File

@ -1,8 +1,8 @@
from pathlib import Path
from typing import TYPE_CHECKING
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli.machines.facts import machine_get_fact
from clan_cli.secrets.folders import sops_secrets_folder
@ -15,21 +15,36 @@ if TYPE_CHECKING:
@pytest.mark.impure
def test_generate_secret(
monkeypatch: pytest.MonkeyPatch,
test_flake_with_core: Path,
test_flake_with_core: FlakeForTest,
age_keys: list["KeyPair"],
) -> None:
monkeypatch.chdir(test_flake_with_core)
monkeypatch.chdir(test_flake_with_core.path)
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli()
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey])
cli.run(["secrets", "generate", "vm1"])
has_secret("vm1-age.key")
has_secret("vm1-zerotier-identity-secret")
network_id = machine_get_fact("vm1", "zerotier-network-id")
cli.run(
[
"secrets",
"users",
"add",
"user1",
age_keys[0].pubkey,
test_flake_with_core.name,
]
)
cli.run(["secrets", "generate", "vm1", test_flake_with_core.name])
has_secret(test_flake_with_core.name, "vm1-age.key")
has_secret(test_flake_with_core.name, "vm1-zerotier-identity-secret")
network_id = machine_get_fact(
test_flake_with_core.name, "vm1", "zerotier-network-id"
)
assert len(network_id) == 16
age_key = sops_secrets_folder().joinpath("vm1-age.key").joinpath("secret")
age_key = (
sops_secrets_folder(test_flake_with_core.name)
.joinpath("vm1-age.key")
.joinpath("secret")
)
identity_secret = (
sops_secrets_folder()
sops_secrets_folder(test_flake_with_core.name)
.joinpath("vm1-zerotier-identity-secret")
.joinpath("secret")
)
@ -37,12 +52,12 @@ def test_generate_secret(
secret1_mtime = identity_secret.lstat().st_mtime_ns
# test idempotency
cli.run(["secrets", "generate", "vm1"])
cli.run(["secrets", "generate", "vm1", test_flake_with_core.name])
assert age_key.lstat().st_mtime_ns == age_key_mtime
assert identity_secret.lstat().st_mtime_ns == secret1_mtime
machine_path = (
sops_secrets_folder()
sops_secrets_folder(test_flake_with_core.name)
.joinpath("vm1-zerotier-identity-secret")
.joinpath("machines")
.joinpath("vm1")

View File

@ -3,6 +3,7 @@ from pathlib import Path
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli.machines.facts import machine_get_fact
from clan_cli.nix import nix_shell
@ -12,16 +13,16 @@ from clan_cli.ssh import HostGroup
@pytest.mark.impure
def test_upload_secret(
monkeypatch: pytest.MonkeyPatch,
test_flake_with_core_and_pass: Path,
temporary_dir: Path,
test_flake_with_core_and_pass: FlakeForTest,
temporary_home: Path,
host_group: HostGroup,
) -> None:
monkeypatch.chdir(test_flake_with_core_and_pass)
gnupghome = temporary_dir / "gpg"
monkeypatch.chdir(test_flake_with_core_and_pass.path)
gnupghome = temporary_home / "gpg"
gnupghome.mkdir(mode=0o700)
monkeypatch.setenv("GNUPGHOME", str(gnupghome))
monkeypatch.setenv("PASSWORD_STORE_DIR", str(temporary_dir / "pass"))
gpg_key_spec = temporary_dir / "gpg_key_spec"
monkeypatch.setenv("PASSWORD_STORE_DIR", str(temporary_home / "pass"))
gpg_key_spec = temporary_home / "gpg_key_spec"
gpg_key_spec.write_text(
"""
Key-Type: 1
@ -38,25 +39,27 @@ def test_upload_secret(
check=True,
)
subprocess.run(nix_shell(["pass"], ["pass", "init", "test@local"]), check=True)
cli.run(["secrets", "generate", "vm1"])
network_id = machine_get_fact("vm1", "zerotier-network-id")
cli.run(["secrets", "generate", "vm1", test_flake_with_core_and_pass.name])
network_id = machine_get_fact(
test_flake_with_core_and_pass.name, "vm1", "zerotier-network-id"
)
assert len(network_id) == 16
identity_secret = (
temporary_dir / "pass" / "machines" / "vm1" / "zerotier-identity-secret.gpg"
temporary_home / "pass" / "machines" / "vm1" / "zerotier-identity-secret.gpg"
)
secret1_mtime = identity_secret.lstat().st_mtime_ns
# test idempotency
cli.run(["secrets", "generate", "vm1"])
cli.run(["secrets", "generate", "vm1", test_flake_with_core_and_pass.name])
assert identity_secret.lstat().st_mtime_ns == secret1_mtime
flake = test_flake_with_core_and_pass.joinpath("flake.nix")
flake = test_flake_with_core_and_pass.path.joinpath("flake.nix")
host = host_group.hosts[0]
addr = f"{host.user}@{host.host}:{host.port}?StrictHostKeyChecking=no&UserKnownHostsFile=/dev/null&IdentityFile={host.key}"
new_text = flake.read_text().replace("__CLAN_DEPLOYMENT_ADDRESS__", addr)
flake.write_text(new_text)
cli.run(["secrets", "upload", "vm1"])
cli.run(["secrets", "upload", "vm1", test_flake_with_core_and_pass.name])
zerotier_identity_secret = (
test_flake_with_core_and_pass / "secrets" / "zerotier-identity-secret"
test_flake_with_core_and_pass.path / "secrets" / "zerotier-identity-secret"
)
assert zerotier_identity_secret.exists()

View File

@ -1,8 +1,8 @@
from pathlib import Path
from typing import TYPE_CHECKING
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli.ssh import HostGroup
@ -13,29 +13,47 @@ if TYPE_CHECKING:
@pytest.mark.impure
def test_secrets_upload(
monkeypatch: pytest.MonkeyPatch,
test_flake_with_core: Path,
test_flake_with_core: FlakeForTest,
host_group: HostGroup,
age_keys: list["KeyPair"],
) -> None:
monkeypatch.chdir(test_flake_with_core)
monkeypatch.chdir(test_flake_with_core.path)
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli()
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey])
cli.run(
[
"secrets",
"users",
"add",
"user1",
age_keys[0].pubkey,
test_flake_with_core.name,
]
)
cli.run(["secrets", "machines", "add", "vm1", age_keys[1].pubkey])
cli.run(
[
"secrets",
"machines",
"add",
"vm1",
age_keys[1].pubkey,
test_flake_with_core.name,
]
)
monkeypatch.setenv("SOPS_NIX_SECRET", age_keys[0].privkey)
cli.run(["secrets", "set", "vm1-age.key"])
cli.run(["secrets", "set", "vm1-age.key", test_flake_with_core.name])
flake = test_flake_with_core.joinpath("flake.nix")
flake = test_flake_with_core.path.joinpath("flake.nix")
host = host_group.hosts[0]
addr = f"{host.user}@{host.host}:{host.port}?StrictHostKeyChecking=no&UserKnownHostsFile=/dev/null&IdentityFile={host.key}"
new_text = flake.read_text().replace("__CLAN_DEPLOYMENT_ADDRESS__", addr)
flake.write_text(new_text)
cli.run(["secrets", "upload", "vm1"])
cli.run(["secrets", "upload", "vm1", test_flake_with_core.name])
# the flake defines this path as the location where the sops key should be installed
sops_key = test_flake_with_core.joinpath("key.txt")
sops_key = test_flake_with_core.path.joinpath("key.txt")
assert sops_key.exists()
assert sops_key.read_text() == age_keys[0].privkey

View File

@ -1,16 +1,19 @@
from pathlib import Path
import pytest
from api import TestClient
from fixtures_flakes import FlakeForTest
@pytest.mark.impure
def test_inspect(api: TestClient, test_flake_with_core: Path) -> None:
def test_inspect(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
response = api.post(
"/api/vms/inspect",
json=dict(flake_url=str(test_flake_with_core), flake_attr="vm1"),
json=dict(flake_url=str(test_flake_with_core.path), flake_attr="vm1"),
)
assert response.status_code == 200, "Failed to inspect vm"
# print(f"SLEEPING FOR EVER: {99999}", file=sys.stderr)
# time.sleep(99999)
assert response.status_code == 200, f"Failed to inspect vm: {response.text}"
config = response.json()["config"]
assert config.get("flake_attr") == "vm1"
assert config.get("cores") == 1
@ -26,4 +29,4 @@ def test_incorrect_uuid(api: TestClient) -> None:
for endpoint in uuid_endpoints:
response = api.get(endpoint.format("1234"))
assert response.status_code == 422, "Failed to get vm status"
assert response.status_code == 422, f"Failed to get vm status: {response.text}"

View File

@ -5,19 +5,24 @@ from typing import TYPE_CHECKING, Iterator
import pytest
from api import TestClient
from cli import Cli
from fixtures_flakes import create_flake
from fixtures_flakes import FlakeForTest, create_flake
from httpx import SyncByteStream
from root import CLAN_CORE
from clan_cli.types import FlakeName
if TYPE_CHECKING:
from age_keys import KeyPair
@pytest.fixture
def flake_with_vm_with_secrets(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]:
def flake_with_vm_with_secrets(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
yield from create_flake(
monkeypatch,
"test_flake_with_core_dynamic_machines",
temporary_home,
FlakeName("test_flake_with_core_dynamic_machines"),
CLAN_CORE,
machines=["vm_with_secrets"],
)
@ -25,27 +30,18 @@ def flake_with_vm_with_secrets(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path
@pytest.fixture
def remote_flake_with_vm_without_secrets(
monkeypatch: pytest.MonkeyPatch,
) -> Iterator[Path]:
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
yield from create_flake(
monkeypatch,
"test_flake_with_core_dynamic_machines",
temporary_home,
FlakeName("test_flake_with_core_dynamic_machines"),
CLAN_CORE,
machines=["vm_without_secrets"],
remote=True,
)
@pytest.fixture
def create_user_with_age_key(
monkeypatch: pytest.MonkeyPatch,
age_keys: list["KeyPair"],
) -> None:
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli()
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey])
def generic_create_vm_test(api: TestClient, flake: Path, vm: str) -> None:
print(f"flake_url: {flake} ")
response = api.post(
@ -74,8 +70,9 @@ def generic_create_vm_test(api: TestClient, flake: Path, vm: str) -> None:
print(line.decode("utf-8"))
print("=========END LOGS==========")
assert response.status_code == 200, "Failed to get vm logs"
print("Get /api/vms/{uuid}/status")
response = api.get(f"/api/vms/{uuid}/status")
print("Finished Get /api/vms/{uuid}/status")
assert response.status_code == 200, "Failed to get vm status"
data = response.json()
assert (
@ -88,10 +85,22 @@ def generic_create_vm_test(api: TestClient, flake: Path, vm: str) -> None:
def test_create_local(
api: TestClient,
monkeypatch: pytest.MonkeyPatch,
flake_with_vm_with_secrets: Path,
create_user_with_age_key: None,
flake_with_vm_with_secrets: FlakeForTest,
age_keys: list["KeyPair"],
) -> None:
generic_create_vm_test(api, flake_with_vm_with_secrets, "vm_with_secrets")
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli()
cmd = [
"secrets",
"users",
"add",
"user1",
age_keys[0].pubkey,
flake_with_vm_with_secrets.name,
]
cli.run(cmd)
generic_create_vm_test(api, flake_with_vm_with_secrets.path, "vm_with_secrets")
@pytest.mark.skipif(not os.path.exists("/dev/kvm"), reason="Requires KVM")
@ -99,8 +108,8 @@ def test_create_local(
def test_create_remote(
api: TestClient,
monkeypatch: pytest.MonkeyPatch,
remote_flake_with_vm_without_secrets: Path,
remote_flake_with_vm_without_secrets: FlakeForTest,
) -> None:
generic_create_vm_test(
api, remote_flake_with_vm_without_secrets, "vm_without_secrets"
api, remote_flake_with_vm_without_secrets.path, "vm_without_secrets"
)

View File

@ -1,9 +1,9 @@
import os
from pathlib import Path
from typing import TYPE_CHECKING
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
if TYPE_CHECKING:
from age_keys import KeyPair
@ -12,9 +12,11 @@ no_kvm = not os.path.exists("/dev/kvm")
@pytest.mark.impure
def test_inspect(test_flake_with_core: Path, capsys: pytest.CaptureFixture) -> None:
def test_inspect(
test_flake_with_core: FlakeForTest, capsys: pytest.CaptureFixture
) -> None:
cli = Cli()
cli.run(["vms", "inspect", "vm1"])
cli.run(["vms", "inspect", "vm1", test_flake_with_core.name])
out = capsys.readouterr() # empty the buffer
assert "Cores" in out.out
@ -23,11 +25,20 @@ def test_inspect(test_flake_with_core: Path, capsys: pytest.CaptureFixture) -> N
@pytest.mark.impure
def test_create(
monkeypatch: pytest.MonkeyPatch,
test_flake_with_core: Path,
test_flake_with_core: FlakeForTest,
age_keys: list["KeyPair"],
) -> None:
monkeypatch.chdir(test_flake_with_core)
monkeypatch.chdir(test_flake_with_core.path)
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli()
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey])
cli.run(["vms", "create", "vm1"])
cli.run(
[
"secrets",
"users",
"add",
"user1",
age_keys[0].pubkey,
test_flake_with_core.name,
]
)
cli.run(["vms", "create", "vm1", test_flake_with_core.name])

View File

@ -6,16 +6,22 @@ import sys
from pathlib import Path
import pytest
from cli import Cli
from ports import PortFunction
@pytest.mark.timeout(10)
def test_start_server(unused_tcp_port: PortFunction, temporary_dir: Path) -> None:
def test_start_server(unused_tcp_port: PortFunction, temporary_home: Path) -> None:
Cli()
port = unused_tcp_port()
fifo = temporary_dir / "fifo"
fifo = temporary_home / "fifo"
os.mkfifo(fifo)
notify_script = temporary_dir / "firefox"
# Create a script called "firefox" in the temporary home directory that
# writes "1" to the fifo. This is used to notify the test that the firefox has been
# started.
notify_script = temporary_home / "firefox"
bash = shutil.which("bash")
assert bash is not None
notify_script.write_text(
@ -26,11 +32,28 @@ echo "1" > {fifo}
)
notify_script.chmod(0o700)
# Add the temporary home directory to the PATH so that the script is found
env = os.environ.copy()
print(str(temporary_dir.absolute()))
env["PATH"] = ":".join([str(temporary_dir.absolute())] + env["PATH"].split(":"))
env["PATH"] = f"{temporary_home}:{env['PATH']}"
# Add build/src to PYTHONPATH so that the webui module is found in nix sandbox
# TODO: We need a way to make sure things which work in the devshell also work in the sandbox
python_path = env.get("PYTHONPATH")
if python_path:
env["PYTHONPATH"] = f"/build/src:{python_path}"
# breakpoint_container(
# cmd=[sys.executable, "-m", "clan_cli.webui", "--port", str(port)],
# env=env,
# work_dir=temporary_home,
# )
with subprocess.Popen(
[sys.executable, "-m", "clan_cli.webui", "--port", str(port)], env=env
[sys.executable, "-m", "clan_cli.webui", "--port", str(port)],
env=env,
stdout=sys.stderr,
stderr=sys.stderr,
text=True,
) as p:
try:
with open(fifo) as f:

View File

@ -1,5 +1,10 @@
{
"root": true,
"extends": ["next/core-web-vitals", "plugin:tailwindcss/recommended"],
"ignorePatterns": ["**/src/api/*"]
"extends": ["next/core-web-vitals", "plugin:tailwindcss/recommended", "plugin:@typescript-eslint/recommended"],
"parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint"],
"ignorePatterns": ["**/src/api/*"],
"rules": {
"@typescript-eslint/no-explicit-any": "off"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -16,6 +16,7 @@
"@rjsf/mui": "^5.12.1",
"@rjsf/validator-ajv8": "^5.12.1",
"@types/json-schema": "^7.0.12",
"@typescript-eslint/eslint-plugin": "^5.62.0",
"autoprefixer": "10.4.14",
"axios": "^1.4.0",
"classnames": "^2.3.2",
@ -50,7 +51,6 @@
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz",
"integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
@ -550,7 +550,6 @@
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
"integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==",
"dev": true,
"dependencies": {
"eslint-visitor-keys": "^3.3.0"
},
@ -565,7 +564,6 @@
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.6.2.tgz",
"integrity": "sha512-pPTNuaAG3QMH+buKyBIGJs3g/S5y0caxw0ygM3YyE6yJFySwiGGSzA+mM3KJ8QQvzeLh3blwgSonkFjgQdxzMw==",
"dev": true,
"engines": {
"node": "^12.0.0 || ^14.0.0 || >=16.0.0"
}
@ -574,7 +572,6 @@
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz",
"integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==",
"dev": true,
"dependencies": {
"ajv": "^6.12.4",
"debug": "^4.3.2",
@ -597,7 +594,6 @@
"version": "8.47.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.47.0.tgz",
"integrity": "sha512-P6omY1zv5MItm93kLM8s2vr1HICJH8v0dvddDhysbIuZ+vcjOHg5Zbkf1mTkcmi2JA9oBG2anOkRnW8WJTS8Og==",
"dev": true,
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}
@ -612,7 +608,6 @@
"version": "0.11.10",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz",
"integrity": "sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ==",
"dev": true,
"dependencies": {
"@humanwhocodes/object-schema": "^1.2.1",
"debug": "^4.1.1",
@ -626,7 +621,6 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
"integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
"dev": true,
"engines": {
"node": ">=12.22"
},
@ -638,8 +632,7 @@
"node_modules/@humanwhocodes/object-schema": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz",
"integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==",
"dev": true
"integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA=="
},
"node_modules/@ibm-cloud/openapi-ruleset": {
"version": "0.45.5",
@ -2148,6 +2141,11 @@
"resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.3.tgz",
"integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ=="
},
"node_modules/@types/semver": {
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.4.tgz",
"integrity": "sha512-MMzuxN3GdFwskAnb6fz0orFvhfqi752yjaXylr0Rp4oDg5H0Zn1IuyRhDVvYOwAXoJirx2xuS16I3WjxnAIHiQ=="
},
"node_modules/@types/urijs": {
"version": "1.19.19",
"resolved": "https://registry.npmjs.org/@types/urijs/-/urijs-1.19.19.tgz",
@ -2160,11 +2158,43 @@
"integrity": "sha512-cSjhgrr8g4KbPnnijAr/KJDNKa/bBa+ixYkywFRvrhvi9n1WEl7yYbtRyzE6jqNQiSxxJxoAW3STaOQwJHndaw==",
"dev": true
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.62.0.tgz",
"integrity": "sha512-TiZzBSJja/LbhNPvk6yc0JrX9XqhQ0hdh6M2svYfsHGejaKFIAGd9MQ+ERIMzLGlN/kZoYIgdxFV0PuljTKXag==",
"dependencies": {
"@eslint-community/regexpp": "^4.4.0",
"@typescript-eslint/scope-manager": "5.62.0",
"@typescript-eslint/type-utils": "5.62.0",
"@typescript-eslint/utils": "5.62.0",
"debug": "^4.3.4",
"graphemer": "^1.4.0",
"ignore": "^5.2.0",
"natural-compare-lite": "^1.4.0",
"semver": "^7.3.7",
"tsutils": "^3.21.0"
},
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"@typescript-eslint/parser": "^5.0.0",
"eslint": "^6.0.0 || ^7.0.0 || ^8.0.0"
},
"peerDependenciesMeta": {
"typescript": {
"optional": true
}
}
},
"node_modules/@typescript-eslint/parser": {
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.62.0.tgz",
"integrity": "sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==",
"dev": true,
"dependencies": {
"@typescript-eslint/scope-manager": "5.62.0",
"@typescript-eslint/types": "5.62.0",
@ -2191,7 +2221,6 @@
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz",
"integrity": "sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w==",
"dev": true,
"dependencies": {
"@typescript-eslint/types": "5.62.0",
"@typescript-eslint/visitor-keys": "5.62.0"
@ -2204,11 +2233,36 @@
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.62.0.tgz",
"integrity": "sha512-xsSQreu+VnfbqQpW5vnCJdq1Z3Q0U31qiWmRhr98ONQmcp/yhiPJFPq8MXiJVLiksmOKSjIldZzkebzHuCGzew==",
"dependencies": {
"@typescript-eslint/typescript-estree": "5.62.0",
"@typescript-eslint/utils": "5.62.0",
"debug": "^4.3.4",
"tsutils": "^3.21.0"
},
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"eslint": "*"
},
"peerDependenciesMeta": {
"typescript": {
"optional": true
}
}
},
"node_modules/@typescript-eslint/types": {
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.62.0.tgz",
"integrity": "sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==",
"dev": true,
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
},
@ -2221,7 +2275,6 @@
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz",
"integrity": "sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==",
"dev": true,
"dependencies": {
"@typescript-eslint/types": "5.62.0",
"@typescript-eslint/visitor-keys": "5.62.0",
@ -2244,11 +2297,55 @@
}
}
},
"node_modules/@typescript-eslint/utils": {
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.62.0.tgz",
"integrity": "sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ==",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@types/json-schema": "^7.0.9",
"@types/semver": "^7.3.12",
"@typescript-eslint/scope-manager": "5.62.0",
"@typescript-eslint/types": "5.62.0",
"@typescript-eslint/typescript-estree": "5.62.0",
"eslint-scope": "^5.1.1",
"semver": "^7.3.7"
},
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"eslint": "^6.0.0 || ^7.0.0 || ^8.0.0"
}
},
"node_modules/@typescript-eslint/utils/node_modules/eslint-scope": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
"integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==",
"dependencies": {
"esrecurse": "^4.3.0",
"estraverse": "^4.1.1"
},
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/@typescript-eslint/utils/node_modules/estraverse": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
"integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
"engines": {
"node": ">=4.0"
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz",
"integrity": "sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==",
"dev": true,
"dependencies": {
"@typescript-eslint/types": "5.62.0",
"eslint-visitor-keys": "^3.3.0"
@ -2277,7 +2374,6 @@
"version": "8.10.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz",
"integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==",
"dev": true,
"bin": {
"acorn": "bin/acorn"
},
@ -2289,7 +2385,6 @@
"version": "5.3.2",
"resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
"integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
"dev": true,
"peerDependencies": {
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
}
@ -2298,7 +2393,6 @@
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
"dev": true,
"dependencies": {
"fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
@ -2359,7 +2453,6 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"engines": {
"node": ">=8"
}
@ -2368,7 +2461,6 @@
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dev": true,
"dependencies": {
"color-convert": "^2.0.1"
},
@ -2404,8 +2496,7 @@
"node_modules/argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
"dev": true
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
},
"node_modules/aria-query": {
"version": "5.3.0",
@ -2452,7 +2543,6 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz",
"integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==",
"dev": true,
"engines": {
"node": ">=8"
}
@ -2826,7 +2916,6 @@
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
"dev": true,
"dependencies": {
"ansi-styles": "^4.1.0",
"supports-color": "^7.1.0"
@ -2917,7 +3006,6 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dev": true,
"dependencies": {
"color-name": "~1.1.4"
},
@ -2928,8 +3016,7 @@
"node_modules/color-name": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"dev": true
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"node_modules/combined-stream": {
"version": "1.0.8",
@ -3014,7 +3101,6 @@
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
"dev": true,
"dependencies": {
"path-key": "^3.1.0",
"shebang-command": "^2.0.0",
@ -3178,7 +3264,6 @@
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"dev": true,
"dependencies": {
"ms": "2.1.2"
},
@ -3199,8 +3284,7 @@
"node_modules/deep-is": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
"integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
"dev": true
"integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="
},
"node_modules/deepmerge": {
"version": "2.2.1",
@ -3274,7 +3358,6 @@
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
"integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
"dev": true,
"dependencies": {
"path-type": "^4.0.0"
},
@ -3291,7 +3374,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
"integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
"dev": true,
"dependencies": {
"esutils": "^2.0.2"
},
@ -3853,7 +3935,6 @@
"version": "8.46.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.46.0.tgz",
"integrity": "sha512-cIO74PvbW0qU8e0mIvk5IV3ToWdCq5FYG6gWPHHkx6gNdjlbAYvtfHmlCMXxjcoVaIdwy/IAt3+mDkZkfvb2Dg==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.6.1",
@ -4200,7 +4281,6 @@
"version": "7.2.2",
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz",
"integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==",
"dev": true,
"dependencies": {
"esrecurse": "^4.3.0",
"estraverse": "^5.2.0"
@ -4216,7 +4296,6 @@
"version": "3.4.3",
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
"integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
"dev": true,
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
},
@ -4228,7 +4307,6 @@
"version": "9.6.1",
"resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
"integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
"dev": true,
"dependencies": {
"acorn": "^8.9.0",
"acorn-jsx": "^5.3.2",
@ -4258,7 +4336,6 @@
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz",
"integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==",
"dev": true,
"dependencies": {
"estraverse": "^5.1.0"
},
@ -4270,7 +4347,6 @@
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
"integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
"dev": true,
"dependencies": {
"estraverse": "^5.2.0"
},
@ -4282,7 +4358,6 @@
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
"integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
"dev": true,
"engines": {
"node": ">=4.0"
}
@ -4297,7 +4372,6 @@
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
"integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
@ -4381,14 +4455,12 @@
"node_modules/fast-json-stable-stringify": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
"dev": true
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
},
"node_modules/fast-levenshtein": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
"dev": true
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="
},
"node_modules/fast-memoize": {
"version": "2.5.2",
@ -4414,7 +4486,6 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
"integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
"dev": true,
"dependencies": {
"flat-cache": "^3.0.4"
},
@ -4442,7 +4513,6 @@
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
"integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
"dev": true,
"dependencies": {
"locate-path": "^6.0.0",
"path-exists": "^4.0.0"
@ -4458,7 +4528,6 @@
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
"integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==",
"dev": true,
"dependencies": {
"flatted": "^3.1.0",
"rimraf": "^3.0.2"
@ -4470,8 +4539,7 @@
"node_modules/flatted": {
"version": "3.2.7",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
"integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==",
"dev": true
"integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ=="
},
"node_modules/follow-redirects": {
"version": "1.15.2",
@ -4683,7 +4751,6 @@
"version": "7.1.7",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz",
"integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==",
"dev": true,
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
@ -4719,7 +4786,6 @@
"version": "13.21.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz",
"integrity": "sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg==",
"dev": true,
"dependencies": {
"type-fest": "^0.20.2"
},
@ -4749,7 +4815,6 @@
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz",
"integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==",
"dev": true,
"dependencies": {
"array-union": "^2.1.0",
"dir-glob": "^3.0.1",
@ -4793,8 +4858,7 @@
"node_modules/graphemer": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
"dev": true
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="
},
"node_modules/has": {
"version": "1.0.3",
@ -4820,7 +4884,6 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
"dev": true,
"engines": {
"node": ">=8"
}
@ -5059,7 +5122,6 @@
"version": "5.2.4",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz",
"integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==",
"dev": true,
"engines": {
"node": ">= 4"
}
@ -5093,7 +5155,6 @@
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
"dev": true,
"engines": {
"node": ">=0.8.19"
}
@ -5297,7 +5358,6 @@
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
"integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
"dev": true,
"engines": {
"node": ">=8"
}
@ -5417,8 +5477,7 @@
"node_modules/isexe": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
"dev": true
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
},
"node_modules/jiti": {
"version": "1.19.1",
@ -5437,7 +5496,6 @@
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"dev": true,
"dependencies": {
"argparse": "^2.0.1"
},
@ -5538,14 +5596,12 @@
"node_modules/json-schema-traverse": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
"dev": true
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
},
"node_modules/json-stable-stringify-without-jsonify": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
"dev": true
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="
},
"node_modules/json5": {
"version": "1.0.2",
@ -5646,7 +5702,6 @@
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
"integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
"dev": true,
"dependencies": {
"prelude-ls": "^1.2.1",
"type-check": "~0.4.0"
@ -5672,7 +5727,6 @@
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
"integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
"dev": true,
"dependencies": {
"p-locate": "^5.0.0"
},
@ -5708,8 +5762,7 @@
"node_modules/lodash.merge": {
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
"dev": true
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="
},
"node_modules/lodash.omit": {
"version": "4.5.0",
@ -5762,7 +5815,6 @@
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dev": true,
"dependencies": {
"yallist": "^4.0.0"
},
@ -5888,8 +5940,7 @@
"node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
"dev": true
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/mz": {
"version": "2.7.0",
@ -5921,8 +5972,12 @@
"node_modules/natural-compare": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
"integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
"dev": true
"integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="
},
"node_modules/natural-compare-lite": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz",
"integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g=="
},
"node_modules/next": {
"version": "13.4.12",
@ -6356,7 +6411,6 @@
"version": "0.9.3",
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz",
"integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==",
"dev": true,
"dependencies": {
"@aashutoshrathi/word-wrap": "^1.2.3",
"deep-is": "^0.1.3",
@ -6426,7 +6480,6 @@
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
"dev": true,
"dependencies": {
"yocto-queue": "^0.1.0"
},
@ -6441,7 +6494,6 @@
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
"integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
"dev": true,
"dependencies": {
"p-limit": "^3.0.2"
},
@ -6505,7 +6557,6 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
"dev": true,
"engines": {
"node": ">=8"
}
@ -6522,7 +6573,6 @@
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
"dev": true,
"engines": {
"node": ">=8"
}
@ -6717,7 +6767,6 @@
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
"dev": true,
"engines": {
"node": ">= 0.8.0"
}
@ -7184,7 +7233,6 @@
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
"integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
"dev": true,
"dependencies": {
"glob": "^7.1.3"
},
@ -7282,7 +7330,6 @@
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
"dev": true,
"dependencies": {
"lru-cache": "^6.0.0"
},
@ -7297,7 +7344,6 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
"dev": true,
"dependencies": {
"shebang-regex": "^3.0.0"
},
@ -7309,7 +7355,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
"dev": true,
"engines": {
"node": ">=8"
}
@ -7404,7 +7449,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
"integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
"dev": true,
"engines": {
"node": ">=8"
}
@ -7553,7 +7597,6 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"dependencies": {
"ansi-regex": "^5.0.1"
},
@ -7583,7 +7626,6 @@
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
"integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
"dev": true,
"engines": {
"node": ">=8"
},
@ -7662,7 +7704,6 @@
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
"dev": true,
"dependencies": {
"has-flag": "^4.0.0"
},
@ -7768,8 +7809,7 @@
"node_modules/text-table": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
"integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
"dev": true
"integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw=="
},
"node_modules/thenify": {
"version": "3.3.1",
@ -7861,7 +7901,6 @@
"version": "3.21.0",
"resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz",
"integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==",
"dev": true,
"dependencies": {
"tslib": "^1.8.1"
},
@ -7875,14 +7914,12 @@
"node_modules/tsutils/node_modules/tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==",
"dev": true
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
},
"node_modules/type-check": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
"integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
"dev": true,
"dependencies": {
"prelude-ls": "^1.2.1"
},
@ -7894,7 +7931,6 @@
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
"integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
"dev": true,
"engines": {
"node": ">=10"
},
@ -7971,7 +8007,6 @@
"version": "5.1.6",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz",
"integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==",
"dev": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
@ -8186,7 +8221,6 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dev": true,
"dependencies": {
"isexe": "^2.0.0"
},
@ -8266,8 +8300,7 @@
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/yaml": {
"version": "1.10.2",
@ -8314,7 +8347,6 @@
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
"dev": true,
"engines": {
"node": ">=10"
},

View File

@ -20,6 +20,7 @@
"@rjsf/mui": "^5.12.1",
"@rjsf/validator-ajv8": "^5.12.1",
"@types/json-schema": "^7.0.12",
"@typescript-eslint/eslint-plugin": "^5.62.0",
"autoprefixer": "10.4.14",
"axios": "^1.4.0",
"classnames": "^2.3.2",

View File

@ -1,5 +1,10 @@
import { MachineContextProvider } from "@/components/hooks/useMachines";
export default function Layout({ children }: { children: React.ReactNode }) {
return <MachineContextProvider>{children}</MachineContextProvider>;
return (
// TODO: select flake?
<MachineContextProvider flakeName="defaultFlake">
{children}
</MachineContextProvider>
);
}

View File

@ -1,12 +1,7 @@
"use client";
import { NodeTable } from "@/components/table";
import { StrictMode } from "react";
export default function Page() {
return (
<StrictMode>
<NodeTable />
</StrictMode>
);
return <NodeTable />;
}

View File

@ -33,7 +33,10 @@ interface PureCustomConfigProps extends FormStepContentProps {
}
export function CustomConfig(props: FormStepContentProps) {
const { formHooks } = props;
const { data, isLoading, error } = useGetMachineSchema("mama");
const { data, isLoading, error } = useGetMachineSchema(
"defaultFlake",
"mama",
);
// const { data, isLoading, error } = { data: {data:{schema: {
// title: 'Test form',
// type: 'object',
@ -53,11 +56,11 @@ export function CustomConfig(props: FormStepContentProps) {
return {};
}, [data, isLoading, error]);
type ValueType = { default: any };
const initialValues = useMemo(
() =>
Object.entries(schema?.properties || {}).reduce((acc, [key, value]) => {
/*@ts-ignore*/
const init: any = value?.default;
const init: any = (value as ValueType)?.default;
if (init) {
return {
...acc,
@ -157,7 +160,7 @@ function PureCustomConfig(props: PureCustomConfigProps) {
// ObjectFieldTemplate:
ErrorListTemplate: ErrorList,
ButtonTemplates: {
SubmitButton: (props) => (
SubmitButton: () => (
<div className="flex w-full items-center justify-center">
<Button
onClick={validate}

View File

@ -113,7 +113,7 @@ export function CreateMachineForm() {
)}
{!isMobile && (
<Stepper activeStep={activeStep} color="secondary">
{steps.map(({ label }, index) => {
{steps.map(({ label }) => {
const stepProps: { completed?: boolean } = {};
const labelProps: {
optional?: React.ReactNode;

View File

@ -2,10 +2,10 @@ import { useListMachines } from "@/api/default/default";
import { MachinesResponse } from "@/api/model";
import { AxiosError, AxiosResponse } from "axios";
import React, {
createContext,
Dispatch,
ReactNode,
SetStateAction,
createContext,
useState,
} from "react";
import { KeyedMutator } from "swr";
@ -38,7 +38,7 @@ interface AppContextProviderProps {
}
export const WithAppState = (props: AppContextProviderProps) => {
const { children } = props;
const { isLoading, error, mutate, swrKey } = useListMachines();
const { isLoading, error, mutate, swrKey } = useListMachines("defaultFlake");
const [data, setAppState] = useState<AppState>({ isJoined: false });

View File

@ -1,7 +1,7 @@
import { useState, useEffect } from "react";
import { useEffect, useState } from "react";
export function useDebounce(value: any, delay: number) {
const [debouncedValue, setDebouncedValue] = useState(value);
export function useDebounce<T>(value: T, delay: number) {
const [debouncedValue, setDebouncedValue] = useState<T>(value);
useEffect(() => {
const handler = setTimeout(() => {

View File

@ -24,6 +24,7 @@ type MachineContextType =
rawData: AxiosResponse<MachinesResponse, any> | undefined;
data: Machine[];
isLoading: boolean;
flakeName: string;
error: AxiosError<any> | undefined;
isValidating: boolean;
@ -33,6 +34,7 @@ type MachineContextType =
swrKey: string | false | Record<any, any>;
}
| {
flakeName: string;
isLoading: true;
data: readonly [];
};
@ -42,14 +44,22 @@ const initialState = {
data: [],
} as const;
export const MachineContext = createContext<MachineContextType>(initialState);
export function CreateMachineContext(flakeName: string) {
return useMemo(() => {
return createContext<MachineContextType>({
...initialState,
flakeName,
});
}, [flakeName]);
}
interface MachineContextProviderProps {
children: ReactNode;
flakeName: string;
}
export const MachineContextProvider = (props: MachineContextProviderProps) => {
const { children } = props;
const { children, flakeName } = props;
const {
data: rawData,
isLoading,
@ -57,7 +67,7 @@ export const MachineContextProvider = (props: MachineContextProviderProps) => {
isValidating,
mutate,
swrKey,
} = useListMachines();
} = useListMachines(flakeName);
const [filters, setFilters] = useState<Filters>([]);
const data = useMemo(() => {
@ -70,6 +80,8 @@ export const MachineContextProvider = (props: MachineContextProviderProps) => {
return [];
}, [isLoading, error, isValidating, rawData, filters]);
const MachineContext = CreateMachineContext(flakeName);
return (
<MachineContext.Provider
value={{
@ -77,6 +89,7 @@ export const MachineContextProvider = (props: MachineContextProviderProps) => {
data,
isLoading,
flakeName,
error,
isValidating,
@ -92,4 +105,5 @@ export const MachineContextProvider = (props: MachineContextProviderProps) => {
);
};
export const useMachines = () => React.useContext(MachineContext);
export const useMachines = (flakeName: string) =>
React.useContext(CreateMachineContext(flakeName));

View File

@ -16,7 +16,7 @@ import { SearchBar } from "./searchBar";
import { StickySpeedDial } from "./stickySpeedDial";
export function NodeTable() {
const machines = useMachines();
const machines = useMachines("defaultFlake");
const theme = useTheme();
const is_xs = useMediaQuery(theme.breakpoints.only("xs"));

View File

@ -1,11 +1,11 @@
"use client";
import { SetStateAction, Dispatch, useState, useEffect, useMemo } from "react";
import IconButton from "@mui/material/IconButton";
import SearchIcon from "@mui/icons-material/Search";
import { useDebounce } from "../hooks/useDebounce";
import { Autocomplete, InputAdornment, TextField } from "@mui/material";
import { Machine } from "@/api/model/machine";
import SearchIcon from "@mui/icons-material/Search";
import { Autocomplete, InputAdornment, TextField } from "@mui/material";
import IconButton from "@mui/material/IconButton";
import { Dispatch, SetStateAction, useEffect, useMemo, useState } from "react";
import { useDebounce } from "../hooks/useDebounce";
export interface SearchBarProps {
tableData: readonly Machine[];
@ -13,7 +13,7 @@ export interface SearchBarProps {
}
export function SearchBar(props: SearchBarProps) {
let { tableData, setFilteredList } = props;
const { tableData, setFilteredList } = props;
const [search, setSearch] = useState<string>("");
const debouncedSearch = useDebounce(search, 250);
const [open, setOpen] = useState(false);

View File

@ -28,11 +28,11 @@ function createData(
};
}
var nameNumber = 0;
let nameNumber = 0;
// A function to generate random names
function getRandomName(): string {
let names = [
const names = [
"Alice",
"Bob",
"Charlie",
@ -53,7 +53,7 @@ function getRandomName(): string {
"Wendy",
"Zoe",
];
let index = Math.floor(Math.random() * names.length);
const index = Math.floor(Math.random() * names.length);
return names[index] + nameNumber++;
}
@ -75,8 +75,12 @@ function getRandomName(): string {
// A function to generate random status keys
function getRandomStatus(): NodeStatusKeys {
let statusKeys = [NodeStatus.Online, NodeStatus.Offline, NodeStatus.Pending];
let index = Math.floor(Math.random() * statusKeys.length);
const statusKeys = [
NodeStatus.Online,
NodeStatus.Offline,
NodeStatus.Pending,
];
const index = Math.floor(Math.random() * statusKeys.length);
return statusKeys[index];
}
@ -85,8 +89,8 @@ function getRandomLastSeen(status: NodeStatusKeys): number {
if (status === "online") {
return 0;
} else {
let min = 1; // One day ago
let max = 360; // One year ago
const min = 1; // One day ago
const max = 360; // One year ago
return Math.floor(Math.random() * (max - min + 1) + min);
}
}
@ -164,12 +168,12 @@ export const tableData = [
// A function to execute the createData function with dummy data in a loop 100 times and return an array
export function executeCreateData(): TableData[] {
let result: TableData[] = [];
const result: TableData[] = [];
for (let i = 0; i < 100; i++) {
// Generate dummy data
let name = getRandomName();
let status = getRandomStatus();
let last_seen = getRandomLastSeen(status);
const name = getRandomName();
const status = getRandomStatus();
const last_seen = getRandomLastSeen(status);
// Call the createData function and push the result to the array
result.push(createData(name, status, last_seen));

View File

@ -16,15 +16,16 @@ export default function JoinPrequel() {
const queryParams = useSearchParams();
const flakeUrl = queryParams.get("flake") || "";
const flakeAttr = queryParams.get("attr") || "default";
const { handleSubmit, control, formState, getValues, reset } =
useForm<FormValues>({ defaultValues: { flakeUrl: "" } });
const { control, formState, getValues, reset } = useForm<FormValues>({
defaultValues: { flakeUrl: "" },
});
return (
<Layout>
<Suspense fallback="Loading">
{!formState.isSubmitted && !flakeUrl && (
<form
onSubmit={handleSubmit(() => {})}
// onSubmit={handleSubmit(() => {})}
className="w-full max-w-2xl justify-self-center"
>
<Controller

View File

@ -1,7 +1,7 @@
{
description = "<Put your description here>";
inputs.clan-core.url = "git+https://git.clan.lol/clan/clan-core";
inputs.clan-core.url = "git+https://git.clan.lol/clan/clan-core?ref=Qubasa-main";
outputs = { self, clan-core, ... }:
let
@ -9,6 +9,7 @@
pkgs = clan-core.inputs.nixpkgs.legacyPackages.${system};
clan = clan-core.lib.buildClan {
directory = self;
clanName = "__CHANGE_ME__";
};
in
{