1
0
forked from clan/clan-core

Merge pull request 'Moved history to own subcommand' (#638) from Qubasa-main into main

This commit is contained in:
clan-bot 2023-12-14 17:48:32 +00:00
commit cf899cbee9
189 changed files with 362 additions and 57585 deletions

View File

@ -10,15 +10,6 @@
treefmt.flakeCheck = true;
treefmt.flakeFormatter = true;
treefmt.programs.shellcheck.enable = true;
treefmt.programs.prettier.enable = true;
# TODO: add custom prettier package, that uses our ui/node_modules
# treefmt.programs.prettier.settings.plugins = [
# "${self'.packages.prettier-plugin-tailwindcss}/lib/node_modules/prettier-plugin-tailwindcss/dist/index.mjs"
# ];
treefmt.settings.formatter.prettier.excludes = [
"secrets.yaml"
"key.json"
];
treefmt.programs.mypy.enable = true;
treefmt.programs.mypy.directories = {
@ -39,7 +30,6 @@
"--" # this argument is ignored by bash
];
includes = [ "*.nix" ];
excludes = [ "pkgs/node-packages/*.nix" ];
};
treefmt.settings.formatter.python = {
command = "sh";

View File

@ -1,6 +1,6 @@
# clan-cli
The clan-cli contains the command line interface as well as the graphical webui through the `clan webui` command.
The clan-cli contains the command line interface
## Hacking on the cli
@ -17,43 +17,6 @@ After you can use the local bin wrapper to test things in the cli:
./bin/clan
```
## Hacking on the webui
By default the webui is build from a tarball available https://git.clan.lol/clan/-/packages/generic/ui/.
To start a local developement environment instead, use the `--dev` flag:
```
./bin/clan webui --dev
```
This will spawn two webserver, a python one to for the api and a nodejs one that rebuilds the ui on the fly.
## Run webui directly
Useful for vscode run and debug option
```bash
python -m clan_cli.webui --reload --no-open
```
Add this `launch.json` to your .vscode directory to have working breakpoints in your vscode editor.
```json
{
"version": "0.2.0",
"configurations": [
{
"name": "Clan Webui",
"type": "python",
"request": "launch",
"module": "clan_cli.webui",
"justMyCode": true,
"args": ["--reload", "--no-open", "--log-level", "debug"]
}
]
}
```
## Run locally single-threaded for debugging
By default tests run in parallel using pytest-parallel.

View File

@ -1,37 +0,0 @@
#!/usr/bin/env python
import argparse
import json
import sys
from pathlib import Path
from uvicorn.importer import import_from_string
def main() -> None:
parser = argparse.ArgumentParser(prog="gen-openapi")
parser.add_argument(
"app", help='App import string. Eg. "main:app"', default="main:app"
)
parser.add_argument("--app-dir", help="Directory containing the app", default=None)
parser.add_argument(
"--out", help="Output file ending in .json", default="openapi.json"
)
args = parser.parse_args()
if args.app_dir is not None:
print(f"adding {args.app_dir} to sys.path")
sys.path.insert(0, args.app_dir)
print(f"importing app from {args.app}")
app = import_from_string(args.app)
openapi = app.openapi()
version = openapi.get("openapi", "unknown version")
print(f"writing openapi spec v{version}")
out = Path(args.out)
out.parent.mkdir(parents=True, exist_ok=True)
out.write_text(json.dumps(openapi, indent=2))
if __name__ == "__main__":
main()

View File

@ -6,7 +6,7 @@ from pathlib import Path
from types import ModuleType
from typing import Any
from . import backups, config, flakes, machines, secrets, vms, webui
from . import backups, config, flakes, history, machines, secrets, vms
from .custom_logger import setup_logging
from .dirs import get_clan_flake_toplevel, is_clan_flake
from .ssh import cli as ssh_cli
@ -105,12 +105,12 @@ def create_parser(prog: str | None = None) -> argparse.ArgumentParser:
)
machines.register_parser(parser_machine)
parser_webui = subparsers.add_parser("webui", help="start webui")
webui.register_parser(parser_webui)
parser_vms = subparsers.add_parser("vms", help="manage virtual machines")
vms.register_parser(parser_vms)
parser_history = subparsers.add_parser("history", help="manage history")
history.register_parser(parser_history)
if argcomplete:
argcomplete.autocomplete(parser)

View File

@ -40,6 +40,7 @@ class ClanParameters:
class ClanURI:
# Initialize the class with a clan:// URI
def __init__(self, uri: str) -> None:
self._full_uri = uri
# Check if the URI starts with clan://
if uri.startswith("clan://"):
self._nested_uri = uri[7:]
@ -53,13 +54,13 @@ class ClanURI:
# Parse the query string into a dictionary
query = urllib.parse.parse_qs(self._components.query)
params: dict[str, str] = {}
new_params: dict[str, str] = {}
for field in dataclasses.fields(ClanParameters):
if field.name in query:
values = query[field.name]
if len(values) > 1:
raise ClanError(f"Multiple values for parameter: {field.name}")
params[field.name] = values[0]
new_params[field.name] = values[0]
# Remove the field from the query dictionary
# clan uri and nested uri share one namespace for query parameters
@ -68,7 +69,7 @@ class ClanURI:
new_query = urllib.parse.urlencode(query, doseq=True)
self._components = self._components._replace(query=new_query)
self.params = ClanParameters(**params)
self.params = ClanParameters(**new_params)
comb = (
self._components.scheme,
@ -96,10 +97,29 @@ class ClanURI:
case _:
raise ClanError(f"Unsupported uri components: {self.scheme}")
def get_full_uri(self) -> str:
return self._full_uri
@classmethod
def from_path(cls, path: Path, params: ClanParameters) -> Self: # noqa
urlparams = urllib.parse.urlencode(params.__dict__)
return cls(f"clan://{path}?{urlparams}")
def from_path(cls, path: Path, params: ClanParameters | None = None) -> Self: # noqa
return cls.from_str(str(path), params)
@classmethod
def from_str(cls, url: str, params: ClanParameters | None = None) -> Self: # noqa
prefix = "clan://"
if url.startswith(prefix):
url = url[len(prefix) :]
if params is None:
return cls(f"clan://{url}")
comp = urllib.parse.urlparse(url)
query = urllib.parse.parse_qs(comp.query)
query.update(params.__dict__)
new_query = urllib.parse.urlencode(query, doseq=True)
comp = comp._replace(query=new_query)
new_url = urllib.parse.urlunparse(comp)
return cls(f"clan://{new_url}")
def __str__(self) -> str:
return f"ClanURI({self._components.geturl()})"
return self.get_full_uri()

View File

@ -1,8 +1,6 @@
# !/usr/bin/env python3
import argparse
from clan_cli.flakes.add import register_add_parser
from clan_cli.flakes.history import register_list_parser
from clan_cli.flakes.inspect import register_inspect_parser
from .create import register_create_parser
@ -18,9 +16,5 @@ def register_parser(parser: argparse.ArgumentParser) -> None:
)
create_parser = subparser.add_parser("create", help="Create a clan flake")
register_create_parser(create_parser)
add_parser = subparser.add_parser("add", help="Add a clan flake")
register_add_parser(add_parser)
list_parser = subparser.add_parser("list", help="List recently used flakes")
register_list_parser(list_parser)
inspect_parser = subparser.add_parser("inspect", help="Inspect a clan flake")
register_inspect_parser(inspect_parser)

View File

@ -1,22 +0,0 @@
# !/usr/bin/env python3
import argparse
from pathlib import Path
from clan_cli.flakes.history import push_history
from ..async_cmd import CmdOut, runforcli
async def add_flake(path: Path) -> dict[str, CmdOut]:
push_history(path)
return {}
def add_flake_command(args: argparse.Namespace) -> None:
runforcli(add_flake, args.path)
# takes a (sub)parser and configures it
def register_add_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("path", type=Path, help="Path to the flake", default=Path("."))
parser.set_defaults(func=add_flake_command)

View File

@ -1,74 +0,0 @@
# !/usr/bin/env python3
import argparse
import dataclasses
import json
from dataclasses import dataclass
from datetime import datetime
from pathlib import Path
from typing import Any
from clan_cli.dirs import user_history_file
from ..locked_open import locked_open
class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, o: Any) -> Any:
if dataclasses.is_dataclass(o):
return dataclasses.asdict(o)
return super().default(o)
@dataclass
class HistoryEntry:
path: str
last_used: str
def list_history() -> list[HistoryEntry]:
logs: list[HistoryEntry] = []
if not user_history_file().exists():
return []
with locked_open(user_history_file(), "r") as f:
try:
content: str = f.read()
parsed: list[dict] = json.loads(content)
logs = [HistoryEntry(**p) for p in parsed]
except json.JSONDecodeError as ex:
print("Failed to load history. Invalid JSON.")
print(f"{user_history_file()}: {ex}")
return logs
def push_history(path: Path) -> list[HistoryEntry]:
user_history_file().parent.mkdir(parents=True, exist_ok=True)
logs = list_history()
found = False
with locked_open(user_history_file(), "w+") as f:
for entry in logs:
if entry.path == str(path):
found = True
entry.last_used = datetime.now().isoformat()
if not found:
logs.append(
HistoryEntry(path=str(path), last_used=datetime.now().isoformat())
)
f.write(json.dumps(logs, cls=EnhancedJSONEncoder))
f.truncate()
return logs
def list_history_command(args: argparse.Namespace) -> None:
for history_entry in list_history():
print(history_entry.path)
# takes a (sub)parser and configures it
def register_list_parser(parser: argparse.ArgumentParser) -> None:
parser.set_defaults(func=list_history_command)

View File

@ -5,6 +5,7 @@ from dataclasses import dataclass
from pathlib import Path
from ..errors import ClanError
from ..machines.list import list_machines
from ..nix import nix_config, nix_eval, nix_metadata
@ -13,6 +14,7 @@ class FlakeConfig:
flake_url: str | Path
flake_attr: str
nar_hash: str
icon: str | None
description: str | None
last_updated: str
@ -23,13 +25,19 @@ def inspect_flake(flake_url: str | Path, flake_attr: str) -> FlakeConfig:
config = nix_config()
system = config["system"]
machines = list_machines(flake_url)
if flake_attr not in machines:
raise ClanError(
f"Machine {flake_attr} not found in {flake_url}. Available machines: {', '.join(machines)}"
)
cmd = nix_eval(
[
f'{flake_url}#clanInternals.machines."{system}"."{flake_attr}".config.clanCore.clanIcon'
]
)
proc = subprocess.run(cmd, check=True, text=True, stdout=subprocess.PIPE)
proc = subprocess.run(cmd, text=True, capture_output=True)
assert proc.stdout is not None
if proc.returncode != 0:
raise ClanError(
@ -38,6 +46,8 @@ command: {shlex.join(cmd)}
exit code: {proc.returncode}
stdout:
{proc.stdout}
stderr:
{proc.stderr}
"""
)
res = proc.stdout.strip()
@ -51,6 +61,7 @@ stdout:
return FlakeConfig(
flake_url=flake_url,
flake_attr=flake_attr,
nar_hash=meta["locked"]["narHash"],
icon=icon_path,
description=meta.get("description"),
last_updated=meta["lastModified"],

View File

@ -0,0 +1,22 @@
# !/usr/bin/env python3
import argparse
from .add import register_add_parser
from .list import register_list_parser
from .update import register_update_parser
# takes a (sub)parser and configures it
def register_parser(parser: argparse.ArgumentParser) -> None:
subparser = parser.add_subparsers(
title="command",
description="the command to run",
help="the command to run",
required=True,
)
add_parser = subparser.add_parser("add", help="Add a clan flake")
register_add_parser(add_parser)
list_parser = subparser.add_parser("list", help="List recently used flakes")
register_list_parser(list_parser)
update_parser = subparser.add_parser("update", help="Update a clan flake")
register_update_parser(update_parser)

View File

@ -0,0 +1,88 @@
# !/usr/bin/env python3
import argparse
import dataclasses
import datetime
import json
from typing import Any
from clan_cli.flakes.inspect import FlakeConfig, inspect_flake
from ..clan_uri import ClanURI
from ..dirs import user_history_file
from ..locked_open import locked_open
class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, o: Any) -> Any:
if dataclasses.is_dataclass(o):
return dataclasses.asdict(o)
return super().default(o)
@dataclasses.dataclass
class HistoryEntry:
last_used: str
flake: FlakeConfig
def __post_init__(self) -> None:
if isinstance(self.flake, dict):
self.flake = FlakeConfig(**self.flake)
def list_history() -> list[HistoryEntry]:
logs: list[HistoryEntry] = []
if not user_history_file().exists():
return []
with locked_open(user_history_file(), "r") as f:
try:
content: str = f.read()
parsed: list[dict] = json.loads(content)
logs = [HistoryEntry(**p) for p in parsed]
except json.JSONDecodeError as ex:
print("Failed to load history. Invalid JSON.")
print(f"{user_history_file()}: {ex}")
return logs
def add_history(uri: ClanURI) -> list[HistoryEntry]:
user_history_file().parent.mkdir(parents=True, exist_ok=True)
logs = list_history()
found = False
path = uri.get_internal()
machine = uri.params.flake_attr
for entry in logs:
if entry.flake.flake_url == str(path):
found = True
entry.last_used = datetime.datetime.now().isoformat()
if found:
break
flake = inspect_flake(path, machine)
flake.flake_url = str(flake.flake_url)
history = HistoryEntry(
flake=flake,
last_used=datetime.datetime.now().isoformat(),
)
logs.append(history)
with locked_open(user_history_file(), "w+") as f:
f.write(json.dumps(logs, cls=EnhancedJSONEncoder, indent=4))
f.truncate()
return logs
def add_history_command(args: argparse.Namespace) -> None:
add_history(args.uri)
# takes a (sub)parser and configures it
def register_add_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"uri", type=ClanURI.from_str, help="Path to the flake", default="."
)
parser.set_defaults(func=add_history_command)

View File

@ -0,0 +1,14 @@
# !/usr/bin/env python3
import argparse
from .add import list_history
def list_history_command(args: argparse.Namespace) -> None:
for history_entry in list_history():
print(history_entry.flake.flake_url)
# takes a (sub)parser and configures it
def register_list_parser(parser: argparse.ArgumentParser) -> None:
parser.set_defaults(func=list_history_command)

View File

@ -0,0 +1,44 @@
# !/usr/bin/env python3
import argparse
import copy
import datetime
import json
from ..dirs import user_history_file
from ..locked_open import locked_open
from ..nix import nix_metadata
from .add import EnhancedJSONEncoder, HistoryEntry, list_history
def update_history() -> list[HistoryEntry]:
logs = list_history()
new_logs = []
for entry in logs:
new_entry = copy.deepcopy(entry)
meta = nix_metadata(entry.flake.flake_url)
new_hash = meta["locked"]["narHash"]
if new_hash != entry.flake.nar_hash:
print(
f"Updating {entry.flake.flake_url} from {entry.flake.nar_hash} to {new_hash}"
)
new_entry.last_used = datetime.datetime.now().isoformat()
new_entry.flake.nar_hash = new_hash
# TODO: Delete stale entries
new_logs.append(new_entry)
with locked_open(user_history_file(), "w+") as f:
f.write(json.dumps(new_logs, cls=EnhancedJSONEncoder, indent=4))
f.truncate()
return new_logs
def add_update_command(args: argparse.Namespace) -> None:
update_history()
# takes a (sub)parser and configures it
def register_update_parser(parser: argparse.ArgumentParser) -> None:
parser.set_defaults(func=add_update_command)

View File

@ -1,24 +1,42 @@
import argparse
import json
import logging
import os
import shlex
import subprocess
from pathlib import Path
from ..dirs import machines_dir
from .types import validate_hostname
from ..errors import ClanError
from ..nix import nix_config, nix_eval
log = logging.getLogger(__name__)
def list_machines(flake_dir: Path) -> list[str]:
path = machines_dir(flake_dir)
log.debug(f"Listing machines in {path}")
if not path.exists():
return []
objs: list[str] = []
for f in os.listdir(path):
if validate_hostname(f):
objs.append(f)
return objs
def list_machines(flake_url: Path | str) -> list[str]:
config = nix_config()
system = config["system"]
cmd = nix_eval(
[
f"{flake_url}#clanInternals.machines.{system}",
"--apply",
"builtins.attrNames",
"--json",
]
)
proc = subprocess.run(cmd, text=True, capture_output=True)
assert proc.stdout is not None
if proc.returncode != 0:
raise ClanError(
f"""
command: {shlex.join(cmd)}
exit code: {proc.returncode}
stdout:
{proc.stdout}
stderr:
{proc.stderr}
"""
)
res = proc.stdout.strip()
return json.loads(res)
def list_command(args: argparse.Namespace) -> None:

View File

@ -1,64 +0,0 @@
import argparse
from collections.abc import Callable
from typing import NoReturn
start_server: Callable | None = None
ServerImportError: ImportError | None = None
try:
from .server import start_server
except ImportError as e:
ServerImportError = e
def fastapi_is_not_installed(_: argparse.Namespace) -> NoReturn:
assert ServerImportError is not None
print(
f"Dependencies for the webserver is not installed. The webui command has been disabled ({ServerImportError})"
)
exit(1)
def register_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("--port", type=int, default=2979, help="Port to listen on")
parser.add_argument(
"--host", type=str, default="localhost", help="Host to listen on"
)
parser.add_argument(
"--no-open", action="store_true", help="Don't open the browser", default=False
)
parser.add_argument(
"--dev", action="store_true", help="Run in development mode", default=False
)
parser.add_argument(
"--dev-port",
type=int,
default=3000,
help="Port to listen on for the dev server",
)
parser.add_argument(
"--dev-host", type=str, default="localhost", help="Host to listen on"
)
parser.add_argument(
"--reload", action="store_true", help="Don't reload on changes", default=False
)
parser.add_argument(
"--log-level",
type=str,
default="info",
help="Log level",
choices=["critical", "error", "warning", "info", "debug", "trace"],
)
parser.add_argument(
"sub_url",
type=str,
default="/",
nargs="?",
help="Sub url to open in the browser",
)
# Set the args.func variable in args
if start_server is None:
parser.set_defaults(func=fastapi_is_not_installed)
else:
parser.set_defaults(func=start_server)

View File

@ -1,15 +0,0 @@
import argparse
from . import register_parser
if __name__ == "__main__":
# this is use in our integration test
parser = argparse.ArgumentParser()
# call the register_parser function, which adds arguments to the parser
register_parser(parser)
args = parser.parse_args()
# call the function that is stored
# in the func attribute of args, and pass args as the argument
# look into register_parser to see how this is done
args.func(args)

View File

@ -1,10 +0,0 @@
import logging
from pydantic import BaseModel
log = logging.getLogger(__name__)
class MissingClanImports(BaseModel):
missing_clan_imports: list[str] = []
msg: str = "Some requested clan modules could not be found"

View File

@ -1,20 +0,0 @@
import logging
from pydantic import AnyUrl, BaseModel, Extra, parse_obj_as
from ..flakes.create import DEFAULT_URL
log = logging.getLogger(__name__)
class FlakeCreateInput(BaseModel):
url: AnyUrl = parse_obj_as(AnyUrl, DEFAULT_URL)
class MachineConfig(BaseModel):
clanImports: list[str] = [] # noqa: N815
clan: dict = {}
# allow extra fields to cover the full spectrum of a nixos config
class Config:
extra = Extra.allow

View File

@ -1,68 +0,0 @@
from enum import Enum
from pydantic import BaseModel, Extra, Field
from ..async_cmd import CmdOut
class Status(Enum):
ONLINE = "online"
OFFLINE = "offline"
UNKNOWN = "unknown"
class ClanModulesResponse(BaseModel):
clan_modules: list[str]
class Machine(BaseModel):
name: str
status: Status
class MachinesResponse(BaseModel):
machines: list[Machine]
class MachineResponse(BaseModel):
machine: Machine
class ConfigResponse(BaseModel):
clanImports: list[str] = [] # noqa: N815
clan: dict = {}
# allow extra fields to cover the full spectrum of a nixos config
class Config:
extra = Extra.allow
class SchemaResponse(BaseModel):
schema_: dict = Field(alias="schema")
class VerifyMachineResponse(BaseModel):
success: bool
error: str | None
class FlakeAttrResponse(BaseModel):
flake_attrs: list[str]
class FlakeAction(BaseModel):
id: str
uri: str
class FlakeListResponse(BaseModel):
flakes: list[str]
class FlakeCreateResponse(BaseModel):
cmd_out: dict[str, CmdOut]
class FlakeResponse(BaseModel):
content: str
actions: list[FlakeAction]

View File

@ -1,56 +0,0 @@
import logging
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.routing import APIRoute
from fastapi.staticfiles import StaticFiles
from .assets import asset_path
from .error_handlers import clan_error_handler
from .routers import clan_modules, flake, health, machines, root
from .settings import settings
from .tags import tags_metadata
# Logging setup
log = logging.getLogger(__name__)
def setup_app() -> FastAPI:
app = FastAPI()
if settings.env.is_development():
# Allow CORS in development mode for nextjs dev server
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(clan_modules.router)
app.include_router(flake.router)
app.include_router(health.router)
app.include_router(machines.router)
# Needs to be last in register. Because of wildcard route
app.include_router(root.router)
app.add_exception_handler(Exception, clan_error_handler)
app.mount("/static", StaticFiles(directory=asset_path()), name="static")
# Add tag descriptions to the OpenAPI schema
app.openapi_tags = tags_metadata
for route in app.routes:
if isinstance(route, APIRoute):
route.operation_id = route.name # in this case, 'read_items'
log.debug(f"Registered route: {route}")
for i in app.exception_handlers.items():
log.debug(f"Registered exception handler: {i}")
return app
app = setup_app()

View File

@ -1,39 +0,0 @@
import functools
import logging
from pathlib import Path
log = logging.getLogger(__name__)
def get_hash(string: str) -> str:
"""
This function takes a string like '/nix/store/kkvk20b8zh8aafdnfjp6dnf062x19732-source'
and returns the hash part 'kkvk20b8zh8aafdnfjp6dnf062x19732' after '/nix/store/' and before '-source'.
"""
# Split the string by '/' and get the last element
last_element = string.split("/")[-1]
# Split the last element by '-' and get the first element
hash_part = last_element.split("-")[0]
# Return the hash part
return hash_part
def check_divergence(path: Path) -> None:
p = path.resolve()
log.info("Absolute web asset path: %s", p)
if not p.is_dir():
raise FileNotFoundError(p)
# Get the hash part of the path
gh = get_hash(str(p))
log.debug(f"Serving webui asset with hash {gh}")
@functools.cache
def asset_path() -> Path:
path = Path(__file__).parent / "assets"
log.debug("Serving assets from: %s", path)
check_divergence(path)
return path

View File

@ -1,54 +0,0 @@
import logging
from fastapi import Request, status
from fastapi.encoders import jsonable_encoder
from fastapi.responses import JSONResponse
from ..errors import ClanError, ClanHttpError
from .settings import settings
log = logging.getLogger(__name__)
def clan_error_handler(request: Request, exc: Exception) -> JSONResponse:
headers = {}
if settings.env.is_development():
headers["Access-Control-Allow-Origin"] = "*"
headers["Access-Control-Allow-Methods"] = "*"
if isinstance(exc, ClanHttpError):
return JSONResponse(
status_code=exc.status_code,
content=jsonable_encoder(dict(detail={"msg": exc.msg})),
headers=headers,
)
elif isinstance(exc, ClanError):
log.error(f"ClanError: {exc}")
detail = [
{
"loc": [],
"msg": str(exc),
}
]
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content=jsonable_encoder(dict(detail=detail)),
headers=headers,
)
else:
log.exception(f"Unhandled Exception: {exc}")
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content=jsonable_encoder(
dict(
detail=[
{
"loc": [],
"msg": str(exc),
}
]
)
),
headers=headers,
)

View File

@ -1,952 +0,0 @@
{
"openapi": "3.1.0",
"info": {
"title": "FastAPI",
"version": "0.1.0"
},
"paths": {
"/api/clan_modules": {
"get": {
"tags": ["modules"],
"summary": "List Clan Modules",
"operationId": "list_clan_modules",
"parameters": [
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ClanModulesResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/api/flake/history": {
"post": {
"tags": ["flake"],
"summary": "Flake History Append",
"operationId": "flake_history_append",
"parameters": [
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
},
"get": {
"tags": ["flake"],
"summary": "Flake History List",
"operationId": "flake_history_list",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Flake History List Api Flake History Get",
"type": "array",
"items": {
"type": "string",
"format": "path"
}
}
}
}
}
}
}
},
"/api/flake/attrs": {
"get": {
"tags": ["flake"],
"summary": "Inspect Flake Attrs",
"operationId": "inspect_flake_attrs",
"parameters": [
{
"name": "url",
"in": "query",
"required": true,
"schema": {
"title": "Url",
"anyOf": [
{
"type": "string",
"minLength": 1,
"maxLength": 65536,
"format": "uri"
},
{
"type": "string",
"format": "path"
}
]
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FlakeAttrResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/api/flake/inspect": {
"get": {
"tags": ["flake"],
"summary": "Inspect Flake",
"operationId": "inspect_flake",
"parameters": [
{
"name": "url",
"in": "query",
"required": true,
"schema": {
"title": "Url",
"anyOf": [
{
"type": "string",
"minLength": 1,
"maxLength": 65536,
"format": "uri"
},
{
"type": "string",
"format": "path"
}
]
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FlakeResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/api/flake/create": {
"post": {
"tags": ["flake"],
"summary": "Create Flake",
"operationId": "create_flake",
"parameters": [
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FlakeCreateInput"
}
}
}
},
"responses": {
"201": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FlakeCreateResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/api/machines": {
"get": {
"tags": ["machine"],
"summary": "List Machines",
"operationId": "list_machines",
"parameters": [
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/MachinesResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/api/machines/{name}": {
"get": {
"tags": ["machine"],
"summary": "Get Machine",
"operationId": "get_machine",
"parameters": [
{
"name": "name",
"in": "path",
"required": true,
"schema": {
"title": "Name",
"type": "string"
}
},
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/MachineResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/api/machines/{name}/config": {
"get": {
"tags": ["machine"],
"summary": "Get Machine Config",
"operationId": "get_machine_config",
"parameters": [
{
"name": "name",
"in": "path",
"required": true,
"schema": {
"title": "Name",
"type": "string"
}
},
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ConfigResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
},
"put": {
"tags": ["machine"],
"summary": "Set Machine Config",
"operationId": "set_machine_config",
"parameters": [
{
"name": "name",
"in": "path",
"required": true,
"schema": {
"title": "Name",
"type": "string"
}
},
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/MachineConfig"
}
}
}
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/api/schema": {
"put": {
"tags": ["machine"],
"summary": "Get Machine Schema",
"operationId": "get_machine_schema",
"parameters": [
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"title": "Config",
"type": "object"
}
}
}
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/SchemaResponse"
}
}
}
},
"400": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/MissingClanImports"
}
}
},
"description": "Bad Request"
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/api/machines/{name}/verify": {
"get": {
"tags": ["machine"],
"summary": "Get Verify Machine Config",
"operationId": "get_verify_machine_config",
"parameters": [
{
"name": "name",
"in": "path",
"required": true,
"schema": {
"title": "Name",
"type": "string"
}
},
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/VerifyMachineResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
},
"put": {
"tags": ["machine"],
"summary": "Put Verify Machine Config",
"operationId": "put_verify_machine_config",
"parameters": [
{
"name": "name",
"in": "path",
"required": true,
"schema": {
"title": "Name",
"type": "string"
}
},
{
"name": "flake_dir",
"in": "query",
"required": true,
"schema": {
"title": "Flake Dir",
"type": "string",
"format": "path"
}
}
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"title": "Config",
"type": "object"
}
}
}
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/VerifyMachineResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
}
},
"components": {
"schemas": {
"ClanModulesResponse": {
"properties": {
"clan_modules": {
"items": {
"type": "string"
},
"type": "array",
"title": "Clan Modules"
}
},
"type": "object",
"required": ["clan_modules"],
"title": "ClanModulesResponse"
},
"CmdOut": {
"properties": {
"stdout": {
"type": "string",
"title": "Stdout"
},
"stderr": {
"type": "string",
"title": "Stderr"
},
"cwd": {
"type": "string",
"format": "path",
"title": "Cwd"
}
},
"type": "object",
"required": ["stdout", "stderr", "cwd"],
"title": "CmdOut"
},
"ConfigResponse": {
"properties": {
"clanImports": {
"items": {
"type": "string"
},
"type": "array",
"title": "Clanimports",
"default": []
},
"clan": {
"type": "object",
"title": "Clan",
"default": {}
}
},
"type": "object",
"title": "ConfigResponse"
},
"FlakeAction": {
"properties": {
"id": {
"type": "string",
"title": "Id"
},
"uri": {
"type": "string",
"title": "Uri"
}
},
"type": "object",
"required": ["id", "uri"],
"title": "FlakeAction"
},
"FlakeAttrResponse": {
"properties": {
"flake_attrs": {
"items": {
"type": "string"
},
"type": "array",
"title": "Flake Attrs"
}
},
"type": "object",
"required": ["flake_attrs"],
"title": "FlakeAttrResponse"
},
"FlakeCreateInput": {
"properties": {
"url": {
"type": "string",
"maxLength": 65536,
"minLength": 1,
"format": "uri",
"title": "Url",
"default": "git+https://git.clan.lol/clan/clan-core?new-clan"
}
},
"type": "object",
"title": "FlakeCreateInput"
},
"FlakeCreateResponse": {
"properties": {
"cmd_out": {
"additionalProperties": {
"items": [
{
"type": "string",
"title": "Stdout"
},
{
"type": "string",
"title": "Stderr"
},
{
"type": "string",
"format": "path",
"title": "Cwd"
}
],
"type": "array",
"maxItems": 3,
"minItems": 3
},
"type": "object",
"title": "Cmd Out"
}
},
"type": "object",
"required": ["cmd_out"],
"title": "FlakeCreateResponse"
},
"FlakeResponse": {
"properties": {
"content": {
"type": "string",
"title": "Content"
},
"actions": {
"items": {
"$ref": "#/components/schemas/FlakeAction"
},
"type": "array",
"title": "Actions"
}
},
"type": "object",
"required": ["content", "actions"],
"title": "FlakeResponse"
},
"HTTPValidationError": {
"properties": {
"detail": {
"items": {
"$ref": "#/components/schemas/ValidationError"
},
"type": "array",
"title": "Detail"
}
},
"type": "object",
"title": "HTTPValidationError"
},
"Machine": {
"properties": {
"name": {
"type": "string",
"title": "Name"
},
"status": {
"$ref": "#/components/schemas/Status"
}
},
"type": "object",
"required": ["name", "status"],
"title": "Machine"
},
"MachineConfig": {
"properties": {
"clanImports": {
"items": {
"type": "string"
},
"type": "array",
"title": "Clanimports",
"default": []
},
"clan": {
"type": "object",
"title": "Clan",
"default": {}
}
},
"type": "object",
"title": "MachineConfig"
},
"MachineResponse": {
"properties": {
"machine": {
"$ref": "#/components/schemas/Machine"
}
},
"type": "object",
"required": ["machine"],
"title": "MachineResponse"
},
"MachinesResponse": {
"properties": {
"machines": {
"items": {
"$ref": "#/components/schemas/Machine"
},
"type": "array",
"title": "Machines"
}
},
"type": "object",
"required": ["machines"],
"title": "MachinesResponse"
},
"MissingClanImports": {
"properties": {
"missing_clan_imports": {
"items": {
"type": "string"
},
"type": "array",
"title": "Missing Clan Imports",
"default": []
},
"msg": {
"type": "string",
"title": "Msg",
"default": "Some requested clan modules could not be found"
}
},
"type": "object",
"title": "MissingClanImports"
},
"SchemaResponse": {
"properties": {
"schema": {
"type": "object",
"title": "Schema"
}
},
"type": "object",
"required": ["schema"],
"title": "SchemaResponse"
},
"Status": {
"enum": ["online", "offline", "unknown"],
"title": "Status",
"description": "An enumeration."
},
"ValidationError": {
"properties": {
"loc": {
"items": {
"anyOf": [
{
"type": "string"
},
{
"type": "integer"
}
]
},
"type": "array",
"title": "Location"
},
"msg": {
"type": "string",
"title": "Message"
},
"type": {
"type": "string",
"title": "Error Type"
}
},
"type": "object",
"required": ["loc", "msg", "type"],
"title": "ValidationError"
},
"VerifyMachineResponse": {
"properties": {
"success": {
"type": "boolean",
"title": "Success"
},
"error": {
"type": "string",
"title": "Error"
}
},
"type": "object",
"required": ["success"],
"title": "VerifyMachineResponse"
}
}
},
"tags": [
{
"name": "flake",
"description": "Operations on a flake.",
"externalDocs": {
"description": "What is a flake?",
"url": "https://www.tweag.io/blog/2020-05-25-flakes/"
}
},
{
"name": "machine",
"description": "Manage physical machines. Instances of a flake"
},
{
"name": "vm",
"description": "Manage virtual machines. Instances of a flake"
},
{
"name": "modules",
"description": "Manage cLAN modules of a flake"
}
]
}

View File

@ -1,23 +0,0 @@
# Logging setup
import logging
from pathlib import Path
from fastapi import APIRouter, HTTPException
from clan_cli.clan_modules import get_clan_module_names
from ..api_outputs import (
ClanModulesResponse,
)
from ..tags import Tags
log = logging.getLogger(__name__)
router = APIRouter()
@router.get("/api/clan_modules", tags=[Tags.modules])
async def list_clan_modules(flake_dir: Path) -> ClanModulesResponse:
module_names, error = get_clan_module_names(flake_dir)
if error is not None:
raise HTTPException(status_code=400, detail=error)
return ClanModulesResponse(clan_modules=module_names)

View File

@ -1,103 +0,0 @@
import json
from json.decoder import JSONDecodeError
from pathlib import Path
from typing import Annotated
from fastapi import APIRouter, Body, HTTPException, status
from pydantic import AnyUrl
from clan_cli.webui.api_inputs import (
FlakeCreateInput,
)
from clan_cli.webui.api_outputs import (
FlakeAction,
FlakeAttrResponse,
FlakeCreateResponse,
FlakeResponse,
)
from ...async_cmd import run
from ...flakes import add, create
from ...nix import nix_command, nix_flake_show
from ..tags import Tags
router = APIRouter()
# TODO: Check for directory traversal
async def get_attrs(url: AnyUrl | Path) -> list[str]:
cmd = nix_flake_show(url)
out = await run(cmd)
data: dict[str, dict] = {}
try:
data = json.loads(out.stdout)
except JSONDecodeError:
raise HTTPException(status_code=422, detail="Could not load flake.")
nixos_configs = data.get("nixosConfigurations", {})
flake_attrs = list(nixos_configs.keys())
if not flake_attrs:
raise HTTPException(
status_code=422, detail="No entry or no attribute: nixosConfigurations"
)
return flake_attrs
@router.post("/api/flake/history", tags=[Tags.flake])
async def flake_history_append(flake_dir: Path) -> None:
await add.add_flake(flake_dir)
@router.get("/api/flake/history", tags=[Tags.flake])
async def flake_history_list() -> list[Path]:
return []
# TODO: Check for directory traversal
@router.get("/api/flake/attrs", tags=[Tags.flake])
async def inspect_flake_attrs(url: AnyUrl | Path) -> FlakeAttrResponse:
return FlakeAttrResponse(flake_attrs=await get_attrs(url))
# TODO: Check for directory traversal
@router.get("/api/flake/inspect", tags=[Tags.flake])
async def inspect_flake(
url: AnyUrl | Path,
) -> FlakeResponse:
actions = []
# Extract the flake from the given URL
# We do this by running 'nix flake prefetch {url} --json'
cmd = nix_command(["flake", "prefetch", str(url), "--json", "--refresh"])
out = await run(cmd)
data: dict[str, str] = json.loads(out.stdout)
if data.get("storePath") is None:
raise HTTPException(status_code=500, detail="Could not load flake")
content: str
with open(Path(data.get("storePath", "")) / Path("flake.nix")) as f:
content = f.read()
# TODO: Figure out some measure when it is insecure to inspect or create a VM
actions.append(FlakeAction(id="vms/inspect", uri="api/vms/inspect"))
actions.append(FlakeAction(id="vms/create", uri="api/vms/create"))
return FlakeResponse(content=content, actions=actions)
@router.post(
"/api/flake/create", tags=[Tags.flake], status_code=status.HTTP_201_CREATED
)
async def create_flake(
flake_dir: Path, args: Annotated[FlakeCreateInput, Body()]
) -> FlakeCreateResponse:
if flake_dir.exists():
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Flake already exists",
)
cmd_out = await create.create_flake(flake_dir, args.url)
return FlakeCreateResponse(cmd_out=cmd_out)

View File

@ -1,8 +0,0 @@
from fastapi import APIRouter
router = APIRouter()
@router.get("/health", include_in_schema=False)
async def health() -> str:
return "OK"

View File

@ -1,92 +0,0 @@
# Logging setup
import logging
from pathlib import Path
from typing import Annotated
from fastapi import APIRouter, Body
from fastapi.encoders import jsonable_encoder
from clan_cli.webui.api_errors import MissingClanImports
from clan_cli.webui.api_inputs import MachineConfig
from ...config.machine import (
config_for_machine,
set_config_for_machine,
verify_machine_config,
)
from ...config.schema import machine_schema
from ...machines.list import list_machines as _list_machines
from ..api_outputs import (
ConfigResponse,
Machine,
MachineResponse,
MachinesResponse,
SchemaResponse,
Status,
VerifyMachineResponse,
)
from ..tags import Tags
log = logging.getLogger(__name__)
router = APIRouter()
@router.get("/api/machines", tags=[Tags.machine])
async def list_machines(flake_dir: Path) -> MachinesResponse:
machines = []
for m in _list_machines(flake_dir):
machines.append(Machine(name=m, status=Status.UNKNOWN))
return MachinesResponse(machines=machines)
@router.get("/api/machines/{name}", tags=[Tags.machine])
async def get_machine(flake_dir: Path, name: str) -> MachineResponse:
log.error("TODO")
return MachineResponse(machine=Machine(name=name, status=Status.UNKNOWN))
@router.get("/api/machines/{name}/config", tags=[Tags.machine])
async def get_machine_config(flake_dir: Path, name: str) -> ConfigResponse:
config = config_for_machine(flake_dir, name)
return ConfigResponse(**config)
@router.put("/api/machines/{name}/config", tags=[Tags.machine])
async def set_machine_config(
flake_dir: Path, name: str, config: Annotated[MachineConfig, Body()]
) -> None:
conf = jsonable_encoder(config)
set_config_for_machine(flake_dir, name, conf)
@router.put(
"/api/schema",
tags=[Tags.machine],
responses={400: {"model": MissingClanImports}},
)
async def get_machine_schema(
flake_dir: Path, config: Annotated[dict, Body()]
) -> SchemaResponse:
schema = machine_schema(flake_dir, config=config)
return SchemaResponse(schema=schema)
@router.get("/api/machines/{name}/verify", tags=[Tags.machine])
async def get_verify_machine_config(
flake_dir: Path, name: str
) -> VerifyMachineResponse:
error = verify_machine_config(flake_dir, name)
success = error is None
return VerifyMachineResponse(success=success, error=error)
@router.put("/api/machines/{name}/verify", tags=[Tags.machine])
async def put_verify_machine_config(
flake_dir: Path,
name: str,
config: Annotated[dict, Body()],
) -> VerifyMachineResponse:
error = verify_machine_config(flake_dir, name, config)
success = error is None
return VerifyMachineResponse(success=success, error=error)

View File

@ -1,36 +0,0 @@
import logging
import os
from mimetypes import guess_type
from pathlib import Path
from fastapi import APIRouter, Response
from ..assets import asset_path
router = APIRouter()
log = logging.getLogger(__name__)
@router.get("/{path_name:path}", include_in_schema=False)
async def root(path_name: str) -> Response:
if path_name == "":
path_name = "index.html"
filename = Path(os.path.normpath(asset_path() / path_name))
if not filename.is_relative_to(asset_path()):
log.error("Prevented directory traversal: %s", filename)
# prevent directory traversal
return Response(status_code=403)
if not filename.is_file():
if filename.suffix == "":
filename = filename.with_suffix(".html")
if not filename.is_file():
log.error("File not found: %s", filename)
return Response(status_code=404)
else:
return Response(status_code=404)
content_type, _ = guess_type(filename)
return Response(filename.read_bytes(), media_type=content_type)

View File

@ -1,105 +0,0 @@
import argparse
import logging
import os
import shutil
import subprocess
import time
import urllib.request
from collections.abc import Iterator
from contextlib import ExitStack, contextmanager
from pathlib import Path
from threading import Thread
# XXX: can we dynamically load this using nix develop?
import uvicorn
from pydantic import AnyUrl, IPvAnyAddress
from pydantic.tools import parse_obj_as
from clan_cli.errors import ClanError
log = logging.getLogger(__name__)
def open_browser(base_url: AnyUrl, sub_url: str) -> None:
for i in range(5):
try:
urllib.request.urlopen(base_url + "/health")
break
except OSError:
time.sleep(i)
url = parse_obj_as(AnyUrl, f"{base_url}/{sub_url.removeprefix('/')}")
_open_browser(url)
def _open_browser(url: AnyUrl) -> subprocess.Popen:
for browser in ("firefox", "iceweasel", "iceape", "seamonkey"):
if shutil.which(browser):
# Do not add a new profile, as it will break in combination with
# the -kiosk flag.
cmd = [
browser,
"-kiosk",
"-new-window",
url,
]
print(" ".join(cmd))
return subprocess.Popen(cmd)
for browser in ("chromium", "chromium-browser", "google-chrome", "chrome"):
if shutil.which(browser):
return subprocess.Popen([browser, f"--app={url}"])
raise ClanError("No browser found")
@contextmanager
def spawn_node_dev_server(host: IPvAnyAddress, port: int) -> Iterator[None]:
log.info("Starting node dev server...")
path = Path(__file__).parent.parent.parent.parent / "ui"
with subprocess.Popen(
[
"direnv",
"exec",
path,
"npm",
"run",
"dev",
"--",
"--hostname",
str(host),
"--port",
str(port),
],
cwd=path,
) as proc:
try:
yield
finally:
proc.terminate()
def start_server(args: argparse.Namespace) -> None:
os.environ["CLAN_WEBUI_ENV"] = "development" if args.dev else "production"
with ExitStack() as stack:
headers: list[tuple[str, str]] = []
if args.dev:
stack.enter_context(spawn_node_dev_server(args.dev_host, args.dev_port))
base_url = f"http://{args.dev_host}:{args.dev_port}"
host = args.dev_host
if ":" in host:
host = f"[{host}]"
else:
base_url = f"http://{args.host}:{args.port}"
if not args.no_open:
Thread(target=open_browser, args=(base_url, args.sub_url)).start()
uvicorn.run(
"clan_cli.webui.app:app",
host=args.host,
port=args.port,
log_level=args.log_level,
reload=args.reload,
access_log=args.log_level == "debug",
headers=headers,
)

View File

@ -1,32 +0,0 @@
import logging
import os
from enum import Enum
log = logging.getLogger(__name__)
class EnvType(Enum):
production = "production"
development = "development"
@staticmethod
def from_environment() -> "EnvType":
t = os.environ.get("CLAN_WEBUI_ENV", "production")
try:
return EnvType[t]
except KeyError:
log.warning(f"Invalid environment type: {t}, fallback to production")
return EnvType.production
def is_production(self) -> bool:
return self == EnvType.production
def is_development(self) -> bool:
return self == EnvType.development
class Settings:
env = EnvType.from_environment()
settings = Settings()

View File

@ -1,37 +0,0 @@
from enum import Enum
from typing import Any
class Tags(Enum):
flake = "flake"
machine = "machine"
vm = "vm"
modules = "modules"
root = "root"
def __str__(self) -> str:
return self.value
tags_metadata: list[dict[str, Any]] = [
{
"name": str(Tags.flake),
"description": "Operations on a flake.",
"externalDocs": {
"description": "What is a flake?",
"url": "https://www.tweag.io/blog/2020-05-25-flakes/",
},
},
{
"name": str(Tags.machine),
"description": "Manage physical machines. Instances of a flake",
},
{
"name": str(Tags.vm),
"description": "Manage virtual machines. Instances of a flake",
},
{
"name": str(Tags.modules),
"description": "Manage cLAN modules of a flake",
},
]

View File

@ -1,8 +1,6 @@
{ age
, lib
, argcomplete
, fastapi
, uvicorn
, installShellFiles
, nix
, openssh
@ -21,7 +19,6 @@
, wheel
, fakeroot
, rsync
, ui-assets
, bash
, sshpass
, zbar
@ -36,7 +33,6 @@
, rope
, clan-core-path
, writeShellScriptBin
, nodePackages
}:
let
@ -45,8 +41,6 @@ let
];
pytestDependencies = runtimeDependencies ++ dependencies ++ [
fastapi # optional dependencies: if not enabled, webui subcommand will not work
uvicorn # optional dependencies: if not enabled, webui subcommand will not work
#schemathesis # optional for http fuzzing
pytest
@ -93,7 +87,6 @@ let
rm $out/clan_cli/config/jsonschema
ln -s ${nixpkgs'} $out/clan_cli/nixpkgs
cp -r ${../../lib/jsonschema} $out/clan_cli/config/jsonschema
ln -s ${ui-assets} $out/clan_cli/webui/assets
'';
nixpkgs' = runCommand "nixpkgs" { nativeBuildInputs = [ nix ]; } ''
mkdir $out
@ -168,28 +161,8 @@ python3.pkgs.buildPythonApplication {
fi
touch $out
'';
check-clan-openapi = runCommand "check-clan-openapi" { } ''
export PATH=${checkPython}/bin:$PATH
${checkPython}/bin/python ${source}/bin/gen-openapi --out ./openapi.json --app-dir ${source} clan_cli.webui.app:app
${lib.getExe nodePackages.prettier} --write ./openapi.json
if ! diff -u ./openapi.json ${source}/clan_cli/webui/openapi.json; then
echo "nix run .#update-clan-openapi to update the openapi.json file."
exit 1
fi
touch $out
'';
};
passthru.update-clan-openapi = writeShellScriptBin "update-clan-openapi" ''
export PATH=${checkPython}/bin:$PATH
git_root=$(git rev-parse --show-toplevel)
cd "$git_root/pkgs/clan-cli"
${checkPython}/bin/python ./bin/gen-openapi --out clan_cli/webui/openapi.json --app-dir . clan_cli.webui.app:app
${lib.getExe nodePackages.prettier} --write clan_cli/webui/openapi.json
'';
passthru.nixpkgs = nixpkgs';
passthru.checkPython = checkPython;

View File

@ -30,12 +30,11 @@
in
{
devShells.clan-cli = pkgs.callPackage ./shell.nix {
inherit (self'.packages) clan-cli ui-assets nix-unit;
inherit (self'.packages) clan-cli nix-unit;
# inherit (inputs) democlan;
};
packages = {
clan-cli = pkgs.python3.pkgs.callPackage ./default.nix {
inherit (self'.packages) ui-assets;
inherit (inputs) nixpkgs;
# inherit (inputs) democlan;
inherit (inputs.nixpkgs-for-deal.legacyPackages.${system}.python3Packages) deal;
@ -45,11 +44,6 @@
default = self'.packages.clan-cli;
};
apps.update-clan-openapi = {
type = "app";
program = "${self'.packages.clan-cli.passthru.update-clan-openapi}/bin/update-clan-openapi";
};
checks = self'.packages.clan-cli.tests;
};

View File

@ -1,4 +1,4 @@
{ nix-unit, clan-cli, ui-assets, system, mkShell, writeScriptBin, openssh, ruff, python3 }:
{ nix-unit, clan-cli, system, mkShell, writeScriptBin, openssh, ruff, python3 }:
let
checkScript = writeScriptBin "check" ''
nix build .#checks.${system}.{treefmt,clan-pytest} -L "$@"
@ -39,7 +39,6 @@ mkShell {
--editable $repo_root
ln -sfT ${clan-cli.nixpkgs} clan_cli/nixpkgs
ln -sfT ${ui-assets} clan_cli/webui/assets
export PATH="$tmp_path/python/bin:${checkScript}/bin:$PATH"
export PYTHONPATH="$repo_root:$tmp_path/python/${pythonWithDeps.sitePackages}:"

View File

@ -1,14 +0,0 @@
import logging
import pytest
from fastapi.testclient import TestClient
from clan_cli.webui.app import app
# TODO: Why stateful
@pytest.fixture(scope="session")
def api() -> TestClient:
# logging.getLogger("httpx").setLevel(level=logging.WARNING)
logging.getLogger("asyncio").setLevel(logging.INFO)
return TestClient(app, raise_server_exceptions=False)

View File

@ -10,7 +10,6 @@ from clan_cli.nix import nix_shell
sys.path.append(os.path.join(os.path.dirname(__file__), "helpers"))
pytest_plugins = [
"api",
"temporary_dir",
"root",
"age_keys",

View File

@ -9,8 +9,6 @@ from pathlib import Path
from typing import NamedTuple
import pytest
from pydantic import AnyUrl
from pydantic.tools import parse_obj_as
from root import CLAN_CORE
from clan_cli.dirs import nixpkgs_source
@ -136,16 +134,6 @@ def test_local_democlan(
yield FlakeForTest(democlan_p)
@pytest.fixture
def test_democlan_url(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[AnyUrl]:
yield parse_obj_as(
AnyUrl,
"https://git.clan.lol/clan/democlan/archive/main.tar.gz",
)
@pytest.fixture
def test_flake_with_core_and_pass(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path

View File

@ -1,16 +0,0 @@
import pytest
from api import TestClient
from fixtures_flakes import FlakeForTest
@pytest.mark.with_core
def test_configure_machine(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
# retrieve the list of available clanModules
response = api.get(f"/api/clan_modules?flake_dir={test_flake_with_core.path}")
assert response.status_code == 200, response.text
response_json = response.json()
assert isinstance(response_json, dict)
assert "clan_modules" in response_json
assert len(response_json["clan_modules"]) > 0
# ensure all entries are a string
assert all(isinstance(x, str) for x in response_json["clan_modules"])

View File

@ -101,6 +101,36 @@ def test_from_path_with_default() -> None:
assert False
def test_from_str() -> None:
# Create a ClanURI object from a remote URI with parameters
uri_str = "https://example.com?password=asdasd&test=1234"
params = ClanParameters(flake_attr="myVM")
uri = ClanURI.from_str(url=uri_str, params=params)
assert uri.params.flake_attr == "myVM"
match uri.scheme:
case ClanScheme.HTTP.value(url):
assert url == "https://example.com?password=asdasd&test=1234" # type: ignore
case _:
assert False
uri_str = "~/Downloads/democlan"
params = ClanParameters(flake_attr="myVM")
uri = ClanURI.from_str(url=uri_str, params=params)
assert uri.params.flake_attr == "myVM"
assert uri.get_internal() == "~/Downloads/democlan"
uri_str = "~/Downloads/democlan"
uri = ClanURI.from_str(url=uri_str)
assert uri.params.flake_attr == "defaultVM"
assert uri.get_internal() == "~/Downloads/democlan"
uri_str = "clan://~/Downloads/democlan"
uri = ClanURI.from_str(url=uri_str)
assert uri.params.flake_attr == "defaultVM"
assert uri.get_internal() == "~/Downloads/democlan"
def test_remote_with_all_params() -> None:
# Create a ClanURI object from a remote URI with parameters
uri = ClanURI("clan://https://example.com?flake_attr=myVM&password=1234")

View File

@ -3,35 +3,14 @@ import subprocess
from pathlib import Path
import pytest
from api import TestClient
from cli import Cli
from clan_cli.flakes.create import DEFAULT_URL
@pytest.fixture
def cli() -> Cli:
return Cli()
@pytest.mark.impure
def test_create_flake_api(
monkeypatch: pytest.MonkeyPatch, api: TestClient, temporary_home: Path
) -> None:
flake_dir = temporary_home / "test-flake"
response = api.post(
f"/api/flake/create?flake_dir={flake_dir}",
json=dict(
flake_dir=str(flake_dir),
url=str(DEFAULT_URL),
),
)
assert response.status_code == 201, f"Failed to create flake {response.text}"
assert (flake_dir / ".clan-flake").exists()
assert (flake_dir / "flake.nix").exists()
@pytest.mark.impure
def test_create_flake(
monkeypatch: pytest.MonkeyPatch,

View File

@ -1,92 +0,0 @@
import json
import logging
import pytest
from api import TestClient
from fixtures_flakes import FlakeForTest
from path import Path
from clan_cli.dirs import user_history_file
log = logging.getLogger(__name__)
def test_flake_history_append(
api: TestClient, test_flake: FlakeForTest, temporary_home: Path
) -> None:
response = api.post(
f"/api/flake/history?flake_dir={test_flake.path!s}",
json={},
)
assert response.status_code == 200, response.json()
assert user_history_file().exists()
# def test_flake_history_list(
# api: TestClient, test_flake: FlakeForTest, temporary_home: Path
# ) -> None:
# response = api.get(
# "/api/flake/history",
# )
# assert response.status_code == 200, response.text
# assert response.json() == []
# # add the test_flake
# response = api.post(
# f"/api/flake/history?flake_dir={test_flake.path!s}",
# json={},
# )
# assert response.status_code == 200, response.text
# # list the flakes again
# response = api.get(
# "/api/flake/history",
# )
# assert response.status_code == 200, response.text
# assert response.json() == [str(test_flake.path)]
@pytest.mark.impure
def test_inspect_ok(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
params = {"url": str(test_flake_with_core.path)}
response = api.get(
"/api/flake/attrs",
params=params,
)
assert response.status_code == 200, "Failed to inspect vm"
data = response.json()
print("Data: ", data)
assert data.get("flake_attrs") == ["vm1", "vm2"]
@pytest.mark.impure
def test_inspect_err(api: TestClient) -> None:
params = {"url": "flake-parts"}
response = api.get(
"/api/flake/attrs",
params=params,
)
assert response.status_code != 200, "Succeed to inspect vm but expected to fail"
data = response.json()
print("Data: ", data)
assert data.get("detail")
@pytest.mark.impure
def test_inspect_flake(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
params = {"url": str(test_flake_with_core.path)}
response = api.get(
"/api/flake/inspect",
params=params,
)
assert response.status_code == 200, "Failed to inspect vm"
data = response.json()
print("Data: ", json.dumps(data, indent=2))
assert data.get("content") is not None
actions = data.get("actions")
assert actions is not None
assert len(actions) == 2
assert actions[0].get("id") == "vms/inspect"
assert actions[0].get("uri") == "api/vms/inspect"
assert actions[1].get("id") == "vms/create"
assert actions[1].get("uri") == "api/vms/create"

View File

@ -1,54 +1,13 @@
import json
from typing import TYPE_CHECKING
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
from pytest import CaptureFixture
from clan_cli.dirs import user_history_file
from clan_cli.flakes.history import HistoryEntry
if TYPE_CHECKING:
pass
def test_flakes_add(
test_flake: FlakeForTest,
) -> None:
cli = Cli()
cmd = [
"flakes",
"add",
str(test_flake.path),
]
cli.run(cmd)
history_file = user_history_file()
assert history_file.exists()
history = [HistoryEntry(**entry) for entry in json.loads(open(history_file).read())]
assert history[0].path == str(test_flake.path)
def test_flakes_list(
capsys: CaptureFixture,
test_flake: FlakeForTest,
) -> None:
cli = Cli()
cmd = [
"flakes",
"list",
]
cli.run(cmd)
assert str(test_flake.path) not in capsys.readouterr().out
cli.run(["flakes", "add", str(test_flake.path)])
cli.run(cmd)
assert str(test_flake.path) in capsys.readouterr().out
@pytest.mark.impure
def test_flakes_inspect(
test_flake_with_core: FlakeForTest, capsys: pytest.CaptureFixture

View File

@ -0,0 +1,56 @@
import json
from typing import TYPE_CHECKING
import pytest
from cli import Cli
from fixtures_flakes import FlakeForTest
from pytest import CaptureFixture
from clan_cli.clan_uri import ClanParameters, ClanURI
from clan_cli.dirs import user_history_file
from clan_cli.history.add import HistoryEntry
if TYPE_CHECKING:
pass
@pytest.mark.impure
def test_history_add(
test_flake_with_core: FlakeForTest,
) -> None:
cli = Cli()
params = ClanParameters(flake_attr="vm1")
uri = ClanURI.from_path(test_flake_with_core.path, params=params)
cmd = [
"history",
"add",
str(uri),
]
cli.run(cmd)
history_file = user_history_file()
assert history_file.exists()
history = [HistoryEntry(**entry) for entry in json.loads(open(history_file).read())]
assert history[0].flake.flake_url == str(test_flake_with_core.path)
@pytest.mark.impure
def test_history_list(
capsys: CaptureFixture,
test_flake_with_core: FlakeForTest,
) -> None:
cli = Cli()
params = ClanParameters(flake_attr="vm1")
uri = ClanURI.from_path(test_flake_with_core.path, params=params)
cmd = [
"history",
"list",
]
cli.run(cmd)
assert str(test_flake_with_core.path) not in capsys.readouterr().out
cli.run(["history", "add", str(uri)])
cli.run(cmd)
assert str(test_flake_with_core.path) in capsys.readouterr().out

View File

@ -1,279 +0,0 @@
import pytest
from api import TestClient
from fixtures_flakes import FlakeForTest
def test_machines(api: TestClient, test_flake: FlakeForTest) -> None:
response = api.get(f"/api/machines?flake_dir={test_flake.path}")
assert response.status_code == 200
assert response.json() == {"machines": []}
response = api.put(
f"/api/machines/test/config?flake_dir={test_flake.path}", json={}
)
assert response.status_code == 200
response = api.get(f"/api/machines/test?flake_dir={test_flake.path}")
assert response.status_code == 200
assert response.json() == {"machine": {"name": "test", "status": "unknown"}}
response = api.get(f"/api/machines?flake_dir={test_flake.path}")
assert response.status_code == 200
assert response.json() == {"machines": [{"name": "test", "status": "unknown"}]}
@pytest.mark.with_core
def test_schema_errors(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
# make sure that eval errors do not raise an internal server error
response = api.put(
f"/api/schema?flake_dir={test_flake_with_core.path}",
json={"imports": ["some-invalid-import"]},
)
assert response.status_code == 422
assert (
"error: string 'some-invalid-import' doesn't represent an absolute path"
in response.json()["detail"][0]["msg"]
)
@pytest.mark.with_core
def test_schema_invalid_clan_imports(
api: TestClient, test_flake_with_core: FlakeForTest
) -> None:
response = api.put(
f"/api/schema?flake_dir={test_flake_with_core.path}",
json={"clanImports": ["non-existing-clan-module"]},
)
assert response.status_code == 400
assert (
"Some requested clan modules could not be found"
in response.json()["detail"]["msg"]
)
def test_create_machine_invalid_hostname(
api: TestClient, test_flake: FlakeForTest
) -> None:
response = api.put(
f"/api/machines/-invalid-hostname/config?flake_dir={test_flake.path}",
json=dict(),
)
assert response.status_code == 422
assert (
"Machine name must be a valid hostname" in response.json()["detail"][0]["msg"]
)
@pytest.mark.with_core
def test_verify_config_without_machine(
api: TestClient, test_flake_with_core: FlakeForTest
) -> None:
response = api.put(
f"/api/machines/test/verify?flake_dir={test_flake_with_core.path}",
json=dict(),
)
assert response.status_code == 200
assert response.json() == {"success": True, "error": None}
@pytest.mark.with_core
def test_ensure_empty_config_is_valid(
api: TestClient, test_flake_with_core: FlakeForTest
) -> None:
response = api.put(
f"/api/machines/test/config?flake_dir={test_flake_with_core.path}",
json=dict(),
)
assert response.status_code == 200
response = api.get(
f"/api/machines/test/verify?flake_dir={test_flake_with_core.path}"
)
assert response.status_code == 200
assert response.json() == {"success": True, "error": None}
@pytest.mark.with_core
def test_configure_machine(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
# ensure error 404 if machine does not exist when accessing the config
response = api.get(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}"
)
assert response.status_code == 404
# create the machine
response = api.put(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}", json={}
)
assert response.status_code == 200
# ensure an empty config is returned by default for a new machine
response = api.get(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}"
)
assert response.status_code == 200
assert response.json() == {
"clanImports": [],
"clan": {},
}
# get jsonschema for without imports
response = api.put(
f"/api/schema?flake_dir={test_flake_with_core.path}",
json={"clanImports": []},
)
assert response.status_code == 200
json_response = response.json()
assert "schema" in json_response and "properties" in json_response["schema"]
# an invalid config setting some non-existent option
invalid_config = dict(
clan=dict(),
foo=dict(
bar=True,
),
services=dict(
nginx=dict(
enable=True,
),
),
)
# verify an invalid config (foo option does not exist)
response = api.put(
f"/api/machines/machine1/verify?flake_dir={test_flake_with_core.path}",
json=invalid_config,
)
assert response.status_code == 200
assert "error: The option `foo' does not exist" in response.json()["error"]
# set come invalid config (foo option does not exist)
response = api.put(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}",
json=invalid_config,
)
assert response.status_code == 200
# ensure the config has actually been updated
response = api.get(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}"
)
assert response.status_code == 200
assert response.json() == dict(clanImports=[], **invalid_config)
# set some valid config
config2 = dict(
clan=dict(),
services=dict(
nginx=dict(
enable=True,
),
),
)
response = api.put(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}",
json=config2,
)
assert response.status_code == 200
# ensure the config has been applied
response = api.get(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}",
)
assert response.status_code == 200
assert response.json() == dict(clanImports=[], **config2)
# get the config again
response = api.get(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}"
)
assert response.status_code == 200
assert response.json() == {"clanImports": [], **config2}
# ensure PUT on the config is idempotent by passing the config again
# For example, this should not result in the boot.loader.grub.devices being
# set twice (eg. merged)
response = api.put(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}",
json=config2,
)
assert response.status_code == 200
# ensure the config has been applied
response = api.get(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}",
)
assert response.status_code == 200
assert response.json() == dict(clanImports=[], **config2)
# verify the machine config evaluates
response = api.get(
f"/api/machines/machine1/verify?flake_dir={test_flake_with_core.path}"
)
assert response.status_code == 200
assert response.json() == {"success": True, "error": None}
# get the schema with an extra module imported
response = api.put(
f"/api/schema?flake_dir={test_flake_with_core.path}",
json={"clanImports": ["diskLayouts"]},
)
# expect the result schema to contain the deltachat option
assert response.status_code == 200
assert (
response.json()["schema"]["properties"]["diskLayouts"]["properties"][
"singleDiskExt4"
]["properties"]["device"]["type"]
== "string"
)
# new config importing an extra clanModule (clanModules.fake-module)
config_with_imports: dict = {
"clanImports": ["fake-module"],
"clan": {
"fake-module": {
"fake-flag": True,
},
},
}
# set the fake-module.fake-flag option to true
response = api.put(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}",
json=config_with_imports,
)
assert response.status_code == 200
# ensure the config has been applied
response = api.get(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}",
)
assert response.status_code == 200
assert response.json() == {
"clanImports": ["fake-module"],
"clan": {
"fake-module": {
"fake-flag": True,
},
},
}
# remove the import from the config
response = api.put(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}",
json=dict(
clanImports=[],
),
)
assert response.status_code == 200
# ensure the config has been applied
response = api.get(
f"/api/machines/machine1/config?flake_dir={test_flake_with_core.path}",
)
assert response.status_code == 200
assert response.json() == {
"clan": {},
"clanImports": [],
}

View File

@ -3,20 +3,25 @@ from cli import Cli
from fixtures_flakes import FlakeForTest
@pytest.mark.impure
def test_machine_subcommands(
test_flake: FlakeForTest, capsys: pytest.CaptureFixture
test_flake_with_core: FlakeForTest, capsys: pytest.CaptureFixture
) -> None:
cli = Cli()
cli.run(["--flake", str(test_flake.path), "machines", "create", "machine1"])
cli.run(
["--flake", str(test_flake_with_core.path), "machines", "create", "machine1"]
)
capsys.readouterr()
cli.run(["--flake", str(test_flake.path), "machines", "list"])
cli.run(["--flake", str(test_flake_with_core.path), "machines", "list"])
out = capsys.readouterr()
assert "machine1\n" == out.out
assert "machine1\nvm1\nvm2\n" == out.out
cli.run(["--flake", str(test_flake.path), "machines", "delete", "machine1"])
cli.run(
["--flake", str(test_flake_with_core.path), "machines", "delete", "machine1"]
)
capsys.readouterr()
cli.run(["--flake", str(test_flake.path), "machines", "list"])
cli.run(["--flake", str(test_flake_with_core.path), "machines", "list"])
out = capsys.readouterr()
assert "" == out.out
assert "vm1\nvm2\n" == out.out

View File

@ -1,10 +0,0 @@
import pytest
from api import TestClient
@pytest.mark.impure
def test_static_files(api: TestClient) -> None:
response = api.get("/")
assert response.headers["content-type"] == "text/html; charset=utf-8"
response = api.get("/does-no-exists.txt")
assert response.status_code == 404

View File

@ -1,64 +0,0 @@
import os
import select
import shutil
import subprocess
import sys
from pathlib import Path
import pytest
from cli import Cli
from ports import PortFunction
@pytest.mark.timeout(10)
def test_start_server(unused_tcp_port: PortFunction, temporary_home: Path) -> None:
Cli()
port = unused_tcp_port()
fifo = temporary_home / "fifo"
os.mkfifo(fifo)
# Create a script called "firefox" in the temporary home directory that
# writes "1" to the fifo. This is used to notify the test that the firefox has been
# started.
notify_script = temporary_home / "firefox"
bash = shutil.which("bash")
assert bash is not None
notify_script.write_text(
f"""#!{bash}
set -x
echo "1" > {fifo}
"""
)
notify_script.chmod(0o700)
# Add the temporary home directory to the PATH so that the script is found
env = os.environ.copy()
env["PATH"] = f"{temporary_home}:{env['PATH']}"
# Add build/src to PYTHONPATH so that the webui module is found in nix sandbox
# TODO: We need a way to make sure things which work in the devshell also work in the sandbox
python_path = env.get("PYTHONPATH")
if python_path:
env["PYTHONPATH"] = f"/build/src:{python_path}"
# breakpoint_container(
# cmd=[sys.executable, "-m", "clan_cli.webui", "--port", str(port)],
# env=env,
# work_dir=temporary_home,
# )
with subprocess.Popen(
[sys.executable, "-m", "clan_cli.webui", "--port", str(port)],
env=env,
stdout=sys.stderr,
stderr=sys.stderr,
text=True,
) as p:
try:
with open(fifo) as f:
r, _, _ = select.select([f], [], [], 10)
assert f in r
assert f.read().strip() == "1"
finally:
p.kill()

View File

@ -11,6 +11,22 @@
}
],
"settings": {
"python.linting.mypyEnabled": true
"python.linting.mypyEnabled": true,
"files.exclude": {
"**/.direnv": true,
"**/.mypy_cache": true,
"**/.ruff_cache": true,
"**/.hypothesis": true,
"**/__pycache__": true,
"**/.reports": true
},
"search.exclude": {
"**/.direnv": true,
"**/.mypy_cache": true,
"**/.ruff_cache": true,
"**/.hypothesis": true,
"**/__pycache__": true,
"**/.reports": true
}
}
}

View File

@ -4,7 +4,7 @@ from pathlib import Path
from typing import Any
import gi
from clan_cli import flakes, vms
from clan_cli import flakes, history, vms
gi.require_version("GdkPixbuf", "2.0")
from gi.repository import GdkPixbuf
@ -73,50 +73,11 @@ class VM:
# start/end indexes can be used optionally for pagination
def get_initial_vms(start: int = 0, end: int | None = None) -> list[VM]:
# vms = [
# VM(
# base=VMBase(
# icon=assets.loc / "cybernet.jpeg",
# name="Cybernet Clan",
# url="clan://cybernet.lol",
# _path=Path(__file__).parent.parent / "test_democlan",
# status=False,
# ),
# ),
# VM(
# base=VMBase(
# icon=assets.loc / "zenith.jpeg",
# name="Zenith Clan",
# url="clan://zenith.lol",
# _path=Path(__file__).parent.parent / "test_democlan",
# status=False,
# )
# ),
# VM(
# base=VMBase(
# icon=assets.loc / "firestorm.jpeg",
# name="Firestorm Clan",
# url="clan://firestorm.lol",
# _path=Path(__file__).parent.parent / "test_democlan",
# status=False,
# ),
# ),
# VM(
# base=VMBase(
# icon=assets.loc / "placeholder.jpeg",
# name="Placeholder Clan",
# url="clan://demo.lol",
# _path=Path(__file__).parent.parent / "test_democlan",
# status=True,
# ),
# ),
# ]
vm_list = []
# TODO: list_history() should return a list of dicts, not a list of paths
# Execute `clan flakes add <path>` to democlan for this to work
for entry in flakes.history.list_history():
for entry in history.list.list_history():
flake_config = flakes.inspect.inspect_flake(entry.path, "defaultVM")
vm_config = vms.inspect.inspect_vm(entry.path, "defaultVM")

View File

@ -3,8 +3,6 @@
./clan-cli/flake-module.nix
./clan-vm-manager/flake-module.nix
./installer/flake-module.nix
./ui/flake-module.nix
./theme/flake-module.nix
];
perSystem = { pkgs, config, lib, ... }: {
@ -17,7 +15,6 @@
pending-reviews = pkgs.callPackage ./pending-reviews { };
nix-unit = pkgs.callPackage ./nix-unit { };
meshname = pkgs.callPackage ./meshname { };
inherit (pkgs.callPackages ./node-packages { }) prettier-plugin-tailwindcss;
} // lib.optionalAttrs pkgs.stdenv.isLinux {
aemu = pkgs.callPackage ./aemu { };
gfxstream = pkgs.callPackage ./gfxstream {

View File

@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_18"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@ -1,8 +0,0 @@
{ pkgs, system, nodejs-18_x, makeWrapper }:
let
nodePackages = import ./composition.nix {
inherit pkgs system;
nodejs = nodejs-18_x;
};
in
nodePackages

View File

@ -1,6 +0,0 @@
#!/usr/bin/env nix-shell
#! nix-shell -i bash -p nodePackages.node2nix
# shellcheck shell=bash
rm -f node-env.nix
node2nix -18 -i node-packages.json -o node-packages.nix -c composition.nix

View File

@ -1,689 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Fixup all executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
if isScript "$file"
then
sed -i 's/\r$//' "$file" # convert crlf to lf
fi
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View File

@ -1 +0,0 @@
["prettier-plugin-tailwindcss"]

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +0,0 @@
[*.{js,jsx,ts,tsx,json}]
indent_style = space
indent_size = 4

View File

@ -1,12 +0,0 @@
# Because we depend on nixpkgs sources, uploading to builders takes a long time
source_up
files=(flake-module.nix package.json package-lock.json)
if type nix_direnv_watch_file &>/dev/null; then
nix_direnv_watch_file "${files[@]}"
else
watch_file "${files[@]}"
fi
use flake .#theme --builders ''

43
pkgs/theme/.gitignore vendored
View File

@ -1,43 +0,0 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# nix
.floco
src/fonts
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts
# Generated api code
openapi.json
api/

View File

@ -1,41 +0,0 @@
{ floco
, system
, pkgs
, clanPkgs
}:
let
lib = floco.lib;
pjs =
let
msg = "default.nix: Expected to find `package.json' to lookup " +
"package name/version, but no such file exists at: " +
(toString ./package.json);
in
if builtins.pathExists ./package.json then lib.importJSON ./package.json
else throw msg;
ident = pjs.name;
inherit (pjs) version;
fmod = lib.evalModules {
modules = [
floco.nixosModules.floco
{ config.floco.settings = { inherit system; basedir = ./.; }; }
./nix/floco-cfg.nix
];
specialArgs = {
inherit pkgs clanPkgs;
};
};
# This attrset holds a few derivations related to our package.
# We'll expose these below to the CLI.
pkg = fmod.config.floco.packages.${ident}.${version};
in
{
inherit pkg fmod;
}

View File

@ -1,18 +0,0 @@
{ self, ... }:
{
perSystem = { self', pkgs, ... }:
let
inherit (self.inputs) floco;
base = pkgs.callPackage ./default.nix { inherit floco; clanPkgs = self'.packages; };
in
{
packages = {
theme = base.pkg.global;
};
devShells.theme = pkgs.callPackage ./shell.nix {
inherit pkgs;
inherit (base) fmod pkg;
clanPkgs = self'.packages;
};
};
}

File diff suppressed because it is too large Load Diff

View File

@ -1,26 +0,0 @@
# ============================================================================ #
#
# Aggregates configs making them available to `default.nix', `flake.nix',
# or other projects that want to consume this module/package as a dependency.
#
# ---------------------------------------------------------------------------- #
{
_file = "theme/nix/floco-cfg.nix";
imports =
let
ifExist = builtins.filter builtins.pathExists [
./pdefs.nix # Generated `pdefs.nix'
./foverrides.nix # Explicit config
];
in
ifExist
++ [
];
}
# ---------------------------------------------------------------------------- #
#
#
#
# ============================================================================ #

View File

@ -1,12 +0,0 @@
{ lib, config, ... }:
let
pjs = lib.importJSON ../package.json;
ident = pjs.name;
inherit (pjs) version;
in
{
config.floco.packages.${ident}.${version} =
{
source = lib.libfloco.cleanLocalSource ../.;
};
}

View File

@ -1,90 +0,0 @@
{
floco = {
pdefs = {
"@clan/colors" = {
"1.0.0" = {
depInfo = {
"@material/material-color-utilities" = {
descriptor = "^0.2.6";
pin = "0.2.7";
};
"@types/node" = {
descriptor = "^20.3.2";
pin = "20.8.2";
};
typescript = {
descriptor = "^5.1.5";
pin = "5.2.2";
};
};
fetchInfo = "path:..";
ident = "@clan/colors";
lifecycle = {
build = true;
};
ltype = "dir";
treeInfo = {
"node_modules/@material/material-color-utilities" = {
dev = true;
key = "@material/material-color-utilities/0.2.7";
};
"node_modules/@types/node" = {
dev = true;
key = "@types/node/20.8.2";
};
"node_modules/typescript" = {
dev = true;
key = "typescript/5.2.2";
};
};
version = "1.0.0";
};
};
"@material/material-color-utilities" = {
"0.2.7" = {
fetchInfo = {
narHash = "sha256-hRYXqtkoXHoB30v1hstWz7dO7dNeBb6EJqZG66hHi94=";
type = "tarball";
url = "https://registry.npmjs.org/@material/material-color-utilities/-/material-color-utilities-0.2.7.tgz";
};
ident = "@material/material-color-utilities";
ltype = "file";
treeInfo = { };
version = "0.2.7";
};
};
"@types/node" = {
"20.8.2" = {
fetchInfo = {
narHash = "sha256-o4hyob1kLnm0OE8Rngm0d6XJxobpMlYSoquusktmLPk=";
type = "tarball";
url = "https://registry.npmjs.org/@types/node/-/node-20.8.2.tgz";
};
ident = "@types/node";
ltype = "file";
treeInfo = { };
version = "20.8.2";
};
};
typescript = {
"5.2.2" = {
binInfo = {
binPairs = {
tsc = "bin/tsc";
tsserver = "bin/tsserver";
};
};
fetchInfo = {
narHash = "sha256-io9rXH9RLRLB0484ZdvcqblLQquLFUBGxDuwSixWxus=";
type = "tarball";
url = "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz";
};
ident = "typescript";
ltype = "file";
treeInfo = { };
version = "5.2.2";
};
};
};
};
}

View File

@ -1,63 +0,0 @@
{
"name": "@clan/colors",
"version": "1.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@clan/colors",
"version": "1.0.0",
"license": "ISC",
"devDependencies": {
"@material/material-color-utilities": "^0.2.6",
"@types/node": "^20.3.2",
"typescript": "^5.1.5"
}
},
"node_modules/@material/material-color-utilities": {
"version": "0.2.7",
"resolved": "https://registry.npmjs.org/@material/material-color-utilities/-/material-color-utilities-0.2.7.tgz",
"integrity": "sha512-0FCeqG6WvK4/Cc06F/xXMd/pv4FeisI0c1tUpBbfhA2n9Y8eZEv4Karjbmf2ZqQCPUWMrGp8A571tCjizxoTiQ==",
"dev": true
},
"node_modules/@types/node": {
"version": "20.8.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.2.tgz",
"integrity": "sha512-Vvycsc9FQdwhxE3y3DzeIxuEJbWGDsnrxvMADzTDF/lcdR9/K+AQIeAghTQsHtotg/q0j3WEOYS/jQgSdWue3w==",
"dev": true
},
"node_modules/typescript": {
"version": "5.2.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz",
"integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==",
"dev": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
}
},
"dependencies": {
"@material/material-color-utilities": {
"version": "0.2.7",
"resolved": "https://registry.npmjs.org/@material/material-color-utilities/-/material-color-utilities-0.2.7.tgz",
"integrity": "sha512-0FCeqG6WvK4/Cc06F/xXMd/pv4FeisI0c1tUpBbfhA2n9Y8eZEv4Karjbmf2ZqQCPUWMrGp8A571tCjizxoTiQ==",
"dev": true
},
"@types/node": {
"version": "20.8.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.2.tgz",
"integrity": "sha512-Vvycsc9FQdwhxE3y3DzeIxuEJbWGDsnrxvMADzTDF/lcdR9/K+AQIeAghTQsHtotg/q0j3WEOYS/jQgSdWue3w==",
"dev": true
},
"typescript": {
"version": "5.2.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz",
"integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==",
"dev": true
}
}
}

View File

@ -1,23 +0,0 @@
{
"name": "@clan/colors",
"version": "1.0.0",
"description": "",
"type": "module",
"files": [
"colors.json"
],
"scripts": {
"typecheck": "./node_modules/.bin/tsc -p ./tsconfig.json --noEmit",
"build": "tsc --build --clean && tsc && node ./build/main.js",
"html": "tsc --build --clean && tsc && node ./build/generate.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@material/material-color-utilities": "^0.2.6",
"typescript": "^5.1.5",
"@types/node": "^20.3.2"
}
}

View File

@ -1,24 +0,0 @@
{ fmod
, pkg
, pkgs
, clanPkgs
}:
pkgs.mkShell {
buildInputs = [
fmod.config.floco.settings.nodePackage
];
shellHook = ''
ID=${pkg.built.tree}
currID=$(cat .floco/.node_modules_id 2> /dev/null)
mkdir -p .floco
if [[ "$ID" != "$currID" || ! -d "node_modules" ]];
then
${pkgs.rsync}/bin/rsync -a --chmod=ug+w --delete ${pkg.built.tree}/node_modules/ ./node_modules/
echo -n $ID > .floco/.node_modules_id
echo "floco ok: node_modules updated"
fi
export PATH="$PATH:$(realpath ./node_modules)/.bin"
'';
}

File diff suppressed because it is too large Load Diff

View File

@ -1,79 +0,0 @@
import { AliasMap, BaseColors, HexString } from "./types.js";
export type PaletteConfig = {
baseColors: BaseColors;
tones: number[];
aliases: AliasMap<"primary" | "secondary" | "error">;
common: {
// Black and white is always constant
// We declare this on the type level
white: "#ffffff";
black: "#000000";
// Some other color constants/reservation
[id: string]: HexString;
};
};
export const config: PaletteConfig = {
/** All color shades that are available
* This colors are used as "key colors" to generate a tonal palette from 0 to 100
* Steps are defined in 'tones'
*/
baseColors: {
neutral: {
keyColor: "#808080",
tones: [2, 5, 8, 92, 95, 98],
},
green: {
keyColor: "#7AC51B",
tones: [2, 98],
},
yellow: {
keyColor: "#E0E01F",
tones: [2, 98],
},
purple: {
keyColor: "#661bc5",
tones: [2, 98],
},
red: {
keyColor: "#e82439",
tones: [95],
},
blue: {
keyColor: "#1B7AC5",
tones: [1, 2, 3, 5, 95, 98],
},
},
/** Common tones to generate out of all the baseColors
* number equals to the amount of light present in the color (HCT Color Space)
*/
tones: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100],
/** create aliases from the color palette
*
* @example
*
* primary: "blue"
* ->
* ...
* primary40 -> blue40
* primary50 -> blue50
* ...
*/
aliases: {
primary: "purple",
secondary: "green",
error: "red",
},
/** some color names are reserved
* typically those colors do not change when switching theme
* or are other types of constant in the UI
*/
common: {
white: "#ffffff",
black: "#000000",
},
};

View File

@ -1,46 +0,0 @@
import { writeFile } from "fs";
import palette from "./colors.json" assert { type: "json" };
import { config } from "./config.js";
type PaletteFile = typeof palette;
const html = (palette: PaletteFile): string => {
const colors = Object.keys(config.baseColors).map((baseName) => {
const colors = Object.entries(palette.ref.palette)
.filter(([name, _]) => name.includes(baseName))
.sort((a, b) => {
return a[1].meta.color.shade - b[1].meta.color.shade;
})
.map(([key, color]) => {
console.log({ key, color });
return `<div style="background-color:${color.value}; color:${
color.meta.color.shade < 48 ? "#fff" : "#000"
}; height: 10rem; border:solid 1px grey; display:grid; place-items:end;">${key}</div>`;
});
return `<div style="display: grid; grid-template-columns: repeat(${13}, minmax(0, 1fr)); gap: 1rem; margin-bottom: 1rem">${colors.join(
"\n",
)}</div>`;
});
return `<!DOCTYPE html>
<html lang="en">
<meta charset="UTF-8">
<title>Page Title</title>
<style>
</style>
<body>
${colors.join("\n")}
</body>
</html>
`;
};
writeFile("index.html", html(palette), (err) => {
if (err) {
console.error({ err });
} else {
console.log("Exported colors to html");
}
});

View File

@ -1,182 +0,0 @@
#!usr/bin/node
import * as fs from "fs";
import {
argbFromHex,
Hct,
hexFromArgb,
} from "@material/material-color-utilities";
import {
AliasTokenMap,
ColorDesignToken,
ColorSet,
HexString,
RefTokenSystem,
TonalPalette,
TonalPaletteConfig,
TonalPaletteItem,
} from "./types.js";
import { config } from "./config.js";
const { baseColors, tones, aliases, common } = config;
/** Takes a color, tone and name
* If a tone is given adjust the lightning level accordingly
*
* @returns TonalPaletteItem (meta wrapper around HCT)
*/
const getTonalPaletteItem = (
value: HexString,
name: string,
tone?: number,
): TonalPaletteItem => {
const aRGB = argbFromHex(value);
const color = Hct.fromInt(aRGB);
if (tone !== undefined) {
color.tone = tone;
}
return {
shade: color.tone,
name: `${name || color.chroma}${Math.round(color.tone)}`,
baseName: name,
value: color,
};
};
/** create a flat list of the cross product from all colors and all tones.
*
* every color is mapped in the range from 0 to 100
* with the steps configure in `config.tones'
* additionally the key color is added unmodified
* lightning levels are rounded to the next natural number to form the 'name'
* Example:
*
* "blue" x [20.1, 30.3]
* ->
* [blue20, blue30]
*/
const mkTonalPalette =
(config: TonalPaletteConfig) =>
(name: string) =>
(keyTone: HexString): TonalPalette => {
const { tones } = config;
const aRGB = argbFromHex(keyTone);
const HctColor = Hct.fromInt(aRGB);
const roundedTone = Math.round(HctColor.tone * 100) / 100;
const localTones = [...tones, roundedTone];
return localTones.map((t) => getTonalPaletteItem(keyTone, name, t));
};
/**
* Converts a PaletteItem into a hex color. (Wrapped)
* Adding meta attributes which avoids any information loss.
*/
const toDesignTokenContent = (color: TonalPaletteItem): ColorDesignToken => {
const { value } = color;
return {
type: "color",
value: hexFromArgb(value.toInt()),
meta: {
color,
date: new Date(),
},
};
};
const color: ColorSet = Object.entries(baseColors)
.map(([name, baseColor]) => ({
name,
baseColor,
tones: mkTonalPalette({
tones: [...tones, ...baseColor.tones].sort((a, b) => a - b),
})(name)(baseColor.keyColor),
}))
.reduce((acc, curr) => {
let currTones = curr.tones.reduce(
(o, v) => ({
...o,
[v.name]: toDesignTokenContent(v),
}),
{},
);
return {
...acc,
...currTones,
};
}, {});
/** Generate a set of tokens from a given alias mapping
*
* @param alias A string e.g. Primary -> Blue (Primary is the alias)
* @param name A string; Basename of the referenced value (e.g. Blue)
* @param colors A set of colors
* @returns All aliases from the given color set
*/
function resolveAlias(
alias: string,
name: string,
colors: ColorSet,
): AliasTokenMap {
// All colors from the color map belonging to that single alias
// Example:
// Primary -> "blue"
// =>
// [ (blue0) , (blue10) , ..., (blue100) ]
const all = Object.values(colors)
.filter((n) => n.meta.color.name.includes(name))
.filter((n) => !n.meta.color.name.includes("."));
const tokens = all
.map((shade) => {
const shadeNumber = shade.meta.color.shade;
return {
name: `${alias}${Math.round(shadeNumber)}`,
value: { value: `{ref.palette.${shade.meta.color.name}}` },
// propagate the meta attribute of the actual value
meta: shade.meta,
};
})
// sort by tone
.sort((a, b) => a.meta.color.value.tone - b.meta.color.value.tone)
.reduce((acc, { name, value }) => ({ ...acc, [name]: value }), {});
return tokens;
}
const aliasMap = Object.entries(aliases).reduce(
(prev, [key, value]) => ({
...prev,
...resolveAlias(key, value, color),
}),
{},
);
const commonColors = Object.entries(common)
.map(([name, value]) =>
toDesignTokenContent(getTonalPaletteItem(value, name)),
)
.reduce(
(acc, val) => ({ ...acc, [val.meta.color.baseName]: val }),
{},
) as ColorSet;
const toPaletteToken = (color: ColorSet): RefTokenSystem => ({
ref: {
palette: color,
alias: aliasMap,
common: commonColors,
},
});
// Dump tokens to json file
fs.writeFile(
"colors.json",
JSON.stringify(toPaletteToken(color), null, 2),
(err) => {
if (err) {
console.error({ err });
} else {
console.log("tokens successfully exported");
}
},
);

View File

@ -1,90 +0,0 @@
import { Hct } from "@material/material-color-utilities";
export type BaseColors = {
neutral: BaseColor;
red: BaseColor;
green: BaseColor;
yellow: BaseColor;
purple: BaseColor;
blue: BaseColor;
};
export type BaseColor = {
keyColor: HexString;
tones: number[];
follows?: string;
};
export type ColorSet = { [key: string]: ColorDesignToken };
/** The resolved alias tokens
*
* @example
* {
* primary: "blue"
* ...
* }
*
*/
export type AliasMap<T extends string> = {
[alias in T]: keyof BaseColors;
};
/** The resolved alias tokens
*
* @example
* {
* primary0: "blue40"
* primary10: "blue40"
* ...
* primary100: "blue100"
* }
*
* Unfortunately My Typescript skills lack the ability to express this type any narrower :/
*/
export type AliasTokenMap = {
[alias: string]: { value: string };
};
export type TonalPaletteConfig = {
tones: number[];
};
export type HexString = string;
export type TonalPaletteItem = {
/**
* @example
* 20
*/
shade: number;
/**
* @example
* "blue20"
*/
name: string;
/**
* @example
* "blue"
*/
baseName: string;
value: Hct;
};
export type TonalPalette = TonalPaletteItem[];
export type ColorDesignToken = {
type: "color";
value: HexString;
meta: {
color: TonalPaletteItem;
date: Date;
};
};
export type RefTokenSystem = {
ref: {
palette: ColorSet;
common: ColorSet;
alias: AliasTokenMap;
};
};

View File

@ -1,41 +0,0 @@
{
"include": ["src"],
"compilerOptions": {
"target": "ESNext" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
"lib": [
"ESNext",
"dom"
] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
"module": "NodeNext" /* Specify what module code is generated. */,
"rootDir": "src" /* Specify the root folder within your source files. */,
"moduleResolution": "nodenext" /* Specify how TypeScript looks up a file from a given module specifier. */,
"resolveJsonModule": true /* Enable importing .json files. */,
"outDir": "build",
"esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
/* Type Checking */
"strict": true /* Enable all strict type-checking options. */,
"noImplicitAny": true /* Enable error reporting for expressions and declarations with an implied 'any' type. */,
"strictNullChecks": true /* When type checking, take into account 'null' and 'undefined'. */,
"strictFunctionTypes": true /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */,
"strictBindCallApply": true /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */,
"strictPropertyInitialization": true /* Check for class properties that are declared but not set in the constructor. */,
"noImplicitThis": true /* Enable error reporting when 'this' is given the type 'any'. */,
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
"alwaysStrict": true /* Ensure 'use strict' is always emitted. */,
"noUnusedLocals": true /* Enable error reporting when local variables aren't read. */,
"noUnusedParameters": true /* Raise an error when a function parameter isn't read. */,
"exactOptionalPropertyTypes": true /* Interpret optional property types as written, rather than adding 'undefined'. */,
"noImplicitReturns": true /* Enable error reporting for codepaths that do not explicitly return in a function. */,
"noFallthroughCasesInSwitch": true /* Enable error reporting for fallthrough cases in switch statements. */,
"noUncheckedIndexedAccess": true /* Add 'undefined' to a type when accessed using an index. */,
"noImplicitOverride": true /* Ensure overriding members in derived classes are marked with an override modifier. */,
"noPropertyAccessFromIndexSignature": true /* Enforces using indexed accessors for keys declared using an indexed type. */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
}
}

View File

@ -1,12 +0,0 @@
# Because we depend on nixpkgs sources, uploading to builders takes a long time
source_up
files=(../../flake.nix ../theme default.nix flake-module.nix package.json package-lock.json)
if type nix_direnv_watch_file &>/dev/null; then
nix_direnv_watch_file "${files[@]}"
else
watch_file "${files[@]}"
fi
use flake .#ui --builders ''

View File

@ -1,11 +0,0 @@
{
"root": true,
"extends": ["next/core-web-vitals", "plugin:tailwindcss/recommended", "plugin:@typescript-eslint/recommended"],
"parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint"],
"ignorePatterns": ["**/src/api/*"],
"rules": {
"@typescript-eslint/no-unused-vars": "off",
"@typescript-eslint/no-explicit-any": "off"
}
}

43
pkgs/ui/.gitignore vendored
View File

@ -1,43 +0,0 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# nix
.floco
src/fonts
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts
# Generated api code
openapi.json
api/

View File

@ -1,27 +0,0 @@
# cLan - awesome UI
## Updating dependencies
After changing dependencies with
`npm <dep> i --package-lock-only`
Update floco dependencies:
`nix run github:aakropotkin/floco -- translate -pt -o ./nix/pdefs.nix`
The prettier tailwind class sorting is not yet working properly with our devShell integration.
To sort classnames manually:
`cd /clan-core/pkgs/ui/`
## Upload ui to gitea
Create a gitea token here: https://git.clan.lol/user/settings/applications
Than run this command:
```
GITEA_TOKEN=<YOUR_TOKEN> nix run .#update-ui-assets
```

View File

@ -1,38 +0,0 @@
{ floco
, system
, pkgs
}:
let
lib = floco.lib;
pjs =
let
msg = "default.nix: Expected to find `package.json' to lookup " +
"package name/version, but no such file exists at: " +
(toString ./package.json);
in
if builtins.pathExists ./package.json then lib.importJSON ./package.json
else throw msg;
ident = pjs.name;
inherit (pjs) version;
fmod = lib.evalModules {
modules = [
floco.nixosModules.floco
{ config.floco.settings = { inherit system; basedir = ./.; }; }
./nix/floco-cfg.nix
];
specialArgs = { inherit pkgs; };
};
# This attrset holds a few derivations related to our package.
# We'll expose these below to the CLI.
pkg = fmod.config.floco.packages.${ident}.${version};
in
{
inherit pkg fmod;
}

View File

@ -1,20 +0,0 @@
{ self, ... }:
{
perSystem = { pkgs, ... }:
let
inherit (self.inputs) floco;
base = pkgs.callPackage ./default.nix { inherit floco; };
in
{
packages = {
ui = base.pkg.global;
ui-assets = pkgs.callPackage ./nix/ui-assets.nix { };
# EXAMPLE: GITEA_TOKEN=$(rbw get -f GITEA_TOKEN git.clan.lol) nix run .#update-ui-assets
update-ui-assets = pkgs.callPackage ./nix/update-ui-assets.nix { };
};
devShells.ui = pkgs.callPackage ./shell.nix {
inherit pkgs;
inherit (base) fmod pkg;
};
};
}

View File

@ -1,10 +0,0 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
output: "export",
images: { unoptimized: true },
eslint: {
dirs: ["src"],
},
};
module.exports = nextConfig;

View File

@ -1,7 +0,0 @@
{
imports = [
./pdefs.nix
./foverrides.nix
../../theme/nix/floco-cfg.nix
];
}

View File

@ -1,114 +0,0 @@
{ lib, config, pkgs, ... }:
let
pjs =
let
msg = "foverrides.nix: Expected to find `package.json' to lookup " +
"package name/version, but no such file exists at: " +
(toString ../package.json);
in
if builtins.pathExists ../package.json then lib.importJSON ../package.json
else throw msg;
ident = pjs.name;
inherit (pjs) version;
# ---------------------------------------------------------------------------- #
# example: "13.4.2"
nextVersion = builtins.head (builtins.attrNames (lib.filterAttrs (name: _attrs: name == "next") config.floco.pdefs).next);
# we must change the precompiled swc binary depending on the curerent system.
# example: "@next/swc-linux-x64-gnu"
swcArch = {
"x86_64-linux" = "@next/swc-linux-x64-gnu";
"aarch64-linux" = "@next/swc-linux-arm64-gnu";
"x86_64-darwin" = "@next/swc-darwin-x64";
"aarch64-darwin" = "@next/swc-darwin-arm64";
}.${config.floco.settings.system};
esbuildVersions = lib.filterAttrs (name: _attrs: name == "esbuild") config.floco.pdefs;
highestEsbuildVersion = lib.last (builtins.attrNames esbuildVersions.esbuild);
esbuildArch = {
# esbuild-linux-64
"x86_64-linux" = "esbuild-linux-64";
"x86_64-darwin" = "esbuild-darwin-64";
"aarch64-darwin" = "esbuild-darwin-arm64";
"aarch64-linux" = "esbuild-linux-arm64";
}.${config.floco.settings.system};
in
{
config.floco.packages.esbuild =
builtins.mapAttrs
(
version: _attrs: {
installed.override.copyTree = true;
installed.tree = config.floco.packages.${esbuildArch}.${version}.global;
}
)
esbuildVersions.esbuild;
# ---------------------------------------------------------------------------- #
config.floco.packages.${ident}.${version} =
let
cfg = config.floco.packages.${ident}.${version};
in
{
# ---------------------------------------------------------------------------- #
# Removes any `*.nix' files as well as `node_modules/' and
# `package-lock.json' from the source tree before using them in builds.
source = lib.libfloco.cleanLocalSource ../.;
# nextjs writes in node_mdules
built.override.copyTree = true;
# nextjs chaches some stuff in $HOME
built.override.preBuild = ''
export HOME=./home
echo "----------- GENERATE API TS ------------"
cp ${../../clan-cli/clan_cli/webui/openapi.json} openapi.json
./node_modules/.bin/orval
ln -fs ${pkgs.roboto}/share/fonts ./src/
echo "----------- RUNNING LINT ------------"
next lint --max-warnings 0
'';
built.tree =
let
customOverrides = cfg.trees.dev.overrideAttrs (prev: {
treeInfo =
prev.treeInfo
// {
"node_modules/${swcArch}" = {
key = "${swcArch}/${nextVersion}";
link = false;
optional = false;
dev = true;
};
"node_modules/${esbuildArch}" = {
key = "${esbuildArch}/${highestEsbuildVersion}";
link = false;
optional = false;
dev = true;
};
"node_modules/@clan/colors" = {
key = "@clan/colors/1.0.0";
link = false;
optional = false;
dev = true;
};
};
});
in
lib.mkForce customOverrides;
};
}

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +0,0 @@
{ fetchzip }:
fetchzip {
url = "https://git.clan.lol/api/packages/clan/generic/ui/0w48mjn2gdd102p3r875hcd0lbm5hrzk1jy01r637sy733qzk32j/assets.tar.gz";
sha256 = "0w48mjn2gdd102p3r875hcd0lbm5hrzk1jy01r637sy733qzk32j";
}

View File

@ -1,18 +0,0 @@
{ writeShellApplication
, curl
, nix
, gnutar
, gitMinimal
, coreutils
}:
writeShellApplication {
name = "update-ui-assets";
runtimeInputs = [
curl
nix
gnutar
gitMinimal
coreutils
];
text = builtins.readFile ./update-ui-assets.sh;
}

View File

@ -1,45 +0,0 @@
# shellcheck shell=bash
set -xeuo pipefail
# GITEA_TOKEN
if [[ -z "${GITEA_TOKEN:-}" ]]; then
echo "GITEA_TOKEN is not set"
echo "Go to https://git.clan.lol/user/settings/applications and generate a token"
exit 1
fi
DEPS=$(nix shell --inputs-from '.#' "nixpkgs#gnutar" "nixpkgs#curl" "nixpkgs#gzip" -c bash -c "echo \$PATH")
export PATH=$PATH:$DEPS
PROJECT_DIR=$(git rev-parse --show-toplevel)
tmpdir=$(mktemp -d)
cleanup() { rm -rf "$tmpdir"; }
trap cleanup EXIT
nix build '.#ui' --out-link "$tmpdir/result"
tar --transform 's,^\.,assets,' -czvf "$tmpdir/assets.tar.gz" -C "$tmpdir"/result/lib/node_modules/*/out .
NAR_HASH=$(nix-prefetch-url --unpack file://<(cat "$tmpdir/assets.tar.gz"))
url="https://git.clan.lol/api/packages/clan/generic/ui/$NAR_HASH/assets.tar.gz"
set +x
curl --upload-file "$tmpdir/assets.tar.gz" -X PUT "$url?token=$GITEA_TOKEN"
set -x
TEST_URL=$(nix-prefetch-url --unpack "$url")
if [[ $TEST_URL != "$NAR_HASH" ]]; then
echo "Prefetch failed. Expected $NAR_HASH, got $TEST_URL"
exit 1
fi
cat > "$PROJECT_DIR/pkgs/ui/nix/ui-assets.nix" <<EOF
{ fetchzip }:
fetchzip {
url = "$url";
sha256 = "$NAR_HASH";
}
EOF

View File

@ -1,16 +0,0 @@
const config = {
clan: {
output: {
mode: "tags-split",
target: "src/api",
schemas: "src/api/model",
client: "swr",
// mock: true,
},
input: {
target: "./openapi.json",
},
},
};
export default config;

8366
pkgs/ui/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,53 +0,0 @@
{
"name": "clan-ui",
"version": "0.1.0",
"private": false,
"files": [
"./out"
],
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
},
"dependencies": {
"@emotion/react": "^11.11.1",
"@emotion/styled": "^11.11.0",
"@mui/icons-material": "^5.14.3",
"@mui/material": "^5.14.3",
"@rjsf/core": "^5.12.1",
"@rjsf/mui": "^5.12.1",
"@rjsf/validator-ajv8": "^5.12.1",
"@types/json-schema": "^7.0.12",
"@typescript-eslint/eslint-plugin": "^5.62.0",
"autoprefixer": "10.4.14",
"axios": "^1.4.0",
"classnames": "^2.3.2",
"hex-rgb": "^5.0.0",
"next": "13.4.12",
"postcss": "8.4.27",
"pretty-bytes": "^6.1.1",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-hook-form": "^7.45.4",
"react-hot-toast": "^2.4.1",
"recharts": "^2.7.3",
"swr": "^2.2.1",
"tailwindcss": "3.3.3"
},
"devDependencies": {
"@types/node": "20.4.7",
"@types/react": "18.2.18",
"@types/react-dom": "18.2.7",
"@types/w3c-web-usb": "^1.0.6",
"esbuild": "^0.15.18",
"eslint": "^8.46.0",
"eslint-config-next": "13.4.12",
"eslint-plugin-tailwindcss": "^3.13.0",
"orval": "^6.17.0",
"prettier": "^3.0.1",
"prettier-plugin-tailwindcss": "^0.4.1",
"typescript": "5.1.6"
}
}

View File

@ -1,6 +0,0 @@
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
};

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><!--! Font Awesome Pro 6.4.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license (Commercial License) Copyright 2023 Fonticons, Inc. --><path d="M255.9 120.9l9.1-15.7c5.6-9.8 18.1-13.1 27.9-7.5 9.8 5.6 13.1 18.1 7.5 27.9l-87.5 151.5h63.3c20.5 0 32 24.1 23.1 40.8H113.8c-11.3 0-20.4-9.1-20.4-20.4 0-11.3 9.1-20.4 20.4-20.4h52l66.6-115.4-20.8-36.1c-5.6-9.8-2.3-22.2 7.5-27.9 9.8-5.6 22.2-2.3 27.9 7.5l8.9 15.7zm-78.7 218l-19.6 34c-5.6 9.8-18.1 13.1-27.9 7.5-9.8-5.6-13.1-18.1-7.5-27.9l14.6-25.2c16.4-5.1 29.8-1.2 40.4 11.6zm168.9-61.7h53.1c11.3 0 20.4 9.1 20.4 20.4 0 11.3-9.1 20.4-20.4 20.4h-29.5l19.9 34.5c5.6 9.8 2.3 22.2-7.5 27.9-9.8 5.6-22.2 2.3-27.9-7.5-33.5-58.1-58.7-101.6-75.4-130.6-17.1-29.5-4.9-59.1 7.2-69.1 13.4 23 33.4 57.7 60.1 104zM256 8C119 8 8 119 8 256s111 248 248 248 248-111 248-248S393 8 256 8zm216 248c0 118.7-96.1 216-216 216-118.7 0-216-96.1-216-216 0-118.7 96.1-216 216-216 118.7 0 216 96.1 216 216z"/></svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><!--! Font Awesome Pro 6.4.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license (Commercial License) Copyright 2023 Fonticons, Inc. --><path d="M96 48L82.7 61.3C70.7 73.3 64 89.5 64 106.5V238.9c0 10.7 5.3 20.7 14.2 26.6l10.6 7c14.3 9.6 32.7 10.7 48.1 3l3.2-1.6c2.6-1.3 5-2.8 7.3-4.5l49.4-37c6.6-5 15.7-5 22.3 0c10.2 7.7 9.9 23.1-.7 30.3L90.4 350C73.9 361.3 64 380 64 400H384l28.9-159c2.1-11.3 3.1-22.8 3.1-34.3V192C416 86 330 0 224 0H83.8C72.9 0 64 8.9 64 19.8c0 7.5 4.2 14.3 10.9 17.7L96 48zm24 68a20 20 0 1 1 40 0 20 20 0 1 1 -40 0zM22.6 473.4c-4.2 4.2-6.6 10-6.6 16C16 501.9 26.1 512 38.6 512H409.4c12.5 0 22.6-10.1 22.6-22.6c0-6-2.4-11.8-6.6-16L384 432H64L22.6 473.4z"/></svg>

Before

Width:  |  Height:  |  Size: 775 B

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 640 512"><!--! Font Awesome Pro 6.4.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license (Commercial License) Copyright 2023 Fonticons, Inc. --><path d="M524.531,69.836a1.5,1.5,0,0,0-.764-.7A485.065,485.065,0,0,0,404.081,32.03a1.816,1.816,0,0,0-1.923.91,337.461,337.461,0,0,0-14.9,30.6,447.848,447.848,0,0,0-134.426,0,309.541,309.541,0,0,0-15.135-30.6,1.89,1.89,0,0,0-1.924-.91A483.689,483.689,0,0,0,116.085,69.137a1.712,1.712,0,0,0-.788.676C39.068,183.651,18.186,294.69,28.43,404.354a2.016,2.016,0,0,0,.765,1.375A487.666,487.666,0,0,0,176.02,479.918a1.9,1.9,0,0,0,2.063-.676A348.2,348.2,0,0,0,208.12,430.4a1.86,1.86,0,0,0-1.019-2.588,321.173,321.173,0,0,1-45.868-21.853,1.885,1.885,0,0,1-.185-3.126c3.082-2.309,6.166-4.711,9.109-7.137a1.819,1.819,0,0,1,1.9-.256c96.229,43.917,200.41,43.917,295.5,0a1.812,1.812,0,0,1,1.924.233c2.944,2.426,6.027,4.851,9.132,7.16a1.884,1.884,0,0,1-.162,3.126,301.407,301.407,0,0,1-45.89,21.83,1.875,1.875,0,0,0-1,2.611,391.055,391.055,0,0,0,30.014,48.815,1.864,1.864,0,0,0,2.063.7A486.048,486.048,0,0,0,610.7,405.729a1.882,1.882,0,0,0,.765-1.352C623.729,277.594,590.933,167.465,524.531,69.836ZM222.491,337.58c-28.972,0-52.844-26.587-52.844-59.239S193.056,219.1,222.491,219.1c29.665,0,53.306,26.82,52.843,59.239C275.334,310.993,251.924,337.58,222.491,337.58Zm195.38,0c-28.971,0-52.843-26.587-52.843-59.239S388.437,219.1,417.871,219.1c29.667,0,53.307,26.82,52.844,59.239C470.715,310.993,447.538,337.58,417.871,337.58Z"/></svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 416 512"><!--! Font Awesome Pro 6.4.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license (Commercial License) Copyright 2023 Fonticons, Inc. --><path d="M397.9 160H256V19.6L397.9 160zM304 192v130c0 66.8-36.5 100.1-113.3 100.1H96V84.8h94.7c12 0 23.1.8 33.1 2.5v-84C212.9 1.1 201.4 0 189.2 0H0v512h189.2C329.7 512 400 447.4 400 318.1V192h-96z"/></svg>

Before

Width:  |  Height:  |  Size: 435 B

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><!--! Font Awesome Pro 6.4.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license (Commercial License) Copyright 2023 Fonticons, Inc. --><path d="M503.52,241.48c-.12-1.56-.24-3.12-.24-4.68v-.12l-.36-4.68v-.12a245.86,245.86,0,0,0-7.32-41.15c0-.12,0-.12-.12-.24l-1.08-4c-.12-.24-.12-.48-.24-.6-.36-1.2-.72-2.52-1.08-3.72-.12-.24-.12-.6-.24-.84-.36-1.2-.72-2.4-1.08-3.48-.12-.36-.24-.6-.36-1-.36-1.2-.72-2.28-1.2-3.48l-.36-1.08c-.36-1.08-.84-2.28-1.2-3.36a8.27,8.27,0,0,0-.36-1c-.48-1.08-.84-2.28-1.32-3.36-.12-.24-.24-.6-.36-.84-.48-1.2-1-2.28-1.44-3.48,0-.12-.12-.24-.12-.36-1.56-3.84-3.24-7.68-5-11.4l-.36-.72c-.48-1-.84-1.8-1.32-2.64-.24-.48-.48-1.08-.72-1.56-.36-.84-.84-1.56-1.2-2.4-.36-.6-.6-1.2-1-1.8s-.84-1.44-1.2-2.28c-.36-.6-.72-1.32-1.08-1.92s-.84-1.44-1.2-2.16a18.07,18.07,0,0,0-1.2-2c-.36-.72-.84-1.32-1.2-2s-.84-1.32-1.2-2-.84-1.32-1.2-1.92-.84-1.44-1.32-2.16a15.63,15.63,0,0,0-1.2-1.8L463.2,119a15.63,15.63,0,0,0-1.2-1.8c-.48-.72-1.08-1.56-1.56-2.28-.36-.48-.72-1.08-1.08-1.56l-1.8-2.52c-.36-.48-.6-.84-1-1.32-1-1.32-1.8-2.52-2.76-3.72a248.76,248.76,0,0,0-23.51-26.64A186.82,186.82,0,0,0,412,62.46c-4-3.48-8.16-6.72-12.48-9.84a162.49,162.49,0,0,0-24.6-15.12c-2.4-1.32-4.8-2.52-7.2-3.72a254,254,0,0,0-55.43-19.56c-1.92-.36-3.84-.84-5.64-1.2h-.12c-1-.12-1.8-.36-2.76-.48a236.35,236.35,0,0,0-38-4H255.14a234.62,234.62,0,0,0-45.48,5c-33.59,7.08-63.23,21.24-82.91,39-1.08,1-1.92,1.68-2.4,2.16l-.48.48H124l-.12.12.12-.12a.12.12,0,0,0,.12-.12l-.12.12a.42.42,0,0,1,.24-.12c14.64-8.76,34.92-16,49.44-19.56l5.88-1.44c.36-.12.84-.12,1.2-.24,1.68-.36,3.36-.72,5.16-1.08.24,0,.6-.12.84-.12C250.94,20.94,319.34,40.14,367,85.61a171.49,171.49,0,0,1,26.88,32.76c30.36,49.2,27.48,111.11,3.84,147.59-34.44,53-111.35,71.27-159,24.84a84.19,84.19,0,0,1-25.56-59,74.05,74.05,0,0,1,6.24-31c1.68-3.84,13.08-25.67,18.24-24.59-13.08-2.76-37.55,2.64-54.71,28.19-15.36,22.92-14.52,58.2-5,83.28a132.85,132.85,0,0,1-12.12-39.24c-12.24-82.55,43.31-153,94.31-170.51-27.48-24-96.47-22.31-147.71,15.36-29.88,22-51.23,53.16-62.51,90.36,1.68-20.88,9.6-52.08,25.8-83.88-17.16,8.88-39,37-49.8,62.88-15.6,37.43-21,82.19-16.08,124.79.36,3.24.72,6.36,1.08,9.6,19.92,117.11,122,206.38,244.78,206.38C392.77,503.42,504,392.19,504,255,503.88,250.48,503.76,245.92,503.52,241.48Z"/></svg>

Before

Width:  |  Height:  |  Size: 2.3 KiB

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 640 512"><!--! Font Awesome Pro 6.4.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license (Commercial License) Copyright 2023 Fonticons, Inc. --><path d="M274.9 34.3c-28.1-28.1-73.7-28.1-101.8 0L34.3 173.1c-28.1 28.1-28.1 73.7 0 101.8L173.1 413.7c28.1 28.1 73.7 28.1 101.8 0L413.7 274.9c28.1-28.1 28.1-73.7 0-101.8L274.9 34.3zM200 224a24 24 0 1 1 48 0 24 24 0 1 1 -48 0zM96 200a24 24 0 1 1 0 48 24 24 0 1 1 0-48zM224 376a24 24 0 1 1 0-48 24 24 0 1 1 0 48zM352 200a24 24 0 1 1 0 48 24 24 0 1 1 0-48zM224 120a24 24 0 1 1 0-48 24 24 0 1 1 0 48zm96 328c0 35.3 28.7 64 64 64H576c35.3 0 64-28.7 64-64V256c0-35.3-28.7-64-64-64H461.7c11.6 36 3.1 77-25.4 105.5L320 413.8V448zM480 328a24 24 0 1 1 0 48 24 24 0 1 1 0-48z"/></svg>

Before

Width:  |  Height:  |  Size: 803 B

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><!--! Font Awesome Pro 6.4.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license (Commercial License) Copyright 2023 Fonticons, Inc. --><path d="M64 112c-8.8 0-16 7.2-16 16v22.1L220.5 291.7c20.7 17 50.4 17 71.1 0L464 150.1V128c0-8.8-7.2-16-16-16H64zM48 212.2V384c0 8.8 7.2 16 16 16H448c8.8 0 16-7.2 16-16V212.2L322 328.8c-38.4 31.5-93.7 31.5-132 0L48 212.2zM0 128C0 92.7 28.7 64 64 64H448c35.3 0 64 28.7 64 64V384c0 35.3-28.7 64-64 64H64c-35.3 0-64-28.7-64-64V128z"/></svg>

Before

Width:  |  Height:  |  Size: 567 B

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><!--! Font Awesome Pro 6.4.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license (Commercial License) Copyright 2023 Fonticons, Inc. --><path d="M96 0C43 0 0 43 0 96V352c0 48 35.2 87.7 81.1 94.9l-46 46C28.1 499.9 33.1 512 43 512H82.7c8.5 0 16.6-3.4 22.6-9.4L160 448H288l54.6 54.6c6 6 14.1 9.4 22.6 9.4H405c10 0 15-12.1 7.9-19.1l-46-46c46-7.1 81.1-46.9 81.1-94.9V96c0-53-43-96-96-96H96zM64 96c0-17.7 14.3-32 32-32H352c17.7 0 32 14.3 32 32v96c0 17.7-14.3 32-32 32H96c-17.7 0-32-14.3-32-32V96zM224 288a48 48 0 1 1 0 96 48 48 0 1 1 0-96z"/></svg>

Before

Width:  |  Height:  |  Size: 636 B

Some files were not shown because too many files have changed in this diff Show More