NEXT Bootstrapping on non-NixOS
nix runingestfiles and generator commands to stand up a system. core for Rebuild of The Complete Computer .
Running on The Wobserver, self-hosting Arcology with the The Arroyo Generators
Package building is handled in the Arcology Project Scaffolding .
Deployment declaration is in the Arcology Project Configuration .
NixOS module
provide it in the flake.nix too...
We need two services, the watchsync server and a uvicorn. We need a virtualhost.
nix source: :tangle ~/arroyo-nix/nixos/arcology2-module.nix{ lib, config, pkgs, ... }: with pkgs; with lib; let cfg = config.services.arcology-ng; # might want to generate a localsettings.py or so to configure the application for deployment, rather than use process env env = { ARCOLOGY_ENVIRONMENT = cfg.environment; ARCOLOGY_BASE_DIR = cfg.orgDir; ARCOLOGY_STATIC_ROOT = cfg.staticRoot; ARCOLOGY_DB_PATH = "${cfg.dataDir}/databases/arcology2.db"; ARCOLOGY_DB_WRITE_PATH = "${cfg.dataDir}/databases/arcology2-writable.db"; ARCOLOGY_ALLOWED_HOSTS = concatStringsSep "," cfg.domains; ARCOLOGY_LOG_LEVEL = cfg.logLevel; ARCOLOGY_CACHE_PATH = cfg.cacheDir; PROMETHEUS_MULTIPROC_DIR = cfg.multiProcDir; UVICORN_PORT = "${toString cfg.port}"; UVICORN_HOST = cfg.address; WEB_CONCURRENCY = "${toString cfg.workerCount}"; OLLAMA_HOST = cfg.ollamaHost; }; pyenv = cfg.packages.python3.withPackages(pp: [cfg.packages.arcology]); wrapperScript = pkgs.writeScriptBin "arcology" '' set -eEuo pipefail ${lib.concatStrings (lib.mapAttrsToList (name: value: "export ${name}=\${${name}:-${value}}\n") env)} source ${cfg.environmentFile} export ARCOLOGY_SYNCTHING_KEY export ARCOLOGY_LOCALAPI_BEARER_TOKEN exec ${cfg.packages.arcology}/bin/arcology "$@" ''; commonSvcConfig = { Type = "simple"; User = "arcology"; Group = "arcology"; WorkingDirectory = cfg.dataDir; EnvironmentFile = cfg.environmentFile; Restart="on-failure"; RestartSec=5; RestartSteps=10; RestartMaxDelaySec="1min"; # hardening... }; svcConfig = { environment.systemPackages = [ wrapperScript ]; system.activationScripts.arcology-collectfiles.text = '' echo "Setting up Arcology static files" ARCOLOGY_STATIC_ROOT=${cfg.staticRoot} ${cfg.packages.arcology}/bin/arcology collectstatic --no-input -c -v0 echo "Ensuring Arcology directories exist" mkdir -p ${cfg.dataDir} ${cfg.multiProcDir} ${cfg.cacheDir} chown arcology:arcology ${cfg.dataDir} ${cfg.multiProcDir} ${cfg.cacheDir} chmod o+x ${cfg.dataDir} ''; systemd.services.arcology2-watchsync = { description = "Arcology Django Syncthing Watcher"; after = ["network.target"]; wantedBy = ["multi-user.target"]; environment = env; preStart = '' ${cfg.packages.arcology}/bin/arcology migrate --database default ${cfg.packages.arcology}/bin/arcology migrate --database writable ${cfg.packages.arcology}/bin/arcology seed || true ''; script = '' ${cfg.packages.arcology}/bin/arcology watchsync -f ${cfg.folderId} ''; serviceConfig = {} // commonSvcConfig; }; systemd.services.arcology2-web = { description = "Arcology Django Uvicorn"; after = ["network.target"]; wantedBy = ["multi-user.target"]; environment = env; preStart = '' find ${cfg.multiProcDir} -type f -delete ''; script = '' ${pyenv}/bin/python -m uvicorn arcology.asgi:application ''; serviceConfig = {} // commonSvcConfig; }; systemd.services.arcology2-llmbot = { description = "Arcology Django Bot Channel"; after = ["network.target"]; wantedBy = ["multi-user.target"]; environment = env; script = '' ${pyenv}/bin/python -m arcology runworker llm-bot ''; serviceConfig = {} // commonSvcConfig; }; services.redis.servers.arcology.enable = true; services.redis.servers.arcology.port = 6379; }; domainVHosts = { services.nginx.virtualHosts."${head cfg.domains}" = mkIf (cfg.enable && cfg.generateVirtualHosts) { serverAliases = tail cfg.domains; locations."~ ^/~(.+?)(/.*)?$".extraConfig = '' index index.html index.htm; alias /home/$1/public_html$2; autoindex on; ''; locations."/".proxyPass = "http://${cfg.address}:${toString cfg.port}"; locations."/".extraConfig = '' limit_req zone=ip_based burst=10; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; # XXX in my case we want to hardcode this because it may already have SSL stripped and the response is through TS, for now... # proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Proto "https"; proxy_set_header X-Forwarded-Host $http_host; proxy_set_header Host $host; ''; locations."/static/".alias = cfg.staticRoot; locations."/media/".alias = "${cfg.cacheDir}/media/"; addSSL = true; useACMEHost = "fontkeming.fail"; # sslCertificate = "/var/lib/nginx/certs/fontkeming.fail_cert.pem"; # sslCertificateKey = "/var/lib/nginx/certs/fontkeming.fail_key.pem"; }; services.prometheus.scrapeConfigs = [{ job_name = "arcology"; static_configs = [{ targets = ["localhost:${toString config.services.arcology-ng.port}"]; }]; }]; }; userConfig = { ids.uids.arcology = 900; ids.gids.arcology = 900; users.users.arcology = { group = "arcology"; home = cfg.dataDir; createHome = true; homeMode = "0711"; shell = "${bash}/bin/bash"; isSystemUser = true; uid = config.ids.uids.arcology; }; users.groups.arcology = { gid = config.ids.gids.arcology; }; }; in { options = { services.arcology-ng = { enable = mkEnableOption "arcology-ng"; packages.arcology = mkOption { type = types.package; description = mdDoc '' ''; }; packages.python3 = mkOption { type = types.package; description = mdDoc '' ''; default = pkgs.python312; }; domains = mkOption { type = types.listOf types.str; }; address = mkOption { type = types.str; default = "localhost"; description = lib.mdDoc "Web interface address."; }; port = mkOption { type = types.port; default = 29543; description = lib.mdDoc "Web interface port."; }; environment = mkOption { type = types.enum ["production" "development"]; default = "production"; }; workerCount = mkOption { type = types.number; default = 16; description = lib.mdDoc "uvicorn worker count; they recommend 2-4 workers per core."; }; generateVirtualHosts = mkOption { type = types.bool; default = true; description = lib.mdDoc "control whether nginx virtual hosts should be created"; }; dataDir = mkOption { type = types.path; default = "/var/lib/arcology"; description = mdDoc '' Directory to store Arcology cache files, database, etc. Service User's home directory. ''; }; ollamaHost = mkOption { type = types.str; default = config.services.ollama.listenAddress; description = mkDoc '' IP Address or host name of the Ollama host ''; }; logLevel = mkOption { type = types.enum ["ERROR" "WARN" "INFO" "DEBUG"]; default = "INFO"; description = mdDoc '' Set the Django root logging level ''; }; environmentFile = mkOption { type = types.path; default = "${cfg.dataDir}/env"; description = mdDoc '' A file containing environment variables you may not want to put in the nix store. For example, you could put a syncthing key and a bearer token for the Local API in there: ARCOLOGY_SYNCTHING_KEY=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA; ARCOLOGY_LOCALAPI_BEARER_TOKEN=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA; ''; }; staticRoot = mkOption { type = types.path; default = "/var/lib/arcology/static/"; description = '' Location where django-manage collectfiles will write the files to. If you let this module generate nginx virtualhosts it will be configured to use that for static files. Ensure this ends with a backslash. ''; }; orgDir = mkOption { type = types.path; description = mdDoc '' Directory containing the org-mode documents. Arcology needs read-only access to this directory. ''; }; folderId = mkOption { type = types.str; description = mdDoc '' Syncthing folder ID containing the org files. ''; }; cacheDir = mkOption { type = types.path; default = "${cfg.dataDir}/cache/"; description = mdDoc '' Location to cache HTML files and the like. ''; }; multiProcDir = mkOption { type = types.path; default = "${cfg.dataDir}/metrics/"; description = mdDoc '' Location where prometheus will cache metrics to be coalesced on all workers. See https://github.com/korfuri/django-prometheus/blob/master/documentation/exports.md ''; }; }; }; config = mkIf cfg.enable (mkMerge [ svcConfig domainVHosts # prevent double-definition of user entities from previous service's manifest # yanked directly from arcology-fastapi userConfig ]); }
DONE finish this
DONE validate the environment variables are used
NEXT consider generating a local settings.py with our configuration overrides
NEXT figure out better way to call in to Arcology and Arroyo in the service definition
probably just getFlake but augh.
NEXT service hardening
DONE static files under gunicorn/nginx
DONE secret infrastructure for the syncthing key
or a way to load that in to the DB 🤔