Flake-parts module that repos import to declare hosts, jobs, and secrets. Nushell CLI (rigging) aggregates multiple repos and provides unified management: host deploy/build, job run/plan/stop, secret list/rekey. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
587 lines
14 KiB
Nix
587 lines
14 KiB
Nix
{
|
|
pkgs,
|
|
lib ? pkgs.lib,
|
|
jobs ? {},
|
|
jobModules ? {},
|
|
compiledJobs ? {},
|
|
nomadLib ? {},
|
|
nomadAddress ? "http://127.0.0.1:4646",
|
|
flakeRef ? null,
|
|
defaultVars ? {},
|
|
}:
|
|
pkgs.writeShellScriptBin "bosun" ''
|
|
set -euo pipefail
|
|
|
|
# Colors
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
CYAN='\033[0;36m'
|
|
NC='\033[0m' # No Color
|
|
|
|
NOMAD_ADDR="''${NOMAD_ADDR:-${nomadAddress}}"
|
|
export NOMAD_ADDR
|
|
|
|
# Detect flake reference - walk up to find flake.nix
|
|
detect_flake_ref() {
|
|
local dir="$PWD"
|
|
while [[ "$dir" != "/" ]]; do
|
|
if [[ -f "$dir/flake.nix" ]]; then
|
|
echo "$dir"
|
|
return 0
|
|
fi
|
|
dir="$(dirname "$dir")"
|
|
done
|
|
echo ""
|
|
}
|
|
|
|
# Set flake reference
|
|
${if flakeRef != null then ''
|
|
FLAKE_REF="''${BOSUN_FLAKE:-${flakeRef}}"
|
|
'' else ''
|
|
if [[ -n "''${BOSUN_FLAKE:-}" ]]; then
|
|
FLAKE_REF="$BOSUN_FLAKE"
|
|
else
|
|
FLAKE_REF="$(detect_flake_ref)"
|
|
if [[ -z "$FLAKE_REF" ]]; then
|
|
echo "Error: Could not find flake.nix. Set BOSUN_FLAKE or run from within a flake." >&2
|
|
exit 1
|
|
fi
|
|
fi
|
|
''}
|
|
|
|
# Available jobs
|
|
JOBS=(${lib.concatStringsSep " " (lib.attrNames jobs)})
|
|
|
|
# Check if a job accepts variables
|
|
job_accepts_vars() {
|
|
local job="$1"
|
|
# Jobs that accept vars will have 'vars' in their function args
|
|
# We check this at build time and encode it here
|
|
case "$job" in
|
|
${lib.concatStringsSep "\n " (lib.mapAttrsToList (name: mod:
|
|
let
|
|
args = builtins.functionArgs mod;
|
|
hasVars = args ? vars;
|
|
in "${name}) echo ${if hasVars then "true" else "false"} ;;"
|
|
) jobModules)}
|
|
*) echo "false" ;;
|
|
esac
|
|
}
|
|
|
|
usage() {
|
|
echo "bosun - Nomad job deployment tool"
|
|
echo ""
|
|
echo "Usage: bosun <command> [options] [job-name]"
|
|
echo ""
|
|
echo "Commands:"
|
|
echo " list List all available jobs"
|
|
echo " compile <job> Compile a job to JSON (stdout)"
|
|
echo " inspect <job> Show compiled job with syntax highlighting"
|
|
echo " run <job> Compile and deploy a job to Nomad"
|
|
echo " dispatch [job] Dispatch a parameterized job (list if no job given)"
|
|
echo " plan <job> Plan a job deployment (dry-run)"
|
|
echo " stop <job> Stop a running job"
|
|
echo " status [job] Show job status"
|
|
echo " logs <job> [task] Show job logs"
|
|
echo " generate [dir] Generate all job files to directory"
|
|
echo ""
|
|
echo "Options:"
|
|
echo " -v, --var KEY=VALUE Set a variable (can be repeated)"
|
|
echo " -m, --meta KEY=VALUE Set dispatch metadata (for dispatch command)"
|
|
echo " --dry-run Show what would be done without executing"
|
|
echo " --verbose Verbose output"
|
|
echo " --help, -h Show this help message"
|
|
echo ""
|
|
echo "Environment:"
|
|
echo " NOMAD_ADDR Nomad server address (default: $NOMAD_ADDR)"
|
|
echo " BOSUN_FLAKE Flake reference (default: $FLAKE_REF)"
|
|
echo ""
|
|
echo "Available jobs:"
|
|
for job in "''${JOBS[@]}"; do
|
|
if [[ "$(job_accepts_vars "$job")" == "true" ]]; then
|
|
echo " - $job ''${CYAN}(parameterized)''${NC}"
|
|
else
|
|
echo " - $job"
|
|
fi
|
|
done
|
|
}
|
|
|
|
log_info() {
|
|
echo -e "''${BLUE}→''${NC} $1"
|
|
}
|
|
|
|
log_success() {
|
|
echo -e "''${GREEN}✓''${NC} $1"
|
|
}
|
|
|
|
log_warn() {
|
|
echo -e "''${YELLOW}⚠''${NC} $1"
|
|
}
|
|
|
|
log_error() {
|
|
echo -e "''${RED}✗''${NC} $1" >&2
|
|
}
|
|
|
|
# Check if job exists
|
|
job_exists() {
|
|
local job="$1"
|
|
for j in "''${JOBS[@]}"; do
|
|
if [[ "$j" == "$job" ]]; then
|
|
return 0
|
|
fi
|
|
done
|
|
return 1
|
|
}
|
|
|
|
# Get pre-compiled job file path (for jobs without runtime vars)
|
|
get_static_job_file() {
|
|
local job="$1"
|
|
case "$job" in
|
|
${lib.concatStringsSep "\n " (lib.mapAttrsToList (name: file: ''${name}) echo "${file}" ;;'') compiledJobs)}
|
|
*) echo "" ;;
|
|
esac
|
|
}
|
|
|
|
# Compile job with variables using nix eval
|
|
compile_job_with_vars() {
|
|
local job="$1"
|
|
shift
|
|
local vars_json="$1"
|
|
|
|
log_info "Evaluating job with variables..." >&2
|
|
|
|
# Use nix eval to get the job JSON with variables
|
|
${pkgs.nix}/bin/nix eval \
|
|
--json \
|
|
--impure \
|
|
--expr "
|
|
let
|
|
flake = builtins.getFlake \"$FLAKE_REF\";
|
|
system = builtins.currentSystem;
|
|
vars = builtins.fromJSON '''$vars_json''';
|
|
in
|
|
flake.legacyPackages.\''${system}.bosun.evalJobWithVars \"$job\" vars
|
|
"
|
|
}
|
|
|
|
# Get job JSON - either static or dynamic with vars
|
|
get_job_json() {
|
|
local job="$1"
|
|
local vars_json="$2"
|
|
|
|
if [[ "$vars_json" == "{}" ]]; then
|
|
# No vars, use static file
|
|
local static_file
|
|
static_file="$(get_static_job_file "$job")"
|
|
if [[ -n "$static_file" ]]; then
|
|
cat "$static_file"
|
|
return 0
|
|
fi
|
|
fi
|
|
|
|
# Has vars or no static file - evaluate dynamically
|
|
compile_job_with_vars "$job" "$vars_json"
|
|
}
|
|
|
|
cmd_list() {
|
|
echo "Available jobs:"
|
|
for job in "''${JOBS[@]}"; do
|
|
if [[ "$(job_accepts_vars "$job")" == "true" ]]; then
|
|
echo -e " $job ''${CYAN}(parameterized)''${NC}"
|
|
else
|
|
echo " $job"
|
|
fi
|
|
done
|
|
}
|
|
|
|
cmd_compile() {
|
|
local job="$1"
|
|
local vars_json="$2"
|
|
|
|
if ! job_exists "$job"; then
|
|
log_error "Unknown job: $job"
|
|
echo "Available jobs: ''${JOBS[*]}"
|
|
exit 1
|
|
fi
|
|
|
|
get_job_json "$job" "$vars_json"
|
|
}
|
|
|
|
cmd_inspect() {
|
|
local job="$1"
|
|
local vars_json="$2"
|
|
|
|
if ! job_exists "$job"; then
|
|
log_error "Unknown job: $job"
|
|
exit 1
|
|
fi
|
|
|
|
local job_json
|
|
job_json="$(get_job_json "$job" "$vars_json")"
|
|
|
|
if command -v jq &> /dev/null; then
|
|
echo "$job_json" | jq '.'
|
|
else
|
|
echo "$job_json"
|
|
fi
|
|
}
|
|
|
|
cmd_plan() {
|
|
local job="$1"
|
|
local vars_json="$2"
|
|
local dry_run="$3"
|
|
|
|
if ! job_exists "$job"; then
|
|
log_error "Unknown job: $job"
|
|
exit 1
|
|
fi
|
|
|
|
log_info "Planning job: $job"
|
|
|
|
if [[ "$dry_run" == "true" ]]; then
|
|
log_warn "DRY RUN - would execute: nomad job plan <job.json>"
|
|
if [[ "$vars_json" != "{}" ]]; then
|
|
log_info "Variables: $vars_json"
|
|
fi
|
|
return 0
|
|
fi
|
|
|
|
local job_json
|
|
job_json="$(get_job_json "$job" "$vars_json")"
|
|
|
|
echo "$job_json" | ${pkgs.nomad}/bin/nomad job plan -
|
|
}
|
|
|
|
cmd_run() {
|
|
local job="$1"
|
|
local vars_json="$2"
|
|
local dry_run="$3"
|
|
|
|
if ! job_exists "$job"; then
|
|
log_error "Unknown job: $job"
|
|
exit 1
|
|
fi
|
|
|
|
log_info "Deploying job: $job"
|
|
log_info "Nomad address: $NOMAD_ADDR"
|
|
if [[ "$vars_json" != "{}" ]]; then
|
|
log_info "Variables: $vars_json"
|
|
fi
|
|
|
|
if [[ "$dry_run" == "true" ]]; then
|
|
log_warn "DRY RUN - would execute: nomad job run <job.json>"
|
|
echo ""
|
|
log_info "Job definition:"
|
|
cmd_inspect "$job" "$vars_json"
|
|
return 0
|
|
fi
|
|
|
|
local job_json
|
|
job_json="$(get_job_json "$job" "$vars_json")"
|
|
|
|
echo "$job_json" | ${pkgs.nomad}/bin/nomad job run -
|
|
log_success "Job $job deployed successfully"
|
|
}
|
|
|
|
cmd_stop() {
|
|
local job="$1"
|
|
local dry_run="$2"
|
|
|
|
log_info "Stopping job: $job"
|
|
|
|
if [[ "$dry_run" == "true" ]]; then
|
|
log_warn "DRY RUN - would execute: nomad job stop $job"
|
|
return 0
|
|
fi
|
|
|
|
${pkgs.nomad}/bin/nomad job stop "$job"
|
|
log_success "Job $job stopped"
|
|
}
|
|
|
|
cmd_status() {
|
|
local job="''${1:-}"
|
|
|
|
if [[ -z "$job" ]]; then
|
|
${pkgs.nomad}/bin/nomad job status
|
|
else
|
|
${pkgs.nomad}/bin/nomad job status "$job"
|
|
fi
|
|
}
|
|
|
|
cmd_logs() {
|
|
local job="$1"
|
|
local task="''${2:-}"
|
|
|
|
if [[ -z "$task" ]]; then
|
|
${pkgs.nomad}/bin/nomad alloc logs -job "$job"
|
|
else
|
|
${pkgs.nomad}/bin/nomad alloc logs -job "$job" "$task"
|
|
fi
|
|
}
|
|
|
|
# Get parameterized job info
|
|
get_parameterized_info() {
|
|
local job="$1"
|
|
local job_json
|
|
job_json="$(get_job_json "$job" "{}")"
|
|
|
|
# Extract parameterized block if it exists
|
|
echo "$job_json" | ${pkgs.jq}/bin/jq -r '
|
|
.job | to_entries[0].value.parameterized // {} |
|
|
if . == {} then
|
|
"not_parameterized"
|
|
else
|
|
"required:" + (.meta_required // [] | join(",")) +
|
|
"|optional:" + (.meta_optional // [] | join(","))
|
|
end
|
|
'
|
|
}
|
|
|
|
cmd_dispatch() {
|
|
local job="''${1:-}"
|
|
local dry_run="$2"
|
|
|
|
# If no job specified, list parameterized jobs
|
|
if [[ -z "$job" ]]; then
|
|
echo "Parameterized jobs available for dispatch:"
|
|
echo ""
|
|
local found_any=false
|
|
for j in "''${JOBS[@]}"; do
|
|
local info
|
|
info="$(get_parameterized_info "$j")"
|
|
if [[ "$info" != "not_parameterized" ]]; then
|
|
found_any=true
|
|
local required=$(echo "$info" | cut -d'|' -f1 | cut -d':' -f2)
|
|
local optional=$(echo "$info" | cut -d'|' -f2 | cut -d':' -f2)
|
|
|
|
echo -e " ''${GREEN}$j''${NC}"
|
|
if [[ -n "$required" ]]; then
|
|
echo -e " Required: ''${YELLOW}$required''${NC}"
|
|
fi
|
|
if [[ -n "$optional" ]]; then
|
|
echo -e " Optional: ''${CYAN}$optional''${NC}"
|
|
fi
|
|
echo ""
|
|
fi
|
|
done
|
|
|
|
if [[ "$found_any" == "false" ]]; then
|
|
echo " No parameterized jobs found."
|
|
echo ""
|
|
echo " To create a parameterized job, add a 'parameterized' block:"
|
|
echo " job.myjob.parameterized = {"
|
|
echo " meta_required = [\"username\"];"
|
|
echo " meta_optional = [\"password\"];"
|
|
echo " };"
|
|
fi
|
|
echo ""
|
|
echo "Usage: bosun dispatch <job> -m key=value [-m key2=value2 ...]"
|
|
return 0
|
|
fi
|
|
|
|
if ! job_exists "$job"; then
|
|
log_error "Unknown job: $job"
|
|
exit 1
|
|
fi
|
|
|
|
# Check if job is parameterized
|
|
local info
|
|
info="$(get_parameterized_info "$job")"
|
|
if [[ "$info" == "not_parameterized" ]]; then
|
|
log_error "Job '$job' is not parameterized. Use 'bosun run $job' instead."
|
|
exit 1
|
|
fi
|
|
|
|
log_info "Dispatching job: $job"
|
|
|
|
if [[ "$dry_run" == "true" ]]; then
|
|
log_warn "DRY RUN - would execute: nomad job dispatch $job"
|
|
if [[ ''${#META[@]} -gt 0 ]]; then
|
|
for key in "''${!META[@]}"; do
|
|
echo " -meta $key=''${META[$key]}"
|
|
done
|
|
fi
|
|
return 0
|
|
fi
|
|
|
|
# Build nomad dispatch command
|
|
local dispatch_args=()
|
|
for key in "''${!META[@]}"; do
|
|
dispatch_args+=("-meta" "$key=''${META[$key]}")
|
|
done
|
|
|
|
${pkgs.nomad}/bin/nomad job dispatch "''${dispatch_args[@]}" "$job"
|
|
log_success "Job $job dispatched successfully"
|
|
}
|
|
|
|
cmd_generate() {
|
|
local outdir="''${1:-./generated}"
|
|
local vars_json="$2"
|
|
|
|
mkdir -p "$outdir"
|
|
log_info "Generating jobs to $outdir"
|
|
|
|
for job in "''${JOBS[@]}"; do
|
|
local job_json
|
|
job_json="$(get_job_json "$job" "$vars_json")"
|
|
echo "$job_json" > "$outdir/$job.nomad.json"
|
|
chmod 644 "$outdir/$job.nomad.json"
|
|
log_success "$job.nomad.json"
|
|
done
|
|
|
|
echo ""
|
|
log_success "Generated ''${#JOBS[@]} jobs in $outdir"
|
|
}
|
|
|
|
# Parse arguments
|
|
DRY_RUN=false
|
|
VERBOSE=false
|
|
COMMAND=""
|
|
ARGS=()
|
|
declare -A VARS
|
|
declare -A META
|
|
|
|
while [[ $# -gt 0 ]]; do
|
|
case "$1" in
|
|
-v|--var)
|
|
if [[ $# -lt 2 ]]; then
|
|
log_error "Missing value for $1"
|
|
exit 1
|
|
fi
|
|
# Parse KEY=VALUE
|
|
if [[ "$2" =~ ^([^=]+)=(.*)$ ]]; then
|
|
VARS["''${BASH_REMATCH[1]}"]="''${BASH_REMATCH[2]}"
|
|
else
|
|
log_error "Invalid variable format: $2 (expected KEY=VALUE)"
|
|
exit 1
|
|
fi
|
|
shift 2
|
|
;;
|
|
-m|--meta)
|
|
if [[ $# -lt 2 ]]; then
|
|
log_error "Missing value for $1"
|
|
exit 1
|
|
fi
|
|
# Parse KEY=VALUE
|
|
if [[ "$2" =~ ^([^=]+)=(.*)$ ]]; then
|
|
META["''${BASH_REMATCH[1]}"]="''${BASH_REMATCH[2]}"
|
|
else
|
|
log_error "Invalid metadata format: $2 (expected KEY=VALUE)"
|
|
exit 1
|
|
fi
|
|
shift 2
|
|
;;
|
|
--dry-run)
|
|
DRY_RUN=true
|
|
shift
|
|
;;
|
|
--verbose)
|
|
VERBOSE=true
|
|
shift
|
|
;;
|
|
--help|-h)
|
|
usage
|
|
exit 0
|
|
;;
|
|
-*)
|
|
log_error "Unknown option: $1"
|
|
usage
|
|
exit 1
|
|
;;
|
|
*)
|
|
if [[ -z "$COMMAND" ]]; then
|
|
COMMAND="$1"
|
|
else
|
|
ARGS+=("$1")
|
|
fi
|
|
shift
|
|
;;
|
|
esac
|
|
done
|
|
|
|
# Convert VARS to JSON
|
|
VARS_JSON="{"
|
|
first=true
|
|
for key in "''${!VARS[@]}"; do
|
|
if [[ "$first" == "true" ]]; then
|
|
first=false
|
|
else
|
|
VARS_JSON+=","
|
|
fi
|
|
# Escape the value for JSON
|
|
value="''${VARS[$key]}"
|
|
value="''${value//\\/\\\\}"
|
|
value="''${value//\"/\\\"}"
|
|
VARS_JSON+="\"$key\":\"$value\""
|
|
done
|
|
VARS_JSON+="}"
|
|
|
|
if [[ -z "$COMMAND" ]]; then
|
|
usage
|
|
exit 1
|
|
fi
|
|
|
|
case "$COMMAND" in
|
|
list)
|
|
cmd_list
|
|
;;
|
|
compile)
|
|
if [[ ''${#ARGS[@]} -lt 1 ]]; then
|
|
log_error "Missing job name"
|
|
exit 1
|
|
fi
|
|
cmd_compile "''${ARGS[0]}" "$VARS_JSON"
|
|
;;
|
|
inspect)
|
|
if [[ ''${#ARGS[@]} -lt 1 ]]; then
|
|
log_error "Missing job name"
|
|
exit 1
|
|
fi
|
|
cmd_inspect "''${ARGS[0]}" "$VARS_JSON"
|
|
;;
|
|
plan)
|
|
if [[ ''${#ARGS[@]} -lt 1 ]]; then
|
|
log_error "Missing job name"
|
|
exit 1
|
|
fi
|
|
cmd_plan "''${ARGS[0]}" "$VARS_JSON" "$DRY_RUN"
|
|
;;
|
|
run)
|
|
if [[ ''${#ARGS[@]} -lt 1 ]]; then
|
|
log_error "Missing job name"
|
|
exit 1
|
|
fi
|
|
cmd_run "''${ARGS[0]}" "$VARS_JSON" "$DRY_RUN"
|
|
;;
|
|
stop)
|
|
if [[ ''${#ARGS[@]} -lt 1 ]]; then
|
|
log_error "Missing job name"
|
|
exit 1
|
|
fi
|
|
cmd_stop "''${ARGS[0]}" "$DRY_RUN"
|
|
;;
|
|
status)
|
|
cmd_status "''${ARGS[0]:-}"
|
|
;;
|
|
logs)
|
|
if [[ ''${#ARGS[@]} -lt 1 ]]; then
|
|
log_error "Missing job name"
|
|
exit 1
|
|
fi
|
|
cmd_logs "''${ARGS[0]}" "''${ARGS[1]:-}"
|
|
;;
|
|
generate)
|
|
cmd_generate "''${ARGS[0]:-./generated}" "$VARS_JSON"
|
|
;;
|
|
dispatch)
|
|
cmd_dispatch "''${ARGS[0]:-}" "$DRY_RUN"
|
|
;;
|
|
*)
|
|
log_error "Unknown command: $COMMAND"
|
|
usage
|
|
exit 1
|
|
;;
|
|
esac
|
|
''
|