return cand if cand.is_file() else None
def _apply_via_gasket(cbor_bytes: bytes ,apply_cmd: Path ,args)-> int:
- cmd = [str(apply_cmd)]
+ cmd = [
+ str(apply_cmd)
+ ,"--plan" ,"-"
+ ]
if args.phase_2_print: cmd.append("--phase-2-print")
if args.phase_2_then_stop: cmd.append("--phase-2-then-stop")
- # fine-grained gates (optional pass-through if gasket proxies them)
if args.phase_2_wellformed_then_stop: cmd.append("--phase-2-wellformed-then-stop")
if args.phase_2_sanity1_then_stop: cmd.append("--phase-2-sanity1-then-stop")
if args.phase_2_validity_then_stop: cmd.append("--phase-2-validity-then-stop")
# --- main stays a thin arg wrapper ------------------------------------------
def main(argv: list[str]|None=None)-> int:
+
ap = argparse.ArgumentParser(
prog="executor_inner.py"
,description="Man_In_Gray inner executor (decode → validate → apply)"
ap.add_argument("--phase-2-validity-then-stop" ,action="store_true" ,help="stop after validity checks")
ap.add_argument("--phase-2-sanity2-then-stop" ,action="store_true" ,help="stop after sanity-2 checks")
+ ap.add_argument("--plan" ,default="" ,help="path to CBOR plan file or '-' for stdin")
+ ap.add_argument("--plan-fd" ,type=int ,default=-1 ,help=argparse.SUPPRESS)
+
args = ap.parse_args(argv)
# load plan
try:
- data = Path(args.plan).read_bytes()
+ if args.plan_fd >= 0:
+ import os as _os
+ data = _os.read(args.plan_fd ,1<<30)
+ elif args.plan == "-":
+ import sys as _sys
+ data = _sys.stdin.buffer.read()
+ elif args.plan:
+ data = Path(args.plan).read_bytes()
+ else:
+ print("error: either --plan <file|-> or --plan-fd <n> is required" ,file=sys.stderr)
+ return 2
except Exception as e:
- print(f"error: failed to read plan file: {e}" ,file=sys.stderr)
+ print(f"error: failed to read plan: {e}" ,file=sys.stderr)
return 2
try:
+#!/usr/bin/env bash
+# developer/tool/compile — build gasket into developer/machine/<arch> (no sudo)
+set -euo pipefail
+SELF_DIR="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)"
+REPO_HOME="$(CDPATH= cd -- "$SELF_DIR/../.." && pwd)"
+
+raw="$(uname -m | tr '[:upper:]' '[:lower:]')"
+case "$raw" in
+ amd64|x64) arch="x86_64" ;; x86_64) arch="x86_64" ;;
+ i386|i486|i586|i686) arch="i686" ;;
+ arm64|aarch64) arch="aarch64" ;;
+ armv7l) arch="armv7l" ;; armv6l) arch="armv6l" ;;
+ riscv64) arch="riscv64" ;; ppc64le|powerpc64le) arch="ppc64le" ;;
+ s390x) arch="s390x" ;; *) arch="$raw" ;;
+esac
+
+SRC="${REPO_HOME}/developer/source/Man_In_Grey_apply.c"
+OUT_DIR="${REPO_HOME}/developer/machine/${arch}"
+OUT="${OUT_DIR}/man_in_grey_apply"
+
+mkdir -p "$OUT_DIR"
+cc -O2 -Wall -Wextra -Wl,-z,relro -Wl,-z,now -fstack-protector-strong -o "$OUT" "$SRC"
+echo "built: $OUT"
# developer/tool/release — stage current build into ../release (no privilege changes)
set -euo pipefail
-# Resolve repo root from this script’s location: $REPO_HOME/developer/tool/release
-SELF_DIR="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)"
-REPO_HOME="$(CDPATH= cd -- "$SELF_DIR/../.." && pwd)"
-REL_DIR="${REPO_HOME}/release"
-
-# Normalize arch (matches Man_In_Grey.py/Man_In_Grey wrapper)
-raw="$(uname -m | tr '[:upper:]' '[:lower:]')"
-case "$raw" in
- amd64|x64) arch="x86_64" ;;
- x86_64) arch="x86_64" ;;
- i386|i486|i586|i686) arch="i686" ;;
- arm64|aarch64) arch="aarch64" ;;
- armv7l) arch="armv7l" ;;
- armv6l) arch="armv6l" ;;
- riscv64) arch="riscv64" ;;
- ppc64le|powerpc64le) arch="ppc64le" ;;
- s390x) arch="s390x" ;;
- *) arch="$raw" ;;
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+REPO_HOME="$(cd "$(dirname "$script_afp")/../.." && pwd -P)"
+
+# --- arch normalize (same mapping as orchestrator/wrapper) ---
+arch_raw=$(uname -m | tr '[:upper:]' '[:lower:]')
+case "$arch_raw" in
+ amd64|x64) arch="x86_64" ;;
+ x86_64) arch="x86_64" ;;
+ i386|i486|i586|i686) arch="i686" ;;
+ arm64|aarch64) arch="aarch64" ;;
+ armv7l) arch="armv7l" ;;
+ armv6l) arch="armv6l" ;;
+ riscv64) arch="riscv64" ;;
+ ppc64le|powerpc64le) arch="ppc64le" ;;
+ s390x) arch="s390x" ;;
+ *) arch="$arch_raw" ;;
esac
-# Locations
-BUILD_DIR="${REPO_HOME}/developer/build/${arch}"
+# --- inputs/outputs ---
SRC_DIR="${REPO_HOME}/developer/source"
+MACHINE_DIR="${REPO_HOME}/developer/machine/${arch}"
-DEST_ARCH_DIR="${REL_DIR}/${arch}"
-DEST_PY_DIR="${REL_DIR}/python3"
-DEST_SH_DIR="${REL_DIR}/shell"
+REL_DIR="${REPO_HOME}/release"
+REL_ARCH="${REL_DIR}/${arch}"
+REL_PY="${REL_DIR}/python3"
+REL_SH="${REL_DIR}/shell"
-# Inputs
-GASKET_SRC="${BUILD_DIR}/man_in_grey_apply"
+GASKET_SRC="${MACHINE_DIR}/man_in_grey_apply"
PY_ORCH_SRC="${SRC_DIR}/Man_In_Grey.py"
PY_INNER_SRC="${SRC_DIR}/executor_inner.py"
PY_PLANNER_SRC="${SRC_DIR}/Planner.py"
-WRAP_SRC="${SRC_DIR}/Man_In_Grey" # shell wrapper
-
-# Sanity
-[[ -f "$PY_ORCH_SRC" ]] || { echo "error: missing $PY_ORCH_SRC" >&2; exit 2; }
-[[ -f "$PY_INNER_SRC" ]] || { echo "error: missing $PY_INNER_SRC" >&2; exit 2; }
-[[ -f "$PY_PLANNER_SRC" ]] || { echo "error: missing $PY_PLANNER_SRC" >&2; exit 2; }
-[[ -f "$WRAP_SRC" ]] || { echo "error: missing $WRAP_SRC (shell wrapper)" >&2; exit 2; }
+WRAP_SRC="${SRC_DIR}/Man_In_Grey" # shell entrypoint
-# Gasket is optional for unprivileged testing; warn if not present
-if [[ ! -x "$GASKET_SRC" ]]; then
- echo "warn: gasket not found for arch ${arch}: $GASKET_SRC"
- echo " (unprivileged apply will fall back to python inner)"
-fi
+# --- sanity checks ---
+[[ -x "$GASKET_SRC" ]] || { echo "error: missing gasket for ${arch}: $GASKET_SRC (run: compile)"; exit 2; }
+[[ -f "$PY_ORCH_SRC" ]] || { echo "error: missing $PY_ORCH_SRC"; exit 2; }
+[[ -f "$PY_INNER_SRC" ]] || { echo "error: missing $PY_INNER_SRC"; exit 2; }
+[[ -f "$PY_PLANNER_SRC" ]] || { echo "error: missing $PY_PLANNER_SRC"; exit 2; }
+[[ -f "$WRAP_SRC" ]] || { echo "error: missing $WRAP_SRC (shell wrapper)"; exit 2; }
-# Create dest dirs
-mkdir -p "$DEST_ARCH_DIR" "$DEST_PY_DIR" "$DEST_SH_DIR"
+# --- create destinations ---
+mkdir -p "$REL_ARCH" "$REL_PY" "$REL_SH"
-# Stage Python bits
-install -m 0755 "$PY_ORCH_SRC" "$DEST_PY_DIR/Man_In_Grey.py"
-install -m 0755 "$PY_INNER_SRC" "$DEST_PY_DIR/executor_inner.py"
-install -m 0644 "$PY_PLANNER_SRC" "$DEST_PY_DIR/Planner.py"
+# --- stage artifacts (no ownership or setuid flips here) ---
+install -m 0755 "$GASKET_SRC" "${REL_ARCH}/man_in_grey_apply"
-# Stage wrapper
-install -m 0755 "$WRAP_SRC" "$DEST_SH_DIR/Man_In_Grey"
+install -m 0755 "$PY_ORCH_SRC" "${REL_PY}/Man_In_Grey.py"
+install -m 0755 "$PY_INNER_SRC" "${REL_PY}/executor_inner.py"
+install -m 0644 "$PY_PLANNER_SRC" "${REL_PY}/Planner.py"
-# Stage gasket (no setuid/owner changes here)
-if [[ -x "$GASKET_SRC" ]]; then
- install -m 0755 "$GASKET_SRC" "$DEST_ARCH_DIR/man_in_grey_apply"
-fi
+install -m 0755 "$WRAP_SRC" "${REL_SH}/Man_In_Grey"
-echo "release staged to: $REL_DIR"
-echo " arch : $arch"
-echo " py : $(realpath "$DEST_PY_DIR")"
-echo " shell: $(realpath "$DEST_SH_DIR")"
-[[ -x "$GASKET_SRC" ]] && echo " gasket: $(realpath "$DEST_ARCH_DIR/man_in_grey_apply")"
+echo "released to: ${REL_DIR}"
+echo " arch : ${REL_ARCH}/man_in_grey_apply"
+echo " py : ${REL_PY}/"
+echo " shell: ${REL_SH}/Man_In_Grey"
#!/usr/bin/env bash
+# toolsmith-owned tester environment file
+
script_afp=$(realpath "${BASH_SOURCE[0]}")
if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
echo "$script_afp:: This script must be sourced, not executed."
exit 1
fi
-export ROLE=tester
+# shared, project-wide
source tool_shared/bespoke/env
+export ROLE=tester
+export ENV=$ROLE
+
+# tester-local tool dir first (if any)
+if [[ -d "$REPO_HOME/$ROLE/tool" ]]; then
+ PATH="$REPO_HOME/$ROLE/tool:$PATH"
+fi
+
+# shared Python (from toolsmith-provided venv)
if [[ ":$PATH:" != *":$PYTHON_HOME/bin:"* ]]; then
- export PATH="$PYTHON_HOME/bin:$PATH"
+ PATH="$PYTHON_HOME/bin:$PATH"
fi
+export PATH
-cd $ROLE
-export ENV=$ROLE
+export RELEASE="$REPO_HOME/release"
+
+cd "$ROLE"
+
+# pull in tester customizations (optional file)
+[[ -f tool/env ]] && source tool/env "$@"
+echo "in environment: $ENV"
return cand if cand.is_file() else None
def _apply_via_gasket(cbor_bytes: bytes ,apply_cmd: Path ,args)-> int:
- cmd = [str(apply_cmd)]
+ cmd = [
+ str(apply_cmd)
+ ,"--plan" ,"-"
+ ]
if args.phase_2_print: cmd.append("--phase-2-print")
if args.phase_2_then_stop: cmd.append("--phase-2-then-stop")
- # fine-grained gates (optional pass-through if gasket proxies them)
if args.phase_2_wellformed_then_stop: cmd.append("--phase-2-wellformed-then-stop")
if args.phase_2_sanity1_then_stop: cmd.append("--phase-2-sanity1-then-stop")
if args.phase_2_validity_then_stop: cmd.append("--phase-2-validity-then-stop")
print(f"error: CBOR encode failed: {e}" ,file=sys.stderr)
return 2
- # Prefer gasket; else fall back to Python inner
+ # Always use the gasket under release/<arch>/man_in_grey_apply (or explicit --apply-cmd)
apply_cmd = Path(args.apply_cmd).resolve() if args.apply_cmd else (_find_apply_cmd(repo_root) or None)
- if apply_cmd:
- try:
- return _apply_via_gasket(cbor_bytes ,apply_cmd ,args)
- except Exception as e:
- print(f"error: apply-cmd failed: {e}" ,file=sys.stderr)
- return 2
+ if not apply_cmd:
+ print("error: gasket not found; build/release first (release/<arch>/man_in_grey_apply)", file=sys.stderr)
+ return 2
+
+ try:
+ return _apply_via_gasket(cbor_bytes ,apply_cmd ,args)
+ except Exception as e:
+ print(f"error: apply-cmd failed: {e}" ,file=sys.stderr)
+ return 2
inner_py = Path(args.inner_py).resolve() if args.inner_py else (_find_inner_py(repo_root) or None)
if inner_py:
--- /dev/null
+#!/usr/bin/env -S python3 -B
+"""
+Planner.py — plan builder for staged configuration (UNPRIVILEGED).
+
+Given: runner-side provenance (PlanProvenance) and optional defaults (WriteFileMeta).
+Does: expose Planner whose command methods (copy/displace/delete) build Command entries,
+ resolving arguments with precedence: kwarg > per-call WriteFileMeta > planner default
+ (and for filename, fallback to provenance-derived basename). On any argument error,
+ the Command is returned with errors and NOT appended to the Journal.
+Returns: Journal (model only; dict in/out) via planner.journal().
+"""
+
+from __future__ import annotations
+
+# no bytecode anywhere (works under sudo/root shells too)
+import sys ,os
+sys.dont_write_bytecode = True
+os.environ.setdefault("PYTHONDONTWRITEBYTECODE" ,"1")
+
+from pathlib import Path
+import getpass
+
+# ===== Utilities =====
+
+def norm_perm(value: int|str)-> tuple[int ,str]|None:
+ "Given int or 3/4-char octal string (optionally 0o-prefixed). Does validate/normalize. Returns (int ,'%04o') or None."
+ if isinstance(value ,int):
+ if 0 <= value <= 0o7777:
+ return value ,f"{value:04o}"
+ return None
+ if isinstance(value ,str):
+ s = value.strip().lower()
+ if s.startswith("0o"):
+ try:
+ v = int(s ,8)
+ return v ,f"{v:04o}"
+ except Exception:
+ return None
+ if len(s) in (3 ,4) and all(ch in "01234567" for ch in s):
+ try:
+ v = int(s ,8)
+ return v ,f"{v:04o}"
+ except Exception:
+ return None
+ return None
+
+def is_abs_dpath(dpath_str: str|None)-> bool:
+ "Given path string. Does quick abs dir check. Returns bool."
+ return isinstance(dpath_str ,str) and dpath_str.startswith("/") and "\x00" not in dpath_str
+
+def norm_abs_dpath_str(value: str|Path|None)-> str|None:
+ "Given str/Path/None. Does normalize absolute dir path string. Returns str or None."
+ if value is None: return None
+ s = value.as_posix() if isinstance(value ,Path) else str(value)
+ return s if is_abs_dpath(s) else None
+
+def norm_dpath_str(value: str|Path|None)-> str|None:
+ "Given str/Path/None. Does minimal sanitize; allows relative. Returns str or None."
+ if value is None: return None
+ s = value.as_posix() if isinstance(value ,Path) else str(value)
+ if not s or "\x00" in s: return None
+ return s
+
+def norm_fname_or_none(value: str|None)-> str|None:
+ "Given candidate filename or None. Does validate bare filename. Returns str or None."
+ if value is None: return None
+ s = str(value)
+ if not s: return None
+ if "/" in s or s in ("." ,"..") or "\x00" in s: return None
+ return s
+
+def norm_nonempty_owner(value: str|None)-> str|None:
+ "Given owner string or None. Does minimally validate (non-empty). Returns str or None."
+ if value is None: return None
+ s = str(value).strip()
+ return s if s else None
+
+def parse_mode(value: int|str|None)-> tuple[int|None ,str|None]:
+ "Given int/str/None. Does normalize via norm_perm. Returns (int ,'%04o') or (None ,None)."
+ if value is None: return None ,None
+ r = norm_perm(value)
+ return r if r is not None else (None ,None)
+
+def norm_content_bytes(value: bytes|str|None)-> bytes|None:
+ "Given bytes/str/None. Does normalize to UTF-8 bytes or None. Returns bytes|None."
+ if value is None: return None
+ if isinstance(value ,bytes): return value
+ return value.encode("utf-8")
+
+# ===== Wire-ready model types (no CBOR here) =====
+
+class Command:
+ """
+ Command — a single planned operation.
+
+ Given name_str ('copy'|'displace'|'delete'), optional arg_dict, optional errors_list.
+ Does hold op name, own a fresh arg_dict, collect per-entry errors.
+ Returns dictionary via as_dictionary().
+ """
+ __slots__ = (
+ "name_str"
+ ,"arg_dict"
+ ,"errors_list"
+ )
+
+ def __init__(self ,name_str: str ,arg_dict: dict|None=None ,errors_list: list[str]|None=None)-> None:
+ self.name_str = name_str
+ self.arg_dict = dict(arg_dict) if arg_dict is not None else {}
+ self.errors_list = list(errors_list) if errors_list is not None else []
+
+ def add_error(self ,msg_str: str)-> None:
+ self.errors_list.append(msg_str)
+
+ def as_dictionary(self)-> dict:
+ return {
+ "op": self.name_str
+ ,"arg_dict": dict(self.arg_dict)
+ ,"errors_list": list(self.errors_list)
+ }
+
+ def print(self ,* ,index: int|None=None ,file=None)-> None:
+ """
+ Given: optional index for numbering and optional file-like (defaults to stdout).
+ Does: print a compact, human-readable one-line summary of this command; prints any errors indented below.
+ Returns: None.
+ """
+ if file is None:
+ import sys as _sys
+ file = _sys.stdout
+
+ op = self.name_str
+ ad = self.arg_dict or {}
+
+ # Compose destination path for display (normalize to collapse '..')
+ d = ad.get("write_file_dpath_str") or ""
+ f = ad.get("write_file_fname") or ""
+ try:
+ from pathlib import Path as _Path
+ if d and f and "/" not in f:
+ dst = (_Path(d)/f).resolve().as_posix()
+ else:
+ dst = "?"
+ except Exception:
+ dst = "?"
+
+ prefix = f"{index:02d}. " if index is not None else ""
+
+ if op == "copy":
+ mode = ad.get("mode_int")
+ owner = ad.get("owner_name")
+ size = len(ad.get("content_bytes") or b"")
+ line = f"{prefix}copy -> {dst} mode {mode:04o} owner {owner} bytes {size}"
+ elif op == "displace":
+ line = f"{prefix}displace -> {dst}"
+ elif op == "delete":
+ line = f"{prefix}delete -> {dst}"
+ else:
+ line = f"{prefix}?op? -> {dst}"
+
+ print(line ,file=file)
+
+ for err in self.errors_list:
+ print(f" ! {err}" ,file=file)
+
+class Journal:
+ """
+ Journal — ordered list of Command plus provenance metadata (model only; no CBOR).
+
+ Given optional plan_dict in wire shape (for reconstruction).
+ Does manage meta, append commands, expose entries, and pack to dict.
+ Returns dict via as_dictionary().
+ """
+ __slots__ = (
+ "meta_dict"
+ ,"command_list"
+ )
+
+ def __init__(self ,plan_dict: dict|None=None)-> None:
+ self.meta_dict = {}
+ self.command_list = []
+ if plan_dict is not None:
+ self._init_from_dict(plan_dict)
+
+ def _init_from_dict(self ,plan_dict: dict)-> None:
+ if not isinstance(plan_dict ,dict):
+ raise ValueError("plan_dict must be a dict")
+ meta = dict(plan_dict.get("meta_dict") or {})
+ entries = plan_dict.get("entries_list") or []
+ self.meta_dict.update(meta)
+ for e in entries:
+ if not isinstance(e ,dict):
+ continue
+ op = e.get("op") or "?"
+ args = e.get("arg_dict") or {}
+ errs = e.get("errors_list") or []
+ self.command_list.append(Command(name_str=op ,arg_dict=dict(args) ,errors_list=list(errs)))
+
+ def set_meta(self ,**kv)-> None:
+ self.meta_dict.update(kv)
+
+ def append(self ,cmd: Command)-> None:
+ self.command_list.append(cmd)
+
+ def entries_list(self)-> list[dict]:
+ return [c.as_dictionary() for c in self.command_list]
+
+ def as_dictionary(self)-> dict:
+ return {
+ "version_int": 1
+ ,"meta_dict": dict(self.meta_dict)
+ ,"entries_list": self.entries_list()
+ }
+
+ def print(self ,* ,index_start: int=1 ,file=None)-> None:
+ """
+ Given: optional starting index and optional file-like (defaults to stdout).
+ Does: print each Command on a single line via Command.print(), numbered.
+ Returns: None.
+ """
+ if file is None:
+ import sys as _sys
+ file = _sys.stdout
+
+ if not self.command_list:
+ print("(plan is empty)" ,file=file)
+ return
+
+ for i ,cmd in enumerate(self.command_list ,start=index_start):
+ cmd.print(index=i ,file=file)
+
+# ===== Runner-provided provenance =====
+
+class PlanProvenance:
+ """
+ Runner-provided, read-only provenance for a single config script.
+ """
+ __slots__ = (
+ "stage_root_dpath"
+ ,"config_abs_fpath"
+ ,"config_rel_fpath"
+ ,"read_dir_dpath"
+ ,"read_fname"
+ ,"process_user"
+ ,"cwd_dpath"
+ )
+
+ def __init__(self ,* ,stage_root: Path ,config_path: Path):
+ self.stage_root_dpath = stage_root.resolve()
+ self.config_abs_fpath = config_path.resolve()
+ try:
+ self.config_rel_fpath = self.config_abs_fpath.relative_to(self.stage_root_dpath)
+ except Exception:
+ self.config_rel_fpath = Path(self.config_abs_fpath.name)
+
+ self.read_dir_dpath = self.config_abs_fpath.parent
+
+ name = self.config_abs_fpath.name
+ if name.endswith(".stage.py"):
+ self.read_fname = name[:-len(".stage.py")]
+ elif name.endswith(".py"):
+ self.read_fname = name[:-3]
+ else:
+ self.read_fname = name
+
+ self.process_user = getpass.getuser()
+ self.cwd_dpath = Path.cwd().resolve()
+
+ def print(self ,* ,file=None)-> None:
+ if file is None:
+ import sys as _sys
+ file = _sys.stdout
+ print(f"Stage root: {self.stage_root_dpath}" ,file=file)
+ print(f"Config (rel): {self.config_rel_fpath.as_posix()}" ,file=file)
+ print(f"Config (abs): {self.config_abs_fpath}" ,file=file)
+ print(f"Read dir: {self.read_dir_dpath}" ,file=file)
+ print(f"Read fname: {self.read_fname}" ,file=file)
+ print(f"Process user: {self.process_user}" ,file=file)
+
+# ===== Admin-facing defaults carrier =====
+
+class WriteFileMeta:
+ """
+ WriteFileMeta — per-call or planner-default write-file attributes.
+
+ Given dpath (str/Path, may be relative) ,fname (bare name or None) ,owner (str)
+ ,mode (int|'0644') ,content (bytes|str|None).
+ Does normalize into fields (may remain None if absent/invalid).
+ Returns object suitable for providing defaults to Planner methods.
+ """
+ __slots__ = (
+ "dpath_str"
+ ,"fname"
+ ,"owner_name_str"
+ ,"mode_int"
+ ,"mode_octal_str"
+ ,"content_bytes"
+ )
+
+ def __init__(self
+ ,*
+ ,dpath="/"
+ ,fname=None
+ ,owner="root"
+ ,mode=0o444
+ ,content=None
+ ):
+ self.dpath_str = norm_dpath_str(dpath)
+ self.fname = norm_fname_or_none(fname)
+ self.owner_name_str = norm_nonempty_owner(owner)
+ self.mode_int ,self.mode_octal_str = parse_mode(mode)
+ self.content_bytes = norm_content_bytes(content)
+
+ def print(self ,* ,label: str|None=None ,file=None)-> None:
+ if file is None:
+ import sys as _sys
+ file = _sys.stdout
+ dpath = self.dpath_str or "?"
+ fname = self.fname or "?"
+ owner = self.owner_name_str or "?"
+ mode_str = f"{self.mode_int:04o}" if isinstance(self.mode_int ,int) else (self.mode_octal_str or "?")
+ size = len(self.content_bytes) if isinstance(self.content_bytes ,(bytes ,bytearray)) else 0
+ prefix = (label + ": ") if label else ""
+ print(f"{prefix}dpath={dpath} fname={fname} owner={owner} mode={mode_str} bytes={size}" ,file=file)
+
+# ===== Planner =====
+
+class Planner:
+ """
+ Planner — constructs a Journal of Commands from config scripts.
+
+ Given provenance (PlanProvenance) and optional default WriteFileMeta.
+ Does resolve command parameters by precedence: kwarg > per-call WriteFileMeta > planner default,
+ with a final filename fallback to provenance basename if still missing.
+ On any argument error, returns the Command with errors and DOES NOT append it to Journal.
+ Returns live Journal via journal().
+ """
+ __slots__ = (
+ "_prov"
+ ,"_defaults"
+ ,"_journal"
+ )
+
+ def __init__(self ,provenance: PlanProvenance ,defaults: WriteFileMeta|None=None)-> None:
+ self._prov = provenance
+ self._defaults = defaults if defaults is not None else WriteFileMeta(
+ dpath="/"
+ ,fname=provenance.read_fname
+ ,owner="root"
+ ,mode=0o444
+ ,content=None
+ )
+ self._journal = Journal()
+ self._journal.set_meta(
+ stage_root_dpath_str=str(self._prov.stage_root_dpath)
+ ,config_rel_fpath_str=self._prov.config_rel_fpath.as_posix()
+ )
+
+ # --- provenance/defaults/journal access ---
+
+ def set_provenance(self ,prov: PlanProvenance)-> None:
+ self._prov = prov
+
+ def set_defaults(self ,defaults: WriteFileMeta)-> None:
+ self._defaults = defaults
+
+ def defaults(self)-> WriteFileMeta:
+ return self._defaults
+
+ def journal(self)-> Journal:
+ return self._journal
+
+ # --- resolution helpers ---
+
+ def _pick(self ,kw ,meta_attr ,default_attr):
+ "Pick first non-None among kw ,meta_attr ,default_attr."
+ return kw if kw is not None else (meta_attr if meta_attr is not None else default_attr)
+
+ def _resolve_write_file(self ,wfm ,dpath ,fname)-> tuple[str|None ,str|None]:
+ # normalize explicit kwargs
+ dpath_str = norm_dpath_str(dpath) if dpath is not None else None
+ fname_str = norm_fname_or_none(fname) if fname is not None else None
+
+ # precedence: kwarg > per-call meta > planner default
+ dpath_val = self._pick(dpath_str ,(wfm.dpath_str if wfm else None) ,self._defaults.dpath_str)
+ fname_val = self._pick(fname_str ,(wfm.fname if wfm else None) ,self._defaults.fname)
+
+ # final fallback for filename: derive from config name
+ if fname_val is None:
+ fname_val = self._prov.read_fname
+
+ # anchor/normalize dpath
+ if dpath_val is not None:
+ p = Path(dpath_val)
+ if not p.is_absolute():
+ p = (self._prov.cwd_dpath/p)
+ dpath_val = p.resolve().as_posix()
+
+ return dpath_val ,fname_val
+
+ def _resolve_owner_mode_content(self
+ ,wfm: WriteFileMeta|None
+ ,owner: str|None
+ ,mode: int|str|None
+ ,content: bytes|str|None
+ )-> tuple[str|None ,tuple[int|None ,str|None] ,bytes|None]:
+ owner_norm = norm_nonempty_owner(owner) if owner is not None else None
+ mode_norm = parse_mode(mode) if mode is not None else (None ,None)
+ content_b = norm_content_bytes(content) if content is not None else None
+
+ owner_v = self._pick(owner_norm ,(wfm.owner_name_str if wfm else None) ,self._defaults.owner_name_str)
+ mode_v = (mode_norm if mode_norm != (None ,None) else
+ ((wfm.mode_int ,wfm.mode_octal_str) if wfm else (self._defaults.mode_int ,self._defaults.mode_octal_str)))
+ content_v = self._pick(content_b ,(wfm.content_bytes if wfm else None) ,self._defaults.content_bytes)
+ return owner_v ,mode_v ,content_v
+
+ # --- printing ---
+
+ def print(self ,* ,show_journal: bool=True ,file=None)-> None:
+ if file is None:
+ import sys as _sys
+ file = _sys.stdout
+
+ print("== Provenance ==" ,file=file)
+ self._prov.print(file=file)
+
+ print("\n== Defaults ==" ,file=file)
+ self._defaults.print(label="defaults" ,file=file)
+
+ if show_journal:
+ entries = getattr(self._journal ,"command_list" ,[])
+ n_total = len(entries)
+ n_copy = sum(1 for c in entries if getattr(c ,"name_str" ,None) == "copy")
+ n_disp = sum(1 for c in entries if getattr(c ,"name_str" ,None) == "displace")
+ n_del = sum(1 for c in entries if getattr(c ,"name_str" ,None) == "delete")
+
+ print("\n== Journal ==" ,file=file)
+ print(f"entries: {n_total} copy:{n_copy} displace:{n_disp} delete:{n_del}" ,file=file)
+ if n_total:
+ self._journal.print(index_start=1 ,file=file)
+ else:
+ print("(plan is empty)" ,file=file)
+
+ # --- Command builders (first arg may be WriteFileMeta) ---
+
+ def copy(self
+ ,wfm: WriteFileMeta|None=None
+ ,*
+ ,write_file_dpath: str|Path|None=None
+ ,write_file_fname: str|None=None
+ ,owner: str|None=None
+ ,mode: int|str|None=None
+ ,content: bytes|str|None=None
+ )-> Command:
+ cmd = Command("copy")
+ dpath ,fname = self._resolve_write_file(wfm ,write_file_dpath ,write_file_fname)
+ owner_v ,(mode_int ,mode_oct) ,content_b = self._resolve_owner_mode_content(wfm ,owner ,mode ,content)
+
+ # well-formed checks
+ if not is_abs_dpath(dpath): cmd.add_error("write_file_dpath must be absolute")
+ if norm_fname_or_none(fname) is None: cmd.add_error("write_file_fname must be a bare filename")
+ if not owner_v: cmd.add_error("owner must be non-empty")
+ if (mode_int ,mode_oct) == (None ,None):
+ cmd.add_error("mode must be int <= 0o7777 or 3/4-digit octal string")
+ if content_b is None:
+ cmd.add_error("content is required for copy() (bytes or str)")
+
+ cmd.arg_dict.update({
+ "write_file_dpath_str": dpath
+ ,"write_file_fname": fname
+ ,"owner_name": owner_v
+ ,"mode_int": mode_int
+ ,"mode_octal_str": mode_oct
+ ,"content_bytes": content_b
+ ,"provenance_config_rel_fpath_str": self._prov.config_rel_fpath.as_posix()
+ })
+
+ if not cmd.errors_list:
+ self._journal.append(cmd)
+ return cmd
+
+ def displace(self
+ ,wfm: WriteFileMeta|None=None
+ ,*
+ ,write_file_dpath: str|Path|None=None
+ ,write_file_fname: str|None=None
+ )-> Command:
+ cmd = Command("displace")
+ dpath ,fname = self._resolve_write_file(wfm ,write_file_dpath ,write_file_fname)
+ if not is_abs_dpath(dpath): cmd.add_error("write_file_dpath must be absolute")
+ if norm_fname_or_none(fname) is None: cmd.add_error("write_file_fname must be a bare filename")
+ cmd.arg_dict.update({
+ "write_file_dpath_str": dpath
+ ,"write_file_fname": fname
+ })
+ if not cmd.errors_list:
+ self._journal.append(cmd)
+ return cmd
+
+ def delete(self
+ ,wfm: WriteFileMeta|None=None
+ ,*
+ ,write_file_dpath: str|Path|None=None
+ ,write_file_fname: str|None=None
+ )-> Command:
+ cmd = Command("delete")
+ dpath ,fname = self._resolve_write_file(wfm ,write_file_dpath ,write_file_fname)
+ if not is_abs_dpath(dpath): cmd.add_error("write_file_dpath must be absolute")
+ if norm_fname_or_none(fname) is None: cmd.add_error("write_file_fname must be a bare filename")
+ cmd.arg_dict.update({
+ "write_file_dpath_str": dpath
+ ,"write_file_fname": fname
+ })
+ if not cmd.errors_list:
+ self._journal.append(cmd)
+ return cmd
# --- main stays a thin arg wrapper ------------------------------------------
def main(argv: list[str]|None=None)-> int:
+
ap = argparse.ArgumentParser(
prog="executor_inner.py"
,description="Man_In_Gray inner executor (decode → validate → apply)"
ap.add_argument("--phase-2-validity-then-stop" ,action="store_true" ,help="stop after validity checks")
ap.add_argument("--phase-2-sanity2-then-stop" ,action="store_true" ,help="stop after sanity-2 checks")
+ ap.add_argument("--plan" ,default="" ,help="path to CBOR plan file or '-' for stdin")
+ ap.add_argument("--plan-fd" ,type=int ,default=-1 ,help=argparse.SUPPRESS)
+
args = ap.parse_args(argv)
# load plan
try:
- data = Path(args.plan).read_bytes()
+ if args.plan_fd >= 0:
+ import os as _os
+ data = _os.read(args.plan_fd ,1<<30)
+ elif args.plan == "-":
+ import sys as _sys
+ data = _sys.stdin.buffer.read()
+ elif args.plan:
+ data = Path(args.plan).read_bytes()
+ else:
+ print("error: either --plan <file|-> or --plan-fd <n> is required" ,file=sys.stderr)
+ return 2
except Exception as e:
- print(f"error: failed to read plan file: {e}" ,file=sys.stderr)
+ print(f"error: failed to read plan: {e}" ,file=sys.stderr)
return 2
try:
+++ /dev/null
-# stage_test_0/DNS/unbound_conf.py
-
-def configure(prov, planner, WriteFileMeta):
- # dpath is relative; it will be anchored to prov.read_dir_dpath,
- # so this lands in .../stage_test_0/stage_test_0_out/dns
- wfm = WriteFileMeta(
- dpath="stage_test_0_out/net",
- fname=prov.read_fname, # "unbound_conf"
- owner=prov.process_user, # current process user
- mode=0o444
- )
- planner.delete(wfm)
- planner.copy(wfm, content="server:\n verbosity: 1\n")
+++ /dev/null
-# unbound.conf (example)
-
-def configure(prov, planner, WriteFileMeta):
- wfm = WriteFileMeta(
- dpath="stage_test_0_out"
- ,fname=prov.read_fname # write file name same as read file name
- ,owner=prov.process_user
- )
- planner.displace(wfm)
- planner.copy(wfm, content="server:\n do-ip6: no\n")
+++ /dev/null
-# stage_test_0/web/site_conf.py
-
-def configure(prov, planner, WriteFileMeta):
- # This writes a faux web config to .../stage_test_0/stage_test_0_out/web/nginx.conf
- wfm = WriteFileMeta(
- dpath="stage_test_0_out/web",
- fname="nginx.conf", # explicit override (not from prov)
- owner=prov.process_user,
- mode="0644"
- )
- planner.displace(wfm)
- planner.copy(wfm, content="events {}\nhttp { server { listen 8080; } }\n")
--- /dev/null
+#!/usr/bin/env bash
+set -euo pipefail
+
+# discover repo root from here
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+TESTER_DIR="$(cd "$(dirname "$script_afp")/.." && pwd -P)"
+REPO_HOME="$(cd "$TESTER_DIR/.." && pwd -P)"
+
+ENTRY="$REL_SHELL/Man_In_Grey"
+GASKET="$REL_ARCH/man_in_grey_apply"
+
+# tester has a path to `release/shell`
+ENTRY="Man_In_Grey"
+
+# sanity
+[[ -x "$ENTRY" ]] || { echo "❌ missing entrypoint: $ENTRY (did you run developer/release?)" >&2; exit 2; }
+
+# ensure tester won’t hit privileged gasket refusal:
+ARCH_RAW=$(uname -m | tr '[:upper:]' '[:lower:]')
+case "$ARCH_RAW" in
+ amd64|x64) ARCH="x86_64" ;;
+ x86_64) ARCH="x86_64" ;;
+ i386|i486|i586|i686) ARCH="i686" ;;
+ arm64|aarch64) ARCH="aarch64" ;;
+ armv7l) ARCH="armv7l" ;;
+ armv6l) ARCH="armv6l" ;;
+ riscv64) ARCH="riscv64" ;;
+ ppc64le|powerpc64le) ARCH="ppc64le" ;;
+ s390x) ARCH="s390x" ;;
+ *) ARCH="$ARCH_RAW" ;;
+esac
+GASKET="$REPO_HOME/release/$ARCH/man_in_grey_apply"
+
+if [[ -x "$GASKET" && -u "$GASKET" ]]; then
+ echo "⚠️ Gasket is blessed (setuid-root) but tester is sudo-less."
+ echo " Run: sudo ./tool/unbless"
+ exit 1
+fi
+
+# fresh output dir
+rm -rf -- "$OUT"
+mkdir -p "$OUT"
+
+echo "▶️ Running Man_In_Grey on tester/$STAGE → $OUT"
+# Run planner → CBOR → apply (unprivileged). Default filter will be emitted in CWD if missing.
+( cd "$TESTER_DIR" && \
+ "$ENTRY" \
+ --stage "$STAGE" \
+ --phase-1-print \
+ --phase-2-print )
+
+echo "✅ Apply finished. Verifying…"
+
+fail=0
+
+# expected artifacts (from your sample stage)
+chk() {
+ local path="$1" desc="$2"
+ if [[ -f "$path" ]]; then
+ echo " ✓ $desc ($path)"
+ else
+ echo " ✗ $desc missing ($path)"; fail=1
+ fi
+}
+
+# files to expect
+chk "$OUT/unbound_conf" "DNS base file"
+chk "$OUT/net/unbound.conf" "DNS net override"
+chk "$OUT/web/nginx.conf" "web nginx.conf"
+
+# content spot checks (adjust to your test content)
+if [[ -f "$OUT/net/unbound.conf" ]]; then
+ grep -q 'verbosity: 1' "$OUT/net/unbound.conf" \
+ && echo " ✓ verbosity content OK" \
+ || { echo " ✗ expected 'verbosity: 1' in net/unbound.conf"; fail=1; }
+fi
+
+if [[ -f "$OUT/web/nginx.conf" ]]; then
+ grep -q 'listen 8080' "$OUT/web/nginx.conf" \
+ && echo " ✓ nginx listen OK" \
+ || { echo " ✗ expected 'listen 8080' in web/nginx.conf"; fail=1; }
+fi
+
+# mode spot check (0444 example; chmod prints in octal differently across distros, use stat)
+if [[ -f "$OUT/unbound_conf" ]]; then
+ mode=$(stat -c '%a' "$OUT/unbound_conf" 2>/dev/null || stat -f '%Lp' "$OUT/unbound_conf")
+ [[ "$mode" == "444" ]] \
+ && echo " ✓ mode unbound_conf is 0444" \
+ || { echo " ⚠︎ mode unbound_conf is $mode (expected 444)"; :; }
+fi
+
+[[ $fail -eq 0 ]] && echo "🎉 test_0 PASS" || { echo "❌ test_0 FAIL"; exit 1; }
--- /dev/null
+# stage_test_0/DNS/unbound_conf.py
+
+def configure(prov, planner, WriteFileMeta):
+ # dpath is relative; it will be anchored to prov.read_dir_dpath,
+ # so this lands in .../stage_test_0/stage_test_0_out/dns
+ wfm = WriteFileMeta(
+ dpath="stage_test_0_out/net",
+ fname=prov.read_fname, # "unbound_conf"
+ owner=prov.process_user, # current process user
+ mode=0o444
+ )
+ planner.delete(wfm)
+ planner.copy(wfm, content="server:\n verbosity: 1\n")
--- /dev/null
+# unbound.conf (example)
+
+def configure(prov, planner, WriteFileMeta):
+ wfm = WriteFileMeta(
+ dpath="stage_test_0_out"
+ ,fname=prov.read_fname # write file name same as read file name
+ ,owner=prov.process_user
+ )
+ planner.displace(wfm)
+ planner.copy(wfm, content="server:\n do-ip6: no\n")
--- /dev/null
+# stage_test_0/web/site_conf.py
+
+def configure(prov, planner, WriteFileMeta):
+ # This writes a faux web config to .../stage_test_0/stage_test_0_out/web/nginx.conf
+ wfm = WriteFileMeta(
+ dpath="stage_test_0_out/web",
+ fname="nginx.conf", # explicit override (not from prov)
+ owner=prov.process_user,
+ mode="0644"
+ )
+ planner.displace(wfm)
+ planner.copy(wfm, content="events {}\nhttp { server { listen 8080; } }\n")
--- /dev/null
+#!/usr/bin/env bash
+# tester-authored custom environment (optional)
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ exit 1
+fi
+
+# Normalize machine arch to our release dir names
+_arch_raw=$(uname -m | tr '[:upper:]' '[:lower:]')
+case "$_arch_raw" in
+ amd64|x64) _arch="x86_64" ;;
+ x86_64) _arch="x86_64" ;;
+ i386|i486|i586|i686) _arch="i686" ;;
+ arm64|aarch64) _arch="aarch64" ;;
+ armv7l) _arch="armv7l" ;;
+ armv6l) _arch="armv6l" ;;
+ riscv64) _arch="riscv64" ;;
+ ppc64le|powerpc64le) _arch="ppc64le" ;;
+ s390x) _arch="s390x" ;;
+ *) _arch="$_arch_raw" ;;
+esac
+export ARCH="$_arch"
+
+# Handy convenience paths (optional)
+export REL_SHELL="$RELEASE/shell"
+export REL_PY="$RELEASE/python3"
+export REL_ARCH="$RELEASE/$ARCH"
--- /dev/null
+#!/usr/bin/env bash
+set -euo pipefail
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+REPO_HOME="$(cd "$(dirname "$script_afp")/.." && pwd -P)"
+
+if [[ $EUID -ne 0 ]]; then
+ echo "must be run as root (sudo)"; exit 1
+fi
+
+arch_raw=$(uname -m | tr '[:upper:]' '[:lower:]')
+case "$arch_raw" in
+ amd64|x64) arch="x86_64" ;;
+ x86_64) arch="x86_64" ;;
+ i386|i486|i586|i686) arch="i686" ;;
+ arm64|aarch64) arch="aarch64" ;;
+ armv7l) arch="armv7l" ;;
+ armv6l) arch="armv6l" ;;
+ riscv64) arch="riscv64" ;;
+ ppc64le|powerpc64le) arch="ppc64le" ;;
+ s390x) arch="s390x" ;;
+ *) arch="$arch_raw" ;;
+esac
+
+GASKET="${REPO_HOME}/release/${arch}/man_in_grey_apply"
+if [[ ! -x "$GASKET" ]]; then
+ echo "gasket missing: $GASKET (run developer/tool/release first)"; exit 2
+fi
+
+chown root:root "$GASKET"
+chmod 4755 "$GASKET"
+
+echo "blessed: $GASKET"
+ls -l "$GASKET"
+echo "flags:"
+"$GASKET" --print-flags || true
+++ /dev/null
-#!/usr/bin/env bash
-set -euo pipefail
-
-# bless_release — elevate released artifacts so privileged apply is possible
-# usage: bless_release [arch]
-# default arch = normalized uname -m → {x86_64,i686,aarch64,armv7l,armv6l,riscv64,ppc64le,s390x}
-
-if [[ "${EUID:-$(id -u)}" -ne 0 ]]; then
- echo "error: bless_release must run as root" >&2
- exit 2
-fi
-
-SELF_DIR="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)"
-REPO_HOME="$(CDPATH= cd -- "$SELF_DIR/../.." && pwd)"
-REL_DIR="${REPO_HOME}/release"
-
-# normalize arch
-raw="$(uname -m | tr '[:upper:]' '[:lower:]')"
-case "$raw" in
- amd64|x64) arch="x86_64" ;;
- x86_64) arch="x86_64" ;;
- i386|i486|i586|i686) arch="i686" ;;
- arm64|aarch64) arch="aarch64" ;;
- armv7l) arch="armv7l" ;;
- armv6l) arch="armv6l" ;;
- riscv64) arch="riscv64" ;;
- ppc64le|powerpc64le) arch="ppc64le" ;;
- s390x) arch="s390x" ;;
- *) arch="$raw" ;;
-esac
-[[ $# -ge 1 ]] && arch="$1"
-
-GASKET="${REL_DIR}/${arch}/man_in_grey_apply"
-INNER_PY="${REL_DIR}/python3/executor_inner.py"
-
-# sanity checks
-[[ -x "$GASKET" ]] || { echo "error: gasket not found/executable: $GASKET" >&2; exit 2; }
-[[ -f "$INNER_PY" ]] || { echo "error: inner executor missing: $INNER_PY" >&2; exit 2; }
-
-# set ownership/mode
-chown root:root "$GASKET" "$INNER_PY"
-chmod 4755 "$GASKET" # setuid root
-chmod 0755 "$INNER_PY" # root-owned, not setuid
-
-echo "blessed:"
-echo " gasket: $GASKET (root:root, 4755)"
-echo " inner : $INNER_PY (root:root, 0755)"
--- /dev/null
+#!/usr/bin/env bash
+set -euo pipefail
+SELF_DIR="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)"
+REPO_HOME="$(CDPATH= cd -- "$SELF_DIR/.." && pwd)"
+
+if [[ $EUID -ne 0 ]]; then
+ echo "must be run as root (sudo)"; exit 1
+fi
+
+raw="$(uname -m | tr '[:upper:]' '[:lower:]')"
+case "$raw" in
+ amd64|x64) arch="x86_64" ;; x86_64) arch="x86_64" ;;
+ i386|i486|i586|i686) arch="i686" ;;
+ arm64|aarch64) arch="aarch64" ;;
+ armv7l) arch="armv7l" ;; armv6l) arch="armv6l" ;;
+ riscv64) arch="riscv64" ;; ppc64le|powerpc64le) arch="ppc64le" ;;
+ s390x) arch="s390x" ;; *) arch="$raw" ;;
+esac
+
+GASKET="${REPO_HOME}/release/${arch}/man_in_grey_apply"
+[[ -e "$GASKET" ]] || { echo "not found: $GASKET"; exit 2; }
+
+# default target owner to the user who owns REPO_HOME
+OWNER="$(stat -c '%U' "$REPO_HOME")"
+GROUP="$(stat -c '%G' "$REPO_HOME")"
+
+chmod 0755 "$GASKET"
+chown "$OWNER:$GROUP" "$GASKET"
+echo "unblessed: $GASKET"
+ls -l "$GASKET"
# without this bash takes non-matching globs literally
shopt -s nullglob
+umask 022
# --------------------------------------------------------------------------------
# project definition