--- /dev/null
+# Python
+__pycache__/
+*.py[cod]
+*.pyo
+*.pyd
+.ipynb_checkpoints/
+.pytest_cache/
+
+# editor backup files (optional)
+*~
+*.bak
+
--- /dev/null
+# Contributing
+
+By contributing, you agree your contributions are licensed under the repository
+SPDX license expression:
+
+ MIT
+
+We use "inbound = outbound": you retain copyright; you license your
+contribution under the same terms. Optionally sign off commits per the DCO:
+
+ Signed-off-by: Your Name <you@example.com>
+
--- /dev/null
+MIT License
+
+Copyright (c) 2019, 2024, 2025 Reasoning Technology
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
--- /dev/null
+# Epimetheus
+
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+
+"""
+Identity
+
+An abstract identity used as the subject key for property attachment.
+
+Kinds (strings) determine storage and resolution behavior.
+"""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Any ,Optional ,Tuple
+
+from .ProcessLocalId import ProcessLocalId
+
+
+IDENTITY_KIND_PY_OBJECT = "py_object"
+IDENTITY_KIND_SYNTAX = "syntax"
+IDENTITY_KIND_PROPERTY = "property"
+IDENTITY_KIND_SET = "semantic_set"
+
+
+@dataclass(frozen=True ,slots=True)
+class Identity:
+ """
+ `id` is always a ProcessLocalId.
+
+ `kind` partitions lookup behavior.
+
+ `payload` is kind-specific metadata (kept small; do not put giant graphs here).
+ """
+ id: ProcessLocalId
+ kind: str
+ payload: Any = None
+
+ def __repr__(self) -> str:
+ # Do not reveal id token.
+ return f"<Identity kind={self.kind!r}>"
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+
+"""
+ObjectRegistry
+
+Maps Python runtime objects to ProcessLocalId using weak identity.
+
+Constraints:
+ - Only weakref-able Python objects can be registered.
+ - This is intentional: RT properties attach to identity-bearing runtime instances,
+ not to value-like primitives (ints/strings/lists/dicts).
+"""
+
+from __future__ import annotations
+
+import weakref
+from typing import Any ,Callable ,Dict ,Optional
+
+from .ProcessLocalId import ProcessLocalIdGenerator ,ProcessLocalId
+
+
+class ObjectRegistry:
+ def __init__(self ,id_gen: ProcessLocalIdGenerator):
+ self._id_gen = id_gen
+ self._obj_to_id_wkd: "weakref.WeakKeyDictionary[Any ,ProcessLocalId]" = weakref.WeakKeyDictionary()
+ self._id_to_obj_ref: Dict[ProcessLocalId ,weakref.ref] = {}
+ self._finalizers: Dict[ProcessLocalId ,Callable[[ProcessLocalId],None]] = {}
+
+ def register_finalizer(self ,fn: Callable[[ProcessLocalId],None]):
+ """
+ Registers a finalizer callback invoked when any registered object is GC'd.
+ """
+ self._global_finalizer = fn
+
+ def _on_collect(self ,obj_id: ProcessLocalId):
+ self._obj_to_id_wkd.pop(self._id_to_obj_ref[obj_id]() ,None)
+ self._id_to_obj_ref.pop(obj_id ,None)
+ fn = getattr(self ,"_global_finalizer" ,None)
+ if fn is not None: fn(obj_id)
+
+ def get_id(self ,obj: Any) -> ProcessLocalId:
+ """
+ Returns the ProcessLocalId for `obj`, registering it if needed.
+
+ Raises TypeError if `obj` is not weakref-able.
+ """
+ try:
+ existing = self._obj_to_id_wkd.get(obj)
+ except TypeError:
+ raise TypeError("ObjectRegistry: object is not weakref-able; RT properties do not attach to value-like primitives.")
+ if existing is not None: return existing
+
+ obj_id = self._id_gen.next_id()
+ try:
+ self._obj_to_id_wkd[obj] = obj_id
+ except TypeError:
+ raise TypeError("ObjectRegistry: object is not weakref-able; RT properties do not attach to value-like primitives.")
+ self._id_to_obj_ref[obj_id] = weakref.ref(obj ,lambda _ref ,oid=obj_id: self._on_collect(oid))
+ return obj_id
+
+ def try_get_object(self ,obj_id: ProcessLocalId) -> Optional[Any]:
+ """
+ Best-effort: returns the live object, or None if it has been collected or never registered.
+ """
+ ref = self._id_to_obj_ref.get(obj_id)
+ if ref is None: return None
+ return ref()
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+
+"""
+ProcessLocalId
+
+A process-local identifier used as an internal key.
+
+Design constraint:
+ - NOT intended to be serialized or persisted.
+ - `repr()` intentionally does not reveal the numeric token, to discourage logging/persistence.
+"""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+
+@dataclass(frozen=True ,slots=True)
+class ProcessLocalId:
+ _n: int
+
+ def __repr__(self) -> str:
+ return "<ProcessLocalId>"
+
+ def __str__(self) -> str:
+ return "<ProcessLocalId>"
+
+ def as_int_UNSAFE(self) -> int:
+ """
+ Returns the raw integer token.
+
+ UNSAFE because:
+ - tokens are process-local
+ - do not write these into files/databases/logs as stable identifiers
+ """
+ return self._n
+
+
+class ProcessLocalIdGenerator:
+ """
+ Monotonic generator; ids are never recycled.
+ """
+ def __init__(self ,start: int = 1):
+ if start < 1: raise ValueError("start must be >= 1")
+ self._next_n: int = start
+
+ def next_id(self) -> ProcessLocalId:
+ n = self._next_n
+ self._next_n += 1
+ return ProcessLocalId(n)
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+
+"""
+Property
+
+A Property is itself an entity (it has an Identity id) so that:
+ - properties can have properties
+ - properties can be members of semantic sets
+"""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional ,Tuple
+
+from .ProcessLocalId import ProcessLocalId
+
+
+@dataclass(frozen=True ,slots=True)
+class Property:
+ id: ProcessLocalId
+ name_path: Tuple[str ,...]
+ doc: str = ""
+
+ def __repr__(self) -> str:
+ # name_path is safe to reveal; id token is not.
+ return f"<Property {'.'.join(self.name_path)!r}>"
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+
+"""
+PropertyManager
+
+Core RT property system.
+
+Key decisions vs the earlier `property_manager.py`:
+ - do NOT key by `object_path()` strings (avoids collisions) fileciteturn3file4
+ - runtime objects are keyed by weak identity (ProcessLocalId assigned by ObjectRegistry)
+ - properties are first-class entities (Property has an id), so properties can have properties
+
+This remains process-local and in-memory.
+"""
+
+from __future__ import annotations
+
+from typing import Any ,Dict ,Iterable ,List ,Optional ,Tuple ,Union
+
+from .ProcessLocalId import ProcessLocalIdGenerator ,ProcessLocalId
+from .ObjectRegistry import ObjectRegistry
+from .PropertyStore import PropertyStore
+from .Property import Property
+from .SemanticSets import SemanticSet ,SemanticSetStore
+from .Syntax import SyntaxInstance
+
+
+NamePathLike = Union[str ,List[str] ,Tuple[str ,...]]
+
+
+class PropertyManager:
+ def __init__(self):
+ self._id_gen = ProcessLocalIdGenerator()
+ self._obj_reg = ObjectRegistry(self._id_gen)
+ self._store = PropertyStore()
+ self._sets = SemanticSetStore()
+
+ # Declare-by-name registry
+ self._name_path_to_property: Dict[Tuple[str ,...],Property] = {}
+ self._property_id_to_property: Dict[ProcessLocalId ,Property] = {}
+
+ self._name_path_to_set: Dict[Tuple[str ,...],SemanticSet] = {}
+ self._set_id_to_set: Dict[ProcessLocalId ,SemanticSet] = {}
+
+ # Optional syntax instances (if user chooses to model them)
+ self._syntax_id_to_instance: Dict[ProcessLocalId ,SyntaxInstance] = {}
+
+ # Finalization cleanup
+ self._obj_reg.register_finalizer(self._on_subject_finalized)
+
+ def _on_subject_finalized(self ,subject_id: ProcessLocalId):
+ self._store.remove_subject(subject_id)
+ self._sets.remove_subject(subject_id)
+
+ def _normalize_name_path(self ,name_path: NamePathLike) -> Tuple[str ,...]:
+ if isinstance(name_path ,tuple): return name_path
+ if isinstance(name_path ,list): return tuple(name_path)
+ if isinstance(name_path ,str): return tuple(name_path.split("."))
+ raise TypeError("name_path must be str ,list[str] ,or tuple[str ,...]")
+
+ # -------------------------
+ # Identity acquisition
+ # -------------------------
+ def id_of_py_object(self ,obj: Any) -> ProcessLocalId:
+ return self._obj_reg.get_id(obj)
+
+ def create_syntax_identity(self ,syntax: SyntaxInstance) -> ProcessLocalId:
+ sid = self._id_gen.next_id()
+ self._syntax_id_to_instance[sid] = syntax
+ return sid
+
+ def try_get_syntax(self ,syntax_id: ProcessLocalId) -> Optional[SyntaxInstance]:
+ return self._syntax_id_to_instance.get(syntax_id)
+
+ # -------------------------
+ # Property declaration
+ # -------------------------
+ def declare_property(self ,name_path: NamePathLike ,doc: str = "") -> ProcessLocalId:
+ np = self._normalize_name_path(name_path)
+ existing = self._name_path_to_property.get(np)
+ if existing is not None: return existing.id
+ pid = self._id_gen.next_id()
+ p = Property(pid ,np ,doc)
+ self._name_path_to_property[np] = p
+ self._property_id_to_property[pid] = p
+ return pid
+
+ def property_id(self ,name_path: NamePathLike) -> ProcessLocalId:
+ np = self._normalize_name_path(name_path)
+ p = self._name_path_to_property.get(np)
+ if p is None: raise KeyError(f"Property not declared: {np!r}")
+ return p.id
+
+ def try_get_property(self ,prop_id: ProcessLocalId) -> Optional[Property]:
+ return self._property_id_to_property.get(prop_id)
+
+ # -------------------------
+ # Semantic sets
+ # -------------------------
+ def declare_set(self ,name_path: NamePathLike ,doc: str = "") -> ProcessLocalId:
+ np = self._normalize_name_path(name_path)
+ existing = self._name_path_to_set.get(np)
+ if existing is not None: return existing.id
+ sid = self._id_gen.next_id()
+ s = SemanticSet(sid ,np ,doc)
+ self._name_path_to_set[np] = s
+ self._set_id_to_set[sid] = s
+ return sid
+
+ def add_to_set(self ,subject: Any ,set_id: ProcessLocalId):
+ subject_id = self._coerce_subject_id(subject)
+ self._sets.add_member(set_id ,subject_id)
+
+ def is_in_set(self ,subject: Any ,set_id: ProcessLocalId) -> bool:
+ subject_id = self._coerce_subject_id(subject)
+ return self._sets.has_member(set_id ,subject_id)
+
+ def members(self ,set_id: ProcessLocalId) -> List[ProcessLocalId]:
+ return list(self._sets.members(set_id))
+
+ # -------------------------
+ # Set/get properties
+ # -------------------------
+ def set(self ,subject: Any ,prop: Union[ProcessLocalId ,NamePathLike] ,value: Any):
+ subject_id = self._coerce_subject_id(subject)
+ prop_id = self._coerce_property_id(prop)
+ self._store.set(subject_id ,prop_id ,value)
+
+ def get(self ,subject: Any ,prop: Union[ProcessLocalId ,NamePathLike] ,default: Any = None) -> Any:
+ subject_id = self._coerce_subject_id(subject)
+ prop_id = self._coerce_property_id(prop)
+ return self._store.get(subject_id ,prop_id ,default)
+
+ def has(self ,subject: Any ,prop: Union[ProcessLocalId ,NamePathLike]) -> bool:
+ subject_id = self._coerce_subject_id(subject)
+ prop_id = self._coerce_property_id(prop)
+ return self._store.has(subject_id ,prop_id)
+
+ def subjects_with(self ,prop: Union[ProcessLocalId ,NamePathLike]) -> List[ProcessLocalId]:
+ prop_id = self._coerce_property_id(prop)
+ return list(self._store.subjects_with(prop_id))
+
+ # -------------------------
+ # Coercions
+ # -------------------------
+ def _coerce_subject_id(self ,subject: Any) -> ProcessLocalId:
+ if isinstance(subject ,ProcessLocalId): return subject
+ # For Python runtime objects, we require weakref-able instances.
+ return self._obj_reg.get_id(subject)
+
+ def _coerce_property_id(self ,prop: Union[ProcessLocalId ,NamePathLike]) -> ProcessLocalId:
+ if isinstance(prop ,ProcessLocalId): return prop
+ return self.property_id(prop)
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+
+"""
+PropertyStore
+
+Stores property values and maintains reverse lookups.
+
+This is intentionally process-local and in-memory.
+"""
+
+from __future__ import annotations
+
+from typing import Any ,Dict ,Optional ,Set ,Tuple
+
+from .ProcessLocalId import ProcessLocalId
+
+
+class PropertyStore:
+ def __init__(self):
+ # (subject_id ,property_id) -> value
+ self._values: Dict[Tuple[ProcessLocalId ,ProcessLocalId] ,Any] = {}
+
+ # subject_id -> set(property_id)
+ self._subject_to_props: Dict[ProcessLocalId ,Set[ProcessLocalId]] = {}
+
+ # property_id -> set(subject_id)
+ self._prop_to_subjects: Dict[ProcessLocalId ,Set[ProcessLocalId]] = {}
+
+ def set(self ,subject_id: ProcessLocalId ,prop_id: ProcessLocalId ,value: Any):
+ key = (subject_id ,prop_id)
+ self._values[key] = value
+ self._subject_to_props.setdefault(subject_id ,set()).add(prop_id)
+ self._prop_to_subjects.setdefault(prop_id ,set()).add(subject_id)
+
+ def get(self ,subject_id: ProcessLocalId ,prop_id: ProcessLocalId ,default: Any = None) -> Any:
+ return self._values.get((subject_id ,prop_id) ,default)
+
+ def has(self ,subject_id: ProcessLocalId ,prop_id: ProcessLocalId) -> bool:
+ return (subject_id ,prop_id) in self._values
+
+ def subjects_with(self ,prop_id: ProcessLocalId) -> Set[ProcessLocalId]:
+ return set(self._prop_to_subjects.get(prop_id ,set()))
+
+ def props_of(self ,subject_id: ProcessLocalId) -> Set[ProcessLocalId]:
+ return set(self._subject_to_props.get(subject_id ,set()))
+
+ def remove_subject(self ,subject_id: ProcessLocalId):
+ """
+ Remove all stored properties for a subject (used on finalization).
+ """
+ prop_ids = self._subject_to_props.pop(subject_id ,set())
+ for prop_id in prop_ids:
+ self._values.pop((subject_id ,prop_id) ,None)
+ s = self._prop_to_subjects.get(prop_id)
+ if s is not None:
+ s.discard(subject_id)
+ if not s: self._prop_to_subjects.pop(prop_id ,None)
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+
+"""
+SemanticSets
+
+Membership sets over identities. Used for semantic typing.
+
+Design:
+ - set_id identifies the set
+ - members are subject ids
+ - reverse index for cleanup
+"""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Dict ,Optional ,Set
+
+from .ProcessLocalId import ProcessLocalId
+
+
+@dataclass(frozen=True ,slots=True)
+class SemanticSet:
+ id: ProcessLocalId
+ name_path: tuple[str ,...]
+ doc: str = ""
+
+ def __repr__(self) -> str:
+ return f"<SemanticSet {'.'.join(self.name_path)!r}>"
+
+
+class SemanticSetStore:
+ def __init__(self):
+ self._members: Dict[ProcessLocalId ,Set[ProcessLocalId]] = {}
+ self._subject_to_sets: Dict[ProcessLocalId ,Set[ProcessLocalId]] = {}
+
+ def add_member(self ,set_id: ProcessLocalId ,subject_id: ProcessLocalId):
+ self._members.setdefault(set_id ,set()).add(subject_id)
+ self._subject_to_sets.setdefault(subject_id ,set()).add(set_id)
+
+ def has_member(self ,set_id: ProcessLocalId ,subject_id: ProcessLocalId) -> bool:
+ return subject_id in self._members.get(set_id ,set())
+
+ def members(self ,set_id: ProcessLocalId) -> Set[ProcessLocalId]:
+ return set(self._members.get(set_id ,set()))
+
+ def remove_subject(self ,subject_id: ProcessLocalId):
+ set_ids = self._subject_to_sets.pop(subject_id ,set())
+ for set_id in set_ids:
+ m = self._members.get(set_id)
+ if m is not None:
+ m.discard(subject_id)
+ if not m: self._members.pop(set_id ,None)
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+
+"""
+Syntax
+
+RT syntax identity instances.
+
+We treat "syntax" as AST-level objects:
+ - kind: official-ish AST node kind name (e.g., "ast.FunctionDef")
+ - location: file + span
+ - scope: enclosing syntax identity id (optional)
+ - parts: mapping of part-name to literal or referenced syntax identity id(s)
+
+This module does NOT traverse Python programs. It only defines the data model.
+"""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Any ,Dict ,Optional ,Tuple ,Union ,List
+
+from .ProcessLocalId import ProcessLocalId
+
+
+@dataclass(frozen=True ,slots=True)
+class SourceSpan:
+ file_path: str
+ lineno: int
+ col: int
+ end_lineno: int
+ end_col: int
+
+
+SyntaxPartValue = Union[
+ None
+ ,bool
+ ,int
+ ,float
+ ,str
+ ,ProcessLocalId
+ ,List["SyntaxPartValue"]
+ ,Dict[str ,"SyntaxPartValue"]
+]
+
+
+@dataclass(frozen=True ,slots=True)
+class SyntaxInstance:
+ """
+ A single syntax node instance.
+
+ NOTE: many syntax nodes have no identifier-name. Name-like things (identifiers)
+ appear as child nodes or literals inside `parts`.
+ """
+ kind: str
+ span: SourceSpan
+ scope_id: Optional[ProcessLocalId] = None
+ parts: Dict[str ,SyntaxPartValue] = None
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+"""
+rt_property_manager
+
+Process-local property attachment with:
+ - weak identity for Python runtime instances
+ - explicit identities for syntax instances and properties
+
+Notes:
+ - ProcessLocalId values are not meant to be serialized or persisted.
+"""
+
+from .PropertyManager import PropertyManager
--- /dev/null
+#include <stdio.h>
+int main(void){ puts("hello from Rabbit CLI"); return 0; }
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+
+"""
+property_manager_example_1.py
+
+Demonstrate RT identity-based PropertyManager.
+
+Run:
+ PYTHONPATH=. python3 property_manager_example_1.py
+"""
+
+from rt_property_manager import PropertyManager
+
+
+class WidgetFactory:
+ def __call__(self ,x):
+ return Widget(x)
+
+
+class Widget:
+ def __init__(self ,x):
+ self.x = x
+
+ def add(self ,y):
+ return self.x + y
+
+
+def main():
+ pm = PropertyManager()
+
+ # Declare semantic set "WidgetFactories"
+ set_widget_factories_id = pm.declare_set(["semantic" ,"WidgetFactories"] ,"Factories that produce Widgets")
+
+ wf = WidgetFactory()
+ pm.add_to_set(wf ,set_widget_factories_id)
+
+ # Declare a property "printer" (intended to attach to methods)
+ prop_printer_id = pm.declare_property(["semantic" ,"printer"] ,"Callable that prints the value of an instance")
+
+ # Attach property to Widget.add method object (unbound function attribute on class)
+ pm.set(Widget.add ,prop_printer_id ,lambda inst: print(f"Widget(x={inst.x})"))
+
+ w = wf(7)
+
+ # Semantic check: require that the provenance factory is in the WidgetFactories set
+ # (In this example we didn't record provenance; we'd do that via an explicit call later.)
+
+ # Call printer property on the method we care about
+ printer = pm.get(Widget.add ,prop_printer_id)
+ printer(w)
+
+ # Reverse lookup: which subjects have 'printer'?
+ subject_ids = pm.subjects_with(prop_printer_id)
+ print("subjects_with(printer):" ,len(subject_ids))
+
+
+if __name__ == "__main__":
+ main()
--- /dev/null
+#+TITLE: Project Ontology: Authored vs. Loadable
+#+AUTHOR: Harmony Developer
+#+DATE: 2025-11-21
+#+OPTIONS: toc:t num:nil
+
+* The Core Philosophy
+This project distinguishes files based on fundamental **invariants** (properties) rather than arbitrary file types[cite: 889, 890]. This creates a clear semantic structure:
+- **Provenance**: Who created this file?
+- **Capability**: What is the file's primary function in the system? [cite: 890, 891]
+
+**The Golden Rule:** God and Artists (Developers) *create* things; Factories (Build Systems) *make* things[cite: 892].
+
+* Directory Structure Overview
+
+#+BEGIN_SRC text
+developer/
+├── authored/ # (The Logic) Human-written source code.
+├── loadable/ # (Capability) Agnostic Entry Points.
+├── scratchpad/ # (Transient) Intermediates, Objects.
+└── tool/ # (The Factory) Build scripts.
+
+release/
+├── loadable/ # (Capability) Shared, Agnostic Release Entry Points.
+├── local_build/ # (Action/Locality) Architecture-specific binaries.
+#+END_SRC
+
+* Detailed Invariants
+
+** ~developer/authored/~
+- **Invariant:** Primary Logic Source (Code). Every file here is written by a human author[cite: 898, 899].
+- **Rule:** Scripts must treat this directory as read-only. This replaces the old `cc/` and `python3/` source directories.
+
+** ~developer/loadable/~
+- **Invariant:** Architecture-Agnostic Entry Points. Files here possess the property of being executable by the user[cite: 902].
+- **Contents:** Symlinks to interpreted code, shared scripts, and wrappers that are safe to commit[cite: 903].
+
+** ~developer/scratchpad/loadable/~
+- **Invariant:** Machine-Generated Executables (Intermediate). This is the transient output location during development[cite: 904].
+- **Rule:** This directory is **ignored by Git** and houses compiled binaries and libraries derived by the build system[cite: 905, 906].
+
+** ~release/local_build/~
+- **Invariant:** Local Action Required.
+- **Rule:** This directory is added to the `.gitignore` in the release directory. Its presence signals to a new developer: **"You must perform a local build to populate this directory with machine-specific executables."**
+- **Contents:** Final binaries are copied here by the release script from the `scratchpad/`[cite: 915].
+ <
--- /dev/null
+*
+!/.gitignore
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
+# input guards
+
+ env_must_be="developer/tool/env"
+ if [ "$ENV" != "$env_must_be" ]; then
+ echo "$(script_fp):: error: must be run in the $env_must_be environment"
+ exit 1
+ fi
+
+set -e
+set -x
+
+ cd "$REPO_HOME"/developer || exit 1
+ /bin/make -f tool/makefile $@
+
+set +x
+echo "$(script_fn) done."
--- /dev/null
+# developer/tool/makefile — Orchestrator (Hybrid)
+.SUFFIXES:
+.EXPORT_ALL_VARIABLES:
+
+RT_INCOMMON := $(REPO_HOME)/shared/third_party/RT-project-share/release
+include $(RT_INCOMMON)/make/environment_RT_1.mk
+
+.PHONY: usage
+usage:
+ @printf "Usage: make [usage|information|all|lib|CLI|kmod|clean]\n"
+
+.PHONY: version
+version:
+ @printf "local ----------------------------------------\n"
+ @echo tool/makefile version 2.0
+ @printf "target_library_CLI.mk ----------------------------------------\n"
+ @$(MAKE) -f $(RT_INCOMMON)/make/target_kmod.mk version
+ @printf "target_kmod.mk ----------------------------------------\n"
+ @$(MAKE) -f $(RT_INCOMMON)/make/target_library_CLI.mk version
+
+.PHONY: information
+information:
+ @printf "local ----------------------------------------\n"
+ -@echo CURDIR='$(CURDIR)'
+ @echo REPO_HOME="$(REPO_HOME)"
+ @echo KMOD_BUILD_DIR="/lib/modules/$(shell uname -r)/build"
+ @echo CURDIR="$(CURDIR)"
+ @printf "target_library_CLI.mk ----------------------------------------\n"
+ @$(MAKE) -f $(RT_INCOMMON)/make/target_library_CLI.mk information
+ @printf "target_kmod.mk ----------------------------------------\n"
+ @$(MAKE) -f $(RT_INCOMMON)/make/target_kmod.mk information
+
+.PHONY: all
+all: library CLI kmod
+
+.PHONY: library lib
+library lib:
+ @$(MAKE) -f $(RT_INCOMMON)/make/target_library_CLI.mk library
+
+.PHONY: CLI
+CLI:
+ @$(MAKE) -f $(RT_INCOMMON)/make/target_library_CLI.mk CLI
+
+.PHONY: kmod
+kmod:
+ @$(MAKE) -f $(RT_INCOMMON)/make/target_kmod.mk kmod
+
+.PHONY: clean
+clean:
+ @printf "local ----------------------------------------\n"
+ @printf "target_library_CLI.mk ----------------------------------------\n"
+ @$(MAKE) -f $(RT_INCOMMON)/make/target_library_CLI.mk clean
+ @printf "target_kmod.mk ----------------------------------------\n"
+ @$(MAKE) -f $(RT_INCOMMON)/make/target_kmod.mk clean
--- /dev/null
+#!/usr/bin/env -S python3 -B
+# -*- mode: python; coding: utf-8; python-indent-offset: 2; indent-tabs-mode: nil -*-
+
+import os, sys, shutil, stat, pwd, grp, glob, tempfile
+
+HELP = """usage: release {write|clean|ls|help|dry write} [DIR]
+ write [DIR] Writes released files into $REPO_HOME/release. If [DIR] is specified, only writes files found in scratchpad/DIR.
+ clean [DIR] Remove the contents of the release directories. If [DIR] is specified, clean only the contents of that release directory.
+ ls List release/ as an indented tree: PERMS OWNER NAME (root-level dotfiles printed first).
+ help Show this message.
+ dry write [DIR]
+ Preview what write would do without modifying the filesystem.
+"""
+
+ENV_MUST_BE = "developer/tool/env"
+DEFAULT_DIR_MODE = 0o750
+PERM_BY_DIR = {
+ "kmod": 0o440,
+ "machine": 0o550,
+ "python3": 0o550,
+ "shell": 0o550,
+}
+
+def exit_with_status(msg, code=1):
+ print(f"release: {msg}", file=sys.stderr)
+ sys.exit(code)
+
+def assert_env():
+ env = os.environ.get("ENV", "")
+ if env != ENV_MUST_BE:
+ hint = (
+ "ENV is not 'developer/tool/env'.\n"
+ "Enter the project with: source ./env_developer\n"
+ "That script exports: ROLE=developer; ENV=$ROLE/tool/env"
+ )
+ exit_with_status(f"bad environment: ENV='{env}'. {hint}")
+
+def repo_home():
+ rh = os.environ.get("REPO_HOME")
+ if not rh:
+ exit_with_status("REPO_HOME not set (did you 'source ./env_developer'?)")
+ return rh
+
+def dpath(*parts):
+ return os.path.join(repo_home(), "developer", *parts)
+
+def rpath(*parts):
+ return os.path.join(repo_home(), "release", *parts)
+
+def dev_root():
+ return dpath()
+
+def rel_root():
+ return rpath()
+
+def _display_src(p_abs: str) -> str:
+ # Developer paths shown relative to $REPO_HOME/developer
+ try:
+ if os.path.commonpath([dev_root()]) == os.path.commonpath([dev_root(), p_abs]):
+ return os.path.relpath(p_abs, dev_root())
+ except Exception:
+ pass
+ return p_abs
+
+def _display_dst(p_abs: str) -> str:
+ # Release paths shown as literal '$REPO_HOME/release/<rel>'
+ try:
+ rel = os.path.relpath(p_abs, rel_root())
+ rel = "" if rel == "." else rel
+ return "$REPO_HOME/release" + ("/" + rel if rel else "")
+ except Exception:
+ return p_abs
+
+def ensure_mode(path, mode):
+ try: os.chmod(path, mode)
+ except Exception: pass
+
+def ensure_dir(path, mode=DEFAULT_DIR_MODE, dry=False):
+ if dry:
+ if not os.path.isdir(path):
+ shown = _display_dst(path) if path.startswith(rel_root()) else (
+ os.path.relpath(path, dev_root()) if path.startswith(dev_root()) else path
+ )
+ print(f"(dry) mkdir -m {oct(mode)[2:]} '{shown}'")
+ return
+ os.makedirs(path, exist_ok=True)
+ ensure_mode(path, mode)
+
+def filemode(m):
+ try: return stat.filemode(m)
+ except Exception: return oct(m & 0o777)
+
+def owner_group(st):
+ try: return f"{pwd.getpwuid(st.st_uid).pw_name}:{grp.getgrgid(st.st_gid).gr_name}"
+ except Exception: return f"{st.st_uid}:{st.st_gid}"
+
+# ---------- LS with two-pass column width for owner:group ----------
+def list_tree(root):
+ if not os.path.isdir(root):
+ return
+
+ # gather entries in display order, record owner:group widths
+ entries = [] # list of (is_dir, depth, perms, ownergrp, name)
+ def gather(path: str, depth: int, is_root: bool):
+ try:
+ it = list(os.scandir(path))
+ except FileNotFoundError:
+ return
+ dirs = [e for e in it if e.is_dir(follow_symlinks=False)]
+ files = [e for e in it if not e.is_dir(follow_symlinks=False)]
+ dirs.sort(key=lambda e: e.name)
+ files.sort(key=lambda e: e.name)
+
+ if is_root:
+ # root-level: dotfiles first
+ for f in (e for e in files if e.name.startswith(".")):
+ st = os.lstat(f.path)
+ entries.append((False, depth, filemode(st.st_mode), owner_group(st), f.name))
+ for d in dirs:
+ st = os.lstat(d.path)
+ entries.append((True, depth, filemode(st.st_mode), owner_group(st), d.name + "/"))
+ gather(d.path, depth + 1, False)
+ for f in (e for e in files if not e.name.startswith(".")):
+ st = os.lstat(f.path)
+ entries.append((False, depth, filemode(st.st_mode), owner_group(st), f.name))
+ else:
+ # subdirs: dirs then files (dotfiles naturally sort first)
+ for d in dirs:
+ st = os.lstat(d.path)
+ entries.append((True, depth, filemode(st.st_mode), owner_group(st), d.name + "/"))
+ gather(d.path, depth + 1, False)
+ for f in files:
+ st = os.lstat(f.path)
+ entries.append((False, depth, filemode(st.st_mode), owner_group(st), f.name))
+
+ gather(root, depth=1, is_root=True)
+
+ # compute max width for owner:group column
+ ogw = 0
+ for (_isdir, _depth, _perms, ownergrp, _name) in entries:
+ if len(ownergrp) > ogw:
+ ogw = len(ownergrp)
+
+ # print
+ print("release/")
+ for (isdir, depth, perms, ownergrp, name) in entries:
+ indent = " " * depth
+ # perms first, owner:group padded next, then name with tree indent
+ print(f"{perms} {ownergrp:<{ogw}} {indent}{name}")
+
+# ---------- end LS ----------
+
+def iter_src_files(topdir, src_root):
+ base = os.path.join(src_root, topdir) if topdir else src_root
+ if not os.path.isdir(base):
+ return
+ yield
+ if topdir == "kmod":
+ for p in sorted(glob.glob(os.path.join(base, "*.ko"))):
+ yield (p, os.path.basename(p))
+ else:
+ for root, dirs, files in os.walk(base):
+ dirs.sort(); files.sort()
+ for fn in files:
+ src = os.path.join(root, fn)
+ rel = os.path.relpath(src, base)
+ yield (src, rel)
+
+def target_mode(topdir):
+ return PERM_BY_DIR.get(topdir, 0o440)
+
+def copy_one(src_abs, dst_abs, mode, dry=False):
+ src_show = _display_src(src_abs)
+ dst_show = _display_dst(dst_abs)
+ parent = os.path.dirname(dst_abs)
+ os.makedirs(parent, exist_ok=True)
+
+ if dry:
+ if os.path.exists(dst_abs):
+ print(f"(dry) unlink '{dst_show}'")
+ print(f"(dry) install -m {oct(mode)[2:]} -D '{src_show}' '{dst_show}'")
+ return
+
+ # Replace even if dst exists and is read-only: write temp then atomic replace.
+ fd, tmp_path = tempfile.mkstemp(prefix=".tmp.", dir=parent)
+ try:
+ with os.fdopen(fd, "wb") as tmpf, open(src_abs, "rb") as sf:
+ shutil.copyfileobj(sf, tmpf)
+ tmpf.flush()
+ os.chmod(tmp_path, mode)
+ os.replace(tmp_path, dst_abs)
+ finally:
+ try:
+ if os.path.exists(tmp_path):
+ os.unlink(tmp_path)
+ except Exception:
+ pass
+
+ print(f"+ install -m {oct(mode)[2:]} '{src_show}' '{dst_show}'")
+
+def write_one_dir(topdir, dry):
+ rel_root_dir = rpath()
+ src_root = dpath("scratchpad")
+ src_dir = os.path.join(src_root, topdir)
+ dst_dir = os.path.join(rel_root_dir, topdir)
+
+ if not os.path.isdir(src_dir):
+ exit_with_status(
+ f"cannot write: expected '{_display_src(src_dir)}' to exist. "
+ f"Create scratchpad/{topdir} (Makefiles may need to populate it)."
+ )
+
+ ensure_dir(dst_dir, DEFAULT_DIR_MODE, dry=dry)
+
+ wrote = False
+ mode = target_mode(topdir)
+ for src_abs, rel in iter_src_files(topdir, src_root):
+ dst_abs = os.path.join(dst_dir, rel)
+ copy_one(src_abs, dst_abs, mode, dry=dry)
+ wrote = True
+ if not wrote:
+ msg = "no matching artifacts found"
+ if topdir == "kmod": msg += " (looking for *.ko)"
+ print(f"(info) {msg} in {_display_src(src_dir)}")
+
+def cmd_write(dir_arg, dry=False):
+ assert_env()
+ ensure_dir(rpath(), DEFAULT_DIR_MODE, dry=dry)
+
+ src_root = dpath("scratchpad")
+ if not os.path.isdir(src_root):
+ exit_with_status(f"cannot find developer scratchpad at '{_display_src(src_root)}'")
+
+ if dir_arg:
+ write_one_dir(dir_arg, dry=dry)
+ else:
+ subs = sorted([e.name for e in os.scandir(src_root) if e.is_dir(follow_symlinks=False)])
+ if not subs:
+ print(f"(info) nothing to release; no subdirectories found under {_display_src(src_root)}")
+ return
+ for td in subs:
+ write_one_dir(td, dry=dry)
+
+def _clean_contents(dir_path):
+ if not os.path.isdir(dir_path): return
+ for name in os.listdir(dir_path):
+ p = os.path.join(dir_path, name)
+ if os.path.isdir(p) and not os.path.islink(p):
+ shutil.rmtree(p, ignore_errors=True)
+ else:
+ try: os.unlink(p)
+ except FileNotFoundError: pass
+
+def cmd_clean(dir_arg):
+ assert_env()
+ rel_root_dir = rpath()
+ if not os.path.isdir(rel_root_dir):
+ return
+ if dir_arg:
+ _clean_contents(os.path.join(rel_root_dir, dir_arg))
+ else:
+ for e in os.scandir(rel_root_dir):
+ if e.is_dir(follow_symlinks=False):
+ _clean_contents(e.path)
+
+def CLI():
+ if len(sys.argv) < 2:
+ print(HELP); return
+ cmd, *args = sys.argv[1:]
+ if cmd == "write":
+ cmd_write(args[0] if args else None, dry=False)
+ elif cmd == "clean":
+ cmd_clean(args[0] if args else None)
+ elif cmd == "ls":
+ list_tree(rpath())
+ elif cmd == "help":
+ print(HELP)
+ elif cmd == "dry":
+ if args and args[0] == "write":
+ cmd_write(args[1] if len(args) >= 2 else None, dry=True)
+ else:
+ print(HELP)
+ else:
+ print(HELP)
+
+if __name__ == "__main__":
+ CLI()
--- /dev/null
+#+TITLE: 00 - Project Structure and Ontology
+#+AUTHOR: Harmony Project Team
+#+DATE: 2025-11-21
+#+OPTIONS: toc:2 num:nil
+
+#+HTML_HEAD_EXTRA: <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
+#+HTML_HEAD_EXTRA: <link rel="stylesheet" type="text/css" href="../style/rt_dark_doc.css">
+
+#+BEGIN_EXPORT html
+<div class="page">
+#+END_EXPORT
+
+* Purpose
+
+Harmony provides a language agnostic project directory structure and maintenance tools for long-lived, multi-person team software development. The structure exists to enforce:
+
+1. Clarity about where things live.
+1.1. Role based work areas
+1.2. Separation of skeleton, team member authored, machine-made, and third party installed software.
+3. A safe, predictable build and release workflow.
+
+A newcomer should be able to clone Harmony and understand the entire
+working model in minutes.
+
+To make a new project a toolsmith first clones Harmony, renames it to the project name, resets the history, and disconnects it from the Harmony project. The skeleton of the new project can be kept in sync with the Harmony skeleton by going to a Harmony skeleton clone and running the =Harmony/tool/sync= tool.
+
+Harmony is IDE agnostic. I typically use Emacs as an IDE and encourage its use. Because of this the documents standard format is emacs `.org` format. Files in this format can be exported to other formats, such as HTML. I have also used IntelliJ IDEA and Eclipse with Harmony, though the project skeleton has drifted some since then. I would like to update Harmony to work out of the box with these and other IDEs in the future.
+
+* 1. Key Concepts
+
+** Created vs. Made
+Harmony divides the world into two categories:
+
+- *Created / Authored*
+ Human-written: source files, docs, design notes.
+
+- *Made*
+ Tool-produced: binaries, generated sources, intermediates.
+
+This separation protects authored material from accidental overwrite and
+makes build artifacts fully disposable.
+
+** Semantic Paths
+Directory names in Harmony are not decorative.
+Each directory name is a *property* shared among files. Thus, a full path forms a semantic
+sentence describing said files.
+
+Example:
+
+- =developer/authored/=
+ “Developer authored code”
+
+- =developer/scratchpad/made/=
+ “Developer → scratch workspace → tool-made binaries”
+
+Once you learn the ontology, you can infer the meaning of any path.
+
+* Top-Level Repository Layout
+
+The layout below is stable across all Harmony skeleton based projects:
+
+| Directory | Meaning |
+|----------|---------|
+| =developer/= | Primary workspace for developers |
+| =tester/= | Regression and validation workspace for testers |
+| =tool/= | Project-local tools |
+| =shared/= | Shared ecosystem tools |
+| =document/= | Documentation (local to project) |
+| =release/= | Central Working Point for promoted artifacts |
+| =scratchpad/= | Global scratch (misc experiments) |
+| =env_* = | Role activators |
+
+The `env_*` files prepare PATH, set environment variables, and cd into
+the correct workspace. A team member will source one of the =env_*= files to take on a role in the project. As of this writing the supported roles are: toolsmith, developer, and tester.
+
+* The =release/= tree.
+
+The =release/= tree is where developers put work product that is to be shared with testers. Once the contents of the =release/= directory are blessed by the tester, the project will be given a release branch, and then the =release/= tree contains the files that are shared with users. Users should not be pulling files from anywhere else in the project tree.
+
+The =release/= tree is owned by the developer. No other role should write into this tree.
+
+Ideally, artifacts arrive in the =release/= tree *only* then the developer invokes the =promote= tool. And take note, The developer's =promote= script, as initially provided with the Harmony skeleton, has a command for erasing the contents of the release directory.
+
+** =release/made_tracked/=
+ Architecture-agnostic artifacts. Tracked by Git, comes when the project is cloned. Users update the release directory when on a release branch, by pulling the project.
+
+** =release/made_untracked/=
+ Architecture-specific artifacts. Directory tracked, contents are git ignored. The contents of this directory are created by running a build after the project is cloned/pulled. This was a compromise to avoid the problem of maintaining architecture and platform specific binaries.
+
+** =release/documnt/=
+ Documents for users of the code in the release directory.
+
+* The =developer/= tree
+
+This property is set, i.e. this director is entered, by first going to the top level directory of the project, then sourcing the =env_developer= environment file. The developer can hook additional items into the environment by putting them into the =developer/tool/env= file.
+
+** =authored/=
+ Human-written source. Tools should never delete files in this directory. =authored/= files are tracked.
+
+** =made/=
+
+ Generated by tools, and the artifacts are tracked. These artifacts
+ are stable across machine architectures. A common item to find in
+ the =developer/made/= directory is a link to a Python program in
+ the =authored/= directory. When following RT conventions the entry
+ point of command line driven Python files is `CLI.py`, so the link
+ in =developer/made/= gives the program a name.
+
+** =experiment/=
+ Try-it-here code. Short-lived. Developers do spot testing here. If tests are to longer lived, they should be moved to the tester role.
+
+** =scratchpad/=
+ Contents of this directory are git ignored. It is intended to hold all intermediate build outputs, and anything else the developer might consider scratchpad work.
+
+** =scratchpad/made/=
+ By RT convention, architecture specific build artifacts are not tracked, but rather are built each time the project is cloned. Such build artifacts are placed in =developer/scratchpad/made= and if they are to be shared with the tester, the release script will release them to =release/made_untracted=.
+
+** =tool/=
+ Developer specific tools. Additional tools will be found under =shared=. If the project is not self contained, then yet additional tools might come from the system environment.
+
+* Documents
+
+** =release/document/=
+ Documentation for users of released code. E.g.s a =man= page, user manual for an application, or a reference manually for a released library.
+
+** =document/=
+ Project wide documentation for project team members.
+
+** =developer/document/=
+ Documentation for developers.
+
+** =tester/document/=
+ Documentation for testers.
+
+** =shared/document/=
+ Documentation on installing the shared tools. Note if a tool has a document directory that remains with the tool.
+ This will typically have a list of tools that need to be installed for the project, and notes to help make installs go more smoothly.
+
+* Tools
+
+** =tool/=
+
+We call the team members who administer the project and install tools the 'toolsmith'. The top level =tool/= directory holds the toolsmith's tools.
+
+** =shared/=
+
+Shared tools are available to all team members. Those that have been written specifically for the Harmony skeleton or for this project go into the =shared/authored= directory. Note the tool =scratchpad=, try =scratchpat help=.
+
+Tools installed from third parties go into the git ignored directory =shared/third_party=.
+
+** =developer/tool=
+
+Developer role specific tools. The =release= script and the RT project shared =make= scripts are found here.
+
+** =tester/tool=
+
+Tester role specific tools.
+
+
+#+BEGIN_EXPORT html
+</div>
+#+END_EXPORT
--- /dev/null
+#+TITLE: 01 - Workflow and Build Contract
+#+AUTHOR: RT
+#+DATE: 2025-11-21
+#+OPTIONS: toc:2 num:nil
+
+#+HTML_HEAD_EXTRA: <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
+#+HTML_HEAD_EXTRA: <link rel="stylesheet" type="text/css" href="../style/rt_dark_doc.css">
+
+#+BEGIN_EXPORT html
+<div class="page">
+#+END_EXPORT
+
+* Purpose
+The workflow contract defines the steps from authorship through release of work product.
+
+There are three circular loops.
+
+In the development loop, developers author code and run experiments, eventually then promoting work product to the =release/= directory.
+
+In the developer tester loop, testers test the promoted release candidates and file issues against them, developers address these, and then promote new release candidates. Or in a tighter version of this loop, a developer with a local copy of this project plays both roles so as to speed the cycle.
+
+In the third loop, the tester finds the release candidates to meet the goals for the release version, and to be of sufficient quality that they create a new release branch. Released code then has bug reports filed against it. Developers address these and the prior two loops are run until a new release candidate is stable, and a new release branch is made.
+
+Release branches have two integer numbers. The first number is the version of the software, as per the architectural specification. (That specification is placed into the project document directory.) The second number counts the number of times the tester has created a release branch for said version of the software.
+
+The workflow is designed for forward motion of through release numbers, so as to avoid having to maintain older releases separately. It is better to give a customer a new release if a bug must be fixed, even the customer will not pay for the new release, that it is to pay the cost of dealing with multi-release level bug fixes. However, as each release has its own branch, it is possible to perform multi-release level bug fixes if that is what is required or desired.
+
+* Roles and Process
+
+** Developer Role
+Responsibilities:
+
+1. Write and modify authored source. Ensure code meets RT style (see =02_RT_Code_Format.org=).
+2. Run builds.
+3. Spot testing in =experiment/=
+4. Promotes release candidates for more thorough testing using the customized =prommote= script.
+5. Rinse, lather, repeat.
+
+** Tester Role
+Responsibilities:
+
+1. Validate candidates under =release/=.
+2. Run regression suites.
+3. Approve for quality and completeness, and create release branches.
+
+** Toolsmith Role
+Responsibilities:
+
+1. Setup the project directory, and keep the project in sync with the Harmony skeleton.
+2. Maintain the role environments, apart from the =<role>/tool/env= files which are owned by the respective =<role>=.
+3. Install and maintained shared tools, =tool/= and =shared/=, and other tools upon request.
+4. Address issues with project workflow. Propose updates to the Harmony skeleton.
+
+
+* Entering the project
+
+What I do to enter a project is to first run an emacs shell. I cd to the project I want to work on, and then source the =env_toolsmith=, =env_developer=, or =env_tester= file, depending on which role I want to work in. Although sourcing these files affects the environment of the shell I am running, it does not effect the environment of emacs. Hence after sourcing the environment, I launch an IDE. This newly launched IDE will have a correct environment. For myself, these days, that new IDE will be emacs again.
+
+It is common that I will have two or three IDE's (emacs invocations) running side by side, each as different roles. Then I can write code, spot test it, promote it, then change to the other IDE and run regression tests. And if it is a phase of the project where tools are in flux, I will use the third IDE for modifying tools. Hence, as one person I will taken on three roles simultaneously, each in a different IDE.
+
+On a large project, chances are that all team members will be doing something similar to this on their local clones of the project. However, there will be team members concentrating on code development, and others on testing and release. Early on a toolsmith will setup the project, and then continue to maintain it.
+
+* Developer
+
+** Authoring and Building
+
+Developers write the build fodder files in the =authored/= directory. File name extensions are used to signal to the build tools how the build fodder is to be used. When the conventional single extension giving the main file type is not enough, two extensions are used.
+
+For example, with the default makefile for C, compiler fodder is found in the =authored/= directory, each file has one these file name extensions:
+
+- CLIs end in =.cli.c=
+- Libary code source end in =.lib.c=
+- Kernel module sources are =.mod.c=
+
+Fodder with the =.cli.c= extension is made into a stand alone executable.
+
+Fodder with =.lib.c= extension is compiled as an object file and added to the =lib<project>.a= archive. The =.cli.c= files are linkedin against said archive.
+
+Build tools never write into the =developer/authored= directory. Build products that are not to be tracked go on the =scratchpad/=. Those that are tracked go into the =developer/made= directory.
+
+It is expected that developers customize and add to the build scripts that come with the Harmony skeleton in order to fit their specific build needs. Note the Ariadne project for complex builds.
+
+** Developer Testing
+
+Spot tests are run in the =experiment/= directory. If the tests grow complex or are to be kept for the long term, move them to the tester environment.
+
+Once the developer finds the edits to be stable he or she can promote them. The promoted code is referred to as release candidates. Promoted release candidates can then be read by the tester role.
+
+As I mentioned, it is not uncommon for a team member to have two IDEs open, with one being in the developer environment, and one being in the tester environment, and then to bounce back and fourth between them.
+
+Once the release candidate code is stable, the developer can pull the remote repo, address merge conflicts, then push the local repo back. Merge conflicts on tracked release candidates are common as it is a bottleneck point in the code development.
+
+** Promotion for release
+
+As mentioned, files are promoted from the developer environment to the top level =release/= directory by the developer. The developer effects promotion for release by running the customized =developer/tool/promote= script, and then pushing the repository. Only a tester can actually perform a release.
+
+Building and promotion are separate activities.
+
+- No tool may rebuild during promotion.
+- Promotion is a copy-only operation.
+- No builds are run in the =release/= directory.
+
+If architecture specific files are to be part of the release, the developer will develop a =build_untracked= script and promote it into the =release/tool= directory. Then when a user clones a released project, as a second step the user will invoke the =release/tool/build_untracked= script. That script will fill in the =release/made_untracked= directory with code built specifically for the user's platform.
+
+- =release/documnt/= (documents for those who intend to use the work product)
+- =release/authored= (interpreter fodder - _none are run directly_)
+- =release/made_tracked/= (pushed to remote, pulled from remote, links into authored scripts)
+- =release/made_untracked/= (local-only)
+- =release/tool/= (=build_untracked= and other tools for maintaining released code)
+
+We chose the 'build after clone' approach over the 'thousand architecture specific binary release directories' approach, because maintaining many architecture release files became a maintenance problem. Note this new approach requires that third party tools be installed so that the =release/tool/build_untracked= script can run. This is the trade off cost for nothing having the thousand architecture directories.
+
+A user of the Harmony skeleton is free to customize the promotion tool and go back to multiple architecture specific binary release directories if that is what they want.
+
+Clearly if work product is intended to be distributed to lay users, there must be a deployment step after the release step, but we do not address this in these documents, as it this is not part of Harmony.
+
+
+* Tester
+
+The developer has promoted release candidates to the =release/= directory. He or she claims those represent a complete high quality product at the given release level. The testers are going to prove the developers to be wrong about that claim. If testers can't disprove this claim, the testers will make a release branch at the next minor release number for the given major release version.
+
+- The tester reads the spec, and writes a complete set of feature tests.
+
+- The tester uses the Mosaic test tool, and writes a set of tests, first for the individual functions that make up the program, then for functions in groups.
+
+- The tester accumulates tests for each bug that ever comes back on a release.
+
+- The tester collects tests from the developer when they are offered.
+
+- The tester writes other tests as he or she sees fit.
+
+- When the tests pass, one presumes, the tester will create a release branch.
+
+* Separation of roles.
+
+A tester never patches code in the =developer/= directory, instead the tester files issues. A tester could propose a code fix on another branch, and then point the developers at it in the issue report.
+
+A developer never writes into =tester/=, instead a developer adds to the =experiment/= and offers to share tests. A developer can propose tests on another branch, and then point testers at it.
+
+It is up the project manager how strict role assignments will be.
+
+As mentioned before, one person can play multiple roles. For example, it makes perfect sense for a developer with a local copy of the repo, to have an IDE open as a tester, so that he or she can run tests on release candidates before pushing them. However, in when doing this, the test code might be read only. The developer is merely running it and has no plans to push changes to it.
+
+#+BEGIN_EXPORT html
+</div>
+#+END_EXPORT
--- /dev/null
+#+TITLE: 02 - RT Prescriptive Code Format Guide (Version 3)
+#+AUTHOR: Thomas Walker Lynch
+#+DATE: 2025-12-05
+#+OPTIONS: toc:2 num:nil
+
+#+HTML_HEAD_EXTRA: <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
+#+HTML_HEAD_EXTRA: <link rel="stylesheet" type="text/css" href="../style/rt_dark_doc.css">
+
+#+BEGIN_EXPORT html
+<div class="page">
+#+END_EXPORT
+
+* Purpose
+
+The goal is consistency, readability, and predictability across all
+languages and tools.
+
+This document covers:
+
+1. Naming conventions
+2. Object vs. Instance Nomenclature
+3. Vertical comma lists
+4. Enclosure spacing
+5. Line breaks and indentation
+6. Cross-language guidance
+
+* Object vs. Instance Nomenclature
+
+In the RT world, we reserve the word 'object' for its general English meaning, as its technical meaning in programming often causes confusion. When discussing data that is manipulated solely through a defined interface, use the term **instance**.
+
+- **Object:** Anything that can be described or reasoned about. A 'math object' is anything defined using mathematics, and a 'Python object' is anything that can be described with Python syntax.
+- **Instance:** Data that is only accessed or manipulated through a defined interface. This term is used to clearly denote data encapsulation and separation of concerns.
+
+
+* Identifer Naming Conventions
+
+** Identifier Naming
+
+- Types, modules: *PascalCase*
+- Functions, variables: *snake_case*
+- Globals: UPPER_SNAKE_CASE
+
+** Proper Noun and Acronyms
+
+Even in PascalCase and snake_case, they remain capitalized, as per the English language convention.
+
+E.g.s
+
+- IEEE_publication_count
+- person_Sara_novelties_list
+
+
+** Suffix Semantics
+Optionally suffixes are added to variable names to suggest type or interface.
+
+- =*_dp :: directory path, not specified if relative or absolute
+- =*_dpr :: relative directory path
+- =*_dpa :: absolute directory path
+
+- =*_fp :: file path, not specified if relative or absolute
+- =*_fpr :: relative file path
+- =*_fpa :: absolute file path
+
+If the file system node type is not specifically specified
+
+- =*_fs_nod_p :: file system node path, not specified if relative or absolute
+- =*_fs_nod_pr :: relative file system node path
+- =*_fs_nod_pa :: absolute file system node path
+
+- =*_list= :: generic ordered items
+- =*_seq= :: ordered items accessed by index
+
+- =*_map= :: a keyed container
+- =*_dict :: a keyed container
+
+- =*_count= :: number of elements
+- =*_flag= :: boolean
+
+- = *_Type :: names specific type, where the type name is given in PascalCase, as is the norm for types. E.g.s =name_Array= or =name_Map= for the cases that name is an instance of a defined Array or Map type.
+
+Add a container type suffix instead of making variables names plural. For example,
+
+- =name_seq= :: a sequence of zero or more names, used in place of =names=.
+
+
+* Comma separated list
+
+RT code format treats the comma in a list as belonging to the item that caused the comma to be needed.
+
+** Horizontal Comma List
+
+For lists on a single line, the comma is preceded by a space and abuts the item it follows.
+
+#+BEGIN_SRC c
+ int x ,y ,z;
+#+END_SRC
+
+Note the space before the comma, and the comma abuts the item that caused the comma to be needed. This applies to language statements and data values alike.
+
+** Vertical Comma List
+
+For lists spanning multiple lines, the comma is placed *before* the item on the new line, aligned with the item's indentation.
+
+#+BEGIN_SRC c
+result = some_function(
+ first_argument
+ ,second_argument
+ ,third_argument
+);
+#+END_SRC
+
+Example in Python:
+
+#+BEGIN_SRC python
+items = [
+ first_item
+ ,second_item
+ ,third_item
+]
+#+END_SRC
+
+- Two-space indent.
+- Comma at column after indentation.
+- All items aligned except the first, as it does not have a comma before it.
+- This convention works identically across C, Python, Bash arrays, JSON-like data, etc.
+
+* Enclosure Spacing
+
+This rule applies on a line by line basis.
+
+** General Rules
+
+**No Space Between Adjacent Enclosures:** Generally, there is no space between adjacent enclosure punctuation (e.g., `f(g(x))`).
+
+** Single-Level Enclosures
+
+For enclosures that do not contain other enclosures (e.g., a simple `if(condition)`), there is **no space padding** inside the enclosure punctuation.
+
+Conforming:
+
+#+BEGIN_SRC c
+if(condition){
+ do_something();
+}
+#+END_SRC
+
+Bad, non-conforming:
+
+#+BEGIN_SRC c
+if(condition) {
+ do_something();
+}
+#+END_SRC
+
+Bad, non-conforming:
+
+#+BEGIN_SRC c
+if ( condition ) {
+ do_something ( );
+}
+#+END_SRC
+
+** Multi-Level Enclosures
+
+For enclosures that contain other enclosures (e.g., `if( f(g(x)) )`), one space of padding is applied only to the **level one (outermost)** enclosure punctuation. All other levels follow the single-level rule (no padding).
+
+#+BEGIN_SRC c
+if( f(g(x)) ){
+ do_something();
+}
+#+END_SRC
+
+In this example, the =if= has a three-level enclosure structure. The outermost parentheses of the =if= condition get one space of padding, while the inner parentheses for =f(...)= and =g(...)= get no padding.
+
+** Unmatched Enclosure Punctuation
+
+Format the enclosure punctuation that is present, as though it were matched. Treat an orphaned opening enclosure punctuation as though it were closed at the end of the line. Treat an extraneous closing, as though there were an opening at the beginning of the line.
+
+** Short Stuff Rule
+
+If a statement, such as an =if= block or a loop, can fit on a single line and is shorter than a reasonable line length (e.g., 40-60 characters), it should be kept on a single line without braces.
+
+#+BEGIN_SRC c
+if(x == 0) return;
+#+END_SRC
+
+* Indentation
+
+- Two spaces per indentation level.
+- Never use tabs.
+- Nest lines under the syntactic element that opened them.
+
+* Exercises
+
+To ensure a full understanding of the RT code format, please complete the following exercises.
+
+** Exercise 1: Comma and Function Call Formatting
+
+Reformat the following C code snippet to strictly adhere to the RT code format rules. Pay close attention to the horizontal and vertical comma lists, and the enclosure spacing for the function call.
+
+#+BEGIN_SRC c
+void my_function(int a, int b, int c) {
+ int result = calculate_value(a, b, c);
+ printf("Result: %d, a: %d, b: %d, c: %d\n", result, a, b, c);
+}
+
+result = my_function(
+ rediculously_long_first_argument,
+ rediculously_long_second_argument,
+ rediculously_long_third_argument
+);
+#+END_SRC
+
+** Exercise 2: Multi-Level Enclosure and Short Stuff Rule
+
+Reformat the following C code snippet. The `if` statement should use the multi-level enclosure rule, and the `for` loop body should use the short stuff rule.
+
+#+BEGIN_SRC c
+if (check_permissions(user_id, file_path) && is_valid(file_path)) {
+ for (int i = 0; i < 10; i++) {
+ if (i % 2 == 0) {
+ printf("Even: %d\n", i);
+ }
+ }
+}
+#+END_SRC
+
+#+BEGIN_EXPORT html
+</div>
+#+END_EXPORT
--- /dev/null
+#+TITLE: 03 - Naming and Directory Conventions
+#+AUTHOR: RT
+#+DATE: 2025-11-21
+#+OPTIONS: toc:2 num:nil
+#+HTML_HEAD_EXTRA: <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
+#+HTML_HEAD_EXTRA: <link rel="stylesheet" type="text/css" href="../style/rt_dark_doc.css">
+
+#+BEGIN_EXPORT html
+<div class="page">
+#+END_EXPORT
+
+A directory name is taken a property for a set of files. Consequently, directory names are rarely plural. E.g. suppose we have a number of test files in a directory. The directory would be named =test=. As each file in the directory has the property of being a test.
+
+It would be nice if we could attach multiple properties to a file as part of the file system framework, but conventional file systems do not support this. Consequently, when needed, people add a second property to a file use dot extensions to the file's name. Hence, we get something like =sqrt.c= in a directory called =source=. So the first property is that the file is source code, and the second property is that it is C code.
+
+We could extent the dot suffix model of adding a property to file by using multiple dot suffixes. Our C makefile structure makes use of this.
+
+So what is a reasonable primary property for a set of files? Perhaps:
+
+- Who uses each file with this property. Home directories are named like this.
+- The role of the people using the file. This is a more generic version of the prior rule. The =developer= and =tester= directories were named in this manner.
+- What program are the files for. Thus we might name a directory a bunch of files for the cc compiler `cc`.
+- The generic category of program said files are for. Thus we end up with directories called =src= or =executable=.
+
+As for the names =src= and =executable= those come from times when almost all programs were compiled. We prefer instead the names =authored= and =made=. =authored= files are those written by humans (or these days, perhaps AI), while =made= files are products of tools. For a Python program, we put packages in =authored= with a module called =CLI.py= for the command line interface. Then we link from =made= into =authored= so as to give the program a name.
+
+The RT C coding environment does not use separate source and header files. Instead a variable is set that gates off the implementation if the source code is to be used as a header. Hence, all of our C source fits fine within and =authored= directory.
+
+
+
+#+BEGIN_EXPORT html
+</div>
+#+END_EXPORT
--- /dev/null
+#+TITLE: 04 - Language Addenda (C, Python, Bash)
+#+AUTHOR: RT
+#+DATE: 2025-11-21
+#+OPTIONS: toc:2 num:nil
+#+HTML_HEAD_EXTRA: <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
+#+HTML_HEAD_EXTRA: <link rel="stylesheet" type="text/css" href="../style/rt_dark_doc.css">
+
+#+BEGIN_EXPORT html
+<div class="page">
+#+END_EXPORT
+
+
+* Purpose
+The RT code format is language-agnostic, but actual languages differ in
+syntax and constraints.
+
+This document explains how the RT rules are applied in:
+
+1. C
+2. Python
+3. Bash
+
+For each language we answer:
+
+1. What carries over directly from =02_RT_Code_Format.org=.
+2. What must be adapted.
+3. What extra discipline is required.
+
+* 1. C Addendum
+
+** 1.1 Control Structure and File Layout
+
+The detailed RT C file structure is described in the dedicated =
+RT_C_control_structure= document. The core ideas:
+
+1. Each module has an *Interface* section and an *Implementation*
+ section in the same file.
+2. The sections are toggled using preprocessor macros (e.g. =FACE=).
+3. Interface declarations are processed even when included multiple
+ times; the implementation is compiled only when used as an
+ implementation.
+
+This approach:
+
+1. Keeps the interface and implementation in sync.
+2. Avoids maintaining parallel =.h= and =.c= files for each module.
+3. Integrates smoothly with standardized makefiles.
+
+** 1.2 Indentation and Comma Lists
+
+C code follows the RT two-space indentation and vertical comma lists:
+
+#+BEGIN_SRC c
+result = some_function(
+ first_argument
+ ,second_argument_with_longer_name
+ ,third_argument
+);
+#+END_SRC
+
+Rules:
+
+1. Two spaces per block indentation.
+2. The comma starts the line in vertical lists.
+3. Align continuation lines under the first symbol after the equals
+ sign or opening parenthesis when feasible.
+
+** 1.3 Error Handling and Ownership
+
+Guidelines:
+
+1. Functions should document ownership of pointers and lifetimes.
+2. Prefer explicit =*_count= parameters over sentinel values when
+ passing arrays.
+3. Return codes should be consistent (=0= success, non-zero failure) or
+ use clearly documented enums.
+
+* 2. Python Addendum
+
+** 2.1 Indentation and Layout
+
+Python enforces indentation syntactically, so the RT two-space rule
+becomes:
+
+1. Use *two-space indentation* for all Python code, even though four is
+ common in the wider ecosystem.
+2. Vertical comma lists still place the comma at the start of the line,
+ after the indentation.
+
+Example:
+
+#+BEGIN_SRC python
+items = [
+ first_item
+ ,second_item
+ ,third_item
+]
+#+END_SRC
+
+** 2.2 Modules and CLI Separation
+
+Python scripts distinguish between:
+
+1. *Work functions* (importable API).
+2. *CLI entry points* (argument parsing, printing, exit codes).
+
+Pattern:
+
+1. Put reusable logic into functions and classes.
+2. Put argument parsing and =if __name__ == "__main__":= in the CLI
+ section.
+3. Keep side effects out of import time.
+
+** 2.3 Error Handling
+
+1. Raise exceptions for exceptional conditions.
+2. Catch exceptions at the CLI boundary and convert them into user
+ messages and exit codes.
+3. Avoid catching broad =Exception= unless it is immediately converted
+ into a controlled failure.
+
+* 3. Bash Addendum
+
+** 3.1 Shebang and Safety
+
+Bash scripts should start with:
+
+#+BEGIN_SRC sh
+#!/usr/bin/env bash
+set -euo pipefail
+#+END_SRC
+
+Explanation:
+
+1. =-e= :: Exit on error.
+2. =-u= :: Treat unset variables as errors.
+3. =-o pipefail= :: Propagate errors across pipelines.
+
+** 3.2 Functions vs. Top-Level Code
+
+RT-style Bash separates:
+
+1. A small top-level CLI harness (argument parsing, usage, dispatch).
+2. A set of functions that implement the work.
+
+Pattern:
+
+1. Parse arguments into variables.
+2. Call a main function with explicit parameters.
+3. Avoid relying on global mutable state where possible.
+
+** 3.3 Logging and Diagnostics
+
+1. Use =printf= or =echo= for user-facing messages.
+2. Send debug or trace output to stderr (=>&2=).
+3. Make it obvious when the script is changing system state (e.g.
+ mounting, creating users, modifying firewall rules).
+
+* 4. Using the Addenda
+
+When in doubt:
+
+1. Start with =02_RT_Code_Format.org= for the core rules.
+2. Apply the relevant language section here.
+3. If a language requires deviation from the generic rules, document
+ that deviation in this file instead of ad-hoc decisions.
+
+#+BEGIN_EXPORT html
+</div>
+#+END_EXPORT
--- /dev/null
+
+ body {
+ font-family: 'Noto Sans JP', Arial, sans-serif;
+ background-color: hsl(0, 0%, 0%);
+ color: hsl(42, 100%, 80%);
+ padding: 2rem;
+ }
+ .page {
+ padding: 3rem;
+ margin: 1.25rem auto;
+ max-width: 46.875rem;
+ background-color: hsl(0, 0%, 0%);
+ box-shadow: 0 0 0.625rem hsl(42, 100%, 50%);
+ }
+ h1 {
+ font-size: 1.5rem;
+ text-align: center;
+ color: hsl(42, 100%, 84%);
+ text-transform: uppercase;
+ margin-top: 1.5rem;
+ }
+ h2 {
+ font-size: 1.25rem;
+ color: hsl(42, 100%, 84%);
+ text-align: center;
+ margin-top: 2rem;
+ }
+ h3 {
+ font-size: 1.125rem;
+ color: hsl(42, 100%, 75%);
+ margin-top: 1.5rem;
+ }
+ p, li {
+ color: hsl(42, 100%, 90%);
+ text-align: justify;
+ margin-bottom: 1rem;
+ }
+ code {
+ font-family: 'Courier New', Courier, monospace;
+ background-color: hsl(0, 0%, 25%);
+ padding: 0.125rem 0.25rem;
+ color: hsl(42, 100%, 90%);
+ }
+
--- /dev/null
+#!/usr/bin/env bash
+# env_developer — enter the project developer environment
+# (must be sourced)
+
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ exit 1
+fi
+
+# enter project environment
+#
+ source shared/authored/env
+
+# setup tools
+#
+ export PYTHON_HOME="$REPO_HOME/shared/third_party/Python"
+ if [[ ":$PATH:" != *":$PYTHON_HOME/bin:"* ]]; then
+ export PATH="$PYTHON_HOME/bin:$PATH"
+ fi
+
+ RT_gcc="$REPO_HOME/shared/third_party/RT_gcc/release"
+ if [[ ":$PATH:" != *":$RT_gcc:"* ]]; then
+ export PATH="$RT_gcc:$PATH"
+ fi
+
+# enter the role environment
+#
+ export ROLE=developer
+
+ tool="$REPO_HOME/$ROLE/tool"
+ if [[ ":$PATH:" != *":$tool:"* ]]; then
+ export PATH="$tool:$PATH"
+ fi
+
+ export ENV=$ROLE/tool/env
+
+ cd "$ROLE"
+ if [[ -f "tool/env" ]]; then
+ source "tool/env"
+ echo "in environment: $ENV"
+ else
+ echo "not found: $ENV"
+ fi
--- /dev/null
+#!/usr/bin/env bash
+# env_tester — enter the project tester environment
+# (must be sourced)
+
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ exit 1
+fi
+
+# enter project environment
+#
+ source shared/authored/env
+
+# setup tools
+#
+ export PYTHON_HOME="$REPO_HOME/shared/third_party/Python"
+ if [[ ":$PATH:" != *":$PYTHON_HOME/bin:"* ]]; then
+ export PATH="$PYTHON_HOME/bin:$PATH"
+ fi
+
+ RT_gcc="$REPO_HOME/shared/third_party/RT_gcc/release"
+ if [[ ":$PATH:" != *":$RT_gcc:"* ]]; then
+ export PATH="$RT_gcc:$PATH"
+ fi
+
+# enter the role environment
+#
+ export ROLE=tester
+
+ tool="$REPO_HOME/$ROLE/tool"
+ if [[ ":$PATH:" != *":$tool:"* ]]; then
+ export PATH="$tool:$PATH"
+ fi
+
+ export ENV=$ROLE/tool/env
+
+ cd "$ROLE"
+ if [[ -f "tool/env" ]]; then
+ source "tool/env"
+ echo "in environment: $ENV"
+ else
+ echo "not found: $ENV"
+ fi
--- /dev/null
+#!/usr/bin/env bash
+# env_toolsmith — enter the project toolsmith environment
+# (must be sourced)
+
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ exit 1
+fi
+
+# enter project environment
+#
+ source shared/authored/env
+
+# setup tools
+# initially these will not exist, as the toolsmith installs them
+#
+ export PYTHON_HOME="$REPO_HOME/shared/third_party/Python"
+ if [[ ":$PATH:" != *":$PYTHON_HOME/bin:"* ]]; then
+ export PATH="$PYTHON_HOME/bin:$PATH"
+ fi
+
+ RT_gcc="$REPO_HOME/shared/third_party/RT_gcc/release"
+ if [[ ":$PATH:" != *":$RT_gcc:"* ]]; then
+ export PATH="$RT_gcc:$PATH"
+ fi
+
+# enter the role environment
+#
+ export ROLE=toolsmith
+
+ TOOL_DIR="$REPO_HOME/tool"
+ if [[ ":$PATH:" != *":$TOOL_DIR:"* ]]; then
+ export PATH="$TOOL_DIR:$PATH"
+ fi
+
+ export ENV="tool/env"
+
+ cd "$REPO_HOME"
+ if [[ -f "tool/env" ]]; then
+ source "tool/env"
+ echo "in environment: $ENV"
+ else
+ echo "not found: $ENV"
+ fi
--- /dev/null
+
+*
+!/.gitignore
\ No newline at end of file
--- /dev/null
+*
+!/.gitignore
--- /dev/null
+#!/usr/bin/env python3
+# ----------------------------------------------------------------------
+# git-empty-dir :: list/mark/clean empty directories, .gitignore aware
+# ----------------------------------------------------------------------
+
+import sys
+import os
+from pathlib import Path
+
+# The source_sync GitIgnore parser is inside the unpacked tool.
+# We assume this directory structure:
+# git-empty-dir/
+# CLI.py
+# source_sync/
+# GitIgnore.py
+#
+# That mirrors how your harmony sync tool is structured.
+
+# Adjust import path so we can load source_sync.*
+HERE = Path(__file__).resolve().parent
+sys.path.insert(0, str(HERE))
+
+from source_sync.GitIgnore import GitIgnore # type: ignore
+
+
+# ----------------------------------------------------------------------
+# helpers
+# ----------------------------------------------------------------------
+
+def load_gitignore_tree(root: Path):
+ """
+ Build a GitIgnore instance rooted at <root>.
+ """
+ return GitIgnore(str(root))
+
+def is_empty_dir(path: Path) -> bool:
+ """
+ A directory is empty if it contains no files or subdirectories.
+ (Hidden files count; .gitignored children are irrelevant because
+ behavior here should reflect real filesystem emptiness.)
+ """
+ try:
+ for _ in path.iterdir():
+ return False
+ return True
+ except PermissionError:
+ # treat as non-empty: safer than aborting
+ return False
+
+
+def has_mark(path: Path, mark_file: str) -> bool:
+ return (path / mark_file).exists()
+
+
+def sorted_dirs(root: Path):
+ """
+ Produce a list of all directories under root, in parent-before-child order.
+ Sort rule:
+ 1. by path length
+ 2. then lexicographically
+ """
+ all_dirs = []
+ for p in root.rglob("*"):
+ if p.is_dir():
+ all_dirs.append(p)
+
+ return sorted(
+ all_dirs
+ ,key = lambda p: (len(p.parts), str(p))
+ )
+
+
+# ----------------------------------------------------------------------
+# traversal
+# ----------------------------------------------------------------------
+
+def visible_dirs(root: Path, ignore_tree, mark_file: str):
+ """
+ Yield all dirs under root, applying:
+ - skip .git
+ - apply .gitignore rules (if a dir is ignored, do not descend)
+ - parent-before-child ordering
+ """
+ for d in sorted_dirs(root):
+ rel = d.relative_to(root)
+
+ if rel == Path("."):
+ continue
+
+ # skip .git explicitly
+ if d.name == ".git":
+ continue
+
+ # .gitignore filtering
+ if ignore_tree.check(str(rel)) == "Ignore":
+ continue
+
+ yield d
+
+
+# ----------------------------------------------------------------------
+# actions
+# ----------------------------------------------------------------------
+
+def action_list(root, ignore_tree, mark_file, mode):
+ """
+ mode ∈ {"empty","marked","all"}
+ """
+ for d in visible_dirs(root, ignore_tree, mark_file):
+ if mode == "all":
+ print(d.relative_to(root))
+ continue
+
+ if mode == "marked":
+ if has_mark(d, mark_file):
+ print(d.relative_to(root))
+ continue
+
+ if mode == "empty":
+ if is_empty_dir(d):
+ print(d.relative_to(root))
+ continue
+
+
+def action_mark(root, ignore_tree, mark_file, mode):
+ """
+ mode ∈ {"empty","all"}
+ """
+ for d in visible_dirs(root, ignore_tree, mark_file):
+ if mode == "empty" and not is_empty_dir(d):
+ continue
+ try:
+ (d / mark_file).touch(exist_ok=True)
+ except Exception:
+ pass
+
+
+def action_clean(root, ignore_tree, mark_file, mode):
+ """
+ mode ∈ {"nonempty","all"}
+ """
+ for d in visible_dirs(root, ignore_tree, mark_file):
+ m = d / mark_file
+ if not m.exists():
+ continue
+
+ if mode == "nonempty":
+ if is_empty_dir(d):
+ continue
+
+ try:
+ m.unlink()
+ except Exception:
+ pass
+
+
+# ----------------------------------------------------------------------
+# usage
+# ----------------------------------------------------------------------
+
+USAGE = """
+usage:
+ git-empty-dir (list|mark|clean) [all|marked|empty] [file-<name>]
+ git-empty-dir help
+ git-empty-dir usage
+
+defaults:
+ mark-file = .gitkeep
+ ignores .git
+ follows .gitignore (no descent into ignored dirs)
+
+examples:
+ git-empty-dir list
+ git-empty-dir list marked file-.githolder
+ git-empty-dir mark
+ git-empty-dir clean all
+"""
+
+
+# ----------------------------------------------------------------------
+# CLI
+# ----------------------------------------------------------------------
+
+def CLI(argv):
+ if len(argv) == 0:
+ print(USAGE)
+ return 0
+
+ cmd = argv[0]
+
+ if cmd in ("help","usage"):
+ print(USAGE)
+ return 0
+
+ # command
+ if cmd not in ("list","mark","clean"):
+ print(f"unknown command: {cmd}")
+ print(USAGE)
+ return 1
+
+ # submode
+ mode = None
+ mark_file = ".gitkeep"
+
+ for a in argv[1:]:
+ if a.startswith("file-"):
+ mark_file = a[5:]
+ continue
+
+ if a in ("all","empty","marked"):
+ mode = a
+ continue
+
+ print(f"unknown argument: {a}")
+ print(USAGE)
+ return 1
+
+ # defaults
+ if cmd == "list":
+ if mode is None:
+ mode = "empty"
+ elif cmd == "mark":
+ if mode is None:
+ mode = "empty"
+ elif cmd == "clean":
+ if mode is None:
+ mode = "nonempty"
+
+ root = Path(".").resolve()
+ ignore_tree = load_gitignore_tree(root)
+
+ if cmd == "list":
+ action_list(root, ignore_tree, mark_file, mode)
+
+ elif cmd == "mark":
+ if mode == "all":
+ action_mark(root, ignore_tree, mark_file, "all")
+ else:
+ action_mark(root, ignore_tree, mark_file, "empty")
+
+ elif cmd == "clean":
+ if mode == "all":
+ action_clean(root, ignore_tree, mark_file, "all")
+ else:
+ action_clean(root, ignore_tree, mark_file, "nonempty")
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(CLI(sys.argv[1:]))
--- /dev/null
+../source_sync/Harmony.py
\ No newline at end of file
--- /dev/null
+../source_sync/load_command_module.py
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2; indent-tabs-mode: nil -*-
+
+"""
+meta.py - thin wrappers around command modules
+
+Current responsibilities:
+ 1. Load the incommon 'printenv' command module (no .py extension)
+ using load_command_module.load_command_module().
+ 2. Expose printenv() here, calling the imported printenv() work
+ function with default arguments (equivalent to running without
+ any CLI arguments).
+ 3. Provide a simple version printer for this meta module.
+ 4. Provide a small debug tag API (set/clear/has).
+"""
+
+from __future__ import annotations
+
+import datetime
+from load_command_module import load_command_module
+
+
+# Load the incommon printenv module once at import time
+_PRINTENV_MODULE = load_command_module("printenv")
+_Z_MODULE = load_command_module("Z")
+
+
+# Meta module version
+_major = 1
+_minor = 1
+def version_print() -> None:
+ """
+ Print the meta module version as MAJOR.MINOR.
+ """
+ print(f"{_major}.{_minor}")
+
+
+# Debug tag set and helpers
+_debug = set([
+])
+
+
+def debug_set(tag: str) -> None:
+ """
+ Add a debug tag to the meta debug set.
+ """
+ _debug.add(tag)
+
+
+def debug_clear(tag: str) -> None:
+ """
+ Remove a debug tag from the meta debug set, if present.
+ """
+ _debug.discard(tag)
+
+
+def debug_has(tag: str) -> bool:
+ """
+ Return True if the given debug tag is present.
+ """
+ return tag in _debug
+
+
+# Touch the default tag once so static checkers do not complain about
+# unused helpers when imported purely for side-effects.
+debug_has("Command")
+
+
+def printenv() -> int:
+ """
+ Call the imported printenv() work function with default arguments:
+ - no null termination
+ - no newline quoting
+ - no specific names (print full environment)
+ - prog name 'printenv'
+ """
+ return _PRINTENV_MODULE.printenv(
+ False # null_terminate
+ ,False # quote_newlines
+ ,[] # names
+ ,"printenv"
+ )
+
+
+def z_format_mtime(
+ mtime: float
+) -> str:
+ """
+ Format a POSIX mtime (seconds since epoch, UTC) using the Z module.
+
+ Uses Z.ISO8601_FORMAT and Z.make_timestamp(dt=...).
+ """
+ dt = datetime.datetime.fromtimestamp(mtime, datetime.timezone.utc)
+ return _Z_MODULE.make_timestamp(
+ fmt=_Z_MODULE.ISO8601_FORMAT
+ ,dt=dt
+ )
--- /dev/null
+../source_sync/
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env python3
+"""
+gitignore_walk.py — Fully correct .gitignore-aware depth-first walker
+Now passes:
+ • __pycache__/ (directory listed, contents ignored)
+ • scratchpad/* !/.gitignore
+ • third_party/.gitignore ignoring everything inside
+ • top-level .gitignore
+"""
+
+from __future__ import annotations
+
+import os
+import re
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Generator, List
+
+
+@dataclass(frozen=True)
+class Rule:
+ raw: str
+ negated: bool
+ dir_only: bool # pattern ends with /
+ anchored: bool # pattern starts with /
+ regex: re.Pattern
+
+
+def _compile_rule(line: str) -> Rule | None:
+ line = line.strip()
+ if not line or line.startswith("#"):
+ return None
+
+ negated = line.startswith("!")
+ if negated:
+ line = line[1:].lstrip()
+
+ dir_only = line.endswith("/")
+ if dir_only:
+ line = line[:-1]
+
+ anchored = line.startswith("/")
+ if anchored:
+ line = line[1:]
+
+ # Convert git pattern to regex
+ parts = []
+ i = 0
+ while i < len(line):
+ c = line[i]
+ if c == "*":
+ if i + 1 < len(line) and line[i + 1] == "*":
+ parts.append(".*")
+ i += 2
+ else:
+ parts.append("[^/]*")
+ i += 1
+ elif c == "?":
+ parts.append("[^/]")
+ i += 1
+ else:
+ parts.append(re.escape(c))
+ i += 1
+
+ regex_str = "".join(parts)
+
+ if anchored:
+ regex_str = f"^{regex_str}"
+ else:
+ regex_str = f"(^|/){regex_str}"
+
+ # For dir-only patterns: match path + optional trailing slash
+ if dir_only:
+ regex_str += "(/.*)?$"
+ else:
+ regex_str += "($|/.*$)"
+
+ return Rule(
+ raw=line,
+ negated=negated,
+ dir_only=dir_only,
+ anchored=anchored,
+ regex=re.compile(regex_str),
+ )
+
+
+def _load_rules(dirpath: Path) -> List[Rule]:
+ rules: List[Rule] = []
+ gitignore = dirpath / ".gitignore"
+ if gitignore.is_file():
+ try:
+ for raw_line in gitignore.read_text(encoding="utf-8", errors="ignore").splitlines():
+ rule = _compile_rule(raw_line)
+ if rule:
+ rules.append(rule)
+ except Exception:
+ pass
+ return rules
+
+
+def gitignore_walk(root: str | Path) -> Generator[Path, None, None]:
+ root = Path(root).resolve()
+ if not root.is_dir():
+ return
+
+ # Stack: (directory_path, rules_from_root_to_here)
+ stack: List[tuple[Path, List[Rule]]] = [(root, [])]
+
+ while stack:
+ cur_dir, inherited_rules = stack.pop() # depth-first
+
+ # Load local rules
+ local_rules = _load_rules(cur_dir)
+ all_rules = inherited_rules + local_rules
+
+ # Relative path string from project root
+ try:
+ rel = cur_dir.relative_to(root)
+ rel_str = "" if rel == Path(".") else rel.as_posix()
+ except ValueError:
+ rel_str = ""
+
+ # === Is this directory itself ignored? ===
+ dir_ignored = False
+ for rule in reversed(all_rules): # last match wins
+ if rule.regex.match(rel_str + "/"): # always test as dir
+ dir_ignored = rule.negated
+ break
+
+ # Yield the directory if not ignored
+ if not dir_ignored:
+ yield cur_dir
+
+ # Scan children only if directory is not ignored
+ if dir_ignored:
+ continue
+
+ try:
+ children = list(cur_dir.iterdir())
+ except PermissionError:
+ continue
+
+ children.sort(key=lambda p: p.name.lower())
+
+ to_visit = []
+ for child in children:
+ if child.name == ".git":
+ continue
+
+ child_rel = child.relative_to(root)
+ child_rel_str = child_rel.as_posix()
+
+ # Special case: .gitignore files are never ignored by their own rules
+ if child.name == ".gitignore":
+ if not dir_ignored:
+ yield child
+ continue
+
+ # Evaluate rules against the full relative path
+ ignored = False
+ for rule in reversed(all_rules):
+ match_str = child_rel_str + "/" if child.is_dir() else child_rel_str
+ if rule.regex.match(match_str):
+ ignored = rule.negated
+ break
+
+ if not ignored:
+ if child.is_dir():
+ to_visit.append(child)
+ else:
+ yield child
+
+ # Push children in reverse order → depth-first, left-to-right
+ for child_dir in reversed(to_visit):
+ stack.append((child_dir, all_rules))
+
+
+if __name__ == "__main__":
+ import argparse
+ parser = argparse.ArgumentParser(description="Gitignore-aware tree walk")
+ parser.add_argument("path", nargs="?", default=".", help="Root directory")
+ args = parser.parse_args()
+
+ for p in gitignore_walk(args.path):
+ print(p)
--- /dev/null
+./gitignore_treewalk/CLI.py
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+from __future__ import annotations
+
+import os
+import sys
+
+# ----------------------------------------------------------------------
+# Bootstrap import context when executed via symlink (e.g. ../walk)
+# ----------------------------------------------------------------------
+if __name__ == "__main__" and __package__ is None:
+ # Resolve the real file (follows symlinks)
+ _real = os.path.realpath(__file__)
+ _pkg_dir = os.path.dirname(_real)
+ _pkg_root = os.path.dirname(_pkg_dir) # authored/
+
+ # Ensure authored/ is on sys.path
+ if _pkg_root not in sys.path:
+ sys.path.insert(0, _pkg_root)
+
+ # Force package name so relative imports work
+ __package__ = "gitignore_treewalk"
+
+# Now safe to do relative imports
+from .pattern import Pattern
+from .ruleset import RuleSet
+from .walker import Walker
+from .printer import Printer
+
+
+# ----------------------------------------------------------------------
+# Usage text
+# ----------------------------------------------------------------------
+def usage() -> int:
+ print(
+ "Usage:\n"
+ " walk |usage|help\n"
+ " Show this help.\n"
+ "\n"
+ " walk list\n"
+ " Walk the working directory applying gitignore rules.\n"
+ )
+ return 0
+
+
+# ----------------------------------------------------------------------
+# CLI dispatcher
+# ----------------------------------------------------------------------
+def CLI(argv: List[str]) -> int:
+ if not argv:
+ return usage()
+
+ cmd = argv[0]
+
+ if cmd in ("usage", "help"):
+ return usage()
+
+ if cmd == "list":
+ cwd = os.getcwd()
+ cwd_dpa = os.path.abspath(cwd)
+
+ rs = RuleSet.from_gitignore_files(
+ start_dir=cwd_dpa
+ )
+
+ walker = Walker(
+ root=cwd_dpa
+ ,rules=rs
+ )
+
+ for p in walker.walk():
+ print_path(
+ p
+ ,cwd_dpa
+ )
+ return 0
+
+ print(f"Unknown command: {cmd}")
+ return usage()
+
+
+# ----------------------------------------------------------------------
+# Entrypoint
+# ----------------------------------------------------------------------
+if __name__ == "__main__":
+ sys.exit(
+ CLI(
+ sys.argv[1:]
+ )
+ )
--- /dev/null
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+"""
+gitignore_treewalk — Git-aware directory traversal library.
+
+Exports:
+ Pattern
+ RuleSet
+ Walker
+ Printer
+"""
+
+from .pattern import Pattern
+from .ruleset import RuleSet
+from .walker import Walker
+from .printer import Printer
--- /dev/null
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+"""
+pattern.py — Git ignore pattern parser.
+
+Implements:
+ Git pattern semantics:
+ - !negation
+ - directory-only ('foo/')
+ - anchored ('/foo')
+ - wildcards '*', '?'
+ - recursive wildcard '**'
+ - full-path matching
+ - last rule wins
+"""
+
+from __future__ import annotations
+import os
+import re
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass
+class Pattern:
+ raw: str
+ negated: bool
+ anchored: bool
+ dir_only: bool
+ regex: re.Pattern
+
+ @staticmethod
+ def from_line(line: str) -> Optional["Pattern"]:
+ """
+ Parse a single .gitignore pattern line.
+ Return None for comments/empty.
+ """
+
+ stripped = line.strip()
+ if not stripped or stripped.startswith("#"):
+ return None
+
+ negated = stripped.startswith("!")
+ if negated:
+ stripped = stripped[1:].lstrip()
+ if not stripped:
+ return None
+
+ dir_only = stripped.endswith("/")
+ if dir_only:
+ stripped = stripped[:-1]
+
+ anchored = stripped.startswith("/")
+ if anchored:
+ stripped = stripped[1:]
+
+ # Convert git-style pattern to regex
+ # Git semantics:
+ # ** -> match any depth
+ # * -> match any sequence except '/'
+ # ? -> match one char except '/'
+ #
+ # Always match against full path (unix style, no leading '.')
+ #
+ def escape(s: str) -> str:
+ return re.escape(s)
+
+ # Convert pattern piecewise
+ regex_pieces = []
+ i = 0
+ while i < len(stripped):
+ c = stripped[i]
+ if c == "*":
+ # Check for **
+ if i + 1 < len(stripped) and stripped[i + 1] == "*":
+ # '**' -> match zero or more directories OR characters
+ regex_pieces.append(".*")
+ i += 2
+ else:
+ # '*' -> match any chars except '/'
+ regex_pieces.append("[^/]*")
+ i += 1
+ elif c == "?":
+ regex_pieces.append("[^/]")
+ i += 1
+ else:
+ regex_pieces.append(escape(c))
+ i += 1
+
+ regex_string = "".join(regex_pieces)
+
+ # Anchored: match from start of path
+ # Unanchored: match anywhere in path
+ if anchored:
+ full = fr"^{regex_string}$"
+ else:
+ full = fr"(^|/){regex_string}($|/)"
+
+ return Pattern(
+ raw=line,
+ negated=negated,
+ anchored=anchored,
+ dir_only=dir_only,
+ regex=re.compile(full),
+ )
+
+ def matches(self, relpath: str, is_dir: bool) -> bool:
+ """
+ Match full relative path, not just basename.
+ """
+ # If pattern is directory-only, relpath must be a directory
+ if self.dir_only and not is_dir:
+ return False
+
+ return bool(self.regex.search(relpath))
--- /dev/null
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+"""
+printer.py — utilities for printing path listings:
+ - linear list
+ - ASCII "tree" view where each line begins with the actual path,
+ then optional visual decoration for humans.
+"""
+
+from __future__ import annotations
+from pathlib import Path
+from typing import Iterable
+
+
+class Printer:
+ @staticmethod
+ def print_linear(paths: Iterable[Path], cwd: Path) -> None:
+ for p in paths:
+ rel = p.relative_to(cwd)
+ print(rel.as_posix())
+
+ @staticmethod
+ def print_tree(paths: Iterable[Path], cwd: Path) -> None:
+ """
+ Print each line as:
+ <relpath> <drawing>
+
+ Where <drawing> is ASCII tree structure.
+ """
+ items = sorted(paths, key=lambda p: p.relative_to(cwd).as_posix())
+ rels = [p.relative_to(cwd).as_posix() for p in items]
+
+ # Build a tree prefix for human reading
+ for rel in rels:
+ parts = rel.split("/")
+ indent = " " * (len(parts) - 1)
+ branch = "└─ " if len(parts) > 1 else ""
+ print(f"{rel} {indent}{branch}")
--- /dev/null
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+"""
+ruleset.py — layered Git ignore rule-set stack.
+
+Implements the Git semantics:
+ - Each directory can contribute patterns from .gitignore
+ - Parent directories apply first
+ - Last matching pattern wins
+ - Negation overrides earlier ignores
+ - dir-only rules respected
+"""
+
+from __future__ import annotations
+import os
+from typing import List, Optional
+from .pattern import Pattern
+
+
+class RuleSet:
+ """
+ Manages a stack of patterns from:
+ - global excludes
+ - .git/info/exclude
+ - directory-local .gitignore
+
+ push(patterns)
+ pop(count)
+ evaluate(path, is_dir)
+ """
+
+ def __init__(self) -> None:
+ self.stack: List[List[Pattern]] = []
+
+ def push(self, patterns: List[Pattern]) -> None:
+ self.stack.append(patterns)
+
+ def pop(self) -> None:
+ if self.stack:
+ self.stack.pop()
+
+ def evaluate(self, relpath: str, is_dir: bool) -> bool:
+ """
+ Return True iff path is ignored.
+ Last matching rule wins.
+ """
+ verdict: Optional[bool] = None
+
+ for group in self.stack:
+ for pat in group:
+ if pat.matches(relpath, is_dir):
+ if pat.negated:
+ verdict = False
+ else:
+ verdict = True
+
+ return bool(verdict)
--- /dev/null
+# -*- mode: python; coding: utf-8; python-indent-offset: 2 -*-
+
+"""
+walker.py — Git-aware directory traversal.
+
+Features:
+ - Loads global excludes
+ - Loads .git/info/exclude if present
+ - Loads .gitignore in each directory
+ - Does NOT descend into ignored directories
+ - Yields both files and directories (Path objects)
+ - Always parent-before-child
+ - Sorted lexicographically
+"""
+
+from __future__ import annotations
+import os
+from pathlib import Path
+from typing import Iterator, List
+
+from .pattern import Pattern
+from .ruleset import RuleSet
+
+
+class Walker:
+ def __init__(self, root: Path) -> None:
+ self.root = root.resolve()
+ self.ruleset = RuleSet()
+
+ # Load global and project-local excludes
+ self._push_global_excludes()
+ self._push_local_excludes()
+
+ # ----------------------------------------------------------------------
+ # Exclude Sources
+ # ----------------------------------------------------------------------
+
+ def _push_global_excludes(self) -> None:
+ """
+ Load user's global ignore file if present:
+ ~/.config/git/ignore
+ or ~/.gitignore_global
+ """
+ candidates = [
+ Path(os.environ.get("XDG_CONFIG_HOME", Path.home() / ".config")) / "git" / "ignore",
+ Path.home() / ".gitignore_global"
+ ]
+ patterns = []
+
+ for f in candidates:
+ if f.exists():
+ for line in f.read_text().splitlines():
+ p = Pattern.from_line(line)
+ if p:
+ patterns.append(p)
+ break
+
+ if patterns:
+ self.ruleset.push(patterns)
+
+ def _push_local_excludes(self) -> None:
+ """
+ Load <root>/.git/info/exclude
+ """
+ f = self.root / ".git" / "info" / "exclude"
+ patterns = []
+ if f.exists():
+ for line in f.read_text().splitlines():
+ p = Pattern.from_line(line)
+ if p:
+ patterns.append(p)
+
+ if patterns:
+ self.ruleset.push(patterns)
+
+ # ----------------------------------------------------------------------
+ # Walk
+ # ----------------------------------------------------------------------
+
+ def walk(self) -> Iterator[Path]:
+ return self._walk_dir(self.root, prefix="")
+
+ def _walk_dir(self, dpath: Path, prefix: str) -> Iterator[Path]:
+ # Load .gitignore for this directory
+ patterns = []
+ gitignore = dpath / ".gitignore"
+ if gitignore.exists():
+ for line in gitignore.read_text().splitlines():
+ p = Pattern.from_line(line)
+ if p:
+ patterns.append(p)
+
+ self.ruleset.push(patterns)
+
+ # Evaluate this directory (except root)
+ if prefix:
+ if self.ruleset.evaluate(prefix, is_dir=True):
+ # ignored directories are NOT descended into
+ self.ruleset.pop()
+ return
+
+ yield dpath
+
+ # Enumerate children sorted
+ entries: List[Path] = sorted(dpath.iterdir(), key=lambda p: p.name)
+
+ for entry in entries:
+ rel = entry.relative_to(self.root).as_posix()
+ is_dir = entry.is_dir()
+
+ # Skip ignored
+ if self.ruleset.evaluate(rel, is_dir=is_dir):
+ continue
+
+ # Directories
+ if is_dir:
+ yield from self._walk_dir(entry, rel)
+ else:
+ yield entry
+
+ self.ruleset.pop()
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ exit 1
+fi
+
+# without this bash takes non-matching globs literally
+shopt -s nullglob
+
+# does not presume sharing or world permissions
+umask 0077
+
+# --------------------------------------------------------------------------------
+# project definition
+
+# actual absolute director path for this script file
+
+ script_adp(){
+ dirname "$script_afp"
+ }
+
+# assume this script is located $REPO_HOME/tools_shared/authored and work backwards
+# to get $REPO_HOME, etc.
+
+ REPO_HOME=$(dirname "$(dirname "$(script_adp)")")
+ echo REPO_HOME "$REPO_HOME"
+
+ PROJECT=$(basename "$REPO_HOME")
+ echo PROJECT "$PROJECT"
+
+ # set the prompt decoration to the name of the project
+ PROMPT_DECOR=$PROJECT
+
+ export REPO_HOME PROJECT PROMPT_DECOR
+
+# --------------------------------------------------------------------------------
+# Project wide Tool setup
+#
+
+export VIRTUAL_ENV="$REPO_HOME/shared/third_party/Python"
+export PYTHON_HOME="$VIRTUAL_ENV"
+unset PYTHONHOME
+
+
+# --------------------------------------------------------------------------------
+# PATH
+# precedence: last defined, first discovered
+
+ PATH="$REPO_HOME/shared/third_party/RT-project-share/release/bash:$PATH"
+ PATH="$REPO_HOME/shared/third_party/RT-project-share/release/amd64:$PATH"
+ PATH="$REPO_HOME/shared/third_party:$PATH"
+ PATH="$REPO_HOME/shared/customized:$PATH"
+ PATH="$REPO_HOME/shared/made:$PATH"
+
+ # Remove duplicates
+ clean_path() {
+ PATH=$(echo ":$PATH" | awk -v RS=: -v ORS=: '!seen[$0]++' | sed 's/^://; s/:$//')
+ }
+ clean_path
+ export PATH
+
+# --------------------------------------------------------------------------------
+# the following functions are provided for other scripts to use.
+# at the top of files that make use of these functions put the following line:
+# script_afp=$(realpath "${BASH_SOURCE[0]}")
+#
+
+ ## script's filename
+ script_fn(){
+ basename "$script_afp"
+ }
+
+ ## script's dirpath relative to $REPO_HOME
+ script_fp(){
+ realpath --relative-to="${REPO_HOME}" "$script_afp"
+ }
+
+ ## script's dirpath relative to $REPO_HOME
+ script_dp(){
+ dirname "$(script_fp)"
+ }
+
+ export -f script_adp script_fn script_dp script_fp
+
+#--------------------------------------------------------------------------------
+# used by release scripts
+#
+
+ install_file() {
+ if [ "$#" -lt 3 ]; then
+ echo "env::install_file usage: install_file <source1> <source2> ... <target_dir> <permissions>"
+ return 1
+ fi
+
+ perms="${@: -1}" # Last argument is permissions
+ target_dp="${@: -2:1}" # Second-to-last argument is the target directory
+ sources=("${@:1:$#-2}") # All other arguments are source files
+
+ if [ ! -d "$target_dp" ]; then
+ echo "env::install_file no install done: target directory '$target_dp' does not exist."
+ return 1
+ fi
+
+ for source_fp in "${sources[@]}"; do
+ if [ ! -f "$source_fp" ]; then
+ echo "env::install_file: source file '$source_fp' does not exist."
+ return 1
+ fi
+
+ target_file="$target_dp/$(basename "$source_fp")"
+
+ if ! install -m "$perms" "$source_fp" "$target_file"; then
+ echo "env::install_file: Failed to install $(basename "$source_fp") to $target_dp"
+ return 1
+ else
+ echo "env::install_file: installed $(basename "$source_fp") to $target_dp with permissions $perms"
+ fi
+ done
+ }
+
+ export -f install_file
+
+# --------------------------------------------------------------------------------
+# closing
+#
+ if [[ -z "$ENV" ]]; then
+ export ENV=$(script_fp)
+ fi
+
--- /dev/null
+Harmony v0.1 2025-01-08
+
--- /dev/null
+#+TITLE: Installing Python in Harmony
+#+AUTHOR: Thomas Walker Lynch
+#+OPTIONS: toc:2 num:nil
+
+* Overview
+
+This document describes how to install a project-local Python environment under:
+
+#+begin_src bash
+shared/third_party/Python
+#+end_src
+
+This environment is shared across the =developer= and =tester= roles and is automatically activated through their respective =env_<role>= scripts.
+
+* Precondition
+
+Ensure the following:
+
+- You are in a POSIX shell with =python3= installed.
+- The =python3-venv= package is available (on Debian: =sudo apt install python3-venv=).
+- You have sourced the Harmony environment via =env_toolsmith= to initialize =REPO_HOME= and related variables.
+
+* Step-by-Step Installation
+
+1. Source the Harmony environment:
+ #+begin_src bash
+ source env_toolsmith
+ #+end_src
+
+2. Create the virtual environment:
+ #+begin_src bash
+ python3 -m venv "$REPO_HOME/shared/third_party/Python"
+ #+end_src
+
+3. Activate it temporarily to install required packages:
+ #+begin_src bash
+ source "$REPO_HOME/shared/third_party/Python/bin/activate"
+ pip install --upgrade pip
+ pip install pytest # Add any shared packages here
+ deactivate
+ #+end_src
+
+4. Rename Python's default activate and deactivate:
+ Harmony provides its own role-aware environment management. Using Python’s default activation scripts may interfere with prompt logic, PATH order, and role-specific behavior.
+
+ Disable the default scripts by renaming them:
+ #+begin_src bash
+ mv "$REPO_HOME/shared/third_party/Python/bin/activate" \
+ "$REPO_HOME/shared/third_party/Python/bin/activate_deprecated"
+ #+end_src
+
+ This ensures that accidental sourcing of Python’s =activate= script won't override Harmony's environment setup.
+
+5. Verify installation:
+ #+begin_src bash
+ ls "$REPO_HOME/shared/third_party/Python/bin/python3"
+ #+end_src
+
+ The binary should exist and report a working Python interpreter when run.
+
+* Notes
+
+- The virtual environment is deliberately named =Python=, not =venv=, to reflect its role as a shared system component.
+- Harmony environment scripts define and control =VIRTUAL_ENV=, =PYTHON_HOME=, and =PATH=, making Python activation seamless and uniform.
+- There is no need to use Python’s =bin/activate= directly — it is fully replaced by Harmony’s environment logic.
+
+* Related Files
+
+- =shared/authored/env=
+- =shared/authored/env_source=
+- =env_developer=, =env_tester=, =env_toolsmith=
+
+* Last Verified
+
+2025-05-19 :: Activate/deactivate renamed post-install. Requires Harmony environment sourcing prior to execution.
--- /dev/null
+
+This is the generic install.org doc that comes with the skeleton.
+
+1. $REPO_HOME/shared/third_party/.gitignore:
+
+ *
+ !/.gitignore
+ !/patch
+
+ The only things from the third party directory that will be pushed to the repo origin is the .gitignore file and the patches.
+
+
+2. downloaded tar files etc. go into the directory `upstream`
+
+ $REPO_HOME/shared/upstream
+
+ Typically the contents of upstream are deleted after the install.
+
+3. for the base install
+
+ cd $REPO_HOME/shared/third_party
+ do whatever it takes to install tool, as examples:
+ git clone <tool_path>
+ tar -xzf ../upstream/tar
+ ...
+
+ Be sure to add the path to the tool executable(s) in the $REPO_HOME/env_$ROLE files for the $ROLE who uses the tool.
+
+ Assuming you are not also developing the tool, for safety
+ change each installed git project to a local branch:
+
+ b=<site>_<project>_local_$USER
+ git switch -c "$b"
+
+
+4. Define some variables to simplify our discussion. Lowercase variable names
+ are not exported from the shell.
+
+ # already set in the environment
+ # REPO_HOME
+ # PROJECT
+ # USER
+
+ # example tool names: 'RT_gcc' 'RT-project share` etc.
+ tool=<tool-name>
+ tool_dpath="$REPO_HOME/shared/third_party/$tool"
+ patch_dpath="$REPO_HOME/shared/patch/"
+
+
+5. create a patch series (from current vendor state → your local edits)
+
+ # this can be repeated and will create an encompassing diff file
+
+ # optionally crate a new branch after cloning the third party tool repo and work from there. You won't make any commits, but in case you plan to ever check the changes in, or have a the bad habit of doing ommits burned into your brain-stem, making a brnch will help.
+
+ # make changes
+
+ cd "$tool_dpath"
+
+ # do your edits
+
+ # Stage edits. Do not commit them!! Be sure you are in the third party
+ # tool directory when doing `git add -A` and `git diff` commands.
+ git add -A
+
+ # diff the stage from the current repo to create the patch file
+ git diff --staged > "$patch_dpath/$tool"
+
+ # the diff file can be added to the project and checked in at the project level.
+
+
+6. how to apply an existing patch
+
+ Get a fresh clone of the tool into $tool_dpath.
+
+ cd "$tool_dpath"
+ git apply "$patch_dpath/$tool"
+
+ You can see what `git apply` would do by running
+
+ git apply --check /path/to/your/patch_dpath/$tool
--- /dev/null
+../authored/gitignore_treewalk.py
\ No newline at end of file
--- /dev/null
+# Ignore all files
+*
+
+# But don't ignore the .gitignore file itself
+!/.gitignore
+
+# keep the upstream directory
+!/upstream
--- /dev/null
+*
+!/.gitignore
\ No newline at end of file
--- /dev/null
+2025-11-25 09:33:05 Z [subu:developer] Thomas_developer@StanleyPark
+§/home/Thomas/subu_data/developer/subu_data/Harmony§
+> find . -type l -exec ls -l {} \;
+lrwxrwxrwx 1 Thomas_developer Thomas_developer 35 Nov 25 09:08 ./tool/sync -> ../tool_shared/authored/sync/CLI.py
+lrwxrwxrwx 1 Thomas_developer Thomas_developer 3 May 19 2025 ./shared/third_party/Python/lib64 -> lib
+lrwxrwxrwx 1 Thomas_developer Thomas_developer 16 May 19 2025 ./shared/third_party/Python/bin/python3 -> /usr/bin/python3
+lrwxrwxrwx 1 Thomas_developer Thomas_developer 7 May 19 2025 ./shared/third_party/Python/bin/python -> python3
+lrwxrwxrwx 1 Thomas_developer Thomas_developer 7 May 19 2025 ./shared/third_party/Python/bin/python3.11 -> python3
+lrwxrwxrwx 1 Thomas_developer Thomas_developer 15 Nov 24 15:19 ./shared/authored/git-empty-dir/source_sync -> ../source_sync/
+lrwxrwxrwx 1 Thomas_developer Thomas_developer 25 Nov 24 15:21 ./shared/authored/git-empty-dir/Harmony.py -> ../source_sync/Harmony.py
+lrwxrwxrwx 1 Thomas_developer Thomas_developer 37 Nov 24 15:22 ./shared/authored/git-empty-dir/load_command_module.py -> ../source_sync/load_command_module.py
--- /dev/null
+#!/usr/bin/env -S python3 -B
+# -*- mode: python; coding: utf-8; python-indent-offset: 2; indent-tabs-mode: nil -*-
+"""
+RT_Format — Reasoning Technology code formatter (commas + bracketed phrases per line)
+
+Commands:
+ RT_Format write <file ...> Format files in place (rewrite originals)
+ RT_Format copy <file ...> Save backups as <file>~ then format originals
+ RT_Format pipe Read from stdin, write to stdout
+ RT_Format self_test Run built-in tests
+ RT_Format version Show tool version
+ RT_Format help | --help Show usage
+
+Rules:
+ • Horizontal lists -> a ,b ,c (space BEFORE comma, none after)
+ • Tight (){}[] by default; add one space just inside borders only when an
+ OUTERMOST bracketed phrase on the line contains an INNER bracket.
+ • Multiple outermost phrases can exist on a line (e.g., `g() { ... }`);
+ apply the rule to EACH such phrase independently.
+ • Per-line, tolerant of unbalanced brackets: first unmatched opener OR last
+ unmatched closer is treated as “the” outermost for padding purposes.
+ • Strings and single-line comments (#, //) are not altered.
+"""
+
+from typing import List ,Tuple ,Optional ,TextIO
+import sys ,re ,io ,shutil ,os
+
+RTF_VERSION = "0.2.2" # pad all outermost-with-nesting phrases on a line
+
+BR_OPEN = "([{<"
+BR_CLOSE = ")]}>"
+PAIR = dict(zip(BR_OPEN ,BR_CLOSE))
+REV = dict(zip(BR_CLOSE ,BR_OPEN))
+
+USAGE = """\
+Usage:
+ RT_Format write <file ...>
+ RT_Format copy <file ...>
+ RT_Format pipe
+ RT_Format self_test
+ RT_Format version
+ RT_Format help | --help
+"""
+
+# --------------- Core token helpers ----------------
+
+def split_code_comment(line: str):
+ """Return (code ,comment), keeping the comment marker if present; ignore markers inside strings."""
+ in_s = None
+ esc = False
+ for i ,ch in enumerate(line):
+ if in_s:
+ if esc:
+ esc = False
+ elif ch == "\\":
+ esc = True
+ elif ch == in_s:
+ in_s = None
+ continue
+ else:
+ if ch in ("'" ,'"'):
+ in_s = ch
+ continue
+ if ch == "#":
+ return line[:i] ,line[i:]
+ if ch == "/" and i + 1 < len(line) and line[i + 1] == "/":
+ return line[:i] ,line[i:]
+ return line ,""
+
+def format_commas(code: str) -> str:
+ """Space BEFORE comma, none after, outside strings."""
+ out: List[str] = []
+ in_s = None
+ esc = False
+ i = 0
+ while i < len(code):
+ ch = code[i]
+ if in_s:
+ out.append(ch)
+ if esc:
+ esc = False
+ elif ch == "\\":
+ esc = True
+ elif ch == in_s:
+ in_s = None
+ i += 1
+ else:
+ if ch in ("'" ,'"'):
+ in_s = ch
+ out.append(ch)
+ i += 1
+ elif ch == ",":
+ while out and out[-1] == " ":
+ out.pop()
+ if out and out[-1] != " ":
+ out.append(" ")
+ out.append(",")
+ j = i + 1
+ while j < len(code) and code[j] == " ":
+ j += 1
+ i = j
+ else:
+ out.append(ch)
+ i += 1
+ return "".join(out)
+
+# --------------- Bracket discovery ----------------
+
+def top_level_spans(code: str) -> List[Tuple[int ,int]]:
+ """Return all balanced OUTERMOST bracketed spans (start,end) for this line, ignoring strings."""
+ in_s = None
+ esc = False
+ stack: List[Tuple[str ,int]] = []
+ spans: List[Tuple[int ,int]] = []
+ for i ,ch in enumerate(code):
+ if in_s:
+ if esc:
+ esc = False
+ elif ch == "\\":
+ esc = True
+ elif ch == in_s:
+ in_s = None
+ continue
+ else:
+ if ch in ("'" ,'"'):
+ in_s = ch
+ continue
+ if ch in BR_OPEN:
+ stack.append((ch ,i))
+ elif ch in BR_CLOSE:
+ if stack and REV[ch] == stack[-1][0]:
+ _ ,pos = stack.pop()
+ if not stack:
+ spans.append((pos ,i))
+ else:
+ # unmatched closer ignored here; handled in unbalanced logic
+ pass
+ return spans
+
+def first_unmatched_opener(code: str) -> Optional[int]:
+ in_s = None
+ esc = False
+ stack: List[Tuple[str ,int]] = []
+ for i ,ch in enumerate(code):
+ if in_s:
+ if esc:
+ esc = False
+ elif ch == "\\":
+ esc = True
+ elif ch == in_s:
+ in_s = None
+ continue
+ else:
+ if ch in ("'" ,'"'):
+ in_s = ch
+ continue
+ if ch in BR_OPEN:
+ stack.append((ch ,i))
+ elif ch in BR_CLOSE:
+ if stack and REV[ch] == stack[-1][0]:
+ stack.pop()
+ else:
+ # unmatched closer: do nothing here
+ pass
+ return stack[0][1] if stack else None
+
+def last_unmatched_closer(code: str) -> Optional[int]:
+ in_s = None
+ esc = False
+ depth = 0
+ last: Optional[int] = None
+ for i ,ch in enumerate(code):
+ if in_s:
+ if esc:
+ esc = False
+ elif ch == "\\":
+ esc = True
+ elif ch == in_s:
+ in_s = None
+ continue
+ else:
+ if ch in ("'" ,'"'):
+ in_s = ch
+ continue
+ if ch in BR_OPEN:
+ depth += 1
+ elif ch in BR_CLOSE:
+ if depth > 0:
+ depth -= 1
+ else:
+ last = i
+ return last
+
+def contains_inner_bracket(code: str ,start: Optional[int] ,end: Optional[int]) -> bool:
+ """Check for any bracket token inside the given bounds (respect strings)."""
+ if start is None and end is None:
+ return False
+ in_s = None
+ esc = False
+ lo = (start + 1) if start is not None else 0
+ hi = (end - 1) if end is not None else len(code) - 1
+ if hi < lo:
+ return False
+ for i ,ch in enumerate(code):
+ if i < lo or i > hi:
+ continue
+ if in_s:
+ if esc:
+ esc = False
+ elif ch == "\\":
+ esc = True
+ elif ch == in_s:
+ in_s = None
+ continue
+ else:
+ if ch in ("'" ,'"'):
+ in_s = ch
+ continue
+ if ch in BR_OPEN or ch in BR_CLOSE:
+ return True
+ return False
+
+# --------------- Spacing transforms ----------------
+
+def tighten_all_brackets(code: str) -> str:
+ """Tight margins and remove immediate interior spaces next to borders."""
+ out: List[str] = []
+ in_s = None
+ esc = False
+ i = 0
+ while i < len(code):
+ ch = code[i]
+ if in_s:
+ out.append(ch)
+ if esc:
+ esc = False
+ elif ch == "\\":
+ esc = True
+ elif ch == in_s:
+ in_s = None
+ i += 1
+ else:
+ if ch in ("'" ,'"'):
+ in_s = ch
+ out.append(ch)
+ i += 1
+ elif ch in BR_CLOSE:
+ if out and out[-1] == " ":
+ out.pop()
+ out.append(ch)
+ i += 1
+ elif ch in BR_OPEN:
+ if out and out[-1] == " ":
+ out.pop()
+ out.append(ch)
+ i += 1
+ while i < len(code) and code[i] == " ":
+ i += 1
+ else:
+ out.append(ch)
+ i += 1
+ return "".join(out)
+
+def apply_bracket_padding(code: str) -> str:
+ """
+ 1) Tighten globally.
+ 2) For EACH balanced outermost span, if it contains an inner bracket,
+ ensure exactly one space just inside its borders — but only if missing.
+ 3) If there are no balanced spans, pad the first unmatched opener OR the last unmatched closer
+ only if that outer fragment contains an inner bracket, and only if padding is missing.
+ """
+ s = tighten_all_brackets(code)
+
+ def borders_have_space(text: str, start: int, end: int) -> Tuple[bool, bool]:
+ # Return (left_has_space, right_has_space) for just-inside borders.
+ left_has = (start + 1 < len(text)) and (text[start + 1] == " ")
+ right_has = (end - 1 >= 0) and (text[end - 1] == " ")
+ return left_has, right_has
+
+ # Balanced top-level spans: may be multiple on one line (e.g., g() { ... }).
+ # Iterate while applying at most one mutation per pass; recompute spans after.
+ while True:
+ spans = top_level_spans(s)
+ changed = False
+ for (start, end) in spans:
+ if contains_inner_bracket(s, start, end):
+ left_has, right_has = borders_have_space(s, start, end)
+ if not left_has or not right_has:
+ # Insert exactly one space just inside each border that lacks it.
+ if not right_has:
+ # Right side first to avoid shifting the 'start' index computation
+ s = s[:end].rstrip(" ") + " " + s[end:].lstrip(" ")
+ if not left_has:
+ s = s[:start + 1].rstrip(" ") + " " + s[start + 1:].lstrip(" ")
+ changed = True
+ break # after a mutation, recompute spans fresh
+ if not changed:
+ break
+
+ # If there are no balanced spans, consider unbalanced fragment once
+ if not top_level_spans(s):
+ o = first_unmatched_opener(s)
+ c = last_unmatched_closer(s)
+ if o is not None and contains_inner_bracket(s, o, None):
+ # add one space after opener only if missing
+ if not (o + 1 < len(s) and s[o + 1] == " "):
+ s = s[:o + 1].rstrip(" ") + " " + s[o + 1:]
+ elif c is not None and contains_inner_bracket(s, None, c):
+ # add one space before closer only if missing
+ if not (c - 1 >= 0 and s[c - 1] == " "):
+ s = s[:c].rstrip(" ") + " " + s[c:]
+
+ return s
+
+# --------------- Public API ----------------
+
+def rt_format_line(line: str) -> str:
+ code ,comment = split_code_comment(line.rstrip("\n"))
+ code = format_commas(code)
+ code = apply_bracket_padding(code)
+ return code + comment
+
+def rt_format_text(text: str) -> str:
+ return "\n".join(rt_format_line(ln) for ln in text.splitlines())
+
+def rt_format_stream(inp: TextIO ,out: TextIO) -> None:
+ for line in inp:
+ out.write(rt_format_line(line) + "\n")
+
+# --------------- Self-test ----------------
+
+def run_self_test() -> bool:
+ ok = True
+ def chk(src ,exp):
+ nonlocal ok
+ got = rt_format_line(src)
+ if got != exp:
+ print("FAIL:" ,src ,"=>" ,got ,"expected:" ,exp)
+ ok = False
+
+ # Commas
+ chk("a,b,c" ,"a ,b ,c")
+ chk("a , b , c" ,"a ,b ,c")
+
+ # Tight () by default
+ chk("f ( x )" ,"f(x)")
+ chk("f(x) + g(y)" ,"f(x) + g(y)")
+
+ # Balanced: multiple outermost spans (g() and {...}) -> only pad {...} if it has inner bracket
+ src = "int g(){int a=0,b=1,c=2; return h(a,b,c);}"
+ exp = "int g(){ int a=0 ,b=1 ,c=2; return h(a ,b ,c); }"
+ chk(src ,exp)
+
+ # Balanced: single outermost with nesting
+ chk("outer( inner(a,b) )" ,"outer( inner(a ,b) )")
+
+ # Unbalanced open-right with nesting
+ chk("compute(x, f(y" ,"compute( x ,f(y)")
+
+ # Unbalanced open-left without prior inner bracket => unchanged
+ chk("return z) + 1" ,"return z) + 1")
+
+ print("SELFTEST OK" if ok else "SELFTEST FAILED")
+ return ok
+
+# --------------- CLI ----------------
+
+def write_files(paths: List[str]) -> int:
+ for path in paths:
+ with open(path ,"r" ,encoding="utf-8") as f:
+ data = f.read()
+ formatted = rt_format_text(data)
+ with open(path ,"w" ,encoding="utf-8") as f:
+ f.write(formatted + ("\n" if not formatted.endswith("\n") else ""))
+ return 0
+
+def copy_files(paths: List[str]) -> int:
+ for path in paths:
+ shutil.copy2(path ,path + "~")
+ return write_files(paths)
+
+def CLI(argv=None) -> int:
+ args = list(sys.argv[1:] if argv is None else argv)
+ if not args or args[0] in {"help" ,"--help" ,"-h"}:
+ print(USAGE)
+ return 0
+
+ cmd = args[0]
+ rest = args[1:]
+
+ if cmd == "version":
+ print(RTF_VERSION)
+ return 0
+ if cmd == "self_test":
+ ok = run_self_test()
+ return 0 if ok else 1
+ if cmd == "pipe":
+ rt_format_stream(sys.stdin ,sys.stdout)
+ return 0
+ if cmd == "write":
+ if not rest:
+ print("write: missing <file ...>\n" + USAGE)
+ return 2
+ return write_files(rest)
+ if cmd == "copy":
+ if not rest:
+ print("copy: missing <file ...>\n" + USAGE)
+ return 2
+ return copy_files(rest)
+
+ print(f"Unknown command: {cmd}\n" + USAGE)
+ return 2
+
+if __name__ == "__main__":
+ sys.exit(CLI())
--- /dev/null
+(defun rt-format-buffer ()
+ (interactive)
+ (shell-command-on-region (point-min) (point-max)
+ "RT_Format pipe" t t))
--- /dev/null
+// commas and simple tight brackets
+int g(){int a=0,b=1,c=2; return h(a,b,c);}
+
+// balanced outermost-with-nesting -> pad inside outer ()
+int f(){return outer( inner(a,b) );}
+
+// strings and comments must be unchanged
+int s(){ printf("x ,y ,z (still a string)"); /* a ,b ,c */ return 1; }
+
+// unbalanced open-right with nesting -> pad after first unmatched '('
+int u(){ if(doit(foo(1,2) // missing ))
+ return 0; }
+
+// arrays / subscripts stay tight; commas still RT-style
+int a(int i,int j){ return M[i,j] + V[i] + W[j]; }
--- /dev/null
+# commas and spacing in defs / calls
+def f ( x , y , z ):
+ return dict( a =1 , b= 2 ), [ 1, 2 ,3 ], ( (1,2) )
+
+# outermost-with-nesting -> pad inside outer ()
+val = outer( inner( a,b ) )
+
+# strings/comments untouched
+s = "text, with , commas ( not to touch )" # a ,b ,c
+
+# unbalanced: open-left (closing without opener) -> no padding unless inner bracket before it
+def g():
+ return result) # likely unchanged
+
+# unbalanced: open-right (first unmatched opener) with inner bracket following
+k = compute(x, f(y
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
--- /dev/null
+../shared/authored/Harmony_sync/CLI.py
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env -S python3 -B
+# -*- mode: python; coding: utf-8; python-indent-offset: 2; indent-tabs-mode: nil -*-
+
+"""
+set_project_permissions — normalize a freshly cloned project to Harmony policies.
+
+usage:
+ set_project_permissions [default]
+ set_project_permissions help | --help | -h
+
+notes:
+ • Must be run from the toolsmith environment (ENV=tool/env, ROLE=toolsmith).
+ • Starts at $REPO_HOME.
+ • Baseline is umask-077 congruence:
+ - directories → 0700
+ - files → 0600, but preserve owner-exec (→ 0700 for executables)
+ applied to the entire repo, including release/, EXCEPT:
+ - release/kmod/*.ko → 0440
+ • Skips .git/ and symlinks.
+"""
+
+import os, sys, stat
+
+# Must match shared/authored/env policy:
+DEFAULT_UMASK = 0o077 # reminder only; effective modes below implement 077 congruence.
+
+DIR_MODE_077 = 0o700
+
+def die(msg, code=1):
+ print(f"set_project_permissions: {msg}", file=sys.stderr)
+ sys.exit(code)
+
+def require_toolsmith_env():
+ env = os.environ.get("ENV", "")
+ role = os.environ.get("ROLE", "")
+ if env != "tool/env" or role != "toolsmith":
+ hint = (
+ "This script should be run from the toolsmith environment.\n"
+ "Try: source ./env_toolsmith (then re-run: set_project_permissions default)"
+ )
+ die(f"bad environment: ENV='{env}' ROLE='{role}'.\n{hint}")
+
+def repo_home():
+ rh = os.environ.get("REPO_HOME")
+ if not rh:
+ die("REPO_HOME is not set (did you source shared/authored/env?)")
+ return os.path.realpath(rh)
+
+def show_path(p, rh):
+ return p.replace(rh, "$REPO_HOME", 1) if p.startswith(rh) else p
+
+def is_git_dir(path):
+ return os.path.basename(path.rstrip(os.sep)) == ".git"
+
+def file_target_mode_077_preserve_exec(current_mode: int) -> int:
+ # Base 0600, add owner exec if currently set; drop all group/other.
+ target = 0o600
+ if current_mode & stat.S_IXUSR:
+ target |= stat.S_IXUSR
+ return target
+
+def set_mode_if_needed(path, target, rh):
+ try:
+ st = os.lstat(path)
+ except FileNotFoundError:
+ return 0
+ cur = stat.S_IMODE(st.st_mode)
+ if cur == target:
+ return 0
+ os.chmod(path, target)
+ print(f"+ chmod {oct(target)[2:]} '{show_path(path, rh)}'")
+ return 1
+
+def apply_policy(rh):
+ changed = 0
+ release_root = os.path.join(rh, "release")
+ for dirpath, dirnames, filenames in os.walk(rh, topdown=True, followlinks=False):
+ # prune .git
+ dirnames[:] = [d for d in dirnames if d != ".git"]
+
+ # directories: 0700 everywhere (incl. release/)
+ changed += set_mode_if_needed(dirpath, DIR_MODE_077, rh)
+
+ # files: 0600 (+owner exec) everywhere, except release/kmod/*.ko → 0440
+ rel_from_repo = os.path.relpath(dirpath, rh)
+ under_release = rel_from_repo == "release" or rel_from_repo.startswith("release"+os.sep)
+ top_under_release = ""
+ if under_release:
+ rel_from_release = os.path.relpath(dirpath, release_root)
+ top_under_release = (rel_from_release.split(os.sep, 1)[0] if rel_from_release != "." else "")
+
+ for fn in filenames:
+ p = os.path.join(dirpath, fn)
+ if os.path.islink(p):
+ continue
+ try:
+ st = os.lstat(p)
+ except FileNotFoundError:
+ continue
+
+ if under_release and top_under_release == "kmod" and fn.endswith(".ko"):
+ target = 0o440
+ else:
+ target = file_target_mode_077_preserve_exec(stat.S_IMODE(st.st_mode))
+
+ changed += set_mode_if_needed(p, target, rh)
+ return changed
+
+def cmd_default():
+ require_toolsmith_env()
+ rh = repo_home()
+ total = apply_policy(rh)
+ print(f"changes: {total}")
+
+def main():
+ if len(sys.argv) == 1 or sys.argv[1] in ("default",):
+ return cmd_default()
+ if sys.argv[1] in ("help", "--help", "-h"):
+ print(__doc__.strip()); return 0
+ # unknown command → help
+ print(__doc__.strip()); return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
--- /dev/null
+#!/usr/bin/env python3
+# -*- mode: python; coding: utf-8; python-indent-offset: 2; indent-tabs-mode: nil -*-
+
+"""
+git-tar — Create an archive of the current Git repo's ref into ./scratchpad
+
+Commands (order-insensitive):
+ git-tar # default: tar.gz (HEAD, ./scratchpad, Z-stamp if importable)
+ git-tar help # show help
+ git-tar version # show version
+ git-tar ref-<REF> # choose ref (tag/branch/commit), default HEAD
+ git-tar out-<OUTDIR> # choose output directory (default: <repo>/scratchpad)
+ git-tar no-stamp # force omit timestamp even if Z is importable
+ git-tar z-format-<FMT> # override timestamp format used with Z (optional)
+ git-tar zip # write .zip instead of .tar.gz
+ git-tar tar # force .tar.gz explicitly
+
+Output names:
+ <repo>__<ref>[__<Z>].tar.gz
+ <repo>__<ref>[__<Z>].zip
+"""
+
+from __future__ import annotations
+import gzip, os, pathlib, subprocess, sys
+from typing import Optional
+import importlib, importlib.util
+from importlib.machinery import SourceFileLoader
+
+VERSION = "1.5"
+
+# ----------------------------------------------------------------------
+# Editable timestamp format (used when calling Z)
+# ----------------------------------------------------------------------
+Z_FORMAT = "%year-%month-%day_%hour%minute%secondZ"
+
+USAGE = f"""git-tar {VERSION}
+
+Usage:
+ git-tar [commands...]
+
+Commands (order-insensitive):
+ help
+ version
+ ref-<REF>
+ out-<OUTDIR>
+ no-stamp
+ z-format-<FMT>
+ zip
+ tar
+
+Examples:
+ git-tar
+ git-tar zip
+ git-tar ref-main out-/tmp
+ git-tar z-format-%year-%month-%dayT%hour:%minute:%second.%scintillaZ
+""".rstrip()
+
+# ----------------------------------------------------------------------
+# git helpers
+# ----------------------------------------------------------------------
+def _run(*args: str, check: bool = True, cwd: Optional[pathlib.Path] = None) -> subprocess.CompletedProcess[str]:
+ return subprocess.run(
+ args
+ ,check=check
+ ,cwd=(str(cwd) if cwd else None)
+ ,text=True
+ ,stdout=subprocess.PIPE
+ ,stderr=subprocess.PIPE
+ )
+
+def _in_git_repo() -> bool:
+ try:
+ return _run("git","rev-parse","--is-inside-work-tree").stdout.strip().lower() == "true"
+ except subprocess.CalledProcessError:
+ return False
+
+def _git_top() -> pathlib.Path:
+ return pathlib.Path(_run("git","rev-parse","--show-toplevel").stdout.strip())
+
+def _git_ref_label(repo_top: pathlib.Path, ref: str) -> str:
+ try:
+ return _run("git","-C",str(repo_top),"describe","--tags","--always","--dirty",ref).stdout.strip()
+ except subprocess.CalledProcessError:
+ return _run("git","-C",str(repo_top),"rev-parse","--short",ref).stdout.strip()
+
+# ----------------------------------------------------------------------
+# Z module discovery (supports extension-less file named "Z")
+# ----------------------------------------------------------------------
+def _import_Z_module(repo_top: pathlib.Path) -> Optional[object]:
+ try:
+ return importlib.import_module("Z")
+ except Exception:
+ pass
+
+ candidates: list[pathlib.Path] = []
+ here = pathlib.Path(__file__).resolve().parent
+ candidates += [here / "Z", here / "Z.py"]
+ candidates += [
+ repo_top / "shared" / "third_party" / "RT-project-share" / "release" / "python" / "Z",
+ repo_top / "shared" / "third_party" / "RT-project-share" / "release" / "python" / "Z.py",
+ repo_top / "shared" / "third_party" / "RT-project-share" / "release" / "bash" / "Z",
+ ]
+ for d in (pathlib.Path(p) for p in (os.getenv("PATH") or "").split(os.pathsep) if p):
+ p = d / "Z"
+ if p.exists() and p.is_file():
+ candidates.append(p)
+
+ for path in candidates:
+ try:
+ if not path.exists() or not path.is_file():
+ continue
+ spec = importlib.util.spec_from_loader("Z", SourceFileLoader("Z", str(path)))
+ if not spec or not spec.loader:
+ continue
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod) # type: ignore[attr-defined]
+ if hasattr(mod,"make_timestamp") or (hasattr(mod,"get_utc_dict") and hasattr(mod,"format_timestamp")):
+ return mod
+ except Exception:
+ continue
+ return None
+
+# ----------------------------------------------------------------------
+# Z stamp helper (format string visible & editable above)
+# ----------------------------------------------------------------------
+def make_z_stamp(zmod: object, z_format: str) -> Optional[str]:
+ try:
+ if hasattr(zmod, "make_timestamp"):
+ s = zmod.make_timestamp(fmt=z_format) # type: ignore[attr-defined]
+ return (str(s).strip().replace("\n","") or None)
+ if hasattr(zmod, "get_utc_dict") and hasattr(zmod, "format_timestamp"):
+ td = zmod.get_utc_dict() # type: ignore[attr-defined]
+ s = zmod.format_timestamp(td, z_format) # type: ignore[attr-defined]
+ return (str(s).strip().replace("\n","") or None)
+ except Exception:
+ return None
+ return None
+
+# ----------------------------------------------------------------------
+# archiving
+# ----------------------------------------------------------------------
+def _stream_git_archive_tar(repo_top: pathlib.Path, prefix: str, ref: str, out_gz_path: pathlib.Path) -> None:
+ proc = subprocess.Popen(
+ ["git","-C",str(repo_top),"archive","--format=tar",f"--prefix={prefix}/",ref]
+ ,stdout=subprocess.PIPE
+ )
+ try:
+ with gzip.open(out_gz_path,"wb") as gz:
+ while True:
+ chunk = proc.stdout.read(1024 * 1024) # 1 MiB
+ if not chunk:
+ break
+ gz.write(chunk)
+ finally:
+ if proc.stdout:
+ proc.stdout.close()
+ rc = proc.wait()
+ if rc != 0:
+ try:
+ out_gz_path.unlink(missing_ok=True)
+ finally:
+ raise subprocess.CalledProcessError(rc, proc.args)
+
+def _stream_git_archive_zip(repo_top: pathlib.Path, prefix: str, ref: str, out_zip_path: pathlib.Path) -> None:
+ # Directly stream git's zip to file; no Python zip building needed.
+ proc = subprocess.Popen(
+ ["git","-C",str(repo_top),"archive","--format=zip",f"--prefix={prefix}/",ref]
+ ,stdout=subprocess.PIPE
+ )
+ try:
+ with open(out_zip_path, "wb") as f:
+ while True:
+ chunk = proc.stdout.read(1024 * 1024)
+ if not chunk:
+ break
+ f.write(chunk)
+ finally:
+ if proc.stdout:
+ proc.stdout.close()
+ rc = proc.wait()
+ if rc != 0:
+ try:
+ out_zip_path.unlink(missing_ok=True)
+ finally:
+ raise subprocess.CalledProcessError(rc, proc.args)
+
+# ----------------------------------------------------------------------
+# work function
+# ----------------------------------------------------------------------
+def work(
+ ref: str = "HEAD"
+ ,outdir: Optional[pathlib.Path] = None
+ ,force_no_stamp: bool = False
+ ,z_format: Optional[str] = None
+ ,archive_kind: str = "tar" # "tar" or "zip"
+) -> pathlib.Path:
+ if archive_kind not in ("tar","zip"):
+ raise RuntimeError("archive_kind must be 'tar' or 'zip'")
+
+ if not _in_git_repo():
+ raise RuntimeError("not inside a git repository")
+
+ repo_top = _git_top()
+ repo_name = repo_top.name
+ ref_label = _git_ref_label(repo_top, ref)
+
+ stamp: Optional[str] = None
+ if not force_no_stamp:
+ zmod = _import_Z_module(repo_top)
+ if zmod is not None:
+ stamp = make_z_stamp(zmod, z_format or Z_FORMAT)
+
+ target_dir = (outdir or (repo_top / "scratchpad"))
+ target_dir.mkdir(parents=True, exist_ok=True)
+
+ suffix = ".zip" if archive_kind == "zip" else ".tar.gz"
+ out_name = f"{repo_name}__{ref_label}{('__' + stamp) if stamp else ''}{suffix}"
+ out_path = target_dir / out_name
+
+ if archive_kind == "zip":
+ _stream_git_archive_zip(repo_top, repo_name, ref, out_path)
+ else:
+ _stream_git_archive_tar(repo_top, repo_name, ref, out_path)
+
+ return out_path
+
+# ----------------------------------------------------------------------
+# CLI with command tokens
+# ----------------------------------------------------------------------
+def CLI(argv: Optional[list[str]] = None) -> int:
+ if argv is None:
+ argv = sys.argv[1:]
+
+ # defaults
+ ref = "HEAD"
+ outdir: Optional[pathlib.Path] = None
+ force_no_stamp = False
+ z_format: Optional[str] = None
+ archive_kind = "tar"
+
+ # no args → do the default action
+ if not argv:
+ try:
+ print(f"Wrote {work(ref=ref, outdir=outdir, force_no_stamp=force_no_stamp, z_format=z_format, archive_kind=archive_kind)}")
+ return 0
+ except Exception as e:
+ print(f"git-tar: {e}", file=sys.stderr); return 1
+
+ # consume tokens (order-insensitive)
+ for arg in argv:
+ if arg in ("help","-h","--help"):
+ print(USAGE); return 0
+ if arg == "version":
+ print(f"git-tar {VERSION}"); return 0
+ if arg == "no-stamp":
+ force_no_stamp = True; continue
+ if arg == "zip":
+ archive_kind = "zip"; continue
+ if arg == "tar":
+ archive_kind = "tar"; continue
+ if arg.startswith("ref-"):
+ ref = arg[4:] or ref; continue
+ if arg.startswith("out-"):
+ od = arg[4:]; outdir = pathlib.Path(od).resolve() if od else None; continue
+ if arg.startswith("z-format-"):
+ z_format = arg[len("z-format-"):] or None; continue
+ print(f"git-tar: unknown command '{arg}'", file=sys.stderr); return 1
+
+ # run
+ try:
+ out_path = work(ref=ref, outdir=outdir, force_no_stamp=force_no_stamp, z_format=z_format, archive_kind=archive_kind)
+ except Exception as e:
+ print(f"git-tar: {e}", file=sys.stderr); return 1
+
+ print(f"Wrote {out_path}")
+ return 0
+
+# ----------------------------------------------------------------------
+if __name__ == "__main__":
+ raise SystemExit(CLI())
--- /dev/null
+#!/usr/bin/env -S python3 -B
+# -*- mode: python; coding: utf-8; python-indent-offset: 2; indent-tabs-mode: nil -*-
+
+import os, sys, shutil, stat, pwd, grp, glob, tempfile
+
+HELP = """usage: release {write|clean|ls|help|dry write} [DIR]
+ write [DIR] Writes released files into $REPO_HOME/release. If [DIR] is specified, only writes files found in scratchpad/DIR.
+ clean [DIR] Remove the contents of the release directories. If [DIR] is specified, clean only the contents of that release directory.
+ ls List release/ as an indented tree: PERMS OWNER NAME (root-level dotfiles printed first).
+ help Show this message.
+ dry write [DIR]
+ Preview what write would do without modifying the filesystem.
+"""
+
+ENV_MUST_BE = "developer/tool/env"
+DEFAULT_DIR_MODE = 0o700 # 077-congruent dirs
+
+def exit_with_status(msg, code=1):
+ print(f"release: {msg}", file=sys.stderr)
+ sys.exit(code)
+
+def assert_env():
+ env = os.environ.get("ENV", "")
+ if env != ENV_MUST_BE:
+ hint = (
+ "ENV is not 'developer/tool/env'.\n"
+ "Enter the project with: source ./env_developer\n"
+ "That script exports: ROLE=developer; ENV=$ROLE/tool/env"
+ )
+ exit_with_status(f"bad environment: ENV='{env}'. {hint}")
+
+def repo_home():
+ rh = os.environ.get("REPO_HOME")
+ if not rh:
+ exit_with_status("REPO_HOME not set (did you 'source ./env_developer'?)")
+ return rh
+
+def dpath(*parts):
+ return os.path.join(repo_home(), "developer", *parts)
+
+def rpath(*parts):
+ return os.path.join(repo_home(), "release", *parts)
+
+def dev_root():
+ return dpath()
+
+def rel_root():
+ return rpath()
+
+def _display_src(p_abs: str) -> str:
+ try:
+ if os.path.commonpath([dev_root()]) == os.path.commonpath([dev_root(), p_abs]):
+ return os.path.relpath(p_abs, dev_root())
+ except Exception:
+ pass
+ return p_abs
+
+def _display_dst(p_abs: str) -> str:
+ try:
+ rel = os.path.relpath(p_abs, rel_root())
+ rel = "" if rel == "." else rel
+ return "$REPO_HOME/release" + ("/" + rel if rel else "")
+ except Exception:
+ return p_abs
+
+def ensure_mode(path, mode):
+ try: os.chmod(path, mode)
+ except Exception: pass
+
+def ensure_dir(path, mode=DEFAULT_DIR_MODE, dry=False):
+ if dry:
+ if not os.path.isdir(path):
+ shown = _display_dst(path) if path.startswith(rel_root()) else (
+ os.path.relpath(path, dev_root()) if path.startswith(dev_root()) else path
+ )
+ print(f"(dry) mkdir -m {oct(mode)[2:]} '{shown}'")
+ return
+ os.makedirs(path, exist_ok=True)
+ ensure_mode(path, mode)
+
+def filemode(m):
+ try: return stat.filemode(m)
+ except Exception: return oct(m & 0o777)
+
+def owner_group(st):
+ try: return f"{pwd.getpwuid(st.st_uid).pw_name}:{grp.getgrgid(st.st_gid).gr_name}"
+ except Exception: return f"{st.st_uid}:{st.st_gid}"
+
+# ---------- LS (two-pass owner:group width) ----------
+def list_tree(root):
+ if not os.path.isdir(root):
+ return
+ entries = []
+ def gather(path: str, depth: int, is_root: bool):
+ try:
+ it = list(os.scandir(path))
+ except FileNotFoundError:
+ return
+ dirs = [e for e in it if e.is_dir(follow_symlinks=False)]
+ files = [e for e in it if not e.is_dir(follow_symlinks=False)]
+ dirs.sort(key=lambda e: e.name); files.sort(key=lambda e: e.name)
+
+ if is_root:
+ for f in (e for e in files if e.name.startswith(".")):
+ st = os.lstat(f.path); entries.append((False, depth, filemode(st.st_mode), owner_group(st), f.name))
+ for d in dirs:
+ st = os.lstat(d.path); entries.append((True, depth, filemode(st.st_mode), owner_group(st), d.name + "/"))
+ gather(d.path, depth + 1, False)
+ for f in (e for e in files if not e.name.startswith(".")):
+ st = os.lstat(f.path); entries.append((False, depth, filemode(st.st_mode), owner_group(st), f.name))
+ else:
+ for d in dirs:
+ st = os.lstat(d.path); entries.append((True, depth, filemode(st.st_mode), owner_group(st), d.name + "/"))
+ gather(d.path, depth + 1, False)
+ for f in files:
+ st = os.lstat(f.path); entries.append((False, depth, filemode(st.st_mode), owner_group(st), f.name))
+ gather(root, depth=1, is_root=True)
+
+ ogw = 0
+ for (_isdir, _depth, _perms, ownergrp, _name) in entries:
+ if len(ownergrp) > ogw: ogw = len(ownergrp)
+
+ print("release/")
+ for (_isdir, depth, perms, ownergrp, name) in entries:
+ indent = " " * depth
+ print(f"{perms} {ownergrp:<{ogw}} {indent}{name}")
+# ---------- end LS ----------
+
+def iter_src_files(topdir, src_root):
+ base = os.path.join(src_root, topdir) if topdir else src_root
+ if not os.path.isdir(base):
+ return
+ yield
+ if topdir == "kmod":
+ for p in sorted(glob.glob(os.path.join(base, "*.ko"))):
+ yield (p, os.path.basename(p))
+ else:
+ for root, dirs, files in os.walk(base):
+ dirs.sort(); files.sort()
+ for fn in files:
+ src = os.path.join(root, fn)
+ rel = os.path.relpath(src, base)
+ yield (src, rel)
+
+def _target_mode_from_source(src_abs: str) -> int:
+ """077 policy: files 0600; if source has owner-exec, make 0700."""
+ try:
+ sm = stat.S_IMODE(os.stat(src_abs).st_mode)
+ except FileNotFoundError:
+ return 0o600
+ return 0o700 if (sm & stat.S_IXUSR) else 0o600
+
+def copy_one(src_abs, dst_abs, dry=False):
+ src_show = _display_src(src_abs)
+ dst_show = _display_dst(dst_abs)
+ parent = os.path.dirname(dst_abs)
+ os.makedirs(parent, exist_ok=True)
+ target_mode = _target_mode_from_source(src_abs)
+
+ def _is_writable_dir(p): return os.access(p, os.W_OK)
+ flip_needed = not _is_writable_dir(parent)
+ restore_mode = None
+ parent_show = _display_dst(parent)
+
+ if dry:
+ if flip_needed:
+ print(f"(dry) chmod u+w '{parent_show}'")
+ if os.path.exists(dst_abs):
+ print(f"(dry) unlink '{dst_show}'")
+ # show final mode we will set
+ print(f"(dry) install -m {oct(target_mode)[2:]} -D '{src_show}' '{dst_show}'")
+ if flip_needed:
+ print(f"(dry) chmod u-w '{parent_show}'")
+ return
+
+ try:
+ if flip_needed:
+ try:
+ st_parent = os.stat(parent)
+ restore_mode = stat.S_IMODE(st_parent.st_mode)
+ os.chmod(parent, restore_mode | stat.S_IWUSR)
+ except PermissionError:
+ exit_with_status(f"cannot write: parent dir not writable and chmod failed on {parent_show}")
+
+ # Atomic replace with enforced 077-compliant mode
+ fd, tmp_path = tempfile.mkstemp(prefix='.tmp.', dir=parent)
+ try:
+ with os.fdopen(fd, "wb") as tmpf, open(src_abs, "rb") as sf:
+ shutil.copyfileobj(sf, tmpf)
+ tmpf.flush()
+ os.chmod(tmp_path, target_mode)
+ os.replace(tmp_path, dst_abs)
+ finally:
+ try:
+ if os.path.exists(tmp_path):
+ os.unlink(tmp_path)
+ except Exception:
+ pass
+ finally:
+ if restore_mode is not None:
+ try: os.chmod(parent, restore_mode)
+ except Exception: pass
+
+ print(f"+ install -m {oct(target_mode)[2:]} '{src_show}' '{dst_show}'")
+
+def write_one_dir(topdir, dry):
+ rel_root_dir = rpath()
+ src_root = dpath("scratchpad")
+ src_dir = os.path.join(src_root, topdir)
+ dst_dir = os.path.join(rel_root_dir, topdir)
+
+ if not os.path.isdir(src_dir):
+ exit_with_status(
+ f"cannot write: expected '{_display_src(src_dir)}' to exist. "
+ f"Create scratchpad/{topdir} (Makefiles may need to populate it)."
+ )
+
+ ensure_dir(dst_dir, DEFAULT_DIR_MODE, dry=dry)
+
+ wrote = False
+ for src_abs, rel in iter_src_files(topdir, src_root):
+ dst_abs = os.path.join(dst_dir, rel)
+ copy_one(src_abs, dst_abs, dry=dry)
+ wrote = True
+ if not wrote:
+ msg = "no matching artifacts found"
+ if topdir == "kmod": msg += " (looking for *.ko)"
+ print(f"(info) {msg} in {_display_src(src_dir)}")
+
+def cmd_write(dir_arg, dry=False):
+ assert_env()
+ ensure_dir(rpath(), DEFAULT_DIR_MODE, dry=dry)
+
+ src_root = dpath("scratchpad")
+ if not os.path.isdir(src_root):
+ exit_with_status(f"cannot find developer scratchpad at '{_display_src(src_root)}'")
+
+ if dir_arg:
+ write_one_dir(dir_arg, dry=dry)
+ else:
+ subs = sorted([e.name for e in os.scandir(src_root) if e.is_dir(follow_symlinks=False)])
+ if not subs:
+ print(f"(info) nothing to release; no subdirectories found under {_display_src(src_root)}")
+ return
+ for td in subs:
+ write_one_dir(td, dry=dry)
+
+def _clean_contents(dir_path):
+ if not os.path.isdir(dir_path): return
+ for name in os.listdir(dir_path):
+ p = os.path.join(dir_path, name)
+ if os.path.isdir(p) and not os.path.islink(p):
+ shutil.rmtree(p, ignore_errors=True)
+ else:
+ try: os.unlink(p)
+ except FileNotFoundError: pass
+
+def cmd_clean(dir_arg):
+ assert_env()
+ rel_root_dir = rpath()
+ if not os.path.isdir(rel_root_dir):
+ return
+ if dir_arg:
+ _clean_contents(os.path.join(rel_root_dir, dir_arg))
+ else:
+ for e in os.scandir(rel_root_dir):
+ if e.is_dir(follow_symlinks=False):
+ _clean_contents(e.path)
+
+def CLI():
+ if len(sys.argv) < 2:
+ print(HELP); return
+ cmd, *args = sys.argv[1:]
+ if cmd == "write":
+ cmd_write(args[0] if args else None, dry=False)
+ elif cmd == "clean":
+ cmd_clean(args[0] if args else None)
+ elif cmd == "ls":
+ list_tree(rpath())
+ elif cmd == "help":
+ print(HELP)
+ elif cmd == "dry":
+ if args and args[0] == "write":
+ cmd_write(args[1] if len(args) >= 2 else None, dry=True)
+ else:
+ print(HELP)
+ else:
+ print(HELP)
+
+if __name__ == "__main__":
+ CLI()