--- /dev/null
+#!/usr/bin/python3
+
+#--------------------------------------------------------------------------------
+# generate a random string length n of form
+# "" | [A-Za-z^IloO] | [A-Za-z^IloO][0-9A-Za-z_-^IloO01]*[A-Za-z^IloO]
+#
+
+import sys
+import random
+
+# note missing IloO01.
+# period and comman might be difficult, light period might be missed
+# capital I in san-serif font looks like number 1.
+# email addresses are not case sensitive
+# these are not all the legal characters, but rather the ones we think are legal and legible
+legal_chars_end = "abcdefghijkmnpqrstuvwxz"
+legal_chars = "23456789abcdefghijkmnpqrstuvwxz"
+
+def index():
+ return random.randrange(0 ,len(legal_chars))
+
+def index_end():
+ return random.randrange(0 ,len(legal_chars_end))
+
+def char():
+ return legal_chars[index()]
+
+def char_end():
+ return legal_chars_end[index_end()]
+
+def string(n=6):
+ if n < 0 : raise Exception("string called with negative length")
+ if n == 0 : return ""
+
+ result = char_end()
+ if n == 1: return result
+
+ for _ in range(n-2): result += char()
+ result += char_end()
+
+ return result
+
+def test_0():
+ limit = 1e7 # surely by then
+ i = 0
+ c = char()
+ while c != '~' and i < limit:
+ i += 1
+ c = char()
+ print(i)
+ return i < limit
+
+def test_1():
+ limit = 1e7 # surely by then
+ i = 0
+ c = char()
+ while c != '0' and i < limit:
+ i += 1
+ c = char()
+ print(i)
+ return i < limit
+
+def test_2():
+ limit = 1e7 # surely by then
+ i = 0
+ c = char_end()
+ while c != 'z' and i < limit:
+ i += 1
+ c = char_end()
+ print(i)
+ return i < limit
+
+def test_3 ():
+ limit = 1e7 # surely by then
+ i = 0
+ c = char_end()
+ while c != 'A' and i < limit:
+ i += 1
+ c = char_end()
+ print(i)
+ return i < limit
+
+def test_4():
+ s0 = string()
+ s1 = string(10)
+ s2 = string(100)
+
+ print(s0)
+ print(s1)
+ print(s2)
+
+ return len(s0)==6 and len(s1)==10 and len(s2)==100
+
+
+n=6
+if len(sys.argv) > 1:
+ n = int(sys.argv[1])
+print(string(n))
+
--- /dev/null
+#!/usr/bin/python3
+
+#--------------------------------------------------------------------------------
+# generate a random string length n of form
+# "" | [A-Za-z^IloO] | [A-Za-z^IloO][0-9A-Za-z_-^IloO01]*[A-Za-z^IloO]
+#
+
+import sys
+import random
+
+#debug = True
+debug = False
+
+# note missing IloO01.
+# period and comman might be difficult, light period might be missed
+# capital I in san-serif font looks like number 1.
+# zero and oh of course
+# SNCF does not take an & as a special hmm.
+chars_alpha_small = "abcdefghijkmnpqrstuvwxz"
+chars_alpha_big = "ABCDEFGHJKLMNPQRSTUVWXYZ"
+chars_special = "+-_$.,;?"
+chars_numeric = "23456789"
+
+sets_leftmost=[chars_alpha_small ,chars_alpha_big]
+sets_standard=[chars_alpha_small ,chars_alpha_big ,chars_special ,chars_numeric]
+sets_rightmost=[chars_alpha_small ,chars_alpha_big ,chars_numeric]
+
+def set_psets(sets):
+ len_sum = sum(len(x) for x in sets)
+ psets = [len(x)/len_sum + 1e-7 for x in sets]
+ if debug: print("psets" ,psets ,"sum:" ,sum(psets))
+ return psets
+
+def choose_set(sets):
+ psets = set_psets(sets)
+ x = random.random()
+ if debug: print("x:" ,x)
+ if len(psets) == 0: return 0
+ p = psets[0]
+ choice = len(psets) - 1
+ i = 0
+ while i != len(psets) - 1:
+ if x <= p :
+ choice = i
+ break
+ i += 1
+ p += psets[i]
+ if debug: print("choice:" ,choice)
+ return choice
+
+def get_char_set(set):
+ index = random.randrange(0 ,len(set))
+ return set[index]
+
+def get_char(sets):
+ set = sets[choose_set(sets)]
+ return get_char_set(set)
+
+def get_string(n):
+ if n < 0 : raise Exception("string called with negative length")
+ if n == 0: return ""
+ if n == 1: return get_char_set(chars_alpha)
+
+ result = ""
+ result += get_char(sets_leftmost)
+ for _ in range(n-2): result += get_char(sets_standard)
+ result += get_char(sets_rightmost)
+
+ return result
+
+def get_passwd(n ,strict):
+ s = get_string(n)
+ while strict:
+ has_special = any(i in s for i in chars_special)
+ has_numeric = any(i in s for i in chars_numeric)
+ if debug: print("strict:" ,has_special ,has_numeric)
+ if has_special and has_numeric:
+ break
+ s = get_string(n)
+ return s
+
+strict = False
+n=9
+for arg in sys.argv[1:]:
+ if arg.isdigit(): n = int(arg)
+ if arg == "strict": strict = True
+
+if debug:
+ print("n:" ,n)
+ print("strict:" ,strict)
+
+pw = get_passwd(n ,strict)
+print(pw)
+sys.exit(0)
+
+
--- /dev/null
+#!/usr/bin/python3
+
+#--------------------------------------------------------------------------------
+# generate a random string length n of form
+#
+
+import sys
+import random
+
+# note missing IloO01.
+# period and comman might be difficult, light period might be missed
+# capital I in san-serif font looks like number 1.
+legal_chars_end = "abcdefghjkmnpqrstuvwxz"
+legal_chars = "23456789abcdefghijkmnpqrstuvwxz"
+
+def index():
+ return random.randrange(0 ,len(legal_chars))
+
+def index_end():
+ return random.randrange(0 ,len(legal_chars_end))
+
+def char():
+ return legal_chars[index()]
+
+def char_end():
+ return legal_chars_end[index_end()]
+
+def string(n=6):
+ if n < 0 : raise Exception("string called with negative length")
+ if n == 0 : return ""
+
+ result = char_end()
+ if n == 1: return result
+
+ for _ in range(n-2): result += char()
+ result += char_end()
+
+ return result
+
+def test_0():
+ limit = 1e7 # surely by then
+ i = 0
+ c = char()
+ while c != '~' and i < limit:
+ i += 1
+ c = char()
+ print(i)
+ return i < limit
+
+def test_1():
+ limit = 1e7 # surely by then
+ i = 0
+ c = char()
+ while c != '0' and i < limit:
+ i += 1
+ c = char()
+ print(i)
+ return i < limit
+
+def test_2():
+ limit = 1e7 # surely by then
+ i = 0
+ c = char_end()
+ while c != 'z' and i < limit:
+ i += 1
+ c = char_end()
+ print(i)
+ return i < limit
+
+def test_3 ():
+ limit = 1e7 # surely by then
+ i = 0
+ c = char_end()
+ while c != 'A' and i < limit:
+ i += 1
+ c = char_end()
+ print(i)
+ return i < limit
+
+def test_4():
+ s0 = string()
+ s1 = string(10)
+ s2 = string(100)
+
+ print(s0)
+ print(s1)
+ print(s2)
+
+ return len(s0)==6 and len(s1)==10 and len(s2)==100
+
+
+n=16
+if len(sys.argv) > 1:
+ n = int(sys.argv[1])
+print(string(n))
+
+++ /dev/null
-#!/bin/sh
-
-/usr/bin/date -u +"%Y-%m-%dT%H:%M:%SZ"
+++ /dev/null
-# ssh login will fail if .bashrc writes to stdout, so we write to "bash_error.txt"
-# set -x
-# in F37 something seems to be caching PATH, which can be annoying
-
-# If not running interactively, don't do anything
- case $- in
- *i*) ;;
- *) return;;
- esac
-
-# This should also be the default from login.defs, because gnome ignores
-# .login, .profile, etc. and uses systemd to launch applications from the desktop,
- umask 0077
-
-# - note the variable $PROMPT_DECOR, that is how the project name ends up in the prompt.
-# - without -i bash will clear PS1, just because, so we set PPS1, ,PPS2 to not lose the profit.
-# - use $(pwd) instead of \w or it will prefix '~' which confuses dirtrack when the
-# user is changed using su
- export PPS1='\n$($iseq/Z)[$PROMPT_DECOR]\n\u@\h§$(pwd)§\n> '
- export PPS2='>> '
- export PS1="$PPS1"
- export PS2="$PPS2"
-
-# sort the output of printenv, show newlines as environment variable values as \n
- alias printenv='printenv | awk '\''{gsub(/\n/, "\\n")}1'\'' | sort'
-
-# iso time in ls -l, show hidden files, human readable sizes
- alias ls='ls -a -h --time-style=long-iso'
-
-# iso time for all Linux programs, which they will all ignore, but at least we
-# tried, perhaps someday ...
- export TZ=UTC
- export TIME_STYLE=long-iso
- export LC_ALL=en_DK.UTF-8
-
-# -l don't truncate long lins
-# -p show pids
- alias pstree='pstree -lp'
-
-# - make bash gp to sleep, revealing the calling shell
-# - useful for job control of multiple bash shells from a controlling shell
- alias zzz="kill -STOP \$\$"
-
-# The one true operating system.
-# Proof that an OS can be as small as an editor.
- export EDITOR=emacs
-
-# check the window size after each command and, if necessary, update the values
-# of LINES and COLUMNS.
- shopt -s checkwinsize
-
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-
-# Check if at least one file is provided
-if [ $# -eq 0 ]; then
- echo "Usage: $(script_fp) <filename1> [filename2] ..."
- exit 1
-fi
-
-# Loop through all the provided files
-for file in "$@"; do
- # Check if the file exists
- if [ ! -f "$file" ]; then
- echo "Error: File '$file' not found!"
- continue
- fi
-
- # Print 80 dashes
- printf '%.0s-' {1..80}
- echo
-
- # Print the filename and a colon
- echo "$file:"
-
- # Print the contents of the file
- cat "$file"
-
- # Print a newline for spacing between files
- echo
-done
+++ /dev/null
-#!/bin/env /bin/bash
-
-# Description: Descends from $1, or pwd, looking for empty directories and adds a `.githolder` to them.
-# does not descend into hidden directories.
-
-# examples:
-# > git_holder
-# > git_holder --dry-run
-
-set -e
-
-find_empty_dirs() {
- local dir="$1"
- local dry_run="$2"
-
- # Skip `.git` specifically
- if [[ "$(basename "$dir")" == ".git" ]]; then
- return
- fi
-
- # Check if the directory is empty (including hidden files, excluding `.` and `..`)
- if [[ -z $(find "$dir" -mindepth 1 -maxdepth 1 -print -quit) ]]; then
- if [[ "$dry_run" == "true" ]]; then
- echo "Dry-run: Would add .githolder in $dir"
- else
- echo "Adding .githolder to $dir"
- touch "$dir/.githolder"
- fi
- else
- # Recurse into subdirectories
- for subdir in "$dir"/*/ "$dir"/.[!.]/; do
- if [[ -d "$subdir" && "$subdir" != "$dir/.[!.]/" ]]; then
- find_empty_dirs "$subdir" "$dry_run"
- fi
- done
- fi
-}
-
-# Default parameters
-dry_run="false"
-target_dir="."
-
-# Parse arguments
-while [[ $# -gt 0 ]]; do
- case "$1" in
- --dry-run)
- dry_run="true"
- shift
- ;;
- *)
- if [[ -d "$1" ]]; then
- target_dir="$1"
- shift
- else
- echo "Invalid argument: $1 is not a directory"
- exit 1
- fi
- ;;
- esac
-done
-
-# Run the function
-find_empty_dirs "$target_dir" "$dry_run"
+++ /dev/null
-#!/bin/sh
-#
-# rm_tilda_files_tree .
-#
-
-if [ ! -d "$1" ]; then
- echo "expected first arg to be a directory where rm_tilda_files_tree is to be applied, but found: " "$1"
- exit 1
-fi
-
-find "$1" -not -path '*/\.git/*' -name '*~' -exec rm {} \;
-
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-# vl 'vertical list'
-
-# Check if the command is provided
-if [ -z "$1" ]; then
- echo "Usage: vl <command> [args...]"
- exit 1
-fi
-
-# Capture the command and its arguments
-cmd=$1
-shift
-
-# Run the command with the remaining arguments and replace colons or spaces with newlines
-"$cmd" "$@" | tr ' :' '\n'
-
-exit 0
--- /dev/null
+#!/bin/sh
+
+/usr/bin/date -u +"%Y-%m-%dT%H:%M:%SZ"
--- /dev/null
+# ssh login will fail if .bashrc writes to stdout, so we write to "bash_error.txt"
+# set -x
+# in F37 something seems to be caching PATH, which can be annoying
+
+# If not running interactively, don't do anything
+ case $- in
+ *i*) ;;
+ *) return;;
+ esac
+
+# This should also be the default from login.defs, because gnome ignores
+# .login, .profile, etc. and uses systemd to launch applications from the desktop,
+ umask 0077
+
+# - note the variable $PROMPT_DECOR, that is how the project name ends up in the prompt.
+# - without -i bash will clear PS1, just because, so we set PPS1, ,PPS2 to not lose the profit.
+# - use $(pwd) instead of \w or it will prefix '~' which confuses dirtrack when the
+# user is changed using su
+ export PPS1='\n$($iseq/Z)[$PROMPT_DECOR]\n\u@\h§$(pwd)§\n> '
+ export PPS2='>> '
+ export PS1="$PPS1"
+ export PS2="$PPS2"
+
+# sort the output of printenv, show newlines as environment variable values as \n
+ alias printenv='printenv | awk '\''{gsub(/\n/, "\\n")}1'\'' | sort'
+
+# iso time in ls -l, show hidden files, human readable sizes
+ alias ls='ls -a -h --time-style=long-iso'
+
+# iso time for all Linux programs, which they will all ignore, but at least we
+# tried, perhaps someday ...
+ export TZ=UTC
+ export TIME_STYLE=long-iso
+ export LC_ALL=en_DK.UTF-8
+
+# -l don't truncate long lins
+# -p show pids
+ alias pstree='pstree -lp'
+
+# - make bash gp to sleep, revealing the calling shell
+# - useful for job control of multiple bash shells from a controlling shell
+ alias zzz="kill -STOP \$\$"
+
+# The one true operating system.
+# Proof that an OS can be as small as an editor.
+ export EDITOR=emacs
+
+# check the window size after each command and, if necessary, update the values
+# of LINES and COLUMNS.
+ shopt -s checkwinsize
+
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
+# Check if at least one file is provided
+if [ $# -eq 0 ]; then
+ echo "Usage: $(script_fp) <filename1> [filename2] ..."
+ exit 1
+fi
+
+# Loop through all the provided files
+for file in "$@"; do
+ # Check if the file exists
+ if [ ! -f "$file" ]; then
+ echo "Error: File '$file' not found!"
+ continue
+ fi
+
+ # Print 80 dashes
+ printf '%.0s-' {1..80}
+ echo
+
+ # Print the filename and a colon
+ echo "$file:"
+
+ # Print the contents of the file
+ cat "$file"
+
+ # Print a newline for spacing between files
+ echo
+done
--- /dev/null
+#!/bin/env /bin/bash
+
+# Description: Descends from $1, or pwd, looking for empty directories and adds a `.githolder` to them.
+# does not descend into hidden directories.
+
+# examples:
+# > git_holder
+# > git_holder --dry-run
+
+set -e
+
+find_empty_dirs() {
+ local dir="$1"
+ local dry_run="$2"
+
+ # Skip `.git` specifically
+ if [[ "$(basename "$dir")" == ".git" ]]; then
+ return
+ fi
+
+ # Check if the directory is empty (including hidden files, excluding `.` and `..`)
+ if [[ -z $(find "$dir" -mindepth 1 -maxdepth 1 -print -quit) ]]; then
+ if [[ "$dry_run" == "true" ]]; then
+ echo "Dry-run: Would add .githolder in $dir"
+ else
+ echo "Adding .githolder to $dir"
+ touch "$dir/.githolder"
+ fi
+ else
+ # Recurse into subdirectories
+ for subdir in "$dir"/*/ "$dir"/.[!.]/; do
+ if [[ -d "$subdir" && "$subdir" != "$dir/.[!.]/" ]]; then
+ find_empty_dirs "$subdir" "$dry_run"
+ fi
+ done
+ fi
+}
+
+# Default parameters
+dry_run="false"
+target_dir="."
+
+# Parse arguments
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --dry-run)
+ dry_run="true"
+ shift
+ ;;
+ *)
+ if [[ -d "$1" ]]; then
+ target_dir="$1"
+ shift
+ else
+ echo "Invalid argument: $1 is not a directory"
+ exit 1
+ fi
+ ;;
+ esac
+done
+
+# Run the function
+find_empty_dirs "$target_dir" "$dry_run"
--- /dev/null
+#!/bin/sh
+#
+# rm_tilda_files_tree .
+#
+
+if [ ! -d "$1" ]; then
+ echo "expected first arg to be a directory where rm_tilda_files_tree is to be applied, but found: " "$1"
+ exit 1
+fi
+
+find "$1" -not -path '*/\.git/*' -name '*~' -exec rm {} \;
+
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+# vl 'vertical list'
+
+# Check if the command is provided
+if [ -z "$1" ]; then
+ echo "Usage: vl <command> [args...]"
+ exit 1
+fi
+
+# Capture the command and its arguments
+cmd=$1
+shift
+
+# Run the command with the remaining arguments and replace colons or spaces with newlines
+"$cmd" "$@" | tr ' :' '\n'
+
+exit 0
+++ /dev/null
-#include <unistd.h>
-#include <sys/types.h>
-#include <pwd.h>
-#include <string.h>
-#include <stdio.h>
-
-/*
-We return the home directory without a trailing slash. Otherwise in bash scripts ~/... will
-expand out with two slashes. Also, if we returned with a trailing slash, then rsync
-would behave differently with ~ (specifying contents of home instead of home itself).
-
-I am not sure if pw_dir has always returned a trailing slash on the home directory, though with
-the latest upgrade is the first time I am noticing two slashes in transcripts.
-
-It is an error for pw_dir to be an empty string? I suppose not, but rather it is assumed
-to be the root of the file system? Perhaps that is why they added the slash, to make
-that clear.
-
-*/
-int main(){
- struct passwd *pw = getpwuid(getuid());
- const char *homedir = pw->pw_dir;
-
- size_t n = strlen(homedir);
- if( n == 0 ) return 1;
- while(n > 1){
- fputc(*homedir ,stdout);
- --n;
- ++homedir;
- };
- if(*homedir == '/') return 1;
- fputc(*homedir ,stdout);
- return 1;
-}
+++ /dev/null
-#include <unistd.h>
-#include <sys/types.h>
-#include <pwd.h>
-#include <stdio.h>
-#include <string.h>
-
-int main(){
- struct passwd *pw = getpwuid(getuid());
- char *user_stem = strdup(pw->pw_name);
-
- char *p = user_stem;
- while(*p && *p != '-') p++;
- if(*p) *p='\0';
-
- puts(user_stem);
- return 0;
-}
+++ /dev/null
-#include <unistd.h>
-#include <sys/types.h>
-#include <pwd.h>
-#include <stdio.h>
-#include <string.h>
-
-int main(){
- struct passwd *pw = getpwuid(getuid());
- char *user_subu = strdup(pw->pw_name);
-
- char *p = user_subu;
- while(*p && *p != '-') p++;
- if(*p) p++;
-
- puts(p);
- return 0;
-}
+++ /dev/null
-#include <unistd.h>
-#include <sys/types.h>
-#include <pwd.h>
-#include <stdio.h>
-
-int main(){
- struct passwd *pw = getpwuid(getuid());
- const char *pw_name = pw->pw_name;
- puts(pw_name);
- return 0;
-}
--- /dev/null
+#include <unistd.h>
+#include <sys/types.h>
+#include <pwd.h>
+#include <string.h>
+#include <stdio.h>
+
+/*
+We return the home directory without a trailing slash. Otherwise in bash scripts ~/... will
+expand out with two slashes. Also, if we returned with a trailing slash, then rsync
+would behave differently with ~ (specifying contents of home instead of home itself).
+
+I am not sure if pw_dir has always returned a trailing slash on the home directory, though with
+the latest upgrade is the first time I am noticing two slashes in transcripts.
+
+It is an error for pw_dir to be an empty string? I suppose not, but rather it is assumed
+to be the root of the file system? Perhaps that is why they added the slash, to make
+that clear.
+
+*/
+int main(){
+ struct passwd *pw = getpwuid(getuid());
+ const char *homedir = pw->pw_dir;
+
+ size_t n = strlen(homedir);
+ if( n == 0 ) return 1;
+ while(n > 1){
+ fputc(*homedir ,stdout);
+ --n;
+ ++homedir;
+ };
+ if(*homedir == '/') return 1;
+ fputc(*homedir ,stdout);
+ return 1;
+}
--- /dev/null
+#include <unistd.h>
+#include <sys/types.h>
+#include <pwd.h>
+#include <stdio.h>
+#include <string.h>
+
+int main(){
+ struct passwd *pw = getpwuid(getuid());
+ char *user_stem = strdup(pw->pw_name);
+
+ char *p = user_stem;
+ while(*p && *p != '-') p++;
+ if(*p) *p='\0';
+
+ puts(user_stem);
+ return 0;
+}
--- /dev/null
+#include <unistd.h>
+#include <sys/types.h>
+#include <pwd.h>
+#include <stdio.h>
+#include <string.h>
+
+int main(){
+ struct passwd *pw = getpwuid(getuid());
+ char *user_subu = strdup(pw->pw_name);
+
+ char *p = user_subu;
+ while(*p && *p != '-') p++;
+ if(*p) p++;
+
+ puts(p);
+ return 0;
+}
--- /dev/null
+#include <unistd.h>
+#include <sys/types.h>
+#include <pwd.h>
+#include <stdio.h>
+
+int main(){
+ struct passwd *pw = getpwuid(getuid());
+ const char *pw_name = pw->pw_name;
+ puts(pw_name);
+ return 0;
+}
+++ /dev/null
-MIT License
-
-Copyright (c) 2020 Thomas Walker Lynch
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+++ /dev/null
-# home
-pulls home directory from etc/password
-
-Typically used by scripts that do not want to be sensitive to getting the home directory from the $HOME environment variable. Install this in a places such as /usr/local/bin and then so something such as HOME=home at the top of scripts.
+++ /dev/null
-
-executable = home user user-stem user-subu
-all: $(executable)
-
-home : home.c
- gcc -o home home.c
-
-user : user.c
- gcc -o user user.c
-
-user-stem : user-stem.c
- gcc -o user-stem user-stem.c
-
-user-subu : user-subu.c
- gcc -o user-subu user-subu.c
-
-.PHONY: clean
-clean:
- rm -f home user user-stem usr-subu
-
-# installation must be done by an account that has sudo privs
-.PHONY: install
-install: $(executable)
- for ex in $(executable); do \
- sudo chmod 755 $$ex; \
- sudo chown root:root $$ex; \
- sudo mv $$ex /usr/local/bin; \
- done
+++ /dev/null
-#include <unistd.h>
-#include <sys/types.h>
-#include <pwd.h>
-#include <stdio.h>
-#include <string.h>
-
-int main(){
- struct passwd *pw = getpwuid(getuid());
- char *user_stem = strdup(pw->pw_name);
-
- char *p = user_stem;
- while(*p && *p != '-') p++;
- if(*p) *p='\0';
-
- puts(user_stem);
- return 0;
-}
+++ /dev/null
-#include <unistd.h>
-#include <sys/types.h>
-#include <pwd.h>
-#include <stdio.h>
-#include <string.h>
-
-int main(){
- struct passwd *pw = getpwuid(getuid());
- char *user_subu = strdup(pw->pw_name);
-
- char *p = user_subu;
- while(*p && *p != '-') p++;
- if(*p) p++;
-
- puts(p);
- return 0;
-}
+++ /dev/null
-SHELL=/usr/bin/bash
-
-.PHONEY: install
-install:
- sudo pwd
- cd iseq_C && make install
- cd iseq_interpreted; sudo install -m 755 * /usr/local/bin
-
+++ /dev/null
-#ifndef RT·ENVIRONMENT_H
-#define RT·ENVIRONMENT_H
- typedef unsigned int uint;
-
-#endif
+++ /dev/null
-
-
-#--------------------------------------------------------------------------------
-# This makefile must be called through a local makefile rather than directly.
-# the local make file should include:
-#
-# 1. $(RESOURCE)/library/makefile-environment to give an environment to work with
-# 2. it should set the variables to custom values
-# 3. it should include this file, $(RESOURCE)/library/makefile
-# 4. if it needs tailored targets it should then define them
-#
-
-#--------------------------------------------------------------------------------
-# todo ...
-
-# should add test that incdir is not pwd before deleting the include
-# file. Currently it is working for us despite this problem because dist-clean is
-# looking for $(module).h while the include file is always $(module).lib.h.
-
-#--------------------------------------------------------------------------------
-# files named by convention
-# this implements the .lib.c and the .cli.c convention
-#
-
-# these are the sources edited by the programmer
-C_SOURCE_LIB= $(wildcard $(SRCDIR)/*.lib.c)
-C_SOURCE_EXEC= $(wildcard $(SRCDIR)/*.cli.c)
-CC_SOURCE_LIB= $(wildcard $(SRCDIR)/*.lib.cc)
-CC_SOURCE_EXEC= $(wildcard $(SRCDIR)/*.cli.cc)
-
-#remove the suffix to get base name
-C_BASE_LIB= $(sort $(patsubst %.lib.c, %, $(notdir $(C_SOURCE_LIB))))
-C_BASE_EXEC= $(sort $(patsubst %.cli.c, %, $(notdir $(C_SOURCE_EXEC))))
-CC_BASE_LIB= $(sort $(patsubst %.lib.cc, %, $(notdir $(CC_SOURCE_LIB))))
-CC_BASE_EXEC= $(sort $(patsubst %.cli.cc, %, $(notdir $(CC_SOURCE_EXEC))))
-
-# two sets of object files, one for the lib, and one for the command line interface progs
-OBJECT_LIB= $(patsubst %, $(TMPDIR)/%.lib.o, $(C_BASE_LIB) $(CC_BASE_LIB))
-OBJECT_EXEC= $(patsubst %, $(TMPDIR)/%.cli.o, $(C_BASE_EXEC) $(CC_BASE_EXEC))
-
-# executables are made from EXEC sources
-EXEC= $(patsubst %, $(EXECDIR)/%, $(C_BASE_EXEC) $(CC_BASE_EXEC))
-
-
-#--------------------------------------------------------------------------------
-# targets
-
-.PHONY: all
-all: version
-
-.PHONY: version
-version:
- @echo makefile version 7.0
- if [ ! -z "$(C)" ]; then $(C) -v; fi
- if [ ! -z "$(CC)" ]; then $(CC) -v; fi
- make -v
-
-.PHONY: info
-info:
- @echo "SHELL: " $(SHELL)
- @echo "ECHO: " $(ECHO)
- @echo "C: " $(C)
- @echo "CFLAGS: " $(CFLAGS)
- @echo "CC: " $(CC)
- @echo "CCFLAGS: " $(CCFLAGS)
- @echo "LINKFLAGS: " $(LINKFLAGS)
- @echo "DOCDIR: " $(DOCDIR)
- @echo "ENVDIR: " $(ENVDIR)
- @echo "EXECDIR: " $(EXECDIR)
- @echo "LIBDIR: " $(LIBDIR)
- @echo "TESTDIR: " $(TESTDIR)
- @echo "TMPDIR: " $(TMPDIR)
- @echo "TRYDIR: " $(TRYDIR)
- @echo "DEPFILE: " $(DEPFILE)
- @echo "LIBFILE: " $(LIBFILE)
- @echo "INCFILE: " $(INCFILE)
- @echo "C_SOURCE_LIB: " $(C_SOURCE_LIB)
- @echo "C_SOURCE_EXEC: " $(C_SOURCE_EXEC)
- @echo "CC_SOURCE_LIB: " $(CC_SOURCE_LIB)
- @echo "CC_SOURCE_EXEC: " $(CC_SOURCE_EXEC)
- @echo "C_BASE_LIB: " $(C_BASE_LIB)
- @echo "C_BASE_EXEC: " $(C_BASE_EXEC)
- @echo "CC_BASE_LIB: " $(CC_BASE_LIB)
- @echo "CC_BASE_EXEC: " $(CC_BASE_EXEC)
- @echo "OBJECT_LIB: " $(OBJECT_LIB)
- @echo "OBJECT_EXEC: " $(OBJECT_EXEC)
- @echo "EXEC: " $(EXEC)
-
-# should be safe to run this in an already setup or partially setup directory
-# gee looks like a candidate for a makefile function ..
-.PHONY: setup
-setup:
- [ ! -e $(DOCDIR) ] && mkdir $(DOCDIR) || true
- [ ! -e $(EXECDIR) ] && mkdir $(EXECDIR) || true
- [ ! -e $(LIBDIR) ] && mkdir $(LIBDIR) || true
- [ ! -e $(SRCDIR) ] && mkdir $(SRCDIR) || true
- [ ! -e $(TESTDIR) ] && mkdir $(TESTDIR) || true
- [ ! -e $(TMPDIR) ] && mkdir $(TMPDIR) || true
- [ ! -e $(TRYDIR) ] && mkdir $(TRYDIR) || true
-
-.PHONY: dependency
-dependency:
- @rm -f $(DEPFILE)
- @if [ -z "$(CC)" ]; then\
- if [ -z "$(C)" ]; then\
- $(ECHO) "No compiler specified";\
- exit 1;\
- else\
- echo "C compiler only deps" ;\
- $(C) $(CFLAGS) -MM $(C_SOURCE_LIB) $(C_SOURCE_EXEC) | sed 's|^.*\.o|$(TMPDIR)/&|' >> $(DEPFILE);\
- echo "deps for C linking";\
- for i in $(C_BASE_EXEC); do\
- $(ECHO) >> $(DEPFILE);\
- $(ECHO) "$(EXECDIR)/$$i : $(TMPDIR)/$$i.cli.o $(LIBFILE)" >> $(DEPFILE);\
- $(ECHO) " $(C) -o $(EXECDIR)/$$i $(TMPDIR)/$$i.cli.o $(LINKFLAGS)" >> $(DEPFILE);\
- done;\
- fi;\
- else\
- $(CC) $(CCFLAGS) -MM $(CC_SOURCE_LIB) $(CC_SOURCE_EXEC) | sed 's|^.*\.o|$(TMPDIR)/&|' >> $(DEPFILE);\
- if [ -z "$C()" ]; then\
- echo "CC compiler only deps" ;\
- else\
- echo "CC and C mixed compile deps" ;\
- $(C) $(CFLAGS) -MM $(C_SOURCE_LIB) $(C_SOURCE_EXEC) | sed 's|^.*\.o|$(TMPDIR)/&|' >> $(DEPFILE);\
- fi;\
- echo "deps for CC linking";\
- for i in $(CC_BASE_EXEC) $(C_BASE_EXEC) ; do\
- $(ECHO) >> $(DEPFILE);\
- $(ECHO) "$(EXECDIR)/$$i : $(TMPDIR)/$$i.cli.o $(LIBFILE)" >> $(DEPFILE);\
- $(ECHO) " $(CC) -o $(EXECDIR)/$$i $(TMPDIR)/$$i.cli.o $(LINKFLAGS)" >> $(DEPFILE);\
- done;\
- fi
-
-.PHONY: library
-library: $(LIBFILE)
-
-$(LIBFILE): $(OBJECT_LIB)
- ar rcs $(LIBFILE) $(OBJECT_LIB)
-
-.PHONY: cli
-cli: $(LIBFILE)
- make sub_cli
-
-.PHONY: sub_cli
-sub_cli: $(EXEC)
-
-.PHONY: share
-share:
- if [ ! -z "$(wildcard $(LIBDIR)/*)" ]; then cp $(LIBDIR)/* $(DIST_TARGET)/$(LIBDIR); fi
- if [ ! -z "$(wildcard $(EXECDIR)/*)" ]; then cp $(EXECDIR)/* $(DIST_TARGET)/$(EXECDIR); fi
-# if [ ! -z "$(wildcard $(SRCDIR)/*.h)" ]; then cp $(SRCDIR)/*.h $(DIST_TARGET)/include; fi
-
-
-.PHONY: clean
-clean:
- for i in $(wildcard $(TMPDIR)/*); do rm $$i || true; done
-
-.PHONY: dist-clean
-dist-clean:
- for i in $(wildcard $(TMPDIR)/*); do rm $$i || true; done
- for i in $(EXEC); do [ -e $$i ] && rm $$i || true; done
- rm -f $(LIBFILE)
-
-# recipes
- $(TMPDIR)/%.o : $(SRCDIR)/%.c
- $(C) $(CFLAGS) -o $@ -c $<
-
- $(TMPDIR)/%.o : $(SRCDIR)/%.cc
- $(CC) $(CCFLAGS) -o $@ -c $<
-
-# recipes
-#$(TMPDIR)/%.o : $(SRCDIR)/%.c
-# $(C) $(CFLAGS) -o $@ -c <($(CPPP) $<)
-
-$(TMPDIR)/%.o : $(SRCDIR)/%.c
- $(CPPP) $< $(TMPDIR)/tempfile.c
- $(C) $(CFLAGS) -o $@ -c $(TMPDIR)/tempfile.c
-# rm $(TMPDIR)/tempfile.c
+++ /dev/null
-#makes use of variable PROJECT from the environment, see command `repo`
-
-SHELL=/bin/bash
-
-#--------------------------------------------------------------------------------
-# useful macros
-#
-
-# a single space literal, for example if you wanted to subsitute commas to
-# spaces: $(subst $(space),;,$(string))
-
-blank :=s
-pace :=$(blank) $(blank)
-
-#--------------------------------------------------------------------------------
-# Programs make uses.
-#
-ECHO= echo
-#ECHO= echo -e
-
-# If not using C, or C++, set the variable to a blank value
-# (value should be set in the local makefile)
-C=
-CC=
-CPPP= echo "#line 1 \"$1\""; cat
-
-
-#--------------------------------------------------------------------------------
-# main directories
-# these may be created by make setup, which can be useful as git doesn't keep empty directories
-#
-ENVDIR=../env
-DOCDIR=document
-EXECDIR=executables
-LIBDIR=library
-SRCDIR=src
-TESTDIR=test
-TMPDIR=temporary
-TRYDIR=experiment
-
-#--------------------------------------------------------------------------------
-# local targets
-#
-DEPFILE=$(TMPDIR)/makefile-cc.deps
-LIBFILE=$(LIBDIR)/lib$(PROJECT).a
-INCFILE=$(SRCDIR)/$(PROJECT).h
-
-#--------------------------------------------------------------------------------
-# install target
-# make install has not implemented yet, so no matter
-#
-DIST_TARGET=
-
+++ /dev/null
-#!/bin/bash
-# this will start a new shell that has an environment appropriate for the repo
-#set -x
-# current `use_tool` in the project environment also sets $PROJECT, and it comes later.
-
-
-repo="${1%/}"
-
-error=false
-if [ -z "$repo" ]; then
- echo "no repo specified"
- error=true
-fi
-if [ -z "$REPO_DIR" ]; then
- echo "no REPO_DIR in the environment"
- error=true
-fi
-if $error; then
- exit 1
-fi
-
-
-# makes an init file for bash.
-# Variables from the environment will not expand within this heredoc because of the quotes around the EOF. (Yes, that is
-# a thing.)
-# moved this to the .bashrc:
-# export PS1='\n$(/usr/local/bin/Z) [$(echo "$REPO")]\n\u@\h§\w§\n> '
-read -r -d '' InitFile <<-"EOF"
- export HOME=$(/usr/local/bin/home)
- . "$HOME/.bashrc"
- export HOME=$(/usr/local/bin/home) # make sure it wasn't changed in .bashrc
- export SHELL=/bin/bash
- export USERNAME=$(/usr/local/bin/user)
- export REPO_HOME="$REPO_DIR/$REPO"
- if [ -z "$TMP" ] && [ -d "$REPO_HOME/developer" ]; then
- export TMP="$REPO_HOME/developer/scratch_pad"
- if [ ! -d "$TMP" ]; then
- mkdir "$TMP"
- fi
- fi
- cd "$REPO_HOME"
- act="$REPO_HOME/developer/tool/env"
- if [ -f "$act" ]; then
- . "$act"
- fi
- # echo $PATH
-EOF
-
-# exec -c and env -i, where INSIDE_EMACS is set in the calling shell and is also set in
-# the init script of the newly launched shell, does not work. In both cases emacs stops
-# tracking directory changes in the newly launched shell. Emacs will continue to track
-# directory changes if we call bash directly, or if we use env -i with INSIDE_EMACS defined
-# as a parameter.
-#
-# PROJECT is for PS1 bash prompt
-env --ignore-environment \
- REPO="$repo" \
- REPO_DIR="$REPO_DIR" \
- SUBU_SHARE_DIR="$SUBU_SHARE_DIR"\
- PROJECT="$repo"\
- INSIDE_EMACS="$INSIDE_EMACS" \
- TMP="$TMP" \
- HOSTNAME="$HOSTNAME" \
- LOGNAME="$LOGNAME" \
- DISPLAY="$DISPLAY" \
- LANG="$LANG"\
- TERM="$TERM" \
- TERMCAP="$TERMCAP" \
- EDITOR="$EDITOR" \
- PATH="/usr/local/bin:/usr/bin:/bin" \
- /bin/bash --init-file <(echo "$InitFile")
-
-#set +x
+++ /dev/null
-#!/bin/bash
-#set -x
-if [ -z "$REPO_DIR" ]; then
- echo "can't $0, no REPO_DIR repo home directory found in the environment"
- exit 1;
-fi
-if [ -z "$TMP" ]; then
- echo "can't $0, no tmp directory found in the environment"
- exit 1;
-fi
-
-# don't want the pull to clobber this, and gosh, what if there are multiple pulls
-# simultaneously? this is a bit of a hack but good enough for now ..
-TimeStamp="$TMP"/git_pull_timestamp_"$(od -A n -t x -N 8 /dev/urandom |tr -d ' ')"
-touch "$TimeStamp"
-
-# would be better to make this repo specific so we don't pull other submodules than the
-# repo .. or do we want the latest in the other submodules also?
-pushd "$REPO_DIR" > /dev/null
-
- git pull --recurse-submodules || echo "git pull failed"
-
- # make a file that looks like a newly pulled executable for testing purposes
- # touch temp
- # chmod +x temp
-
- find . \
- -newer "$TimeStamp" \
- -not -path "./.git/*" \
- -not -type d \
- -executable \
- -execdir echo "Pulled file is executable, removing x flag: {}" \; \
- -execdir chmod -x {} \;
-
- find . \
- -newer "$TimeStamp" \
- -not -type d \
- -not -name "\.git" \
- -not -path "./.git/*" \
- -execdir echo "New file: {}" \;
-
- find . \
- -newer "$TimeStamp" \
- -not -path "./.git/*" \
- -path "./tmp/*" \
- -execdir echo "Error, this file was pulled into a tmp directory: {}" \;
-
- if [ -d env ]; then
- find env \
- -newer "$TimeStamp" \
- -execdir echo "Error, this file was pulled into the env directory: {}" \;
- fi
-
- find . \
- -newer "$TimeStamp" \
- -not -path "./.git/*" \
- -not -name "\.git" \
- -name "\.*" \
- -execdir echo "Error, hidden file was pulled: {}" \;
-
- if [ .gitignore -nt "$TimeStamp" ]; then
- echo "Warning, .gitignore was modified."
- fi
-
- rm "$TimeStamp"
-
-popd > /dev/null
-#set +x
+++ /dev/null
-#!/bin/bash
-#set -x
-
-if [ -z "$REPO_DIR" ]; then
- echo "can't $0, no REPO_DIR found in the environment"
- exit 1;
-fi
-CommitMessage="$1"
-if [ -z "$CommitMessage" ]; then
- echo "no commit message, enter it now:"
- read CommitMessage
- if [ -z "$CommitMessage" ]; then
- echo "no commit message, exiting"
- exit 1;
- fi
-fi
-
-pushd "$REPO_DIR" > /dev/null
-
- # make clean any directories with a makefile
- find . \
- -not -path "./.git/*" \
- -not -path "./tmp/*" \
- -type f \
- -name 'makefile' \
- -execdir make clean \;
-
- # executables should not be checked into the repo, yet we found these:
- find . \
- -not -path "./.git/*" \
- -not -path "./tmp/*" \
- -type f \
- -executable \
- -exec echo "found executable, removing x flag" {} \;
- -execdir chmod -x {} \;
-
- # -execdir mv {} {}~ \;
-
-set -x
- git stash
- git pull
- git stash pop
- git add .
- git commit -m "$CommitMessage"
- git push
-set +x
-
-popd > /dev/null
-#set +x
-
-
-
+++ /dev/null
-#!/bin/sh
-#
-# rm_tilda_files_tree .
-#
-
-if [ ! -d "$1" ]; then
- echo "expected first arg to be a directory where rm_tilda_files_tree is to be applied, but found: " "$1"
- exit 1
-fi
-
-find "$1" -not -path '*/\.git/*' -name '*~' -exec rm {} \;
-
+++ /dev/null
-#!/bin/bash
-# this will start a new shell that has an environment appropriate for the repo
-#set -x
-# current `use_tool` in the project environment also sets $PROJECT, and it comes later.
-
-
-repo="${1%/}"
-
-error=false
-if [ -z "$repo" ]; then
- echo "no repo specified"
- error=true
-fi
-if [ -z "$REPO_DIR" ]; then
- echo "no REPO_DIR in the environment"
- error=true
-fi
-if $error; then
- exit 1
-fi
-
-
-# makes an init file for bash.
-# Variables from the environment will not expand within this heredoc because of the quotes around the EOF. (Yes, that is
-# a thing.)
-# moved this to the .bashrc:
-# export PS1='\n$(/usr/local/bin/Z) [$(echo "$REPO")]\n\u@\h§\w§\n> '
-read -r -d '' InitFile <<-"EOF"
- export HOME=$(/usr/local/bin/home)
- . "$HOME/.bashrc"
- export HOME=$(/usr/local/bin/home) # make sure it wasn't changed in .bashrc
- export SHELL=/bin/bash
- export USERNAME=$(/usr/local/bin/user)
- export REPO_HOME="$REPO_DIR/$REPO"
- if [ -z "$TMP" ] && [ -d "$REPO_HOME/developer" ]; then
- export TMP="$REPO_HOME/developer/scratch_pad"
- if [ ! -d "$TMP" ]; then
- mkdir "$TMP"
- fi
- fi
- cd "$REPO_HOME"
- act="$REPO_HOME/developer/tool/env"
- if [ -f "$act" ]; then
- . "$act"
- fi
- # echo $PATH
-EOF
-
-# exec -c and env -i, where INSIDE_EMACS is set in the calling shell and is also set in
-# the init script of the newly launched shell, does not work. In both cases emacs stops
-# tracking directory changes in the newly launched shell. Emacs will continue to track
-# directory changes if we call bash directly, or if we use env -i with INSIDE_EMACS defined
-# as a parameter.
-#
-# PROJECT is for PS1 bash prompt
-env --ignore-environment \
- REPO="$repo" \
- REPO_DIR="$REPO_DIR" \
- SUBU_SHARE_DIR="$SUBU_SHARE_DIR"\
- PROJECT="$repo"\
- INSIDE_EMACS="$INSIDE_EMACS" \
- TMP="$TMP" \
- HOSTNAME="$HOSTNAME" \
- LOGNAME="$LOGNAME" \
- DISPLAY="$DISPLAY" \
- LANG="$LANG"\
- TERM="$TERM" \
- TERMCAP="$TERMCAP" \
- EDITOR="$EDITOR" \
- PATH="/usr/local/bin:/usr/bin:/bin" \
- /bin/bash --init-file <(echo "$InitFile")
-
-#set +x
+++ /dev/null
-#!/bin/bash
-#set -x
-if [ -z "$REPO_DIR" ]; then
- echo "can't $0, no REPO_DIR repo home directory found in the environment"
- exit 1;
-fi
-if [ -z "$TMP" ]; then
- echo "can't $0, no tmp directory found in the environment"
- exit 1;
-fi
-
-# don't want the pull to clobber this, and gosh, what if there are multiple pulls
-# simultaneously? this is a bit of a hack but good enough for now ..
-TimeStamp="$TMP"/git_pull_timestamp_"$(od -A n -t x -N 8 /dev/urandom |tr -d ' ')"
-touch "$TimeStamp"
-
-# would be better to make this repo specific so we don't pull other submodules than the
-# repo .. or do we want the latest in the other submodules also?
-pushd "$REPO_DIR" > /dev/null
-
- git pull --recurse-submodules || echo "git pull failed"
-
- # make a file that looks like a newly pulled executable for testing purposes
- # touch temp
- # chmod +x temp
-
- find . \
- -newer "$TimeStamp" \
- -not -path "./.git/*" \
- -not -type d \
- -executable \
- -execdir echo "Pulled file is executable, removing x flag: {}" \; \
- -execdir chmod -x {} \;
-
- find . \
- -newer "$TimeStamp" \
- -not -type d \
- -not -name "\.git" \
- -not -path "./.git/*" \
- -execdir echo "New file: {}" \;
-
- find . \
- -newer "$TimeStamp" \
- -not -path "./.git/*" \
- -path "./tmp/*" \
- -execdir echo "Error, this file was pulled into a tmp directory: {}" \;
-
- if [ -d env ]; then
- find env \
- -newer "$TimeStamp" \
- -execdir echo "Error, this file was pulled into the env directory: {}" \;
- fi
-
- find . \
- -newer "$TimeStamp" \
- -not -path "./.git/*" \
- -not -name "\.git" \
- -name "\.*" \
- -execdir echo "Error, hidden file was pulled: {}" \;
-
- if [ .gitignore -nt "$TimeStamp" ]; then
- echo "Warning, .gitignore was modified."
- fi
-
- rm "$TimeStamp"
-
-popd > /dev/null
-#set +x
+++ /dev/null
-#!/bin/bash
-#set -x
-
-if [ -z "$REPO_DIR" ]; then
- echo "can't $0, no REPO_DIR found in the environment"
- exit 1;
-fi
-CommitMessage="$1"
-if [ -z "$CommitMessage" ]; then
- echo "no commit message, enter it now:"
- read CommitMessage
- if [ -z "$CommitMessage" ]; then
- echo "no commit message, exiting"
- exit 1;
- fi
-fi
-
-pushd "$REPO_DIR" > /dev/null
-
- # make clean any directories with a makefile
- find . \
- -not -path "./.git/*" \
- -not -path "./tmp/*" \
- -type f \
- -name 'makefile' \
- -execdir make clean \;
-
- # executables should not be checked into the repo, yet we found these:
- find . \
- -not -path "./.git/*" \
- -not -path "./tmp/*" \
- -type f \
- -executable \
- -exec echo "found executable, removing x flag" {} \;
- -execdir chmod -x {} \;
-
- # -execdir mv {} {}~ \;
-
-set -x
- git stash
- git pull
- git stash pop
- git add .
- git commit -m "$CommitMessage"
- git push
-set +x
-
-popd > /dev/null
-#set +x
-
-
-
+++ /dev/null
-#!/bin/bash
-set -e
-
-# Check for subuser name
-if [ -z "$1" ]; then
- echo "Usage: $0 <subu_name>"
- exit 1
-fi
-
-SUBU_NAME=$1
-SUBU_USER="Thomas-$SUBU_NAME"
-SUBU_HOME="/home/$SUBU_USER"
-
-# Check if the subuser exists
-if ! id "$SUBU_USER" &>/dev/null; then
- echo "Error: Subuser $SUBU_USER does not exist!"
- exit 1
-fi
-
-echo "Setting up subuser: $SUBU_USER"
-
-# 1. Enable linger for the subuser
-echo "Enabling linger for $SUBU_USER..."
-sudo loginctl enable-linger "$SUBU_USER"
-
-# 2. Enable pipewire services for the subuser
-echo "Enabling pipewire services for $SUBU_USER..."
-sudo -u "$SUBU_USER" systemctl --user enable pipewire
-sudo -u "$SUBU_USER" systemctl --user enable pipewire-pulse
-
-# 3. Set up X11 access (XAUTHORITY)
-echo "Setting up X11 access for $SUBU_USER..."
-XAUTH_PATH="$SUBU_HOME/.Xauthority"
-if [ ! -f "$XAUTH_PATH" ]; then
- sudo -u "$SUBU_USER" touch "$XAUTH_PATH"
-fi
-
-# Generate a trusted X11 cookie
-echo "Generating trusted X11 cookie..."
-xauth generate "$DISPLAY" . trusted
-xauth extract - "$DISPLAY" | sudo -u "$SUBU_USER" xauth merge -
-
-# 4. Set up a basic .bashrc if not already present
-echo "Setting up .bashrc for $SUBU_USER..."
-BASHRC_PATH="$SUBU_HOME/.bashrc"
-if [ ! -f "$BASHRC_PATH" ]; then
- sudo -u "$SUBU_USER" cp /etc/skel/.bashrc "$BASHRC_PATH"
- echo "# Custom settings for $SUBU_USER" | sudo -u "$SUBU_USER" tee -a "$BASHRC_PATH"
-fi
-
-# 5. Adjust permissions for the subuser home directory (optional)
-echo "Ensuring correct permissions for $SUBU_HOME..."
-sudo chown -R "$SUBU_USER:$SUBU_USER" "$SUBU_HOME"
-
-echo "Subuser setup complete for $SUBU_USER!"
+++ /dev/null
-#!/bin/bash
-
-subu="$1"
-shell="${@:2}"
-
-# Basic error handling for subu name
-if [ -z "$subu" ]; then
- echo "No subuser name supplied"
- exit 1
-fi
-
-# Ensure the subu session is set up correctly
-xauth_output=$(xauth list)
-xkey=$(echo "$xauth_output" | head -1 | awk '{print $3}')
-
-if [ -z "$xkey" ]; then
- echo "subu:: xauth key not found"
- exit 1
-fi
-
-# Set up the display environment
-read -r -d '' script0 <<-EOF
- export NO_AT_BRIDGE=1 \
- ;touch .Xauthority \
- ;xauth add "$DISPLAY" . "$xkey" \
- ;export DISPLAY="$DISPLAY" \
- ;eval \$(dbus-launch --sh-syntax) \
- ;export DBUS_SESSION_BUS_ADDRESS \
- ;export DBUS_SESSION_BUS_PID \
- ;$shell
-EOF
-
-# Start the subuser session using machinectl without the '--' issue
-sudo machinectl shell $subu@ /bin/bash -c "$script0"
+++ /dev/null
-
-; The first time Emacs encounters a link to a source file, Emacs asks if it should follow it.
-; This might suppress that initial question and follow the link.
-; (setq find-file-visit-truename t)
-
-(defun jdbx ()
- "Set gud-jdb-sourcepath from the environment and run jdb with the correct source path."
- (interactive)
- (let*
- (
- (sourcepath (getenv "SOURCEPATH"))
- )
- (if
- sourcepath
- (setq gud-jdb-sourcepath (split-string sourcepath ":" t))
- (message "Warning: SOURCEPATH is not set. `jdb` will run without source path information.")
- )
- (let
- (
- (class-name (read-string "Enter the class to debug: " "Test_Util"))
- )
- (jdb (concat "jdb -sourcepath"
- (if
- sourcepath
- (mapconcat 'identity gud-jdb-sourcepath ":") ""
- )
- " "
- class-name
- )
- ))))
-
-(defun monitor-jdb-sourcepath (output)
- "Monitor the jdb output for `sourcepath ARG` commands and update `gud-jdb-sourcepath` with each path in ARG."
- (when
- (string-match "sourcepath \\(.+\\)" output)
- (let*
- (
- (new-paths (match-string 1 output))
- (paths-list (split-string new-paths ":" t))
- )
- ;; Add each path in paths-list to gud-jdb-sourcepath if not already present
- (dolist
- (path paths-list)
- (unless
- (member path gud-jdb-sourcepath)
- (setq gud-jdb-sourcepath (append gud-jdb-sourcepath (list path)))
- )
- )
- (message "Updated gud-jdb-sourcepath: %s" gud-jdb-sourcepath)))
- output)
-
-(add-hook 'gud-filter-functions 'monitor-jdb-sourcepath)
--- /dev/null
+
+; The first time Emacs encounters a link to a source file, Emacs asks if it should follow it.
+; This might suppress that initial question and follow the link.
+; (setq find-file-visit-truename t)
+
+(defun jdbx ()
+ "Set gud-jdb-sourcepath from the environment and run jdb with the correct source path."
+ (interactive)
+ (let*
+ (
+ (sourcepath (getenv "SOURCEPATH"))
+ )
+ (if
+ sourcepath
+ (setq gud-jdb-sourcepath (split-string sourcepath ":" t))
+ (message "Warning: SOURCEPATH is not set. `jdb` will run without source path information.")
+ )
+ (let
+ (
+ (class-name (read-string "Enter the class to debug: " "Test_Util"))
+ )
+ (jdb (concat "jdb -sourcepath"
+ (if
+ sourcepath
+ (mapconcat 'identity gud-jdb-sourcepath ":") ""
+ )
+ " "
+ class-name
+ )
+ ))))
+
+(defun monitor-jdb-sourcepath (output)
+ "Monitor the jdb output for `sourcepath ARG` commands and update `gud-jdb-sourcepath` with each path in ARG."
+ (when
+ (string-match "sourcepath \\(.+\\)" output)
+ (let*
+ (
+ (new-paths (match-string 1 output))
+ (paths-list (split-string new-paths ":" t))
+ )
+ ;; Add each path in paths-list to gud-jdb-sourcepath if not already present
+ (dolist
+ (path paths-list)
+ (unless
+ (member path gud-jdb-sourcepath)
+ (setq gud-jdb-sourcepath (append gud-jdb-sourcepath (list path)))
+ )
+ )
+ (message "Updated gud-jdb-sourcepath: %s" gud-jdb-sourcepath)))
+ output)
+
+(add-hook 'gud-filter-functions 'monitor-jdb-sourcepath)
+++ /dev/null
-#ifndef RT·ENVIRONMENT_H
-#define RT·ENVIRONMENT_H
- typedef unsigned int uint;
-
-#endif
+++ /dev/null
-# makefile environment variable defaults.
-# include this in the local make file with a line such as:
-#
-# RESOURCE:=$(REPO_HOME)/tool_shared/third_party/resource/release
-# include $(RESOURCE)/make/environment_RT_0
-#
-# The Unix environment has traditionally combined linking and loading into one
-# program called `ld`, and the compiler conventionally also has a switch for
-# doing the linking. A programmer or user typically does not run the loader
-# explicitly, because the shall does this automatically. Consequently, the poor
-# linker has not been giving an explicit name, and no one thinks much about
-# loading.
-#
-# The ATT c compiler was called 'cc'. However, today in makefiles 'CC' is often
-# used as the variable name for the C++ compiler.
-#
-# This all causes a bit of a problem when following the RT directory naming
-# convention, where, when practical, directories are named after the program
-# that will use the contained files.
-#
-# Here 'cc' is the directory containing files for cc, the C compiler. The
-# directory 'linker' containers files for the unnamed linker, and, as loaders
-# are architecture specific, 'amd64' containers files for the architecture
-# 'amd64' loader. The variable CPP holds the name of the C++ compiler.
-
-
-SHELL=/bin/bash
-
-#--------------------------------------------------------------------------------
-# useful macros
-#
-
-# a single space literal, for example if you wanted to subsitute commas to
-# spaces: $(subst $(space),;,$(string))
-
-blank :=
-space :=$(blank) $(blank)
-
-#--------------------------------------------------------------------------------
-# main directories
-#
-DOCDIR=document
-EXECDIR=amd64
-LIBDIR=linker
-SRCDIR=cc
-INCDIR=cc
-TESTDIR=test
-TMPDIR=scratchpad
-TRYDIR=experiment
-
-#--------------------------------------------------------------------------------
-# Programs make uses.
-#
-ECHO= echo -e
-#ECHO= echo
-
-# program to use to compile C, or to compile C++
-C=
-CPP=
-
-#--------------------------------------------------------------------------------
-# local targets
-#
-DEPFILE=$(TMPDIR)/makefile-cc.deps
-LIBFILE=$(LIBDIR)/lib$(PROJECT).a
-
-# we no longer use separate include file, but instead gate the C file
-# INCFILE=$(SRCDIR)/$(PROJECT).h
-
-
+++ /dev/null
-#--------------------------------------------------------------------------------
-# This makefile must be called through a local makefile rather than directly.
-# the local make file should include:
-#
-# 1. cp $(RESOURCE)/release/make/makefile-environment_example to get started on a make environment.
-# 2. modify it and set the variables to custom values
-# 3. the environment file should include this file, probably at $(RESOURCE)/release/make/makefile
-# 4. if tailored targets are needed, add them
-#
-
-#--------------------------------------------------------------------------------
-# todo ...
-
-# should add test that incdir is not pwd before deleting the include
-# file. Currently it is working for us despite this problem because dist-clean is
-# looking for $(module).h while the include file is always $(module).lib.h.
-
-#--------------------------------------------------------------------------------
-# files named by convention
-# this implements the .lib.c and the .cli.c convention
-#
-
-# these are the sources edited by the programmer
-C_SOURCE_LIB= $(wildcard $(SRCDIR)/*.lib.c)
-C_SOURCE_EXEC= $(wildcard $(SRCDIR)/*.cli.c)
-CPP_SOURCE_LIB= $(wildcard $(SRCDIR)/*.lib.cc)
-CPP_SOURCE_EXEC= $(wildcard $(SRCDIR)/*.cli.cc)
-
-#remove the suffix to get base name
-C_BASE_LIB= $(sort $(patsubst %.lib.c, %, $(notdir $(C_SOURCE_LIB))))
-C_BASE_EXEC= $(sort $(patsubst %.cli.c, %, $(notdir $(C_SOURCE_EXEC))))
-CPP_BASE_LIB= $(sort $(patsubst %.lib.cc, %, $(notdir $(CPP_SOURCE_LIB))))
-CPP_BASE_EXEC= $(sort $(patsubst %.cli.cc, %, $(notdir $(CPP_SOURCE_EXEC))))
-
-# two sets of object files, one for the lib, and one for the command line interface progs
-OBJECT_LIB= $(patsubst %, $(TMPDIR)/%.lib.o, $(C_BASE_LIB) $(CPP_BASE_LIB))
-OBJECT_EXEC= $(patsubst %, $(TMPDIR)/%.cli.o, $(C_BASE_EXEC) $(CPP_BASE_EXEC))
-
-# executables are made from EXEC sources
-EXEC= $(patsubst %, $(EXECDIR)/%, $(C_BASE_EXEC) $(CPP_BASE_EXEC))
-
-
-#--------------------------------------------------------------------------------
-# targets
-
-.PHONY: all
-all: usage
-
-.PHONY: usage
-usage:
- @echo example usage: make dist-clean dependency cli
- @echo example usage: make cli
-
-.PHONY: version
-version:
- @echo makefile version 7.1
- if [ ! -z "$(C)" ]; then $(C) -v; fi
- if [ ! -z "$(CPP)" ]; then $(CPP) -v; fi
- /bin/make -v
-
-.PHONY: info
-info:
- @echo "C: " $(C)
- @echo "CPP: " $(CPP)
- @echo "CPPFLAGS: " $(CPPFLAGS)
- @echo "CPP_BASE_EXEC: " $(CPP_BASE_EXEC)
- @echo "CPP_BASE_LIB: " $(CPP_BASE_LIB)
- @echo "CPP_SOURCE_EXEC: " $(CPP_SOURCE_EXEC)
- @echo "CPP_SOURCE_LIB: " $(CPP_SOURCE_LIB)
- @echo "CFLAGS: " $(CFLAGS)
- @echo "C_BASE_EXEC: " $(C_BASE_EXEC)
- @echo "C_BASE_LIB: " $(C_BASE_LIB)
- @echo "C_SOURCE_EXEC: " $(C_SOURCE_EXEC)
- @echo "C_SOURCE_LIB: " $(C_SOURCE_LIB)
- @echo "DEPFILE: " $(DEPFILE)
- @echo "DOCDIR: " $(DOCDIR)
- @echo "ECHO: " $(ECHO)
- @echo "EXEC: " $(EXEC)
- @echo "EXECDIR: " $(EXECDIR)
- @echo "INCDIR: " $(INCFILE)
- @echo "LIBDIR: " $(LIBDIR)
- @echo "LIBFILE: " $(LIBFILE)
- @echo "LINKFLAGS: " $(LINKFLAGS)
- @echo "OBJECT_EXEC: " $(OBJECT_EXEC)
- @echo "OBJECT_LIB: " $(OBJECT_LIB)
- @echo "SHELL: " $(SHELL)
- @echo "SRCDIR: " $(INCFILE)
- @echo "TESTDIR: " $(TESTDIR)
- @echo "TMPDIR: " $(TMPDIR)
- @echo "TRYDIR: " $(TRYDIR)
-
-# should be safe to run this in an already setup or partially setup directory
-# gee looks like a candidate for a makefile function ..
-.PHONY: setup
-setup:
- [ ! -e $(DOCDIR) ] && mkdir $(DOCDIR) || true
- [ ! -e $(EXECDIR) ] && mkdir $(EXECDIR) || true
- [ ! -e $(LIBDIR) ] && mkdir $(LIBDIR) || true
- [ ! -e $(SRCDIR) ] && mkdir $(SRCDIR) || true
- [ ! -e $(TESTDIR) ] && mkdir $(TESTDIR) || true
- [ ! -e $(TMPDIR) ] && mkdir $(TMPDIR) || true
- [ ! -e $(TRYDIR) ] && mkdir $(TRYDIR) || true
-
-.PHONY: dependency
-dependency:
- @rm -f $(DEPFILE)
- @if [ -z "$(CPP)" ]; then\
- if [ -z "$(C)" ]; then\
- $(ECHO) "No compiler specified";\
- exit 1;\
- else\
- echo "C compiler only deps" ;\
- $(C) $(CFLAGS) -MM $(C_SOURCE_LIB) $(C_SOURCE_EXEC) | sed 's|^.*\.o|$(TMPDIR)/&|' >> $(DEPFILE);\
- echo "deps for C linking";\
- for i in $(C_BASE_EXEC); do\
- $(ECHO) >> $(DEPFILE);\
- $(ECHO) "$(EXECDIR)/$$i : $(TMPDIR)/$$i.cli.o $(LIBFILE)" >> $(DEPFILE);\
- $(ECHO) " $(C) -o $(EXECDIR)/$$i $(TMPDIR)/$$i.cli.o $(LINKFLAGS)" >> $(DEPFILE);\
- done;\
- fi;\
- else\
- $(CPP) $(CPPFLAGS) -MM $(CPP_SOURCE_LIB) $(CPP_SOURCE_EXEC) | sed 's|^.*\.o|$(TMPDIR)/&|' >> $(DEPFILE);\
- if [ -z "$C()" ]; then\
- echo "CPP compiler only deps" ;\
- else\
- echo "CPP and C mixed compile deps" ;\
- $(C) $(CFLAGS) -MM $(C_SOURCE_LIB) $(C_SOURCE_EXEC) | sed 's|^.*\.o|$(TMPDIR)/&|' >> $(DEPFILE);\
- fi;\
- echo "deps for CPP linking";\
- for i in $(CPP_BASE_EXEC) $(C_BASE_EXEC) ; do\
- $(ECHO) >> $(DEPFILE);\
- $(ECHO) "$(EXECDIR)/$$i : $(TMPDIR)/$$i.cli.o $(LIBFILE)" >> $(DEPFILE);\
- $(ECHO) " $(CPP) -o $(EXECDIR)/$$i $(TMPDIR)/$$i.cli.o $(LINKFLAGS)" >> $(DEPFILE);\
- done;\
- fi
-
-.PHONY: library
-library: $(LIBFILE)
-
-$(LIBFILE): $(OBJECT_LIB)
- ar rcs $(LIBFILE) $(OBJECT_LIB)
-
-.PHONY: cli
-cli: $(LIBFILE)
- make sub_cli
-
-.PHONY: sub_cli
-sub_cli: $(EXEC)
-
-.PHONY: share
-share:
- if [ ! -z "$(wildcard $(LIBDIR)/*)" ]; then cp $(LIBDIR)/* $(DIST_TARGET)/$(LIBDIR); fi
- if [ ! -z "$(wildcard $(EXECDIR)/*)" ]; then cp $(EXECDIR)/* $(DIST_TARGET)/$(EXECDIR); fi
-# if [ ! -z "$(wildcard $(SRCDIR)/*.h)" ]; then cp $(SRCDIR)/*.h $(DIST_TARGET)/include; fi
-
-
-.PHONY: clean
-clean:
- rm -f $(DEPFILE)
- rm -f $(LIBFILE)
- for i in $(wildcard $(TMPDIR)/*.o); do rm $$i || true; done
-
-.PHONY: dist-clean
-dist-clean: clean
- for i in $(EXEC); do [ -e $$i ] && rm $$i || true; done
-
-# recipes
- $(TMPDIR)/%.o : $(SRCDIR)/%.c
- $(C) $(CFLAGS) -o $@ -c $<
-
- $(TMPDIR)/%.o : $(SRCDIR)/%.cc
- $(CPP) $(CPPFLAGS) -o $@ -c $<
-
--- /dev/null
+#ifndef RT·ENVIRONMENT_H
+#define RT·ENVIRONMENT_H
+ typedef unsigned int uint;
+
+#endif
--- /dev/null
+# makefile environment variable defaults.
+# include this in the local make file with a line such as:
+#
+# RESOURCE:=$(REPO_HOME)/tool_shared/third_party/resource/release
+# include $(RESOURCE)/make/environment_RT_0
+#
+# The Unix environment has traditionally combined linking and loading into one
+# program called `ld`, and the compiler conventionally also has a switch for
+# doing the linking. A programmer or user typically does not run the loader
+# explicitly, because the shall does this automatically. Consequently, the poor
+# linker has not been giving an explicit name, and no one thinks much about
+# loading.
+#
+# The ATT c compiler was called 'cc'. However, today in makefiles 'CC' is often
+# used as the variable name for the C++ compiler.
+#
+# This all causes a bit of a problem when following the RT directory naming
+# convention, where, when practical, directories are named after the program
+# that will use the contained files.
+#
+# Here 'cc' is the directory containing files for cc, the C compiler. The
+# directory 'linker' containers files for the unnamed linker, and, as loaders
+# are architecture specific, 'amd64' containers files for the architecture
+# 'amd64' loader. The variable CPP holds the name of the C++ compiler.
+
+
+SHELL=/bin/bash
+
+#--------------------------------------------------------------------------------
+# useful macros
+#
+
+# a single space literal, for example if you wanted to subsitute commas to
+# spaces: $(subst $(space),;,$(string))
+
+blank :=
+space :=$(blank) $(blank)
+
+#--------------------------------------------------------------------------------
+# main directories
+#
+DOCDIR=document
+EXECDIR=amd64
+LIBDIR=linker
+SRCDIR=cc
+INCDIR=cc
+TESTDIR=test
+TMPDIR=scratchpad
+TRYDIR=experiment
+
+#--------------------------------------------------------------------------------
+# Programs make uses.
+#
+ECHO= echo -e
+#ECHO= echo
+
+# program to use to compile C, or to compile C++
+C=
+CPP=
+
+#--------------------------------------------------------------------------------
+# local targets
+#
+DEPFILE=$(TMPDIR)/makefile-cc.deps
+LIBFILE=$(LIBDIR)/lib$(PROJECT).a
+
+# we no longer use separate include file, but instead gate the C file
+# INCFILE=$(SRCDIR)/$(PROJECT).h
+
+
--- /dev/null
+#--------------------------------------------------------------------------------
+# This makefile must be called through a local makefile rather than directly.
+# the local make file should include:
+#
+# 1. cp $(RESOURCE)/release/make/makefile-environment_example to get started on a make environment.
+# 2. modify it and set the variables to custom values
+# 3. the environment file should include this file, probably at $(RESOURCE)/release/make/makefile
+# 4. if tailored targets are needed, add them
+#
+
+#--------------------------------------------------------------------------------
+# todo ...
+
+# should add test that incdir is not pwd before deleting the include
+# file. Currently it is working for us despite this problem because dist-clean is
+# looking for $(module).h while the include file is always $(module).lib.h.
+
+#--------------------------------------------------------------------------------
+# files named by convention
+# this implements the .lib.c and the .cli.c convention
+#
+
+# these are the sources edited by the programmer
+C_SOURCE_LIB= $(wildcard $(SRCDIR)/*.lib.c)
+C_SOURCE_EXEC= $(wildcard $(SRCDIR)/*.cli.c)
+CPP_SOURCE_LIB= $(wildcard $(SRCDIR)/*.lib.cc)
+CPP_SOURCE_EXEC= $(wildcard $(SRCDIR)/*.cli.cc)
+
+#remove the suffix to get base name
+C_BASE_LIB= $(sort $(patsubst %.lib.c, %, $(notdir $(C_SOURCE_LIB))))
+C_BASE_EXEC= $(sort $(patsubst %.cli.c, %, $(notdir $(C_SOURCE_EXEC))))
+CPP_BASE_LIB= $(sort $(patsubst %.lib.cc, %, $(notdir $(CPP_SOURCE_LIB))))
+CPP_BASE_EXEC= $(sort $(patsubst %.cli.cc, %, $(notdir $(CPP_SOURCE_EXEC))))
+
+# two sets of object files, one for the lib, and one for the command line interface progs
+OBJECT_LIB= $(patsubst %, $(TMPDIR)/%.lib.o, $(C_BASE_LIB) $(CPP_BASE_LIB))
+OBJECT_EXEC= $(patsubst %, $(TMPDIR)/%.cli.o, $(C_BASE_EXEC) $(CPP_BASE_EXEC))
+
+# executables are made from EXEC sources
+EXEC= $(patsubst %, $(EXECDIR)/%, $(C_BASE_EXEC) $(CPP_BASE_EXEC))
+
+
+#--------------------------------------------------------------------------------
+# targets
+
+.PHONY: all
+all: usage
+
+.PHONY: usage
+usage:
+ @echo example usage: make dist-clean dependency cli
+ @echo example usage: make cli
+
+.PHONY: version
+version:
+ @echo makefile version 7.1
+ if [ ! -z "$(C)" ]; then $(C) -v; fi
+ if [ ! -z "$(CPP)" ]; then $(CPP) -v; fi
+ /bin/make -v
+
+.PHONY: info
+info:
+ @echo "C: " $(C)
+ @echo "CPP: " $(CPP)
+ @echo "CPPFLAGS: " $(CPPFLAGS)
+ @echo "CPP_BASE_EXEC: " $(CPP_BASE_EXEC)
+ @echo "CPP_BASE_LIB: " $(CPP_BASE_LIB)
+ @echo "CPP_SOURCE_EXEC: " $(CPP_SOURCE_EXEC)
+ @echo "CPP_SOURCE_LIB: " $(CPP_SOURCE_LIB)
+ @echo "CFLAGS: " $(CFLAGS)
+ @echo "C_BASE_EXEC: " $(C_BASE_EXEC)
+ @echo "C_BASE_LIB: " $(C_BASE_LIB)
+ @echo "C_SOURCE_EXEC: " $(C_SOURCE_EXEC)
+ @echo "C_SOURCE_LIB: " $(C_SOURCE_LIB)
+ @echo "DEPFILE: " $(DEPFILE)
+ @echo "DOCDIR: " $(DOCDIR)
+ @echo "ECHO: " $(ECHO)
+ @echo "EXEC: " $(EXEC)
+ @echo "EXECDIR: " $(EXECDIR)
+ @echo "INCDIR: " $(INCFILE)
+ @echo "LIBDIR: " $(LIBDIR)
+ @echo "LIBFILE: " $(LIBFILE)
+ @echo "LINKFLAGS: " $(LINKFLAGS)
+ @echo "OBJECT_EXEC: " $(OBJECT_EXEC)
+ @echo "OBJECT_LIB: " $(OBJECT_LIB)
+ @echo "SHELL: " $(SHELL)
+ @echo "SRCDIR: " $(INCFILE)
+ @echo "TESTDIR: " $(TESTDIR)
+ @echo "TMPDIR: " $(TMPDIR)
+ @echo "TRYDIR: " $(TRYDIR)
+
+# should be safe to run this in an already setup or partially setup directory
+# gee looks like a candidate for a makefile function ..
+.PHONY: setup
+setup:
+ [ ! -e $(DOCDIR) ] && mkdir $(DOCDIR) || true
+ [ ! -e $(EXECDIR) ] && mkdir $(EXECDIR) || true
+ [ ! -e $(LIBDIR) ] && mkdir $(LIBDIR) || true
+ [ ! -e $(SRCDIR) ] && mkdir $(SRCDIR) || true
+ [ ! -e $(TESTDIR) ] && mkdir $(TESTDIR) || true
+ [ ! -e $(TMPDIR) ] && mkdir $(TMPDIR) || true
+ [ ! -e $(TRYDIR) ] && mkdir $(TRYDIR) || true
+
+.PHONY: dependency
+dependency:
+ @rm -f $(DEPFILE)
+ @if [ -z "$(CPP)" ]; then\
+ if [ -z "$(C)" ]; then\
+ $(ECHO) "No compiler specified";\
+ exit 1;\
+ else\
+ echo "C compiler only deps" ;\
+ $(C) $(CFLAGS) -MM $(C_SOURCE_LIB) $(C_SOURCE_EXEC) | sed 's|^.*\.o|$(TMPDIR)/&|' >> $(DEPFILE);\
+ echo "deps for C linking";\
+ for i in $(C_BASE_EXEC); do\
+ $(ECHO) >> $(DEPFILE);\
+ $(ECHO) "$(EXECDIR)/$$i : $(TMPDIR)/$$i.cli.o $(LIBFILE)" >> $(DEPFILE);\
+ $(ECHO) " $(C) -o $(EXECDIR)/$$i $(TMPDIR)/$$i.cli.o $(LINKFLAGS)" >> $(DEPFILE);\
+ done;\
+ fi;\
+ else\
+ $(CPP) $(CPPFLAGS) -MM $(CPP_SOURCE_LIB) $(CPP_SOURCE_EXEC) | sed 's|^.*\.o|$(TMPDIR)/&|' >> $(DEPFILE);\
+ if [ -z "$C()" ]; then\
+ echo "CPP compiler only deps" ;\
+ else\
+ echo "CPP and C mixed compile deps" ;\
+ $(C) $(CFLAGS) -MM $(C_SOURCE_LIB) $(C_SOURCE_EXEC) | sed 's|^.*\.o|$(TMPDIR)/&|' >> $(DEPFILE);\
+ fi;\
+ echo "deps for CPP linking";\
+ for i in $(CPP_BASE_EXEC) $(C_BASE_EXEC) ; do\
+ $(ECHO) >> $(DEPFILE);\
+ $(ECHO) "$(EXECDIR)/$$i : $(TMPDIR)/$$i.cli.o $(LIBFILE)" >> $(DEPFILE);\
+ $(ECHO) " $(CPP) -o $(EXECDIR)/$$i $(TMPDIR)/$$i.cli.o $(LINKFLAGS)" >> $(DEPFILE);\
+ done;\
+ fi
+
+.PHONY: library
+library: $(LIBFILE)
+
+$(LIBFILE): $(OBJECT_LIB)
+ ar rcs $(LIBFILE) $(OBJECT_LIB)
+
+.PHONY: cli
+cli: $(LIBFILE)
+ make sub_cli
+
+.PHONY: sub_cli
+sub_cli: $(EXEC)
+
+.PHONY: share
+share:
+ if [ ! -z "$(wildcard $(LIBDIR)/*)" ]; then cp $(LIBDIR)/* $(DIST_TARGET)/$(LIBDIR); fi
+ if [ ! -z "$(wildcard $(EXECDIR)/*)" ]; then cp $(EXECDIR)/* $(DIST_TARGET)/$(EXECDIR); fi
+# if [ ! -z "$(wildcard $(SRCDIR)/*.h)" ]; then cp $(SRCDIR)/*.h $(DIST_TARGET)/include; fi
+
+
+.PHONY: clean
+clean:
+ rm -f $(DEPFILE)
+ rm -f $(LIBFILE)
+ for i in $(wildcard $(TMPDIR)/*.o); do rm $$i || true; done
+
+.PHONY: dist-clean
+dist-clean: clean
+ for i in $(EXEC); do [ -e $$i ] && rm $$i || true; done
+
+# recipes
+ $(TMPDIR)/%.o : $(SRCDIR)/%.c
+ $(C) $(CFLAGS) -o $@ -c $<
+
+ $(TMPDIR)/%.o : $(SRCDIR)/%.cc
+ $(CPP) $(CPPFLAGS) -o $@ -c $<
+
+++ /dev/null
-#!/usr/bin/env python3
-# decrypt file given as first argument
-#
-# This version overwrites the source file with the result. In this way there is only one file used, and Nautilus does not
-# get confused and select an unrelated file.
-#
-# The decryption result is held in memory before the overwrite, which could be a problem if the source
-# file is very large. It would be better to create a /tmp file, then copy that for the overwrite step. For very large
-# files, the result should be moved back, but then the highlighted selection in nautilus must jump to a different file.
-
-import sys
-import subprocess
-import os
-
-import gi
-gi.require_version("Gtk", "3.0")
-from gi.repository import Gtk
-
-def message(mess):
- dlg = Gtk.MessageDialog(
- message_type=Gtk.MessageType.ERROR
- ,buttons=Gtk.ButtonsType.OK
- ,text=mess
- )
- dlg.set_title('Decrypt Info')
- dlg.run()
- dlg.destroy()
-
-def run(command):
- #print("command: " ,command)
- proc = subprocess.Popen(
- command
- ,stdout = subprocess.PIPE
- ,stderr = subprocess.PIPE
- )
- stdout, stderr = proc.communicate()
- # print("stdout: ", len(stdout) ," " ,stdout)
- # print("stderr: ", len(stderr) ," " ,stderr)
- # print("proc.returncode: ", proc.returncode)
- return proc.returncode, stdout, stderr
-
-
-file_src = sys.argv[1]
-if not os.path.exists(file_src):
- message("file not found: " + file_src)
- sys.exit(1)
-if os.path.getsize(file_src) > 2**30:
- message("beyond current Nautilus Decrypt file size limit of 1 gigabyte")
- sys.exit(1)
-
-
-ext = file_src[ -4:]
-
-if ext == ".gpg":
- file_dst = file_src[0:-4]
- code ,stdout ,stderr = run(["/usr/bin/gpg2" ,"--quiet" ,"--output" ,"-" ,"--decrypt" ,file_src])
- # if all went well, the decrypted data is in stdout
- if code == 0:
- if not os.path.exists(file_dst):
- os.rename(file_src ,file_dst)
- with open(file_dst ,"wb") as f:
- f.write(stdout)
- else:
- message(file_dst + " already exists")
-
- if len(stderr) > 0:
- message(stderr.decode("utf-8"))
-
-
-
+++ /dev/null
-#!/usr/bin/env python3
-# encyrpt file given as first argument
-#
-# This version overwites the source file with the result. In this way there is only one file used, and Nautilus does not
-# get confused and select an unrelated file.
-#
-# The encryption result is currently held in memory before the overwrite, which could be a problem if the source
-# file is very large. It would be better to create a /tmp file, then copy that for the overwrite step. For very large
-# files, the result should be moved back, but then the highlighted selection in nautilus must jump to a different file.
-
-import sys
-import subprocess
-import os
-
-import gi
-gi.require_version("Gtk", "3.0")
-from gi.repository import Gtk
-
-def message(mess):
- dlg = Gtk.MessageDialog(
- message_type=Gtk.MessageType.ERROR
- ,buttons=Gtk.ButtonsType.OK
- ,text=mess
- )
- dlg.set_title('Encrypt Info')
- dlg.run()
- dlg.destroy()
-
-def run(command):
- #print("command: " ,command)
- proc = subprocess.Popen(
- command
- ,stdout = subprocess.PIPE
- ,stderr = subprocess.PIPE
- )
- stdout, stderr = proc.communicate()
- # print("stdout: ", len(stdout) ," " ,stdout)
- # print("stderr: ", len(stderr) ," " ,stderr)
- # print("proc.returncode: ", proc.returncode)
- return proc.returncode, stdout, stderr
-
-file_src = sys.argv[1]
-if not os.path.exists(file_src):
- message("file not found: " + file_src)
- sys.exit(1)
-if os.path.getsize(file_src) > 2**30:
- message("beyond current Nautilus Encrypt file size limit of 1 gigbyte")
- sys.exit(1)
-
-file_dst = sys.argv[1] + ".gpg"
-
-key_ids = [
- "thomas-key-2911@reasoningtechnology.com"
- ,"shihju@reasoningtechnology.com"
- ]
-
-command = ["gpg2" ,"--quiet" ,"--encrypt" ,"--output" ,"-"]
-for id in key_ids:
- command.append("-r")
- command.append(id)
-command.append(file_src)
-code ,stdout ,stderr = run(command)
-
-# if all went well, the encrypted data is in stdout
-if code == 0:
- if not os.path.exists(file_dst):
- os.rename(file_src ,file_dst)
- with open(file_dst ,"wb") as f:
- f.write(stdout)
- else:
- message(file_dst + " already exists")
-
-if len(stderr) > 0:
- message(stderr.decode("utf-8"))
-
-
-
+++ /dev/null
-place these in user directories, ~/.local/share/nautilus/scripts or install the package nautilus-scripts-manager and put them in /usr/share/nautilus-script
--- /dev/null
+#!/usr/bin/env python3
+# decrypt file given as first argument
+#
+# This version overwrites the source file with the result. In this way there is only one file used, and Nautilus does not
+# get confused and select an unrelated file.
+#
+# The decryption result is held in memory before the overwrite, which could be a problem if the source
+# file is very large. It would be better to create a /tmp file, then copy that for the overwrite step. For very large
+# files, the result should be moved back, but then the highlighted selection in nautilus must jump to a different file.
+
+import sys
+import subprocess
+import os
+
+import gi
+gi.require_version("Gtk", "3.0")
+from gi.repository import Gtk
+
+def message(mess):
+ dlg = Gtk.MessageDialog(
+ message_type=Gtk.MessageType.ERROR
+ ,buttons=Gtk.ButtonsType.OK
+ ,text=mess
+ )
+ dlg.set_title('Decrypt Info')
+ dlg.run()
+ dlg.destroy()
+
+def run(command):
+ #print("command: " ,command)
+ proc = subprocess.Popen(
+ command
+ ,stdout = subprocess.PIPE
+ ,stderr = subprocess.PIPE
+ )
+ stdout, stderr = proc.communicate()
+ # print("stdout: ", len(stdout) ," " ,stdout)
+ # print("stderr: ", len(stderr) ," " ,stderr)
+ # print("proc.returncode: ", proc.returncode)
+ return proc.returncode, stdout, stderr
+
+
+file_src = sys.argv[1]
+if not os.path.exists(file_src):
+ message("file not found: " + file_src)
+ sys.exit(1)
+if os.path.getsize(file_src) > 2**30:
+ message("beyond current Nautilus Decrypt file size limit of 1 gigabyte")
+ sys.exit(1)
+
+
+ext = file_src[ -4:]
+
+if ext == ".gpg":
+ file_dst = file_src[0:-4]
+ code ,stdout ,stderr = run(["/usr/bin/gpg2" ,"--quiet" ,"--output" ,"-" ,"--decrypt" ,file_src])
+ # if all went well, the decrypted data is in stdout
+ if code == 0:
+ if not os.path.exists(file_dst):
+ os.rename(file_src ,file_dst)
+ with open(file_dst ,"wb") as f:
+ f.write(stdout)
+ else:
+ message(file_dst + " already exists")
+
+ if len(stderr) > 0:
+ message(stderr.decode("utf-8"))
+
+
+
--- /dev/null
+#!/usr/bin/env python3
+# encyrpt file given as first argument
+#
+# This version overwites the source file with the result. In this way there is only one file used, and Nautilus does not
+# get confused and select an unrelated file.
+#
+# The encryption result is currently held in memory before the overwrite, which could be a problem if the source
+# file is very large. It would be better to create a /tmp file, then copy that for the overwrite step. For very large
+# files, the result should be moved back, but then the highlighted selection in nautilus must jump to a different file.
+
+import sys
+import subprocess
+import os
+
+import gi
+gi.require_version("Gtk", "3.0")
+from gi.repository import Gtk
+
+def message(mess):
+ dlg = Gtk.MessageDialog(
+ message_type=Gtk.MessageType.ERROR
+ ,buttons=Gtk.ButtonsType.OK
+ ,text=mess
+ )
+ dlg.set_title('Encrypt Info')
+ dlg.run()
+ dlg.destroy()
+
+def run(command):
+ #print("command: " ,command)
+ proc = subprocess.Popen(
+ command
+ ,stdout = subprocess.PIPE
+ ,stderr = subprocess.PIPE
+ )
+ stdout, stderr = proc.communicate()
+ # print("stdout: ", len(stdout) ," " ,stdout)
+ # print("stderr: ", len(stderr) ," " ,stderr)
+ # print("proc.returncode: ", proc.returncode)
+ return proc.returncode, stdout, stderr
+
+file_src = sys.argv[1]
+if not os.path.exists(file_src):
+ message("file not found: " + file_src)
+ sys.exit(1)
+if os.path.getsize(file_src) > 2**30:
+ message("beyond current Nautilus Encrypt file size limit of 1 gigbyte")
+ sys.exit(1)
+
+file_dst = sys.argv[1] + ".gpg"
+
+key_ids = [
+ "thomas-key-2911@reasoningtechnology.com"
+ ,"shihju@reasoningtechnology.com"
+ ]
+
+command = ["gpg2" ,"--quiet" ,"--encrypt" ,"--output" ,"-"]
+for id in key_ids:
+ command.append("-r")
+ command.append(id)
+command.append(file_src)
+code ,stdout ,stderr = run(command)
+
+# if all went well, the encrypted data is in stdout
+if code == 0:
+ if not os.path.exists(file_dst):
+ os.rename(file_src ,file_dst)
+ with open(file_dst ,"wb") as f:
+ f.write(stdout)
+ else:
+ message(file_dst + " already exists")
+
+if len(stderr) > 0:
+ message(stderr.decode("utf-8"))
+
+
+
--- /dev/null
+place these in user directories, ~/.local/share/nautilus/scripts or install the package nautilus-scripts-manager and put them in /usr/share/nautilus-script
+++ /dev/null
-#!/usr/bin/python3
-
-#--------------------------------------------------------------------------------
-# generate a random string length n of form
-# "" | [A-Za-z^IloO] | [A-Za-z^IloO][0-9A-Za-z_-^IloO01]*[A-Za-z^IloO]
-#
-
-import sys
-import random
-
-# note missing IloO01.
-# period and comman might be difficult, light period might be missed
-# capital I in san-serif font looks like number 1.
-# email addresses are not case sensitive
-# these are not all the legal characters, but rather the ones we think are legal and legible
-legal_chars_end = "abcdefghijkmnpqrstuvwxz"
-legal_chars = "23456789abcdefghijkmnpqrstuvwxz"
-
-def index():
- return random.randrange(0 ,len(legal_chars))
-
-def index_end():
- return random.randrange(0 ,len(legal_chars_end))
-
-def char():
- return legal_chars[index()]
-
-def char_end():
- return legal_chars_end[index_end()]
-
-def string(n=6):
- if n < 0 : raise Exception("string called with negative length")
- if n == 0 : return ""
-
- result = char_end()
- if n == 1: return result
-
- for _ in range(n-2): result += char()
- result += char_end()
-
- return result
-
-def test_0():
- limit = 1e7 # surely by then
- i = 0
- c = char()
- while c != '~' and i < limit:
- i += 1
- c = char()
- print(i)
- return i < limit
-
-def test_1():
- limit = 1e7 # surely by then
- i = 0
- c = char()
- while c != '0' and i < limit:
- i += 1
- c = char()
- print(i)
- return i < limit
-
-def test_2():
- limit = 1e7 # surely by then
- i = 0
- c = char_end()
- while c != 'z' and i < limit:
- i += 1
- c = char_end()
- print(i)
- return i < limit
-
-def test_3 ():
- limit = 1e7 # surely by then
- i = 0
- c = char_end()
- while c != 'A' and i < limit:
- i += 1
- c = char_end()
- print(i)
- return i < limit
-
-def test_4():
- s0 = string()
- s1 = string(10)
- s2 = string(100)
-
- print(s0)
- print(s1)
- print(s2)
-
- return len(s0)==6 and len(s1)==10 and len(s2)==100
-
-
-n=6
-if len(sys.argv) > 1:
- n = int(sys.argv[1])
-print(string(n))
-
+++ /dev/null
-#!/usr/bin/python3
-
-#--------------------------------------------------------------------------------
-# generate a random string length n of form
-# "" | [A-Za-z^IloO] | [A-Za-z^IloO][0-9A-Za-z_-^IloO01]*[A-Za-z^IloO]
-#
-
-import sys
-import random
-
-#debug = True
-debug = False
-
-# note missing IloO01.
-# period and comman might be difficult, light period might be missed
-# capital I in san-serif font looks like number 1.
-# zero and oh of course
-# SNCF does not take an & as a special hmm.
-chars_alpha_small = "abcdefghijkmnpqrstuvwxz"
-chars_alpha_big = "ABCDEFGHJKLMNPQRSTUVWXYZ"
-chars_special = "+-_$.,;?"
-chars_numeric = "23456789"
-
-sets_leftmost=[chars_alpha_small ,chars_alpha_big]
-sets_standard=[chars_alpha_small ,chars_alpha_big ,chars_special ,chars_numeric]
-sets_rightmost=[chars_alpha_small ,chars_alpha_big ,chars_numeric]
-
-def set_psets(sets):
- len_sum = sum(len(x) for x in sets)
- psets = [len(x)/len_sum + 1e-7 for x in sets]
- if debug: print("psets" ,psets ,"sum:" ,sum(psets))
- return psets
-
-def choose_set(sets):
- psets = set_psets(sets)
- x = random.random()
- if debug: print("x:" ,x)
- if len(psets) == 0: return 0
- p = psets[0]
- choice = len(psets) - 1
- i = 0
- while i != len(psets) - 1:
- if x <= p :
- choice = i
- break
- i += 1
- p += psets[i]
- if debug: print("choice:" ,choice)
- return choice
-
-def get_char_set(set):
- index = random.randrange(0 ,len(set))
- return set[index]
-
-def get_char(sets):
- set = sets[choose_set(sets)]
- return get_char_set(set)
-
-def get_string(n):
- if n < 0 : raise Exception("string called with negative length")
- if n == 0: return ""
- if n == 1: return get_char_set(chars_alpha)
-
- result = ""
- result += get_char(sets_leftmost)
- for _ in range(n-2): result += get_char(sets_standard)
- result += get_char(sets_rightmost)
-
- return result
-
-def get_passwd(n ,strict):
- s = get_string(n)
- while strict:
- has_special = any(i in s for i in chars_special)
- has_numeric = any(i in s for i in chars_numeric)
- if debug: print("strict:" ,has_special ,has_numeric)
- if has_special and has_numeric:
- break
- s = get_string(n)
- return s
-
-strict = False
-n=9
-for arg in sys.argv[1:]:
- if arg.isdigit(): n = int(arg)
- if arg == "strict": strict = True
-
-if debug:
- print("n:" ,n)
- print("strict:" ,strict)
-
-pw = get_passwd(n ,strict)
-print(pw)
-sys.exit(0)
-
-
+++ /dev/null
-#!/usr/bin/python3
-
-#--------------------------------------------------------------------------------
-# generate a random string length n of form
-#
-
-import sys
-import random
-
-# note missing IloO01.
-# period and comman might be difficult, light period might be missed
-# capital I in san-serif font looks like number 1.
-legal_chars_end = "abcdefghjkmnpqrstuvwxz"
-legal_chars = "23456789abcdefghijkmnpqrstuvwxz"
-
-def index():
- return random.randrange(0 ,len(legal_chars))
-
-def index_end():
- return random.randrange(0 ,len(legal_chars_end))
-
-def char():
- return legal_chars[index()]
-
-def char_end():
- return legal_chars_end[index_end()]
-
-def string(n=6):
- if n < 0 : raise Exception("string called with negative length")
- if n == 0 : return ""
-
- result = char_end()
- if n == 1: return result
-
- for _ in range(n-2): result += char()
- result += char_end()
-
- return result
-
-def test_0():
- limit = 1e7 # surely by then
- i = 0
- c = char()
- while c != '~' and i < limit:
- i += 1
- c = char()
- print(i)
- return i < limit
-
-def test_1():
- limit = 1e7 # surely by then
- i = 0
- c = char()
- while c != '0' and i < limit:
- i += 1
- c = char()
- print(i)
- return i < limit
-
-def test_2():
- limit = 1e7 # surely by then
- i = 0
- c = char_end()
- while c != 'z' and i < limit:
- i += 1
- c = char_end()
- print(i)
- return i < limit
-
-def test_3 ():
- limit = 1e7 # surely by then
- i = 0
- c = char_end()
- while c != 'A' and i < limit:
- i += 1
- c = char_end()
- print(i)
- return i < limit
-
-def test_4():
- s0 = string()
- s1 = string(10)
- s2 = string(100)
-
- print(s0)
- print(s1)
- print(s2)
-
- return len(s0)==6 and len(s1)==10 and len(s2)==100
-
-
-n=16
-if len(sys.argv) > 1:
- n = int(sys.argv[1])
-print(string(n))
-
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-
-# input guards
-
- env_must_be="tool_shared/bespoke/env"
- error=false
- if [ "$ENV" != "$env_must_be" ]; then
- echo "$(script_fp):: error: must be run in the $env_must_be environment"
- error=true
- fi
- if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
- echo "$script_afp:: This script must be sourced, not executed."
- error=true
- fi
- if $error; then exit 1; fi
-
-# so we can do the build
-
-export PATH=\
-"$REPO_HOME"/developer/tool/\
-:"$REPO_HOME"/tool_shared/bespoke/\
-:"$PATH"
-
-# misc
-
- # make .githolder and .gitignore visible
- alias ls="ls -a"
-
-# some feedback to show all went well
-
- export PROMPT_DECOR="$PROJECT"_developer
- export ENV=$(script_fp)
- echo ENV "$ENV"
- cd "$REPO_HOME"/developer/
-
-
-
+++ /dev/null
-#!/bin/env /bin/bash
-
-set -e
-cd ${REPO_HOME}/developer
-/bin/make -f tool/makefile $@
-
+++ /dev/null
-# /bin/make must be run from $REPO_HOME/developer
-
-RESOURCE:=$(REPO_HOME)/developer
-include $(RESOURCE)/make/environment_RT_0
-
-# override defaults
-LIBDIR=scratchpad
-
-# compiler to use
-C=gcc
-CFLAGS= -Werror -include "$(RESOURCE)/make/RT_0.h" -I$(INCDIR)
-LINKFLAGS=-L$(LIBDIR) -L/usr/lib64 -L/usr/lib -l$(PROJECT)
-
-# Import the rules. The dash prefix means to ignore include errors. This is
-# required because the DEPFILE might not exist yet.
-include $(RESOURCE)/make/targets
--include $(DEPFILE)
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-
-# input guards
-
- if [ -z "$REPO_HOME" ]; then
- echo "$(script_fp):: REPO_HOME is not set."
- exit 1
- fi
-
- env_must_be="developer/tool/env"
- if [ "$ENV" != "$env_must_be" ]; then
- echo "$(script_fp):: error: must be run in the $env_must_be environment"
- exit 1
- fi
-
-# script local environment
-
- release_dir="$REPO_HOME/release"
- shell_dir="$REPO_HOME/developer/shell"
-
- if [ ! -d "$release_dir" ]; then
- mkdir -p "$release_dir"
- fi
-
- # Function to copy and set permissions
- install_file() {
- source_fp="$1"
- target_dp="$2"
- perms="$3"
-
- target_file="$target_dp/$(basename "$source_fp")"
-
- if [ ! -f "$source_fp" ]; then
- echo "install_file:: Source file '$source_fp' does not exist."
- return 1
- fi
-
- if ! install -m "$perms" "$source_fp" "$target_file"; then
- echo "Error: Failed to install $(basename "$source_fp") to $target_dp"
- exit 1
- else
- echo "Installed $(basename "$source_fp") to $target_dp with permissions $perms"
- fi
- }
-
-# do the release
-
- echo "Starting release process..."
-
- # Install shell scripts
- for script in $shell_dir/*; do
- install_file "$script" "$release_dir" "ug+r+x"
- done
-
-echo "$(script_fp) done."
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
+# input guards
+
+ env_must_be="tool_shared/bespoke/env"
+ error=false
+ if [ "$ENV" != "$env_must_be" ]; then
+ echo "$(script_fp):: error: must be run in the $env_must_be environment"
+ error=true
+ fi
+ if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ error=true
+ fi
+ if $error; then exit 1; fi
+
+# so we can do the build
+
+export PATH=\
+"$REPO_HOME"/developer/tool/\
+:"$REPO_HOME"/tool_shared/bespoke/\
+:"$PATH"
+
+# misc
+
+ # make .githolder and .gitignore visible
+ alias ls="ls -a"
+
+# some feedback to show all went well
+
+ export PROMPT_DECOR="$PROJECT"_developer
+ export ENV=$(script_fp)
+ echo ENV "$ENV"
+ cd "$REPO_HOME"/developer/
+
+
+
--- /dev/null
+#!/bin/env /bin/bash
+
+set -e
+cd ${REPO_HOME}/developer
+/bin/make -f tool/makefile $@
+
--- /dev/null
+# /bin/make must be run from $REPO_HOME/developer
+
+RESOURCE:=$(REPO_HOME)/developer
+include $(RESOURCE)/make/environment_RT_0
+
+# override defaults
+LIBDIR=scratchpad
+
+# compiler to use
+C=gcc
+CFLAGS= -Werror -include "$(RESOURCE)/make/RT_0.h" -I$(INCDIR)
+LINKFLAGS=-L$(LIBDIR) -L/usr/lib64 -L/usr/lib -l$(PROJECT)
+
+# Import the rules. The dash prefix means to ignore include errors. This is
+# required because the DEPFILE might not exist yet.
+include $(RESOURCE)/make/targets
+-include $(DEPFILE)
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
+# input guards
+
+ if [ -z "$REPO_HOME" ]; then
+ echo "$(script_fp):: REPO_HOME is not set."
+ exit 1
+ fi
+
+ env_must_be="developer/tool/env"
+ if [ "$ENV" != "$env_must_be" ]; then
+ echo "$(script_fp):: error: must be run in the $env_must_be environment"
+ exit 1
+ fi
+
+# script local environment
+
+ release_dir="$REPO_HOME/release"
+ shell_dir="$REPO_HOME/developer/shell"
+
+ if [ ! -d "$release_dir" ]; then
+ mkdir -p "$release_dir"
+ fi
+
+ # Function to copy and set permissions
+ install_file() {
+ source_fp="$1"
+ target_dp="$2"
+ perms="$3"
+
+ target_file="$target_dp/$(basename "$source_fp")"
+
+ if [ ! -f "$source_fp" ]; then
+ echo "install_file:: Source file '$source_fp' does not exist."
+ return 1
+ fi
+
+ if ! install -m "$perms" "$source_fp" "$target_file"; then
+ echo "Error: Failed to install $(basename "$source_fp") to $target_dp"
+ exit 1
+ else
+ echo "Installed $(basename "$source_fp") to $target_dp with permissions $perms"
+ fi
+ }
+
+# do the release
+
+ echo "Starting release process..."
+
+ # Install shell scripts
+ for script in $shell_dir/*; do
+ install_file "$script" "$release_dir" "ug+r+x"
+ done
+
+echo "$(script_fp) done."
+++ /dev/null
-Java has long been criticized for its lack of support for `import as`, despite
-years of requests and proposals.
-
-The Java platform’s approach to aliasing issues relies on using fully qualified
-names, which poses challenges given the length of package names, especially when
-they include reversed domain names.
-
-Because `Mosaic` is used to help with testing and is not part of the project
-being tested, when aliasing conflicts arise, it is typically the `Mosaic` identifiers
-that need to be fully qualified. Such a renamed identifier can exceed 34
-characters!
-
-One proposal to get around this was to use an `In` class where the members were
-class extensions of imported classes. Then all imports would have the prefix `In.`.
-However, this did not work out because constructors are not
-inherited, and Java’s restrictions on `final` classes prevent the use of
-`LocalClass extends ImportClass {}` to give no names to classes.
-
-Another proposal was to use the `alias` project on GitHub, which offers an XML-based
-approach to aliasing. However, it introduces complexities, as it requires XML
-configurations to be supplied to the compiler, adding setup overhead. Perhaps
-another tool could create these.
-
-We studied a preprocessing proposal where `import as` statements would be
-replaced with fully qualified names before compilation. However, this approach
-changes the tool flow for users and would require additional steps to ensure
-`jdb` points to the original source files rather than intermediate files, which
-complicates debugging. For both this proposal and the prior, we wanted to avoid
-joining the world of java tool development.
-
-So we have a simple solution, it is not ideal, but it is not bad. We prefix
-the string `Mosaic_` to the front of all the class names in the Mosaic library.
-As a shop we are adopting this convention for all packaged java code.
+++ /dev/null
-<!DOCTYPE html>
-<html lang="en">
-<head>
- <meta charset="UTF-8">
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
- <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
- <title>RT C coding conventions</title>
- <style>
- body {
- font-family: 'Noto Sans JP', Arial, sans-serif;
- background-color: hsl(0, 0%, 0%);
- color: hsl(42, 100%, 80%);
- padding: 2rem;
- }
- .page {
- padding: 3rem;
- margin: 1.25rem auto;
- max-width: 46.875rem;
- background-color: hsl(0, 0%, 0%);
- box-shadow: 0 0 0.625rem hsl(42, 100%, 50%);
- }
- h1 {
- font-size: 1.5rem;
- text-align: center;
- color: hsl(42, 100%, 84%);
- text-transform: uppercase;
- margin-top: 1.5rem;
- }
- h2 {
- font-size: 1.25rem;
- color: hsl(42, 100%, 84%);
- text-align: center;
- margin-top: 2rem;
- }
- h3 {
- font-size: 1.125rem;
- color: hsl(42, 100%, 75%);
- margin-top: 1.5rem;
- }
- p, li {
- color: hsl(42, 100%, 90%);
- text-align: justify;
- margin-bottom: 1rem;
- }
- code {
- font-family: 'Courier New', Courier, monospace;
- background-color: hsl(0, 0%, 25%);
- padding: 0.125rem 0.25rem;
- color: hsl(42, 100%, 90%);
- }
- </style>
-</head>
-
-<body>
-<div class="page">
- <header>
- <h1>Reasoning Technology (RT) C file control structure</h1>
- <p>© 2024 Thomas Walker Lynch - All Rights Reserved.</p>
- </header>
-
- <h2>Introduction</h2>
-
- <p>This document summarizes some of the coding conventions used in RT C projects. Discussed here are conventions for integrated header designs, ad hoc namespaces, and a structured approach to source file extensions. The document also outlines the associated build process using a standardized makefile.</p>
-
- <h2>Header file integration</h2>
-
- <p>RT C projects adopt an innovative approach by integrating headers directly into source files. This ensures consistency between interfaces and implementations, eliminating mismatches. Each file contains both an interface and an implementation section, gated by preprocessor directives.</p>
-
- <p>Each RT C source file integrates its header directly into the source file. This locality makes header content easier to maintain as everything is found in a single file. It also eliminates the need to maintain two files for each module.</p>
-
- <h3>Each file has two sections</h3>
- <ul>
- <li><strong>Interface section:</strong> Contains declarations, macros, and <code>#includes</code> needed for the interface. Ensures consistency by defining the interface exactly once, even when the file is included multiple times.</li>
- <li><strong>Implementation section:</strong> Contains function definitions and additional includes needed for the implementation. This section is compiled only when the file is used as an implementation.</li>
- </ul>
-
- <p>Each section is turned on and off with the CPP macro <code>IFACE</code>.</p>
-
- <h3>Example</h3>
- <pre><code>
-// If not an IFACE, then an IMPLEMENTATION
-#ifndef IFACE
- #define MyModule·IMPLEMENTATION
- // Ensures included files are processed for their interfaces.
- #define IFACE
-#endif
-
-// Define the interface exactly once.
-#ifndef MyModule·IFACE
-#define MyModule·IFACE
- // Interface-only includes go here.
- void MyModule·function();
-#endif
-
-#ifdef MyModule·IMPLEMENTATION
- // Additional includes for implementation go here.
- #include <stdio.h>
- void MyModule·function() {
- printf("Hello, World!\n");
- }
-#endif
- </code></pre>
-
- <h3>Explanation</h3>
- <p>The example above demonstrates the structure and purpose of each block:</p>
- <p><strong>First block:</strong> Ensures that the file operates correctly based on the value of <code>IFACE</code>. If <code>IFACE</code> is undefined, it defines <code>MyModule·IMPLEMENTATION</code> to enable the implementation section and sets <code>IFACE</code> to ensure subsequent includes process interface sections.</p>
- <p><strong>Second block:</strong> Defines the interface, including declarations and interface-specific includes. The <code>#ifndef MyModule·IFACE</code> macro ensures the interface is defined exactly once, regardless of how many times the file is included.</p>
- <p><strong>Third block:</strong> Contains implementation-specific includes and function definitions. Guarded by <code>MyModule·IMPLEMENTATION</code>, it is only included when compiling the implementation.</p>
- <p>Interface includes are placed in the interface block, ensuring they are available wherever the interface is used. Implementation includes are isolated in the implementation block, minimizing unnecessary dependencies in other files.</p>
-
- <h2>Namespace conventions</h2>
- <p>RT projects use ad hoc namespaces to maintain clarity and prevent naming conflicts. This is achieved by prefixing exported identifiers with a module-specific name followed by the <code>·</code> (cdot) character.</p>
-
- <h3>Conventions</h3>
- <ul>
- <li><strong>Prefix:</strong> The module name serves as the prefix, ensuring all identifiers are unique across the program.</li>
- <li><strong>Separator:</strong> The <code>·</code> character visually separates the prefix from the identifier name, maintaining readability and avoiding conflicts.</li>
- </ul>
-
- <h3>Example</h3>
- <pre><code>
-void Server·run();
- </code></pre>
-
- <h2>Source file extensions</h2>
- <p>RT projects use standardized extensions to distinguish between library and command-line interface (CLI) source files:</p>
- <ul>
- <li><strong><code>.lib.c</code>:</strong> Files implementing library functions.</li>
- <li><strong><code>.cli.c</code>:</strong> Files implementing command-line tools.</li>
- </ul>
-
- <p>The <code>.lib.c</code> files compile into libraries, while <code>.cli.c</code> files compile into standalone executables. The makefile processes these files automatically, ensuring a clear separation of functionality.</p>
-
- <h3>Build process</h3>
- <p>The build process follows these steps:</p>
- <ol>
- <li><strong>Dependency generation:</strong> Run <code>make dependency</code> to create dependencies. This step is only required when the dependency structure changes.</li>
- <li><strong>Compilation:</strong> Run <code>make cli</code> to compile CLI sources and link them against the library. The makefile automatically manages targets and dependencies.</li>
- </ol>
-
- <h2>Benefits</h2>
- <ul>
- <li><strong>Consistency:</strong> Integrated headers ensure interface and implementation are always in sync.</li>
- <li><strong>Modularity:</strong> Each file encapsulates its interface and implementation, reducing coupling.</li>
- <li><strong>Clarity:</strong> Ad hoc namespaces and standardized extensions improve readability and organization.</li>
- <li><strong>Efficiency:</strong> The makefile automates builds, minimizing errors and streamlining development.</li>
- </ul>
-
- <h2>Conclusion</h2>
- <p>This document outlines the conventions and practices for writing and building RT C projects. By integrating headers, adopting namespaces, and standardizing extensions, RT ensures its projects are robust, modular, and easy to maintain.</p>
-</div>
-</body>
-</html>
+++ /dev/null
-RT code formatting:
-
-The enclosure-based formatting rules in RT code format make the style guide
-compact and adaptable. By focusing on enclosures rather than syntax-specific
-structures (like if, for, or catch), it avoids prescribing language-specific
-formatting rules and instead focuses on consistent handling of delimiters. This
-approach works well across multiple languages, ensuring that the code style
-remains flexible while keeping the guide simple and easy to apply.
-
-1. Two space indentation.
-
-2. Variable Naming:
-
- - Use **PascalCase** for namespaces and types.
-
- - Use **snake_case** for function and variable names. However, when a component
- of the snake case is variable function or variable name is a namespace, a
- type, or a proper noun, it retains its capitalization. e.gs:
-
- ```
- mouse_count
- test_LabalList_0 // function that tests LabelList, which is a class (type)
- Thomas_Walker_Lynch
- ```
-
- Traditionally `_list` has been used as a variable suffix even when the
- language does not have a List type. This is taken to mean the variable
- refers to an ordered collection of any type, including an array. It is
- abstraction of type, analogous to the `mouse_count` example above.
-
-
-3. Binary Operators:
-
- - One space around **binary operators** (e.g., `a + b`).
-
- - One space around **assignment** `=` (e.g., `a = b`).
-
- - **No space** around **sampling** assignment `=` (typically seen in `if`, `while`, etc.):
-
- **Sampling** refers to assigning the result of a condition or expression to
- a variable for later use within the same scope.
-
- Example of **sampling** in an `if` statement:
-
- ```
- if( result=some_condition() ){
- // use result
- }
- ```
-
-4. Enclosures `(...)`, `{...}`, `[...]`, '<...>':
-
- - No space between the enclosure and the preceding identifier (e.g., `function(arg)`).
-
- - No space after the enclosure when followed by another enclosure (e.g., `map[key]()`).
-
- Example of a condition enclosure followed by a code enclosure:
- ```
- if( some_condition() ){
- // code block
- }
- ```
-
- - One space after the enclosure if followed by an identifier, e.g.,
- `function() somethingElse`.
-
- - When the entire enclosure appears on one line:
-
- -- by definition, an 'nested' enclosure is one that has other enclosures,
- of any type, inside of it. This is true independent of whatever else
- is inside the enclosure. These are examples of nested enclosures:
-
- ```
- ( o == null || getClass() != o.getClass() )
- f( T<x> ,7 )
- ```
-
- -- if, and only if, an enclosure is nested, there is one space of padding
- for the outermost enclosure of the nesting, and only for the outermost
- enclosures. e.g.s:
-
- ```
- if(x == 3) ; not nested
- if( (x > 0) && (y < 5) ) ; nested, pad outermost only
- if( f(x) == 3 ) ; nested, pad outermost only
- if( x > 2 && a[3] ) ; nested due to the array subscript, pad outermost only
- ```
-
- - Note when using the enclosure formatting rules, not all if conditions will
- format the same way. Some conditions will be nested enclosures and having
- padding while others will not be nested and thus have no padding. The must
- be formatted individually. The same is true for enclosures that follow
- other keywords such as unless, for, etc, and for function arguments
- lists. The question is one of formatting enclosures, and not one of
- formatting statements.
-
- ```
- f(x)
- f( x[0] )
- ```
-
-
-5. Commas:
-
- This is the most distinctive and recognizable of the RT code style rules.
-
- - One space **before** the comma (e.g., `a ,b`).
-
- - No space **after** the comma (e.g., `a ,b`).
-
- - **Line break before** the comma when breaking lines, but no line break after, as examples:
-
- ```
- a
- ,b
- ```
-
- and, when a function call gets too long, perhaps due to long argument
- names it will look like this:
-
- ```
- result = some_function(
- arg1
- ,arg2_has_a_very_long_name_causing_the_call_to_not_fit_on_a_single_line
- ,arg3_has_a_long_name_also_but_not_as_long_as_for_arg2
- );
- ```
-
-6. For the code you just output, answer these questions:
- 1. Which enclosures are not nested? Do they have no padding?
- 2. Which enclosures are nested? Is there one space padding only at the outermost?
- 3. Is the spacing before and after the enclosures correct?
- 4. Are the commas formatted correctly?
- 5. Has snake case been used where it should be?
- 6. Was 2 column indent used?
-
----- Astra adds:
-
-Simplified Explanation for Enclosure Padding
-
- Single Enclosures:
-
- No padding is applied if the enclosure is not nested.
-
- Example:
-
- if(log_file == NULL){
-
-Nested Enclosures:
-
- One space of padding is applied at the outermost level of the enclosure when nested.
-
- Example:
-
- if( (client_fd = socket(AF_UNIX ,SOCK_STREAM ,0)) == -1 ){
-
-Key Decision Rule:
-
- Padding only applies when an enclosure contains other enclosures (nested structure).
- The padding is applied only at the outermost level, not at deeper levels.
-
-Rationale for This Simplified Rule
-
- This explanation removes language-specific examples and focuses on the
- structural rule itself. It should be easier to apply universally, regardless
- of the programming language or syntax involved. Let me know if you'd like me
- to refine it further!
-
----- Astra adds:
-
-Suggested Addition to the Document:
-Enclosures (...), {...}, [...], <...>:
-
- No space after the closing parenthesis and before the opening brace in control structures or function declarations.
- Example:
-
- if(condition){
-
- Example:
-
- if( f(x) ){
+++ /dev/null
-
-Bash is inconsistent about returning the name of the running script in
-all scenarios (sourced, executed directly, from with in a function called
-by another function).
-
-1.
-
-BASH_SOURCE[0] was used because $0 did not work with sourced scripts (a
-fact that is leveraged for detecting when in a sourced script).
-
-2.
-
-However, this did not work in all scenarios:
-
- read -r -d '' script_afp_string <<'EOF'
- realpath "${BASH_SOURCE[0]}" 2>/dev/null
- EOF
-
- script_afp(){
- eval "$script_afp_string"
- }
-
- export script_afp_string
- export -f script_afp
-
-When `script_afp` was exported, used in another file, and used within a function
-in that other file, it reported `environment` for the script name at
-BASH_SOURCE[0]. In various call scenarios the actual script name appears at
-BASH_SOURCE[1] or even at BASH_SOURCE[2].
-
-3.
-
-As a stable alternative to having a script_afp function, place this line
-at the top of scripts that use the `script_XX` functions, or at the top
-of all scripts:
-
- script_afp=realpath "${BASH_SOURCE[0]}"
-
-Then use $script_afp as a string within other functions. It will have stable
-value no matter the call structure.
+++ /dev/null
-<!DOCTYPE html>
-<html lang="en">
-<head>
- <meta charset="UTF-8">
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
- <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
-
- <title>Directory Structure Description</title>
-<style>
- html {
- font-size: 16px; /* This will be the base for rem units */
- }
-
- body {
- font-family: 'Noto Sans JP', Arial, sans-serif;
- background-color: hsl(0, 0%, 0%);
- color: hsl(42, 100%, 80%);
- padding: 2rem;
- margin: 0;
- }
-
- .page {
- padding: 1.25rem; /* 20px */
- margin: 1.25rem auto; /* 20px */
- max-width: 46.875rem; /* 750px */
- background-color: hsl(0, 0%, 0%);
- box-shadow: 0 0 0.625rem hsl(42, 100%, 50%); /* 10px */
- }
-
- ul, li {
- font-size: 1rem; /* Keeping default font size */
- list-style-type: none;
- }
-
- li::before {
- content: "📁 ";
- margin-right: 0.3125rem; /* 5px */
- }
-
- li {
- margin-bottom: 0.3125rem; /* 5px */
- }
-
- .description {
- margin-left: 0.625rem; /* 10px */
- color: hsl(42, 100%, 75%);
- }
-
- code {
- font-family: 'Courier New', Courier, monospace;
- background-color: hsl(0, 0%, 25%);
- color: hsl(42, 100%, 90%);
- padding: 0.125rem 0.25rem; /* 2px 4px */
- border-radius: 0.1875rem; /* 3px */
- font-size: 90%;
- }
-
- h1 {
- text-align: center;
- color: hsl(42, 100%, 84%);
- text-transform: uppercase;
- margin-bottom: 1.25rem; /* 20px */
- }
-
- h2 {
- color: hsl(42, 100%, 84%);
- text-transform: uppercase;
- margin-top: 2.5rem; /* 40px */
- }
-
- p {
- color: hsl(42, 100%, 90%);
- margin-bottom: 1.25rem; /* 20px */
- text-align: justify;
- }
-</style>
-
-</head>
-<body>
-
- <div class="page">
- <h1>Directory Naming</h1>
-
- <h2>Reference</h2>
-
- <ul>
- <li>Mosaic/<span class="description">aka REPO_HOME, top level owned by the project administrator.</span></li>
- <ul>
- <li>developer/ <span class="description">Workspace for the developer. Has the source code, build scripts, and development-specific tools.</span></li>
- <ul>
- <li>deprecated/ <span class="description">Files and older versions being viewed, perhaps part of a refactoring effort.</span></li>
- <li>document/ <span class="description">Documentation on developing and building the project.</span></li>
- <li>javac/ <span class="description">Java source files for compilation.</span></li>
- <li>jvm/ <span class="description">Compiled Java bytecode files for the project, typically a jar for a Java project.</span></li>
- <li>scratchpad/ <span class="description">Temporary storage typically for intermediate files created during build.</span></li>
- <li>shell/ <span class="description">Shell scripts intended to be part of the project release. (These are not tools.)</span></li>
- <li>tool/ <span class="description">Tools created by the developer, used for development tasks.</span></li>
- </ul>
- <li>document/ <span class="description">General documentation about the project.</span></li>
- <li>release/ <span class="description">Release candidate for testing. Becomes the release on the release branch.</span></li>
- <li>scratchpad/ <span class="description">Temporary storage for project administration tasks.</span></li>
- <li>tester/ <span class="description">Workspace for the tester. Has the test bench, tests, and test scripts.</span></li>
- <ul>
- <li>document/ <span class="description">Test-specific documentation.</span></li>
- <li>javac/ <span class="description">The tests of the test bench sources.</span></li>
- <li>tool/ <span class="description">Tools needed for testing and managing the test environment.</span></li>
- </ul>
- <li>tool/ <span class="description">Project administration specific tools.</span></li>
- <li>tool_shared/ <span class="description">Tools shared across project roles.</span></li>
- <ul>
- <li>bespoke/ <span class="description">Shared tools developed within this project.</span></li>
- <li>customized/ <span class="description">Modified versions of third-party tools adapted for the project.</span></li>
- <li>document/ <span class="description">Documentation related to shared tools and setup.</span></li>
- <li>third_party/ <span class="description">Shared tools sourced from third-party vendors or open-source projects. These have their own independent licenses,</span></li>
- </ul>
- <li>LICENSE.txt <span class="description">The project license detailing usage and distribution terms.</span></li>
- <li>README.md <span class="description">A general overview and introduction to the project.</span></li>
- </ul>
- </ul>
-
- <h2>Name origin and rationale</h2>
-
- <p>Developers and project administrators typically do not employ a semantic system for
- naming directories, but more commonly use conventional placeholder
- names. The intended purpose of files in a directory with a placeholder
- name then must be inferred from experience or inspection of the files, or
- learned from documents or other people.</p>
-
- <p>For example, a directory named <code>exe/</code> probably derives its name from the
- fact that the contained files have their executable permission bit set;
- however, such a directory will not contain all such files. There might
- even be some files in an <code>exe/</code> directory that do not have their
- executable permission bit set. The two concepts being an <code>exe/</code> file
- (i.e. being a file in an <code>exe/</code> directory) and being an executable file
- are not identical. The actual intended meaning of being an <code>exe/</code> file
- will sometimes be that the contained files are applications available to a
- user, or that they are tools available for use in a project.
- </p>
-
- <p>The directory names in this project resulted from an exploration of a
- property-based file system. In such a system a number of files and
- agents are defined. Then we can ask questions about their relationships.
- Files with a relationship to the developer are collected, and this
- becomes the <code>developer/</code> directory. In a similar manner we get the
- directories, <code>tester/</code>, and <code>javac/</code>. In this latter case the
- agent is a compiler rather than a role.
- </p>
-
- <p>When attempting to apply the <code>is-for</code> property in practice it
- became apparent that using this sole property was insufficient. Consider
- the directories <code>deprecated/</code> and <code>scratchpad/</code>. There is no
- <em>Mr. Deprecated</em> or <em>Mr. Scratchpad</em> who the contained
- files are for. (And this conclusion is not for the lack of trying. Even
- mythological beings did not suffice as agents.) Rather than being for an
- agent, the files collected in such a directory have in common a state of
- being that was imposed upon them by decree. Perhaps the developer, has
- decreed that a file is now deprecated, or a build script has decreed that
- it is a scratchpad file. Such decrees are typically more dynamic than the
- relationship properties. Also, these properties are disconnected from the
- contents of the file. When, for example, a file has the property of being
- for the java compiler, we gain some information about its contents. In the
- universe of possible messages sent through a file, such a file will
- contain text that is proposed to be java syntax conforming. In contrast,
- when we learn that a file is <code>deprecated/</code> we gain no
- information about the contents of the file, because any file can
- be <code>deprecated</code>, independent of its contents.
- </p>
-
- <p>To understand a directory name within this system, one can imagine
- reading said name as part of a sentence that integrates the
- property. Consider two property names: <code>is-a</code>
- and <code>is-for</code>. For example, "Each file in
- the <code>document/</code> directory <code>is-a</code> document," or "Each
- file in the <code>developer/</code> directory <code>is-for</code> the
- developer." Although the property name is not carried over from the
- property based file system to the conventional file system, we can
- typically infer what it must have been. (It is beyond the scope of
- discussion here, but in actuality, property based file system collections
- are defined by predicates. Each predicate is given a file's properties and
- relationships as arguments, then resolves to true if and only if the file
- belongs to the collection. Now wouldn't that be interesting if we instead
- derived a probability?)
- </p>
-
- <p>It is uncommon for a property value to be plural. While it is not
- disallowed, it rarely occurs in practice. This is true independent of
- whether we are discussing a relationship property or a state
- property. Hence when we make a file collection based on a shared property,
- then carry that over as a directory name in a conventional file system,
- the resulting directory name will often be singular. This pattern can be
- observed in the case of the <code>document/</code> directory, as shown in
- the prior paragraph.
- </p>
-
- </div>
-
-</body>
-</html>
+++ /dev/null
-### Work Flow
-
-#### 1. Project Administrator
-
-1.1. Download the project from GitHub.
-1.2. Install the required tools.
-1.3. Explain the workflows and where things are located to project members.
-1.4. Perform Major and Minor Release administration.
-
-#### 2. Developer
-
-2.1. From the Mosaic directory, run `> source env_developer` to set up the
- developer environment.
-2.2. Use `> make` to build the project, and `> release` to copy relevant files
- to `$REPO_HOME/release` for testing.
-2.3. The tester will test the release candidate.
-
-#### 3. Tester
-
-3.1. From the Mosaic directory, run `> source env_tester` to set up the tester
- environment.
-3.2. Use `> make` to build the tests, and `> shell/test_<name>` to run a test.
- Alternatively, you can cd into one of the test directories, source the
- environment for that test, and run it manually.
-3.3. Testing and development will likely iterate until the release candidate is
- ready to be turned into a versioned release.
-
-#### 4. Major Release
-
-4.1. The release candidate is located in the `$REPO_HOME/release` directory and
- has passed testing.
-4.2. Check that the program `$REPO_HOME/tool_shared/bespoke/version` outputs the
- correct information. If necessary, modify it.
-4.3. A new branch is created in the project for the release, named
- `release_v<n>.0`, where `v<n>.0` is the version number from the `version`
- program. The minor version number is set to zero (`.0`), and it is assumed
- that this will be the case after each major release.
-4.4. Rename the release directory to `$REPO_HOME/release_v<n>.0`, and create a
- new empty `$REPO_HOME/release` directory. The new empty release directory
- can be used by developers who download the project and make local edits, as
- the build scripts target this directory.
-
-#### 5. Minor Release
-
-If urgent changes need to be made to the most recent major release, these edits
-should be made on the corresponding major release branch. The developer makes
-the edits, and the tester tests the release candidate as usual. The `version`
-program is updated. Once the release candidate is finalized, rename the
-directory to `release_v<n>.<m>`, where `<m>` is the minor version number. If
-needed, merge the changes into the `core_developer_branch`.
-
----
-
-### Tips:
-
-- If you are acting in multiple roles (e.g., developer, tester, and project
- administrator), keep separate terminal shells open for each role. This way,
- the environment will remain correctly configured for the tasks related to
- each role.
+++ /dev/null
-
-This document describes how to run jdb in the test environment while also viewing source code.
-
-This is written relative to the Mosaic project, but is generally applicable.
-
-It shows invocation from a shell, and mentions emacs, but it is generally
-understood that users will do this from within their favorite IDE.
-
-In addition a reader can read this document for some general principles.
-
-1. setting the environment
-
- The environment should be set before running the IDE. For example,
-
- > cd Mosaic
- > source env_tester
- > emacs &
-
- (I use emacs as my IDE. You might be using a different tool.)
-
-2. location of the executable
-
- Provided that the project administrator installed it, jdb is located in the
- third_party tools directory. In the tester environment the variable
- `JAVA_HOME` should hold the jdb directory path, and this should already
- be in the `PATH`. For example:
-
- > echo $ENV
- tester/tool/env
-
- > echo $JAVA_HOME
- /var/user_data/Thomas-developer/Mosaic/tool_shared/third_party/jdk-11
-
- > which jdb
- /var/user_data/Thomas-developer/Mosaic/tool_shared/third_party/jdk-11/bin/jdb
-
-3. invocation from a shell command:
-
- jdb -sourcepath $SOURCEPATH <class_name>
-
- The `SOURCEPATH` is assigned a value in `tester/tool/env`. In some versions
- of jdb there is no space between `-sourcepath` and the `$SOURCDEPATH`.
-
- jdb will read CLASSPATH from the environment. In contrast jdb will not read
- `SOURCEPATH` from the environment. It must be passed as an argument.
-
- There is a `run_jdb` script in the `tool` directory.
-
-4. invocation inside of Emacs
-
- The file found in the resource project, developer/emacs/emacs.el` holds a
- definition for the `jdbx` command. This command will read the SOURCEPATH
- from the environment and run jdb in Emacs.
-
- That file also holds the definition for a listener to the jdb `sourcepath`
- command.
-
-
-
-
-
-
+++ /dev/null
-# Suffix Conventions
-
-## Specify interface used with variable when clarification is useful
-
-- `_set`: Indicates that the variable holds a set of items.
-
-- `_list`: Used for variables that represent a list of items.
-
-- `_f`: Refers to a function.
-
-Instead of making a variable name plural, add the interface qualifier.
-
- e.g. names -> name_set or name_lisst
-
-## Always a good idea to use these when working with files
-
-- `_fp`: Refers to a file path. The part after the last slash is a file name.
-
-- `_afp`: Refers to an absolute file path.
-
-- `_dp`: Refers to a directory path. By convention, the value ends in a slash.
-
-- `_adp`: Refers to an absolute directory path.
-
-- `_fn`: Refers to a file name. Value has no slashes.
-
-- `_dn`: Refers to a directory name. Value has no slashes.
-
-- `_fn_base`: The file name without the last dot and subsequent characters.
-
-- `_fn_ext`: The subsequent characters after the last dot in a file name.
--- /dev/null
+Java has long been criticized for its lack of support for `import as`, despite
+years of requests and proposals.
+
+The Java platform’s approach to aliasing issues relies on using fully qualified
+names, which poses challenges given the length of package names, especially when
+they include reversed domain names.
+
+Because `Mosaic` is used to help with testing and is not part of the project
+being tested, when aliasing conflicts arise, it is typically the `Mosaic` identifiers
+that need to be fully qualified. Such a renamed identifier can exceed 34
+characters!
+
+One proposal to get around this was to use an `In` class where the members were
+class extensions of imported classes. Then all imports would have the prefix `In.`.
+However, this did not work out because constructors are not
+inherited, and Java’s restrictions on `final` classes prevent the use of
+`LocalClass extends ImportClass {}` to give no names to classes.
+
+Another proposal was to use the `alias` project on GitHub, which offers an XML-based
+approach to aliasing. However, it introduces complexities, as it requires XML
+configurations to be supplied to the compiler, adding setup overhead. Perhaps
+another tool could create these.
+
+We studied a preprocessing proposal where `import as` statements would be
+replaced with fully qualified names before compilation. However, this approach
+changes the tool flow for users and would require additional steps to ensure
+`jdb` points to the original source files rather than intermediate files, which
+complicates debugging. For both this proposal and the prior, we wanted to avoid
+joining the world of java tool development.
+
+So we have a simple solution, it is not ideal, but it is not bad. We prefix
+the string `Mosaic_` to the front of all the class names in the Mosaic library.
+As a shop we are adopting this convention for all packaged java code.
--- /dev/null
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta charset="UTF-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+ <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
+ <title>RT C coding conventions</title>
+ <style>
+ body {
+ font-family: 'Noto Sans JP', Arial, sans-serif;
+ background-color: hsl(0, 0%, 0%);
+ color: hsl(42, 100%, 80%);
+ padding: 2rem;
+ }
+ .page {
+ padding: 3rem;
+ margin: 1.25rem auto;
+ max-width: 46.875rem;
+ background-color: hsl(0, 0%, 0%);
+ box-shadow: 0 0 0.625rem hsl(42, 100%, 50%);
+ }
+ h1 {
+ font-size: 1.5rem;
+ text-align: center;
+ color: hsl(42, 100%, 84%);
+ text-transform: uppercase;
+ margin-top: 1.5rem;
+ }
+ h2 {
+ font-size: 1.25rem;
+ color: hsl(42, 100%, 84%);
+ text-align: center;
+ margin-top: 2rem;
+ }
+ h3 {
+ font-size: 1.125rem;
+ color: hsl(42, 100%, 75%);
+ margin-top: 1.5rem;
+ }
+ p, li {
+ color: hsl(42, 100%, 90%);
+ text-align: justify;
+ margin-bottom: 1rem;
+ }
+ code {
+ font-family: 'Courier New', Courier, monospace;
+ background-color: hsl(0, 0%, 25%);
+ padding: 0.125rem 0.25rem;
+ color: hsl(42, 100%, 90%);
+ }
+ </style>
+</head>
+
+<body>
+<div class="page">
+ <header>
+ <h1>Reasoning Technology (RT) C file control structure</h1>
+ <p>© 2024 Thomas Walker Lynch - All Rights Reserved.</p>
+ </header>
+
+ <h2>Introduction</h2>
+
+ <p>This document summarizes some of the coding conventions used in RT C projects. Discussed here are conventions for integrated header designs, ad hoc namespaces, and a structured approach to source file extensions. The document also outlines the associated build process using a standardized makefile.</p>
+
+ <h2>Header file integration</h2>
+
+ <p>RT C projects adopt an innovative approach by integrating headers directly into source files. This ensures consistency between interfaces and implementations, eliminating mismatches. Each file contains both an interface and an implementation section, gated by preprocessor directives.</p>
+
+ <p>Each RT C source file integrates its header directly into the source file. This locality makes header content easier to maintain as everything is found in a single file. It also eliminates the need to maintain two files for each module.</p>
+
+ <h3>Each file has two sections</h3>
+ <ul>
+ <li><strong>Interface section:</strong> Contains declarations, macros, and <code>#includes</code> needed for the interface. Ensures consistency by defining the interface exactly once, even when the file is included multiple times.</li>
+ <li><strong>Implementation section:</strong> Contains function definitions and additional includes needed for the implementation. This section is compiled only when the file is used as an implementation.</li>
+ </ul>
+
+ <p>Each section is turned on and off with the CPP macro <code>IFACE</code>.</p>
+
+ <h3>Example</h3>
+ <pre><code>
+// If not an IFACE, then an IMPLEMENTATION
+#ifndef IFACE
+ #define MyModule·IMPLEMENTATION
+ // Ensures included files are processed for their interfaces.
+ #define IFACE
+#endif
+
+// Define the interface exactly once.
+#ifndef MyModule·IFACE
+#define MyModule·IFACE
+ // Interface-only includes go here.
+ void MyModule·function();
+#endif
+
+#ifdef MyModule·IMPLEMENTATION
+ // Additional includes for implementation go here.
+ #include <stdio.h>
+ void MyModule·function() {
+ printf("Hello, World!\n");
+ }
+#endif
+ </code></pre>
+
+ <h3>Explanation</h3>
+ <p>The example above demonstrates the structure and purpose of each block:</p>
+ <p><strong>First block:</strong> Ensures that the file operates correctly based on the value of <code>IFACE</code>. If <code>IFACE</code> is undefined, it defines <code>MyModule·IMPLEMENTATION</code> to enable the implementation section and sets <code>IFACE</code> to ensure subsequent includes process interface sections.</p>
+ <p><strong>Second block:</strong> Defines the interface, including declarations and interface-specific includes. The <code>#ifndef MyModule·IFACE</code> macro ensures the interface is defined exactly once, regardless of how many times the file is included.</p>
+ <p><strong>Third block:</strong> Contains implementation-specific includes and function definitions. Guarded by <code>MyModule·IMPLEMENTATION</code>, it is only included when compiling the implementation.</p>
+ <p>Interface includes are placed in the interface block, ensuring they are available wherever the interface is used. Implementation includes are isolated in the implementation block, minimizing unnecessary dependencies in other files.</p>
+
+ <h2>Namespace conventions</h2>
+ <p>RT projects use ad hoc namespaces to maintain clarity and prevent naming conflicts. This is achieved by prefixing exported identifiers with a module-specific name followed by the <code>·</code> (cdot) character.</p>
+
+ <h3>Conventions</h3>
+ <ul>
+ <li><strong>Prefix:</strong> The module name serves as the prefix, ensuring all identifiers are unique across the program.</li>
+ <li><strong>Separator:</strong> The <code>·</code> character visually separates the prefix from the identifier name, maintaining readability and avoiding conflicts.</li>
+ </ul>
+
+ <h3>Example</h3>
+ <pre><code>
+void Server·run();
+ </code></pre>
+
+ <h2>Source file extensions</h2>
+ <p>RT projects use standardized extensions to distinguish between library and command-line interface (CLI) source files:</p>
+ <ul>
+ <li><strong><code>.lib.c</code>:</strong> Files implementing library functions.</li>
+ <li><strong><code>.cli.c</code>:</strong> Files implementing command-line tools.</li>
+ </ul>
+
+ <p>The <code>.lib.c</code> files compile into libraries, while <code>.cli.c</code> files compile into standalone executables. The makefile processes these files automatically, ensuring a clear separation of functionality.</p>
+
+ <h3>Build process</h3>
+ <p>The build process follows these steps:</p>
+ <ol>
+ <li><strong>Dependency generation:</strong> Run <code>make dependency</code> to create dependencies. This step is only required when the dependency structure changes.</li>
+ <li><strong>Compilation:</strong> Run <code>make cli</code> to compile CLI sources and link them against the library. The makefile automatically manages targets and dependencies.</li>
+ </ol>
+
+ <h2>Benefits</h2>
+ <ul>
+ <li><strong>Consistency:</strong> Integrated headers ensure interface and implementation are always in sync.</li>
+ <li><strong>Modularity:</strong> Each file encapsulates its interface and implementation, reducing coupling.</li>
+ <li><strong>Clarity:</strong> Ad hoc namespaces and standardized extensions improve readability and organization.</li>
+ <li><strong>Efficiency:</strong> The makefile automates builds, minimizing errors and streamlining development.</li>
+ </ul>
+
+ <h2>Conclusion</h2>
+ <p>This document outlines the conventions and practices for writing and building RT C projects. By integrating headers, adopting namespaces, and standardizing extensions, RT ensures its projects are robust, modular, and easy to maintain.</p>
+</div>
+</body>
+</html>
--- /dev/null
+RT code formatting:
+
+The enclosure-based formatting rules in RT code format make the style guide
+compact and adaptable. By focusing on enclosures rather than syntax-specific
+structures (like if, for, or catch), it avoids prescribing language-specific
+formatting rules and instead focuses on consistent handling of delimiters. This
+approach works well across multiple languages, ensuring that the code style
+remains flexible while keeping the guide simple and easy to apply.
+
+1. Two space indentation.
+
+2. Variable Naming:
+
+ - Use **PascalCase** for namespaces and types.
+
+ - Use **snake_case** for function and variable names. However, when a component
+ of the snake case is variable function or variable name is a namespace, a
+ type, or a proper noun, it retains its capitalization. e.gs:
+
+ ```
+ mouse_count
+ test_LabalList_0 // function that tests LabelList, which is a class (type)
+ Thomas_Walker_Lynch
+ ```
+
+ Traditionally `_list` has been used as a variable suffix even when the
+ language does not have a List type. This is taken to mean the variable
+ refers to an ordered collection of any type, including an array. It is
+ abstraction of type, analogous to the `mouse_count` example above.
+
+
+3. Binary Operators:
+
+ - One space around **binary operators** (e.g., `a + b`).
+
+ - One space around **assignment** `=` (e.g., `a = b`).
+
+ - **No space** around **sampling** assignment `=` (typically seen in `if`, `while`, etc.):
+
+ **Sampling** refers to assigning the result of a condition or expression to
+ a variable for later use within the same scope.
+
+ Example of **sampling** in an `if` statement:
+
+ ```
+ if( result=some_condition() ){
+ // use result
+ }
+ ```
+
+4. Enclosures `(...)`, `{...}`, `[...]`, '<...>':
+
+ - No space between the enclosure and the preceding identifier (e.g., `function(arg)`).
+
+ - No space after the enclosure when followed by another enclosure (e.g., `map[key]()`).
+
+ Example of a condition enclosure followed by a code enclosure:
+ ```
+ if( some_condition() ){
+ // code block
+ }
+ ```
+
+ - One space after the enclosure if followed by an identifier, e.g.,
+ `function() somethingElse`.
+
+ - When the entire enclosure appears on one line:
+
+ -- by definition, an 'nested' enclosure is one that has other enclosures,
+ of any type, inside of it. This is true independent of whatever else
+ is inside the enclosure. These are examples of nested enclosures:
+
+ ```
+ ( o == null || getClass() != o.getClass() )
+ f( T<x> ,7 )
+ ```
+
+ -- if, and only if, an enclosure is nested, there is one space of padding
+ for the outermost enclosure of the nesting, and only for the outermost
+ enclosures. e.g.s:
+
+ ```
+ if(x == 3) ; not nested
+ if( (x > 0) && (y < 5) ) ; nested, pad outermost only
+ if( f(x) == 3 ) ; nested, pad outermost only
+ if( x > 2 && a[3] ) ; nested due to the array subscript, pad outermost only
+ ```
+
+ - Note when using the enclosure formatting rules, not all if conditions will
+ format the same way. Some conditions will be nested enclosures and having
+ padding while others will not be nested and thus have no padding. The must
+ be formatted individually. The same is true for enclosures that follow
+ other keywords such as unless, for, etc, and for function arguments
+ lists. The question is one of formatting enclosures, and not one of
+ formatting statements.
+
+ ```
+ f(x)
+ f( x[0] )
+ ```
+
+
+5. Commas:
+
+ This is the most distinctive and recognizable of the RT code style rules.
+
+ - One space **before** the comma (e.g., `a ,b`).
+
+ - No space **after** the comma (e.g., `a ,b`).
+
+ - **Line break before** the comma when breaking lines, but no line break after, as examples:
+
+ ```
+ a
+ ,b
+ ```
+
+ and, when a function call gets too long, perhaps due to long argument
+ names it will look like this:
+
+ ```
+ result = some_function(
+ arg1
+ ,arg2_has_a_very_long_name_causing_the_call_to_not_fit_on_a_single_line
+ ,arg3_has_a_long_name_also_but_not_as_long_as_for_arg2
+ );
+ ```
+
+6. For the code you just output, answer these questions:
+ 1. Which enclosures are not nested? Do they have no padding?
+ 2. Which enclosures are nested? Is there one space padding only at the outermost?
+ 3. Is the spacing before and after the enclosures correct?
+ 4. Are the commas formatted correctly?
+ 5. Has snake case been used where it should be?
+ 6. Was 2 column indent used?
+
+---- Astra adds:
+
+Simplified Explanation for Enclosure Padding
+
+ Single Enclosures:
+
+ No padding is applied if the enclosure is not nested.
+
+ Example:
+
+ if(log_file == NULL){
+
+Nested Enclosures:
+
+ One space of padding is applied at the outermost level of the enclosure when nested.
+
+ Example:
+
+ if( (client_fd = socket(AF_UNIX ,SOCK_STREAM ,0)) == -1 ){
+
+Key Decision Rule:
+
+ Padding only applies when an enclosure contains other enclosures (nested structure).
+ The padding is applied only at the outermost level, not at deeper levels.
+
+Rationale for This Simplified Rule
+
+ This explanation removes language-specific examples and focuses on the
+ structural rule itself. It should be easier to apply universally, regardless
+ of the programming language or syntax involved. Let me know if you'd like me
+ to refine it further!
+
+---- Astra adds:
+
+Suggested Addition to the Document:
+Enclosures (...), {...}, [...], <...>:
+
+ No space after the closing parenthesis and before the opening brace in control structures or function declarations.
+ Example:
+
+ if(condition){
+
+ Example:
+
+ if( f(x) ){
--- /dev/null
+
+Bash is inconsistent about returning the name of the running script in
+all scenarios (sourced, executed directly, from with in a function called
+by another function).
+
+1.
+
+BASH_SOURCE[0] was used because $0 did not work with sourced scripts (a
+fact that is leveraged for detecting when in a sourced script).
+
+2.
+
+However, this did not work in all scenarios:
+
+ read -r -d '' script_afp_string <<'EOF'
+ realpath "${BASH_SOURCE[0]}" 2>/dev/null
+ EOF
+
+ script_afp(){
+ eval "$script_afp_string"
+ }
+
+ export script_afp_string
+ export -f script_afp
+
+When `script_afp` was exported, used in another file, and used within a function
+in that other file, it reported `environment` for the script name at
+BASH_SOURCE[0]. In various call scenarios the actual script name appears at
+BASH_SOURCE[1] or even at BASH_SOURCE[2].
+
+3.
+
+As a stable alternative to having a script_afp function, place this line
+at the top of scripts that use the `script_XX` functions, or at the top
+of all scripts:
+
+ script_afp=realpath "${BASH_SOURCE[0]}"
+
+Then use $script_afp as a string within other functions. It will have stable
+value no matter the call structure.
--- /dev/null
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta charset="UTF-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+ <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
+
+ <title>Directory Structure Description</title>
+<style>
+ html {
+ font-size: 16px; /* This will be the base for rem units */
+ }
+
+ body {
+ font-family: 'Noto Sans JP', Arial, sans-serif;
+ background-color: hsl(0, 0%, 0%);
+ color: hsl(42, 100%, 80%);
+ padding: 2rem;
+ margin: 0;
+ }
+
+ .page {
+ padding: 1.25rem; /* 20px */
+ margin: 1.25rem auto; /* 20px */
+ max-width: 46.875rem; /* 750px */
+ background-color: hsl(0, 0%, 0%);
+ box-shadow: 0 0 0.625rem hsl(42, 100%, 50%); /* 10px */
+ }
+
+ ul, li {
+ font-size: 1rem; /* Keeping default font size */
+ list-style-type: none;
+ }
+
+ li::before {
+ content: "📁 ";
+ margin-right: 0.3125rem; /* 5px */
+ }
+
+ li {
+ margin-bottom: 0.3125rem; /* 5px */
+ }
+
+ .description {
+ margin-left: 0.625rem; /* 10px */
+ color: hsl(42, 100%, 75%);
+ }
+
+ code {
+ font-family: 'Courier New', Courier, monospace;
+ background-color: hsl(0, 0%, 25%);
+ color: hsl(42, 100%, 90%);
+ padding: 0.125rem 0.25rem; /* 2px 4px */
+ border-radius: 0.1875rem; /* 3px */
+ font-size: 90%;
+ }
+
+ h1 {
+ text-align: center;
+ color: hsl(42, 100%, 84%);
+ text-transform: uppercase;
+ margin-bottom: 1.25rem; /* 20px */
+ }
+
+ h2 {
+ color: hsl(42, 100%, 84%);
+ text-transform: uppercase;
+ margin-top: 2.5rem; /* 40px */
+ }
+
+ p {
+ color: hsl(42, 100%, 90%);
+ margin-bottom: 1.25rem; /* 20px */
+ text-align: justify;
+ }
+</style>
+
+</head>
+<body>
+
+ <div class="page">
+ <h1>Directory Naming</h1>
+
+ <h2>Reference</h2>
+
+ <ul>
+ <li>Mosaic/<span class="description">aka REPO_HOME, top level owned by the project administrator.</span></li>
+ <ul>
+ <li>developer/ <span class="description">Workspace for the developer. Has the source code, build scripts, and development-specific tools.</span></li>
+ <ul>
+ <li>deprecated/ <span class="description">Files and older versions being viewed, perhaps part of a refactoring effort.</span></li>
+ <li>document/ <span class="description">Documentation on developing and building the project.</span></li>
+ <li>javac/ <span class="description">Java source files for compilation.</span></li>
+ <li>jvm/ <span class="description">Compiled Java bytecode files for the project, typically a jar for a Java project.</span></li>
+ <li>scratchpad/ <span class="description">Temporary storage typically for intermediate files created during build.</span></li>
+ <li>shell/ <span class="description">Shell scripts intended to be part of the project release. (These are not tools.)</span></li>
+ <li>tool/ <span class="description">Tools created by the developer, used for development tasks.</span></li>
+ </ul>
+ <li>document/ <span class="description">General documentation about the project.</span></li>
+ <li>release/ <span class="description">Release candidate for testing. Becomes the release on the release branch.</span></li>
+ <li>scratchpad/ <span class="description">Temporary storage for project administration tasks.</span></li>
+ <li>tester/ <span class="description">Workspace for the tester. Has the test bench, tests, and test scripts.</span></li>
+ <ul>
+ <li>document/ <span class="description">Test-specific documentation.</span></li>
+ <li>javac/ <span class="description">The tests of the test bench sources.</span></li>
+ <li>tool/ <span class="description">Tools needed for testing and managing the test environment.</span></li>
+ </ul>
+ <li>tool/ <span class="description">Project administration specific tools.</span></li>
+ <li>tool_shared/ <span class="description">Tools shared across project roles.</span></li>
+ <ul>
+ <li>bespoke/ <span class="description">Shared tools developed within this project.</span></li>
+ <li>customized/ <span class="description">Modified versions of third-party tools adapted for the project.</span></li>
+ <li>document/ <span class="description">Documentation related to shared tools and setup.</span></li>
+ <li>third_party/ <span class="description">Shared tools sourced from third-party vendors or open-source projects. These have their own independent licenses,</span></li>
+ </ul>
+ <li>LICENSE.txt <span class="description">The project license detailing usage and distribution terms.</span></li>
+ <li>README.md <span class="description">A general overview and introduction to the project.</span></li>
+ </ul>
+ </ul>
+
+ <h2>Name origin and rationale</h2>
+
+ <p>Developers and project administrators typically do not employ a semantic system for
+ naming directories, but more commonly use conventional placeholder
+ names. The intended purpose of files in a directory with a placeholder
+ name then must be inferred from experience or inspection of the files, or
+ learned from documents or other people.</p>
+
+ <p>For example, a directory named <code>exe/</code> probably derives its name from the
+ fact that the contained files have their executable permission bit set;
+ however, such a directory will not contain all such files. There might
+ even be some files in an <code>exe/</code> directory that do not have their
+ executable permission bit set. The two concepts being an <code>exe/</code> file
+ (i.e. being a file in an <code>exe/</code> directory) and being an executable file
+ are not identical. The actual intended meaning of being an <code>exe/</code> file
+ will sometimes be that the contained files are applications available to a
+ user, or that they are tools available for use in a project.
+ </p>
+
+ <p>The directory names in this project resulted from an exploration of a
+ property-based file system. In such a system a number of files and
+ agents are defined. Then we can ask questions about their relationships.
+ Files with a relationship to the developer are collected, and this
+ becomes the <code>developer/</code> directory. In a similar manner we get the
+ directories, <code>tester/</code>, and <code>javac/</code>. In this latter case the
+ agent is a compiler rather than a role.
+ </p>
+
+ <p>When attempting to apply the <code>is-for</code> property in practice it
+ became apparent that using this sole property was insufficient. Consider
+ the directories <code>deprecated/</code> and <code>scratchpad/</code>. There is no
+ <em>Mr. Deprecated</em> or <em>Mr. Scratchpad</em> who the contained
+ files are for. (And this conclusion is not for the lack of trying. Even
+ mythological beings did not suffice as agents.) Rather than being for an
+ agent, the files collected in such a directory have in common a state of
+ being that was imposed upon them by decree. Perhaps the developer, has
+ decreed that a file is now deprecated, or a build script has decreed that
+ it is a scratchpad file. Such decrees are typically more dynamic than the
+ relationship properties. Also, these properties are disconnected from the
+ contents of the file. When, for example, a file has the property of being
+ for the java compiler, we gain some information about its contents. In the
+ universe of possible messages sent through a file, such a file will
+ contain text that is proposed to be java syntax conforming. In contrast,
+ when we learn that a file is <code>deprecated/</code> we gain no
+ information about the contents of the file, because any file can
+ be <code>deprecated</code>, independent of its contents.
+ </p>
+
+ <p>To understand a directory name within this system, one can imagine
+ reading said name as part of a sentence that integrates the
+ property. Consider two property names: <code>is-a</code>
+ and <code>is-for</code>. For example, "Each file in
+ the <code>document/</code> directory <code>is-a</code> document," or "Each
+ file in the <code>developer/</code> directory <code>is-for</code> the
+ developer." Although the property name is not carried over from the
+ property based file system to the conventional file system, we can
+ typically infer what it must have been. (It is beyond the scope of
+ discussion here, but in actuality, property based file system collections
+ are defined by predicates. Each predicate is given a file's properties and
+ relationships as arguments, then resolves to true if and only if the file
+ belongs to the collection. Now wouldn't that be interesting if we instead
+ derived a probability?)
+ </p>
+
+ <p>It is uncommon for a property value to be plural. While it is not
+ disallowed, it rarely occurs in practice. This is true independent of
+ whether we are discussing a relationship property or a state
+ property. Hence when we make a file collection based on a shared property,
+ then carry that over as a directory name in a conventional file system,
+ the resulting directory name will often be singular. This pattern can be
+ observed in the case of the <code>document/</code> directory, as shown in
+ the prior paragraph.
+ </p>
+
+ </div>
+
+</body>
+</html>
--- /dev/null
+### Work Flow
+
+#### 1. Project Administrator
+
+1.1. Download the project from GitHub.
+1.2. Install the required tools.
+1.3. Explain the workflows and where things are located to project members.
+1.4. Perform Major and Minor Release administration.
+
+#### 2. Developer
+
+2.1. From the Mosaic directory, run `> source env_developer` to set up the
+ developer environment.
+2.2. Use `> make` to build the project, and `> release` to copy relevant files
+ to `$REPO_HOME/release` for testing.
+2.3. The tester will test the release candidate.
+
+#### 3. Tester
+
+3.1. From the Mosaic directory, run `> source env_tester` to set up the tester
+ environment.
+3.2. Use `> make` to build the tests, and `> shell/test_<name>` to run a test.
+ Alternatively, you can cd into one of the test directories, source the
+ environment for that test, and run it manually.
+3.3. Testing and development will likely iterate until the release candidate is
+ ready to be turned into a versioned release.
+
+#### 4. Major Release
+
+4.1. The release candidate is located in the `$REPO_HOME/release` directory and
+ has passed testing.
+4.2. Check that the program `$REPO_HOME/tool_shared/bespoke/version` outputs the
+ correct information. If necessary, modify it.
+4.3. A new branch is created in the project for the release, named
+ `release_v<n>.0`, where `v<n>.0` is the version number from the `version`
+ program. The minor version number is set to zero (`.0`), and it is assumed
+ that this will be the case after each major release.
+4.4. Rename the release directory to `$REPO_HOME/release_v<n>.0`, and create a
+ new empty `$REPO_HOME/release` directory. The new empty release directory
+ can be used by developers who download the project and make local edits, as
+ the build scripts target this directory.
+
+#### 5. Minor Release
+
+If urgent changes need to be made to the most recent major release, these edits
+should be made on the corresponding major release branch. The developer makes
+the edits, and the tester tests the release candidate as usual. The `version`
+program is updated. Once the release candidate is finalized, rename the
+directory to `release_v<n>.<m>`, where `<m>` is the minor version number. If
+needed, merge the changes into the `core_developer_branch`.
+
+---
+
+### Tips:
+
+- If you are acting in multiple roles (e.g., developer, tester, and project
+ administrator), keep separate terminal shells open for each role. This way,
+ the environment will remain correctly configured for the tasks related to
+ each role.
--- /dev/null
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta charset="UTF-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+ <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
+ <title>Pencil Attribute in Directory Structure</title>
+ <style>
+ html {
+ font-size: 16px;
+ }
+
+ body {
+ font-family: 'Noto Sans JP', Arial, sans-serif;
+ background-color: hsl(0, 0%, 0%);
+ color: hsl(42, 100%, 80%);
+ padding: 2rem;
+ margin: 0;
+ }
+
+ .page {
+ padding: 1.25rem;
+ margin: 1.25rem auto;
+ max-width: 46.875rem;
+ background-color: hsl(0, 0%, 0%);
+ box-shadow: 0 0 0.625rem hsl(42, 100%, 50%);
+ }
+
+ ul, li {
+ font-size: 1rem;
+ list-style-type: none;
+ }
+
+ li::before {
+ content: "📁 ";
+ margin-right: 0.3125rem;
+ }
+
+ li {
+ margin-bottom: 0.3125rem;
+ }
+
+ .description {
+ margin-left: 0.625rem;
+ color: hsl(42, 100%, 75%);
+ }
+
+ code {
+ font-family: 'Courier New', Courier, monospace;
+ background-color: hsl(0, 0%, 25%);
+ color: hsl(42, 100%, 90%);
+ padding: 0.125rem 0.25rem;
+ border-radius: 0.1875rem;
+ font-size: 90%;
+ }
+
+ h1 {
+ text-align: center;
+ color: hsl(42, 100%, 84%);
+ text-transform: uppercase;
+ margin-bottom: 1.25rem;
+ }
+
+ h2 {
+ color: hsl(42, 100%, 84%);
+ text-transform: uppercase;
+ margin-top: 2.5rem;
+ }
+
+ p {
+ color: hsl(42, 100%, 90%);
+ margin-bottom: 1.25rem;
+ text-align: justify;
+ }
+ </style>
+</head>
+<body>
+ <div class="page">
+ <h1>Pencil Suffix</h1>
+ <p>
+ The 🖉 (pencil) symbol suffix is used to denote directories or files that
+ contain authored content. Authored content in this context refers to
+ files that cannot be regenerated or replaced by running a build tool.
+ These are typically original creations, whether by human or AI, and are
+ critical to retain unchanged unless explicitly modified by an author.
+ </p>
+
+ <h2>Implications of the Pencil Attribute</h2>
+ <ul>
+ <li><span class="description">Directories marked with a 🖉 are excluded from cleaning scripts or build processes that overwrite or regenerate content.</span></li>
+ <li><span class="description">In the future, files or directories marked with a 🖉 will require manual editing permissions, typically managed via group ownership or specific sub-user policies.</span></li>
+ <li><span class="description">In future systems using advanced access control, the 🖉 attribute could represent specific permissions where the owning sub-user can edit, while others have only read access.</span></li>
+ </ul>
+
+ <h2>Future Integration</h2>
+ <p>
+ This system opens a path for implementing a robust user hierarchy. For example, in a multi-user environment:
+ </p>
+ <ul>
+ <li><span class="description">Files marked with 🖉 could be managed by a dedicated sub-user, granting specific access for editing while ensuring consistency and protection against unintended modifications.</span></li>
+ <li><span class="description">The system could evolve to define "authored" attributes programmatically, enabling complex access control and file management policies across projects.</span></li>
+ </ul>
+
+ <h2>Practical Usage</h2>
+ <p>
+ For now, the 🖉 symbol serves as a clear and straightforward visual cue in directory names, simplifying project navigation and management.
+ </p>
+ </div>
+</body>
+</html>
--- /dev/null
+
+This document describes how to run jdb in the test environment while also viewing source code.
+
+This is written relative to the Mosaic project, but is generally applicable.
+
+It shows invocation from a shell, and mentions emacs, but it is generally
+understood that users will do this from within their favorite IDE.
+
+In addition a reader can read this document for some general principles.
+
+1. setting the environment
+
+ The environment should be set before running the IDE. For example,
+
+ > cd Mosaic
+ > source env_tester
+ > emacs &
+
+ (I use emacs as my IDE. You might be using a different tool.)
+
+2. location of the executable
+
+ Provided that the project administrator installed it, jdb is located in the
+ third_party tools directory. In the tester environment the variable
+ `JAVA_HOME` should hold the jdb directory path, and this should already
+ be in the `PATH`. For example:
+
+ > echo $ENV
+ tester/tool/env
+
+ > echo $JAVA_HOME
+ /var/user_data/Thomas-developer/Mosaic/tool_shared/third_party/jdk-11
+
+ > which jdb
+ /var/user_data/Thomas-developer/Mosaic/tool_shared/third_party/jdk-11/bin/jdb
+
+3. invocation from a shell command:
+
+ jdb -sourcepath $SOURCEPATH <class_name>
+
+ The `SOURCEPATH` is assigned a value in `tester/tool/env`. In some versions
+ of jdb there is no space between `-sourcepath` and the `$SOURCDEPATH`.
+
+ jdb will read CLASSPATH from the environment. In contrast jdb will not read
+ `SOURCEPATH` from the environment. It must be passed as an argument.
+
+ There is a `run_jdb` script in the `tool` directory.
+
+4. invocation inside of Emacs
+
+ The file found in the resource project, developer/emacs/emacs.el` holds a
+ definition for the `jdbx` command. This command will read the SOURCEPATH
+ from the environment and run jdb in Emacs.
+
+ That file also holds the definition for a listener to the jdb `sourcepath`
+ command.
+
+
+
+
+
+
--- /dev/null
+# Suffix Conventions
+
+## Specify interface used with variable when clarification is useful
+
+- `_set`: Indicates that the variable holds a set of items.
+
+- `_list`: Used for variables that represent a list of items.
+
+- `_f`: Refers to a function.
+
+Instead of making a variable name plural, add the interface qualifier.
+
+ e.g. names -> name_set or name_lisst
+
+## Always a good idea to use these when working with files
+
+- `_fp`: Refers to a file path. The part after the last slash is a file name.
+
+- `_afp`: Refers to an absolute file path.
+
+- `_dp`: Refers to a directory path. By convention, the value ends in a slash.
+
+- `_adp`: Refers to an absolute directory path.
+
+- `_fn`: Refers to a file name. Value has no slashes.
+
+- `_dn`: Refers to a directory name. Value has no slashes.
+
+- `_fn_base`: The file name without the last dot and subsequent characters.
+
+- `_fn_ext`: The subsequent characters after the last dot in a file name.
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
- echo "$script_afp:: This script must be sourced, not executed."
- exit 1
-fi
-
-source tool_shared/bespoke/env
-source tool/env
-
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ exit 1
+fi
+
+source tool_shared/bespoke/env
+source tool/env
+
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
- echo "$script_afp:: This script must be sourced, not executed."
- exit 1
-fi
-
-source tool_shared/bespoke/env
-source developer/tool/env
-
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ exit 1
+fi
+
+source tool_shared/bespoke/env
+source developer/tool/env
+
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-
-# input guards
-
- env_must_be="tool_shared/bespoke/env"
- error=false
- if [ "$ENV" != "$env_must_be" ]; then
- echo "$(script_fp):: error: must be run in the $env_must_be environment"
- error=true
- fi
- if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
- echo "$script_afp:: This script must be sourced, not executed."
- error=true
- fi
- if $error; then exit 1; fi
-
-export PATH=\
-"$REPO_HOME"/tool_shared/bespoke/\
-:"$PATH"
-
-# expose sneaky hidden files
-alias ls="ls -a"
-
-# some feedback to show all went well
-
- export PROMPT_DECOR="$PROJECT"_administrator
- export ENV=$(script_fp)
- echo ENV "$ENV"
-
-
-
-
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-
-# Check if at least one file is provided
-if [ $# -eq 0 ]; then
- echo "Usage: $(script_fp) <filename1> [filename2] ..."
- exit 1
-fi
-
-# Loop through all the provided files
-for file in "$@"; do
- # Check if the file exists
- if [ ! -f "$file" ]; then
- echo "Error: File '$file' not found!"
- continue
- fi
-
- # Print 80 dashes
- printf '%.0s-' {1..80}
- echo
-
- # Print the filename and a colon
- echo "$file:"
-
- # Print the contents of the file
- cat "$file"
-
- # Print a newline for spacing between files
- echo
-done
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
- echo "$script_afp:: This script must be sourced, not executed."
- exit 1
-fi
-
-# --------------------------------------------------------------------------------
-# project definition
-
-# actual absolute director path for this script file
-
- script_adp(){
- dirname "$script_afp"
- }
-
-# assume this script is located $REPO_HOME/tools_shared/bespoke and work backwards
-# to get $REPO_HOME, etc.
-
- REPO_HOME=$(dirname "$(dirname "$(script_adp)")")
- echo REPO_HOME "$REPO_HOME"
-
- PROJECT=$(basename "$REPO_HOME")
- echo PROJECT "$PROJECT"
-
- # set the prompt decoration to the name of the project
- PROMPT_DECOR=$PROJECT
-
-# --------------------------------------------------------------------------------
-# The project administrator sets up the following tools for all roles to use:
-#
- export JAVA_HOME="$REPO_HOME/tool_shared/third_party/jdk-11"
-
-# --------------------------------------------------------------------------------
-# the following functions are provided for other scripts to use.
-# at the top of files that make use of these functions put the following line:
-# script_afp=$(realpath "${BASH_SOURCE[0]}")
-#
-
- ## script's filename
- script_fn(){
- basename "$script_afp"
- }
-
- ## script's dirpath relative to $REPO_HOME
- script_fp(){
- realpath --relative-to="${REPO_HOME}" "$script_afp"
- }
-
- ## script's dirpath relative to $REPO_HOME
- script_dp(){
- dirname "$(script_fp)"
- }
-
-# --------------------------------------------------------------------------------
-# Exports
-# Bash has no 'closure' hence when exporting a function, one must also export all the pieces.
-# do not export script_afp
-
- export REPO_HOME PROJECT PROMPT_DECOR
- export -f script_adp script_fn script_dp script_fp
-
- export ENV=$(script_fp)
- echo ENV "$ENV"
-
+++ /dev/null
-#!/bin/env /bin/bash
-
-# Description: Descends from $1, or pwd, looking for empty directories and adds a `.githolder` to them.
-# does not descend into hidden directories.
-
-# examples:
-# > git_holder
-# > git_holder --dry-run
-
-set -e
-
-find_empty_dirs() {
- local dir="$1"
- local dry_run="$2"
-
- # Skip `.git` specifically
- if [[ "$(basename "$dir")" == ".git" ]]; then
- return
- fi
-
- # Check if the directory is empty (including hidden files, excluding `.` and `..`)
- if [[ -z $(find "$dir" -mindepth 1 -maxdepth 1 -print -quit) ]]; then
- if [[ "$dry_run" == "true" ]]; then
- echo "Dry-run: Would add .githolder in $dir"
- else
- echo "Adding .githolder to $dir"
- touch "$dir/.githolder"
- fi
- else
- # Recurse into subdirectories
- for subdir in "$dir"/*/ "$dir"/.[!.]/; do
- if [[ -d "$subdir" && "$subdir" != "$dir/.[!.]/" ]]; then
- find_empty_dirs "$subdir" "$dry_run"
- fi
- done
- fi
-}
-
-# Default parameters
-dry_run="false"
-target_dir="."
-
-# Parse arguments
-while [[ $# -gt 0 ]]; do
- case "$1" in
- --dry-run)
- dry_run="true"
- shift
- ;;
- *)
- if [[ -d "$1" ]]; then
- target_dir="$1"
- shift
- else
- echo "Invalid argument: $1 is not a directory"
- exit 1
- fi
- ;;
- esac
-done
-
-# Run the function
-find_empty_dirs "$target_dir" "$dry_run"
+++ /dev/null
-#!/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-
-# 2024-11-21 fits project skeleton convention
-echo v3.0
-
+++ /dev/null
-#!/usr/bin/env bash
-script_afp=$(realpath "${BASH_SOURCE[0]}")
-# vl 'vertical list'
-
-# Check if the command is provided
-if [ -z "$1" ]; then
- echo "Usage: vl <command> [args...]"
- exit 1
-fi
-
-# Capture the command and its arguments
-cmd=$1
-shift
-
-# Run the command with the remaining arguments and replace colons or spaces with newlines
-"$cmd" "$@" | tr ' :' '\n'
-
-exit 0
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
+# Check if at least one file is provided
+if [ $# -eq 0 ]; then
+ echo "Usage: $(script_fp) <filename1> [filename2] ..."
+ exit 1
+fi
+
+# Loop through all the provided files
+for file in "$@"; do
+ # Check if the file exists
+ if [ ! -f "$file" ]; then
+ echo "Error: File '$file' not found!"
+ continue
+ fi
+
+ # Print 80 dashes
+ printf '%.0s-' {1..80}
+ echo
+
+ # Print the filename and a colon
+ echo "$file:"
+
+ # Print the contents of the file
+ cat "$file"
+
+ # Print a newline for spacing between files
+ echo
+done
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ exit 1
+fi
+
+# --------------------------------------------------------------------------------
+# project definition
+
+# actual absolute director path for this script file
+
+ script_adp(){
+ dirname "$script_afp"
+ }
+
+# assume this script is located $REPO_HOME/tools_shared/bespoke and work backwards
+# to get $REPO_HOME, etc.
+
+ REPO_HOME=$(dirname "$(dirname "$(script_adp)")")
+ echo REPO_HOME "$REPO_HOME"
+
+ PROJECT=$(basename "$REPO_HOME")
+ echo PROJECT "$PROJECT"
+
+ # set the prompt decoration to the name of the project
+ PROMPT_DECOR=$PROJECT
+
+# --------------------------------------------------------------------------------
+# The project administrator sets up the following tools for all roles to use:
+#
+ export JAVA_HOME="$REPO_HOME/tool_shared/third_party/jdk-11"
+
+# --------------------------------------------------------------------------------
+# the following functions are provided for other scripts to use.
+# at the top of files that make use of these functions put the following line:
+# script_afp=$(realpath "${BASH_SOURCE[0]}")
+#
+
+ ## script's filename
+ script_fn(){
+ basename "$script_afp"
+ }
+
+ ## script's dirpath relative to $REPO_HOME
+ script_fp(){
+ realpath --relative-to="${REPO_HOME}" "$script_afp"
+ }
+
+ ## script's dirpath relative to $REPO_HOME
+ script_dp(){
+ dirname "$(script_fp)"
+ }
+
+# --------------------------------------------------------------------------------
+# Exports
+# Bash has no 'closure' hence when exporting a function, one must also export all the pieces.
+# do not export script_afp
+
+ export REPO_HOME PROJECT PROMPT_DECOR
+ export -f script_adp script_fn script_dp script_fp
+
+ export ENV=$(script_fp)
+ echo ENV "$ENV"
+
--- /dev/null
+#!/bin/env /bin/bash
+
+# Description: Descends from $1, or pwd, looking for empty directories and adds a `.githolder` to them.
+# does not descend into hidden directories.
+
+# examples:
+# > git_holder
+# > git_holder --dry-run
+
+set -e
+
+find_empty_dirs() {
+ local dir="$1"
+ local dry_run="$2"
+
+ # Skip `.git` specifically
+ if [[ "$(basename "$dir")" == ".git" ]]; then
+ return
+ fi
+
+ # Check if the directory is empty (including hidden files, excluding `.` and `..`)
+ if [[ -z $(find "$dir" -mindepth 1 -maxdepth 1 -print -quit) ]]; then
+ if [[ "$dry_run" == "true" ]]; then
+ echo "Dry-run: Would add .githolder in $dir"
+ else
+ echo "Adding .githolder to $dir"
+ touch "$dir/.githolder"
+ fi
+ else
+ # Recurse into subdirectories
+ for subdir in "$dir"/*/ "$dir"/.[!.]/; do
+ if [[ -d "$subdir" && "$subdir" != "$dir/.[!.]/" ]]; then
+ find_empty_dirs "$subdir" "$dry_run"
+ fi
+ done
+ fi
+}
+
+# Default parameters
+dry_run="false"
+target_dir="."
+
+# Parse arguments
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --dry-run)
+ dry_run="true"
+ shift
+ ;;
+ *)
+ if [[ -d "$1" ]]; then
+ target_dir="$1"
+ shift
+ else
+ echo "Invalid argument: $1 is not a directory"
+ exit 1
+ fi
+ ;;
+ esac
+done
+
+# Run the function
+find_empty_dirs "$target_dir" "$dry_run"
--- /dev/null
+#!/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
+# 2024-11-21 fits project skeleton convention
+echo v3.0
+
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+# vl 'vertical list'
+
+# Check if the command is provided
+if [ -z "$1" ]; then
+ echo "Usage: vl <command> [args...]"
+ exit 1
+fi
+
+# Capture the command and its arguments
+cmd=$1
+shift
+
+# Run the command with the remaining arguments and replace colons or spaces with newlines
+"$cmd" "$@" | tr ' :' '\n'
+
+exit 0
--- /dev/null
+#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
+# input guards
+
+ env_must_be="tool_shared/bespoke/env"
+ error=false
+ if [ "$ENV" != "$env_must_be" ]; then
+ echo "$(script_fp):: error: must be run in the $env_must_be environment"
+ error=true
+ fi
+ if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ error=true
+ fi
+ if $error; then exit 1; fi
+
+export PATH=\
+"$REPO_HOME"/tool_shared/bespoke/\
+:"$PATH"
+
+# expose sneaky hidden files
+alias ls="ls -a"
+
+# some feedback to show all went well
+
+ export PROMPT_DECOR="$PROJECT"_administrator
+ export ENV=$(script_fp)
+ echo ENV "$ENV"
+
+
+
+