+++ /dev/null
-> cd Mosaic
-> source env_developer
-> emacs &
-
-...
-
-2024-11-04T11:19:53Z[Mosaic_developer]
-Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
-> clean_build_directories
-+ cd /var/user_data/Thomas-developer/Mosaic/developer
-+ rm -r scratchpad/com
-+ rm jvm/Mosaic.jar
-+ rm shell/Mosaic
-+ set +x
-clean_build_directories done.
-
-2024-11-04T11:20:14Z[Mosaic_developer]
-Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
-> wipe_release
-+ cd /var/user_data/Thomas-developer/Mosaic
-+ rm -rf release/Mosaic release/Mosaic.jar
-+ set +x
-wipe_release done.
-
-2024-11-04T11:20:18Z[Mosaic_developer]
-Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
-> make
-Compiling files...
-+ javac -g -d scratchpad javac/IO.java javac/Mosaic.java javac/TestBench.java javac/Util.java
-+ set +x
-Creating JAR file...
-+ jar_file=jvm/Mosaic.jar
-+ mkdir -p jvm
-+ jar cf jvm/Mosaic.jar -C scratchpad .
-+ set +x
-JAR file created successfully: jvm/Mosaic.jar
-Creating shell wrappers...
-developer/tool/make done.
-
-2024-11-04T11:20:40Z[Mosaic_developer]
-Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
-> release
-Starting release process...
-Installed Mosaic.jar to /var/user_data/Thomas-developer/Mosaic/release with permissions ug+r
-Installed Mosaic to /var/user_data/Thomas-developer/Mosaic/release with permissions ug+r+x
-developer/tool/release done.
-
-2024-11-04T11:20:44Z[Mosaic_developer]
-Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
-> clean_make_output
-+ cd /var/user_data/Thomas-developer/Mosaic/developer
-+ rm -r scratchpad/com/ReasoningTechnology/Mosaic
-+ rm jvm/Mosaic.jar
-+ rm 'shell/{Mosaic}'
-rm: cannot remove 'shell/{Mosaic}': No such file or directory
-+ set +x
-clean_make_output done.
-
--- /dev/null
+> cd Mosaic
+> source env_developer
+> emacs &
+
+...
+
+2024-11-04T11:19:53Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+> clean_build_directories
++ cd /var/user_data/Thomas-developer/Mosaic/developer
++ rm -r scratchpad/com
++ rm jvm/Mosaic.jar
++ rm shell/Mosaic
++ set +x
+clean_build_directories done.
+
+2024-11-04T11:20:14Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+> wipe_release
++ cd /var/user_data/Thomas-developer/Mosaic
++ rm -rf release/Mosaic release/Mosaic.jar
++ set +x
+wipe_release done.
+
+2024-11-04T11:20:18Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+> make
+Compiling files...
++ javac -g -d scratchpad javac/IO.java javac/Mosaic.java javac/TestBench.java javac/Util.java
++ set +x
+Creating JAR file...
++ jar_file=jvm/Mosaic.jar
++ mkdir -p jvm
++ jar cf jvm/Mosaic.jar -C scratchpad .
++ set +x
+JAR file created successfully: jvm/Mosaic.jar
+Creating shell wrappers...
+developer/tool/make done.
+
+2024-11-04T11:20:40Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+> release
+Starting release process...
+Installed Mosaic.jar to /var/user_data/Thomas-developer/Mosaic/release with permissions ug+r
+Installed Mosaic to /var/user_data/Thomas-developer/Mosaic/release with permissions ug+r+x
+developer/tool/release done.
+
+2024-11-04T11:20:44Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+> clean_make_output
++ cd /var/user_data/Thomas-developer/Mosaic/developer
++ rm -r scratchpad/com/ReasoningTechnology/Mosaic
++ rm jvm/Mosaic.jar
++ rm 'shell/{Mosaic}'
+rm: cannot remove 'shell/{Mosaic}': No such file or directory
++ set +x
+clean_make_output done.
+
--- /dev/null
+
+--- setting up the environment:
+
+
+024-11-08T07:40:57Z[]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer§
+> bash
+
+2024-11-08T07:41:19Z[]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer§
+> cd Mosaic
+
+2024-11-08T07:41:25Z[]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
+> . env_developer
+REPO_HOME /var/user_data/Thomas-developer/Mosaic
+PROJECT Mosaic
+ENV tool_shared/bespoke/env
+ENV developer/tool/env
+
+2024-11-08T07:41:34Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+> emacs &
+
+
+--- building the release candidate
+
+2024-11-08T09:58:08Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+> clean_build_directories
++ cd /var/user_data/Thomas-developer/Mosaic/developer
++ rm -r scratchpad/com
++ rm jvm/Mosaic.jar
++ rm shell/Mosaic
++ set +x
+clean_build_directories done.
+
+2024-11-08T09:58:16Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+> make
+Compiling files...
++ javac -g -d scratchpad javac/Mosaic_IO.java javac/Mosaic_Mosaic.java javac/Mosaic_Testbench.java javac/Mosaic_Util.java
++ set +x
+Creating JAR file...
++ jar_file=jvm/Mosaic.jar
++ mkdir -p jvm
++ jar cf jvm/Mosaic.jar -C scratchpad .
++ set +x
+JAR file created successfully: jvm/Mosaic.jar
+Creating shell wrappers...
+developer/tool/make done.
+
+2024-11-08T09:58:21Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+> release
+Starting release process...
+Installed Mosaic.jar to /var/user_data/Thomas-developer/Mosaic/release with permissions ug+r
+Installed Mosaic to /var/user_data/Thomas-developer/Mosaic/release with permissions ug+r+x
+developer/tool/release done.
+
+2024-11-08T09:58:24Z[Mosaic_developer]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/developer§
+>
--- /dev/null
+1. Tool
+
+The directory called `tool` has tools for the developer. There are comments at
+the top of each that says what it does.
+
+In the tool directory, `env` sets the PATH, CLASSPATH, and prepares the developer's
+environment. Noting will work right until this is sourced. (This is similar to
+Python's `venv`.)
+
+The tool called `make` builds the project. This is not the venerable `/bin/make`
+but is a simple bash script. It is going to compile everything in the `javac`
+directory.
+
+The tool called `shall_wrapper_list` gives a list of classes names that are to
+be given direct call shell wrappers. `make` will put these in the `shell`
+directory.
+
+The `clean_<x>` scripts are there to delete files so that developers do not have
+to type `rm` commands. This helps prevent accidents. Note the
+$REPO_HOME/tool_shared/bespoke/wipe_release script will remove files from the
+../release directory.
+
+2. build
+
+`make` runs `javac` which puts the class files into the `scratch_pad` directory.
+It will `makedir` a directory hierarchy in `scratch_pad` that mirrors the
+package name.
+
+After compiling `make` then gathers the class files found in the scratchpad
+directory hierarchy and puts them into a `.jar` file. Said `.jar` file will
+be located in the directory `jvm`.
+
+The `scratch_pad` directory is not pushed to the repo. It can be cleaned
+at any time, because it can always be rebuilt.
+
+3. release
+
+The `release` script will make a copy of the scripts in `shell` and the `.jar`
+file in `jvm` and put them in the `$REPO_HOME/release` directory. This
+comprises the release candidate. After a release branch is made, this becomes
+the actual release. Note the script in `$REPO_HOME/bespoke/version` which
+outputs the version for released code.
+
+
+4. debug
+
+If you use emacs note the file `$REPO_HOME/test_shared/bespoke/emacs.el'.
+
+Edit `make` to add or remove the `-g` flag from `javac`. This controls putting
+source code information into the class files.
+
+After `javac` is compiled with the `-g` flag, and in the `jdb` debugger, `jdb`
+will look into the `scratchpad` directory hierarchy where the sources were
+put to find the sources files to display when single stepping etc.
+
+The `distribute_source` tool adds links into the `scratchpad` directory hierarchy
+the point back into the `javac` directory. After these links are made, `jdb`
+will show the sources, and should the sources be edited, the originals located
+in the `javac` directory will be modified.
+
+5. debug from the `tester` environment
+
+The tester environment points at the release candidate located in the
+$REPO_HOME/release directory to find the java classes.
+
+If this release candidate was compiled with the `-g` flag, then it will have
+embedded in it source information pointing back into the
+`$REPO_HOME/developer/scratchpad` directory.
+
+If the `distribute_source` was not called by the developer, or the scratchpad
+contents have been cleaned, jdb will not be able to find the sources.
+If jdb does find the sources, and the tester edits them, then the originals
+in the `$REPO_HOME/developer/javac` directory will be modified. If this
+behavior is not desired, then put the tester on a `core_tester_branch`, then
+inspect changes before merging them back to the `core_developer_branch`.
+
+This setup makes it possible for developers to use the tester environment
+to work, without having to be on a separate branch, or for testers to
+work separately.
+
+
+
+
+
+++ /dev/null
-package com.ReasoningTechnology.Mosaic;
-/*
- The primary purpose of this class is to redirect I/O to buffers,
- sot that a test can check the I/O behavior of a function under test.
-*/
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.FileOutputStream;
-import java.io.FileInputStream;
-import java.io.FileDescriptor;
-import java.io.PrintStream;
-import java.io.InputStream;
-
-public class IO{
-
- private PrintStream original_out;
- private PrintStream original_err;
- private InputStream original_in;
-
- private ByteArrayOutputStream out_content;
- private ByteArrayOutputStream err_content;
- private ByteArrayInputStream in_content;
- private Boolean streams_foobar = false;
- private Boolean uninitialized = true;
-
-
- // IO currently has no constructors defined, uses default
-
-
- // Redirects IO streams, logs and handles errors if redirection fails.
- //
- // Most tests do not do I/O checks, so rather than throwing an error
- // it will set the streams_foobar flag, then throw an error if the I/O
- // functions are used.
- //
- // This is the only method that can set the streams_foobar flag.
- public Boolean redirect(){
-
- try{
- original_out = System.out;
- original_err = System.err;
- original_in = System.in;
-
- out_content = new ByteArrayOutputStream();
- err_content = new ByteArrayOutputStream();
- in_content = new ByteArrayInputStream(new byte[0]);
-
- System.setOut( new PrintStream(out_content) );
- System.setErr( new PrintStream(err_content) );
- System.setIn(in_content);
-
- uninitialized = false;
- return true;
-
- } catch(Exception e){
- restore_hard();
- streams_foobar = true;
- return false;
-
- }
- }
-
- // Hard restore of the streams, resetting to system defaults
- public void restore_hard(){
- System.setOut(new PrintStream( new FileOutputStream(FileDescriptor.out)) );
- System.setErr(new PrintStream( new FileOutputStream(FileDescriptor.err))) ;
- System.setIn(new FileInputStream(FileDescriptor.in));
- }
-
- // Restores original IO streams, ensuring foobar and uninitialized states are checked.
- // If anything goes wrong reverse to restore_hard.
- public void restore(){
- if(uninitialized || streams_foobar){
- restore_hard();
- return;
- }
- try{
- System.setOut(original_out);
- System.setErr(original_err);
- System.setIn(original_in);
- } catch(Throwable e){
- restore_hard();
- }
- }
-
- // Clears output, error, and input buffers, checks for foobar state only.
- public void clear_buffers(){
- if(streams_foobar){
- throw new IllegalStateException("Cannot clear buffers: IO object is in foobar state.");
- }
- out_content.reset();
- err_content.reset();
- in_content = new ByteArrayInputStream( new byte[0] ); // Reset to EOF
- System.setIn(in_content);
- }
-
- public Boolean has_out_content(){
- if(streams_foobar){
- throw new IllegalStateException
- (
- "Cannot access stdout content: IO object is in foobar state."
- );
- }
- return out_content.size() > 0;
- }
- public String get_out_content(){
- if(streams_foobar){
- throw new IllegalStateException
- (
- "Cannot access stdout content: IO object is in foobar state."
- );
- }
- return out_content.toString();
- }
-
- public Boolean has_err_content(){
- if(streams_foobar){
- throw new IllegalStateException
- (
- "Cannot access stderr content: IO object is in foobar state."
- );
- }
- return err_content.size() > 0;
- }
- public String get_err_content(){
- if(streams_foobar){
- throw new IllegalStateException
- (
- "Cannot access stderr content: IO object is in foobar state."
- );
- }
- return err_content.toString();
- }
-
- // Pushes input string onto stdin, checks foobar state only.
- public void push_input(String input_data){
- if(streams_foobar){
- throw new IllegalStateException("Cannot push input: IO object is in foobar state.");
- }
- in_content = new ByteArrayInputStream( input_data.getBytes() );
- System.setIn(in_content);
- }
-}
+++ /dev/null
-package com.ReasoningTechnology.Mosaic;
-import com.ReasoningTechnology.Mosaic.Util;
-
-/*
-Mosaic currently does not have shell commands.
-
-*/
-
-
-public class Mosaic{
-
- public static Boolean test_is_true(){
- return true;
- }
-
- public static int run(){
- System.out.println("Main function placeholder. Currently Mosaic is used by extending the TestBench class.");
- return 0;
- }
-
- public static void main(String[] args){
- int return_code = run();
- System.exit(return_code);
- return;
- }
-
-}
--- /dev/null
+package com.ReasoningTechnology.Mosaic;
+/*
+ The primary purpose of this class is to redirect I/O to buffers,
+ sot that a test can check the I/O behavior of a function under test.
+*/
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.FileOutputStream;
+import java.io.FileInputStream;
+import java.io.FileDescriptor;
+import java.io.PrintStream;
+import java.io.InputStream;
+
+public class Mosaic_IO{
+
+ private PrintStream original_out;
+ private PrintStream original_err;
+ private InputStream original_in;
+
+ private ByteArrayOutputStream out_content;
+ private ByteArrayOutputStream err_content;
+ private ByteArrayInputStream in_content;
+ private Boolean streams_foobar = false;
+ private Boolean uninitialized = true;
+
+
+ // IO currently has no constructors defined, uses default
+
+
+ // Redirects IO streams, logs and handles errors if redirection fails.
+ //
+ // Most tests do not do I/O checks, so rather than throwing an error
+ // it will set the streams_foobar flag, then throw an error if the I/O
+ // functions are used.
+ //
+ // This is the only method that can set the streams_foobar flag.
+ public Boolean redirect(){
+
+ try{
+ original_out = System.out;
+ original_err = System.err;
+ original_in = System.in;
+
+ out_content = new ByteArrayOutputStream();
+ err_content = new ByteArrayOutputStream();
+ in_content = new ByteArrayInputStream(new byte[0]);
+
+ System.setOut( new PrintStream(out_content) );
+ System.setErr( new PrintStream(err_content) );
+ System.setIn(in_content);
+
+ uninitialized = false;
+ return true;
+
+ } catch(Exception e){
+ restore_hard();
+ streams_foobar = true;
+ return false;
+
+ }
+ }
+
+ // Hard restore of the streams, resetting to system defaults
+ public void restore_hard(){
+ System.setOut(new PrintStream( new FileOutputStream(FileDescriptor.out)) );
+ System.setErr(new PrintStream( new FileOutputStream(FileDescriptor.err))) ;
+ System.setIn(new FileInputStream(FileDescriptor.in));
+ }
+
+ // Restores original IO streams, ensuring foobar and uninitialized states are checked.
+ // If anything goes wrong reverse to restore_hard.
+ public void restore(){
+ if(uninitialized || streams_foobar){
+ restore_hard();
+ return;
+ }
+ try{
+ System.setOut(original_out);
+ System.setErr(original_err);
+ System.setIn(original_in);
+ } catch(Throwable e){
+ restore_hard();
+ }
+ }
+
+ // Clears output, error, and input buffers, checks for foobar state only.
+ public void clear_buffers(){
+ if(streams_foobar){
+ throw new IllegalStateException("Cannot clear buffers: IO object is in foobar state.");
+ }
+ out_content.reset();
+ err_content.reset();
+ in_content = new ByteArrayInputStream( new byte[0] ); // Reset to EOF
+ System.setIn(in_content);
+ }
+
+ public Boolean has_out_content(){
+ if(streams_foobar){
+ throw new IllegalStateException
+ (
+ "Cannot access stdout content: IO object is in foobar state."
+ );
+ }
+ return out_content.size() > 0;
+ }
+ public String get_out_content(){
+ if(streams_foobar){
+ throw new IllegalStateException
+ (
+ "Cannot access stdout content: IO object is in foobar state."
+ );
+ }
+ return out_content.toString();
+ }
+
+ public Boolean has_err_content(){
+ if(streams_foobar){
+ throw new IllegalStateException
+ (
+ "Cannot access stderr content: IO object is in foobar state."
+ );
+ }
+ return err_content.size() > 0;
+ }
+ public String get_err_content(){
+ if(streams_foobar){
+ throw new IllegalStateException
+ (
+ "Cannot access stderr content: IO object is in foobar state."
+ );
+ }
+ return err_content.toString();
+ }
+
+ // Pushes input string onto stdin, checks foobar state only.
+ public void push_input(String input_data){
+ if(streams_foobar){
+ throw new IllegalStateException("Cannot push input: IO object is in foobar state.");
+ }
+ in_content = new ByteArrayInputStream( input_data.getBytes() );
+ System.setIn(in_content);
+ }
+}
--- /dev/null
+package com.ReasoningTechnology.Mosaic;
+
+/*
+The Mosaic shell callable wrapper is currently a placeholder. Perhaps someday we
+can find something for this to do.
+
+*/
+
+
+public class Mosaic_Mosaic{
+
+ public static Boolean test_is_true(){
+ return true;
+ }
+
+ public static int run(){
+ System.out.println("Main function placeholder. Currently Mosaic is used by extending the TestBench class.");
+ return 0;
+ }
+
+ public static void main(String[] args){
+ int return_code = run();
+ System.exit(return_code);
+ return;
+ }
+
+}
--- /dev/null
+package com.ReasoningTechnology.Mosaic;
+
+import java.lang.reflect.Method;
+
+public class Mosaic_Testbench {
+
+ /* --------------------------------------------------------------------------------
+ Validate the structure of a test method
+ */
+ public static Boolean method_is_wellformed(Method method){
+ // Check if the method returns Boolean
+ if(!method.getReturnType().equals(Boolean.class)){
+ System.out.println("Structural problem: " + method.getName() + " does not return Boolean.");
+ return false;
+ }
+
+ // Check if the method has exactly one argument of type Mosaic_IO
+ Class<?>[] parameterTypes = method.getParameterTypes();
+ if(parameterTypes == null || parameterTypes.length != 1 || !parameterTypes[0].equals(Mosaic_IO.class)){
+ System.out.println("Structural problem: " + method.getName() + " does not accept a single Mosaic_IO argument.");
+ return false;
+ }
+
+ return true;
+ }
+
+ /* --------------------------------------------------------------------------------
+ Run a single test method
+ */
+ public static Boolean run_test(Object test_suite, Method method, Mosaic_IO io){
+ String test_name = method.getName();
+
+ // Tracking possible test failures
+ Boolean fail_malformed = false;
+ Boolean fail_reported = false;
+ Boolean fail_exception = false;
+ Boolean fail_extraneous_stdout = false;
+ Boolean fail_extraneous_stderr = false;
+ String exception_string = "";
+
+ // Validate method structure
+ if(!method_is_wellformed(method)){
+ System.out.println("Error: " + test_name + " has an invalid structure.");
+ return false;
+ }
+
+ // Redirect I/O
+ Boolean successful_redirect = io.redirect();
+ if(successful_redirect){
+ io.clear_buffers(); // Start each test with empty buffers
+ } else {
+ Mosaic_Util.log_message(test_name, "Error: I/O redirection failed before running the test.");
+ System.out.println("Warning: Failed to redirect I/O for test: " + test_name);
+ }
+
+ // Run the test and catch any exceptions
+ try{
+ Object result = method.invoke(test_suite, io);
+ fail_reported = !Boolean.TRUE.equals(result); // Test passes only if it returns exactly `true`
+ fail_extraneous_stdout = io.has_out_content();
+ fail_extraneous_stderr = io.has_err_content();
+ } catch(Exception e){
+ fail_exception = true;
+ exception_string = e.toString();
+ } finally{
+ io.restore();
+ }
+
+ // Report results
+ if(fail_reported) System.out.println("Test failed: '" + test_name + "' reported failure.");
+ if(fail_exception) System.out.println("Test failed: '" + test_name + "' threw an exception: " + exception_string);
+ if(fail_extraneous_stdout){
+ System.out.println("Test failed: '" + test_name + "' produced extraneous stdout.");
+ Mosaic_Util.log_output(test_name, "stdout", io.get_out_content());
+ }
+ if(fail_extraneous_stderr){
+ System.out.println("Test failed: '" + test_name + "' produced extraneous stderr.");
+ Mosaic_Util.log_output(test_name, "stderr", io.get_err_content());
+ }
+
+ // Determine final test result
+ return !(fail_reported || fail_exception || fail_extraneous_stdout || fail_extraneous_stderr);
+ }
+
+ /* --------------------------------------------------------------------------------
+ Run all tests in the test suite
+ */
+ public static int run(Object test_suite){
+ int failed_tests = 0;
+ int passed_tests = 0;
+ Method[] methods = test_suite.getClass().getDeclaredMethods();
+ Mosaic_IO io = new Mosaic_IO();
+
+ for(Method method : methods){
+ if(run_test(test_suite, method, io)) passed_tests++; else failed_tests++;
+ }
+
+ // Summary of test results
+ System.out.println("Total tests run: " + (passed_tests + failed_tests));
+ System.out.println("Total tests passed: " + passed_tests);
+ System.out.println("Total tests failed: " + failed_tests);
+
+ return (failed_tests > 0) ? 1 : 0;
+ }
+
+}
--- /dev/null
+package com.ReasoningTechnology.Mosaic;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.lang.reflect.Method;
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeFormatter;
+import java.util.function.Predicate;
+
+public class Mosaic_Util{
+
+ // Linear search with a predicate
+ public static <T> T find( T[] elements ,Predicate<T> predicate ){
+ for( T element : elements ){
+ if( predicate.test( element )) return element; // Return the first match
+ }
+ return null; // Return null if no element satisfies the predicate
+ }
+
+ // True when it does a search and finds a true value; otherwise false.
+ public static Boolean exists( Object[] elements ){
+ return elements.length > 0 && find( elements ,element -> (element instanceof Boolean) && (Boolean) element ) != null;
+ }
+
+ // True when it does a search and does not find a false value; otherwise false.
+ public static Boolean all( Object[] elements ){
+ return elements.length > 0 && find( elements ,element -> !(element instanceof Boolean) || !(Boolean) element ) == null;
+ }
+
+ public static void all_set_false( Boolean[] condition_list ){
+ int i = 0;
+ while(i < condition_list.length){
+ condition_list[i] = false;
+ i++;
+ }
+ }
+
+ public static void all_set_true( Boolean[] condition_list ){
+ int i = 0;
+ while(i < condition_list.length){
+ condition_list[i] = true;
+ i++;
+ }
+ }
+
+ public static String iso_utc_time(){
+ return Instant.now().atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_INSTANT);
+ }
+
+ // used to report if a test completed with data still on an output streams
+ public static void log_output(String test_name ,String stream ,String output_data){
+ try(FileWriter log_writer = new FileWriter("test_log.txt" ,true)){ // Append mode
+ log_writer.write("\n" + iso_utc_time() + " -----------------------------------------------------------\n");
+ log_writer.write("Test: " + test_name + "\n");
+ log_writer.write("Stream: " + stream + "\n");
+ log_writer.write("Output:\n" + output_data + "\n");
+ } catch(IOException e) {
+ System.err.println("Error writing to log for test: " + test_name + ", stream: " + stream);
+ e.printStackTrace(System.err);
+ }
+ }
+
+ // used to log a general message about a test
+ public static void log_message(String test_name ,String message){
+ try(FileWriter log_writer = new FileWriter("test_log.txt" ,true)){ // Append mode
+ log_writer.write("\n" + iso_utc_time() + " -----------------------------------------------------------\n");
+ log_writer.write("Test: " + test_name + "\n");
+ log_writer.write("Message:\n" + message + "\n");
+ } catch(IOException e){
+ System.err.println
+ (
+ "Error writing message \""
+ + message
+ + "\" to log for test \'"
+ + test_name
+ + "\'"
+ );
+ e.printStackTrace(System.err);
+ }
+ }
+
+}
+++ /dev/null
-package com.ReasoningTechnology.Mosaic;
-
-import java.lang.reflect.Method;
-
-public class TestBench {
-
- /* --------------------------------------------------------------------------------
- Validate the structure of a test method
- */
- public static Boolean method_is_wellformed(Method method){
- // Check if the method returns Boolean
- if(!method.getReturnType().equals(Boolean.class)){
- System.out.println("Structural problem: " + method.getName() + " does not return Boolean.");
- return false;
- }
-
- // Check if the method has exactly one argument of type IO
- Class<?>[] parameterTypes = method.getParameterTypes();
- if(parameterTypes == null || parameterTypes.length != 1 || !parameterTypes[0].equals(IO.class)){
- System.out.println("Structural problem: " + method.getName() + " does not accept a single IO argument.");
- return false;
- }
-
- return true;
- }
-
- /* --------------------------------------------------------------------------------
- Run a single test method
- */
- public static Boolean run_test(Object test_suite, Method method, IO io){
- String test_name = method.getName();
-
- // Tracking possible test failures
- Boolean fail_malformed = false;
- Boolean fail_reported = false;
- Boolean fail_exception = false;
- Boolean fail_extraneous_stdout = false;
- Boolean fail_extraneous_stderr = false;
- String exception_string = "";
-
- // Validate method structure
- if(!method_is_wellformed(method)){
- System.out.println("Error: " + test_name + " has an invalid structure.");
- return false;
- }
-
- // Redirect I/O
- Boolean successful_redirect = io.redirect();
- if(successful_redirect){
- io.clear_buffers(); // Start each test with empty buffers
- } else {
- Util.log_message(test_name, "Error: I/O redirection failed before running the test.");
- System.out.println("Warning: Failed to redirect I/O for test: " + test_name);
- }
-
- // Run the test and catch any exceptions
- try{
- Object result = method.invoke(test_suite, io);
- fail_reported = !Boolean.TRUE.equals(result); // Test passes only if it returns exactly `true`
- fail_extraneous_stdout = io.has_out_content();
- fail_extraneous_stderr = io.has_err_content();
- } catch(Exception e){
- fail_exception = true;
- exception_string = e.toString();
- } finally{
- io.restore();
- }
-
- // Report results
- if(fail_reported) System.out.println("Test failed: '" + test_name + "' reported failure.");
- if(fail_exception) System.out.println("Test failed: '" + test_name + "' threw an exception: " + exception_string);
- if(fail_extraneous_stdout){
- System.out.println("Test failed: '" + test_name + "' produced extraneous stdout.");
- Util.log_output(test_name, "stdout", io.get_out_content());
- }
- if(fail_extraneous_stderr){
- System.out.println("Test failed: '" + test_name + "' produced extraneous stderr.");
- Util.log_output(test_name, "stderr", io.get_err_content());
- }
-
- // Determine final test result
- return !(fail_reported || fail_exception || fail_extraneous_stdout || fail_extraneous_stderr);
- }
-
- /* --------------------------------------------------------------------------------
- Run all tests in the test suite
- */
- public static int run(Object test_suite){
- int failed_tests = 0;
- int passed_tests = 0;
- Method[] methods = test_suite.getClass().getDeclaredMethods();
- IO io = new IO();
-
- for(Method method : methods){
- if(run_test(test_suite, method, io)) passed_tests++; else failed_tests++;
- }
-
- // Summary of test results
- System.out.println("Total tests run: " + (passed_tests + failed_tests));
- System.out.println("Total tests passed: " + passed_tests);
- System.out.println("Total tests failed: " + failed_tests);
-
- return (failed_tests > 0) ? 1 : 0;
- }
-
-}
+++ /dev/null
-package com.ReasoningTechnology.Mosaic;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.lang.reflect.Method;
-import java.time.Instant;
-import java.time.ZoneOffset;
-import java.time.format.DateTimeFormatter;
-import java.util.function.Predicate;
-
-public class Util{
-
- // Linear search with a predicate
- public static <T> T find( T[] elements ,Predicate<T> predicate ){
- for( T element : elements ){
- if( predicate.test( element )) return element; // Return the first match
- }
- return null; // Return null if no element satisfies the predicate
- }
-
- // True when it does a search and finds a true value; otherwise false.
- public static Boolean exists( Object[] elements ){
- return elements.length > 0 && find( elements ,element -> (element instanceof Boolean) && (Boolean) element ) != null;
- }
-
- // True when it does a search and does not find a false value; otherwise false.
- public static Boolean all( Object[] elements ){
- return elements.length > 0 && find( elements ,element -> !(element instanceof Boolean) || !(Boolean) element ) == null;
- }
-
- public static void all_set_false( Boolean[] condition_list ){
- int i = 0;
- while(i < condition_list.length){
- condition_list[i] = false;
- i++;
- }
- }
-
- public static void all_set_true( Boolean[] condition_list ){
- int i = 0;
- while(i < condition_list.length){
- condition_list[i] = true;
- i++;
- }
- }
-
- public static String iso_utc_time(){
- return Instant.now().atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_INSTANT);
- }
-
- // used to report if a test completed with data still on an output streams
- public static void log_output(String test_name ,String stream ,String output_data){
- try(FileWriter log_writer = new FileWriter("test_log.txt" ,true)){ // Append mode
- log_writer.write("\n" + iso_utc_time() + " -----------------------------------------------------------\n");
- log_writer.write("Test: " + test_name + "\n");
- log_writer.write("Stream: " + stream + "\n");
- log_writer.write("Output:\n" + output_data + "\n");
- } catch(IOException e) {
- System.err.println("Error writing to log for test: " + test_name + ", stream: " + stream);
- e.printStackTrace(System.err);
- }
- }
-
- // used to log a general message about a test
- public static void log_message(String test_name ,String message){
- try(FileWriter log_writer = new FileWriter("test_log.txt" ,true)){ // Append mode
- log_writer.write("\n" + iso_utc_time() + " -----------------------------------------------------------\n");
- log_writer.write("Test: " + test_name + "\n");
- log_writer.write("Message:\n" + message + "\n");
- } catch(IOException e){
- System.err.println
- (
- "Error writing message \""
- + message
- + "\" to log for test \'"
- + test_name
- + "\'"
- );
- e.printStackTrace(System.err);
- }
- }
-
-}
--- /dev/null
+Java has long been criticized for its lack of support for `import as`, despite
+years of requests and proposals.
+
+The Java platform’s approach to aliasing issues relies on using fully qualified
+names, which poses challenges given the length of package names, especially when
+they include reversed domain names.
+
+Because `Mosaic` is used to help with testing and is not part of the project
+being tested, when aliasing conflicts arise, it is typically the `Mosaic` identifiers
+that need to be fully qualified. Such a renamed identifier can exceed 34
+characters!
+
+One proposal to get around this was to use an `In` class where the members were
+class extensions of imported classes. Then all imports would have the prefix `In.`.
+However, this did not work out because constructors are not
+inherited, and Java’s restrictions on `final` classes prevent the use of
+`LocalClass extends ImportClass {}` to give no names to classes.
+
+Another proposal was to use the `alias` project on GitHub, which offers an XML-based
+approach to aliasing. However, it introduces complexities, as it requires XML
+configurations to be supplied to the compiler, adding setup overhead. Perhaps
+another tool could create these.
+
+We studied a preprocessing proposal where `import as` statements would be
+replaced with fully qualified names before compilation. However, this approach
+changes the tool flow for users and would require additional steps to ensure
+`jdb` points to the original source files rather than intermediate files, which
+complicates debugging. For both this proposal and the prior, we wanted to avoid
+joining the world of java tool development.
+
+So we have a simple solution, it is not ideal, but it is not bad. We prefix
+the string `Mosaic_` to the front of all the class names in the Mosaic library.
+As a shop we are adopting this convention for all packaged java code.
-Updates for Ariadne
+1. More languages support, notably nodejs.
-1. reflect project skeleton changes.
- replaced literal `Ariadne` with `$PROJECT`
- in top level env choices, PROJECT -> PROMPT_DECOR
+2. This message:
- note in 'release' there is a variable name with the project name embedded in it:
- Ariadne_jar_fp -> project_jar_fp
- Change slashes to dots in the wrapper maker of 'make'
+ Running Test_File_0...Structural problem: unpack_file_path_0 does not accept a single IO argument.
+ Error: unpack_file_path_0 has an invalid structure.
-2. should do something about `wrapper` as it appears in multiple places making
- editing it a pain.
- in all places `wrapper` appears, now calls `shell_wrapper_list`
- in both developer and tester
+ Perhaps only the second one, getting rid of 'Error:'
-3. clean_make -> clean_make_output
+ "Bad type signature for method: unpack_file_path_0 does not accept a single IO argument."
-4. version outputs a `v` in front of the number
+3. TestBench -> Testbench perhaps?
-5. fix many shebangs to be: #!/usr/bin/env bash
-
-6. scratch_pad -> scratchpad
-
-5. make_source_tree also integrated into make
-
-6. replacement for bespoke/env and all other env files, related document.
+4. fix emacs.el so that jdbx doesn't always start with Test_Util. (note the
+ prior start can be found with M-x p
+5. should include a tool for aggregating test suite runs
+ FYI, I'm using
+6. need an example .bashrc for setting the prompt now that I removed references
+to the 'resource' project and plan to deprecate it.
+7. should check stdin for unused input and report error also.
fi
source tool_shared/bespoke/env
-
+source tool/env
+++ /dev/null
-<!DOCTYPE html>
-<html lang="en">
-<head>
- <meta charset="UTF-8">
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
- <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
- <title>Output Stream Policy - Mosaic Project</title>
- <style>
- html { font-size: 16px; }
- body {
- font-family: 'Noto Sans JP', Arial, sans-serif;
- background-color: hsl(0, 0%, 10%);
- color: hsl(42, 100%, 80%);
- padding: 2rem;
- margin: 0;
- }
- .page { padding: 1.25rem; margin: 1.25rem auto; max-width: 46.875rem; background-color: hsl(0, 0%, 0%); box-shadow: 0 0 0.625rem hsl(42, 100%, 50%); }
- ul, li { font-size: 1rem; list-style-type: none; }
- li::before { content: "📄 "; margin-right: 0.3125rem; }
- li { margin-bottom: 0.3125rem; }
- .description { margin-left: 0.625rem; color: hsl(42, 100%, 75%); }
- code { font-family: 'Courier New', Courier, monospace; background-color: hsl(0, 0%, 25%); color: hsl(42, 100%, 90%); padding: 0.125rem 0.25rem; border-radius: 0.1875rem; font-size: 90%; }
- h1 { text-align: center; color: hsl(42, 100%, 84%); text-transform: uppercase; margin-bottom: 1.25rem; }
- h2 { color: hsl(42, 100%, 84%); text-transform: uppercase; margin-top: 2.5rem; }
- p { color: hsl(42, 100%, 90%); margin-bottom: 1.25rem; text-align: justify; }
- </style>
-</head>
-<body>
- <div class="page">
- <h1>Output Stream Policy for Tests</h1>
-
- <h2>Overview of the <code>IO</code> Object</h2>
-
- <p>Each test function is given an <code>IO</code> object, which provides
- methods for inspecting <code>stdout</code> and <code>stderr</code> output
- streams, programmatically adding data to the <code>stdin</code> input stream,
- and clearing output streams as needed. Although the <code>IO</code> object is
- optional, it is available for cases where I/O validation or cleanup is
- essential to the test.</p>
-
- <h2>Purpose</h2>
-
- <p>Each test function is responsible for managing any output generated
- on <code>stdout</code> or <code>stderr</code> by the function under test
- (fut). <code>TestBench</code> will automatically clear the streams before
- each test begins and will check them after the test completes, treating any
- remaining output as unintended and marking the test as a failure. This policy
- ensures that tests intentionally handle output by either validating,
- clearing, or ignoring it, thereby maintaining a clean and predictable testing
- environment.</p>
-
- <h2>Policy Guidelines</h2>
- <ul>
- <li><strong>1. Define an Output Handling Policy:</strong></li>
- <ul>
- <li><span class="description">Every test should have a defined policy for how it handles output generated by the fut. There are three primary approaches:</span></li>
- <ul>
- <li><span class="description"><strong>Validation:</strong> Check the fut output and confirm its correctness.</span></li>
- <li><span class="description"><strong>Intentional Ignoring:</strong> If output validation isn’t relevant to the test, the output should still be acknowledged and cleared to avoid unintended failures.</span></li>
- <li><span class="description"><strong>Mixed Policy:</strong> A test can validate specific output while ignoring others, as long as any remaining output is cleared before the test returns.</span></li>
- </ul>
- </ul>
-
- <li><strong>2. When to Validate Output:</strong></li>
- <ul>
- <li><span class="description">If the test expects specific output from the fut, it should retrieve and check the content on <code>stdout</code> and <code>stderr</code> using methods like <code>io.get_out_content()</code> or <code>io.get_err_content()</code>. The test passes if the actual output matches the expected content.</span></li>
- <li><span class="description">After validating, the test should clear the output buffers (<code>io.clear_buffers()</code>) if further output handling is not needed to avoid residual content.</span></li>
- </ul>
-
- <li><strong>3. When to Ignore Output:</strong></li>
- <ul>
- <li><span class="description">If the test does not require output verification, it should acknowledge the output by clearing the streams before returning.</span></li>
- <li><span class="description">This approach signals to <code>TestBench</code> that any output generated was intentionally disregarded and avoids marking the test as failed.</span></li>
- </ul>
-
- <li><strong>4. Failure Due to Residual Output:</strong></li>
- <ul>
- <li><span class="description"><strong>No Defined Policy:</strong> If a test leaves output on the streams without a clear handling policy (validation or intentional clearing), <code>TestBench</code> will flag this as a failure.</span></li>
- <li><span class="description"><strong>Ensuring Clean Tests:</strong> To avoid unexpected failures, verify that each test has no residual output before returning by either validating or clearing output streams.</span></li>
- </ul>
- </ul>
-
- <h2>Example Scenarios</h2>
- <ul>
- <li><strong>1. Output Validation:</strong></li>
- <pre><code>public static Boolean test_with_output_verification(IO io) {
- System.out.println("Expected output");
- String output = io.get_out_content();
- boolean isCorrect = output.equals("Expected output");
- io.clear_buffers(); // Clear remaining content if not needed
- return isCorrect;
-}</code></pre>
-
- <li><strong>2. Ignoring Output:</strong></li>
- <pre><code>public static Boolean test_without_output_verification(IO io) {
- System.out.println("Output not needed for this test");
- io.clear_buffers(); // Clear output since it’s intentionally ignored
- return true;
-}</code></pre>
- </ul>
-
- <h2>Summary</h2>
- <p>Each test should manage its output streams with an intentional policy:</p>
- <ul>
- <li><span class="description"><strong>Validate output</strong> if it is relevant to the test.</span></li>
- <li><span class="description"><strong>Acknowledge and clear output</strong> if it is not relevant.</span></li>
- <li><span class="description"><strong>Avoid residual output</strong> to prevent <code>TestBench</code> from marking the test as failed.</span></li>
- </ul>
- <p>This approach ensures that tests remain clean and focused on their primary objectives without unintended side effects from unhandled output.</p>
- </div>
-</body>
-</html>
--- /dev/null
+
+I did not use Mosaic to test itself, although Test_MockClass_0 comes close.
+
+TestMockClass has the general form of a test that uses Mosaic, though MockClass
+itself does not exist. This general form:
+
+*. For block testing there conventionally be a correspondence between
+ The test classes and the a class being tested, so each test class will
+ be named:
+
+ `Test_<class being tested>_<number>'.
+
+ Typically the lowest `number` will be zero, and it will correspond to
+ smoke tests.
+
+* A `Test_<class being tested>_<number>' class will have inside of it another class
+ called `TestSuite`. By convention each method in this class is a test routine. For block
+ testing a test routine will has a correspondence to the method being tested,
+ and has the name:
+
+ `test_<method_name>_<number>`.
+
+ This convention is not always followed, no that in `Test_MackClass_0` you will
+ notice that tests are named after the expected results rather than a method
+ that is being tested.
+
+ Test routines can run a number of tests on a RUT, each of which is referred to
+ as a test case. So we have this hierarchy:
+
+ `Test_<class being tested>_<number>' > `TestSuite` > test_routine > test_case
+
+*. The main call for a Test class will parse arguments and options, setup
+ the testing environment, make a `TestSuite` object, pass said object to
+ the `TestBench`, then take the return value from the `Testbench`, and set
+ the return value from the test.
+
+* A test routines will return `true` if the test passes. Any other return
+ value, any uncaught exception, or anything left on the stdout or stderr
+ will cause the test to be interpreted as a failure. (On the todo list is
+ an item to make unused stdin an error as well.)
+
+* A test reoutine (nor the contained test cases) should not themselves print
+ any messages. Generally it has always been this way, even before the Testbench
+ redirected and the i/o streams. Rather the test should simply return true
+ for a pass. This is because in testing we are looking for function failures,
+ and not for faults. The fault will be searched for later in the debugger.
+
+ If a test routine has an internal error, such that the routine itself
+ has a problem (not the RUT it is testing), this can be put in a log
+ entry. See the Mosaic_Util for the log methods.
+++ /dev/null
-<!DOCTYPE html>
-<html lang="en">
-<head>
- <meta charset="UTF-8">
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
- <link href="https://fonts.googleapis.com/css2?family=Noto+Sans+JP&display=swap" rel="stylesheet">
- <title>About the Tests - Mosaic Project</title>
- <style>
- html { font-size: 16px; }
- body {
- font-family: 'Noto Sans JP', Arial, sans-serif;
- background-color: hsl(0, 0%, 10%);
- color: hsl(42, 100%, 80%);
- padding: 2rem;
- margin: 0;
- }
- .page { padding: 1.25rem; margin: 1.25rem auto; max-width: 46.875rem; background-color: hsl(0, 0%, 0%); box-shadow: 0 0 0.625rem hsl(42, 100%, 50%); }
- ul, li { font-size: 1rem; list-style-type: none; }
- li::before { content: "📄 "; margin-right: 0.3125rem; }
- li { margin-bottom: 0.3125rem; }
- .description { margin-left: 0.625rem; color: hsl(42, 100%, 75%); }
- code { font-family: 'Courier New', Courier, monospace; background-color: hsl(0, 0%, 25%); color: hsl(42, 100%, 90%); padding: 0.125rem 0.25rem; border-radius: 0.1875rem; font-size: 90%; }
- h1 { text-align: center; color: hsl(42, 100%, 84%); text-transform: uppercase; margin-bottom: 1.25rem; }
- h2 { color: hsl(42, 100%, 84%); text-transform: uppercase; margin-top: 2.5rem; }
- p { color: hsl(42, 100%, 90%); margin-bottom: 1.25rem; text-align: justify; }
- </style>
-</head>
-<body>
- <div class="page">
-
- <h1>About the Tests</h1>
-
- <p>This document provides an operational guide for running and expanding
- tests of the Mosaic TestBench. I.e. it is not about running the Mosaic
- TestBench, rather it is about testing the Mosaic TestBench</p>
-
- <p>These tests are primarily ad hoc, as we avoid using the TestBench to test
- itself. Despite being ad hoc, the tests follow a core philosophy: the goal
- is to identify which functions fail, rather than diagnose why they fail. In
- the argot of the field, we are looking for function failures ad are not
- identifying falts. Hence, tests do not print messages but signal if they
- fail, or not.</p>
-
- <p>Accordingly, only pass/fail counts and the names of failing functions are
- recorded. For more detailed investigation, for locating the fault, the
- developer can run a failed test using a debugging tool such
- as <code>jdb</code>.</p>
-
- <h2>1. Running the Tests</h2>
- <p>To run all tests and gather results, follow these steps:</p>
- <ol>
- <li>Make sure no old files are hanging about by running <code>clean_build_directories</code>.</li>
- <li>Run <code>make</code> to compile the project and prepare all test class shell wrappers.</li>
- <li>Run <code>run_tests</code> to run the tests. Each test class will output
- its results, identifying tests that failed.</li>
- </ol>
-
- <h2>2. Ad Hoc Block Tests</h2>
- <p>The block tests are ad hoc and do not use TestBench directly. It would
- have been nice to have used the TestBench, but doing so would have
- introduce unnecessary complexity.</p>
- <ul>
- <li><strong>2.1 Each test group is a class.</strong></li>
- <ul>
- <li><span class="description">Each group of related tests is organized within its own class, keeping tests modular and focused.</span></li>
- </ul>
- <li><strong>2.2 Key Methods</strong></li>
- <ul>
- <li><span class="description"><code>main</code>: The entry point for command-line execution.</span></li>
- <li><span class="description"><code>run</code>: Aggregates test results, runs all methods in the class, and reports outcomes.</span></li>
- </ul>
- <li><strong>2.3 Helper and Test Methods</strong></li>
- <ul>
- <li><span class="description">Test methods take no arguments and return <code>true</code> if they pass; any other return value counts as a failure.</span></li>
- </ul>
- </ul>
-
- <h2>3. Integration Tests</h2>
- <p>After completion of the ad hoc block testing, integration of the blocks
- is tested with one or more tests that make use of the TestBench. The
- TestBench framework offers a structured testing approach. Classes using
- TestBench are referred to as Test Suites, each method within which is
- treated as an independent test. </p>
- <ul>
- <li><strong>3.1 Test Suites</strong></li>
- <ul>
-
- <li><span class="description">Each Test Suite class extends
- the <code>TestBench</code> class. Each method in a Test Suite runs as a
- separate test when the suite is executed.</span></li>
- </ul>
- <li><strong>3.2 Method Structure</strong></li>
- <ul>
- <li><span class="description">Each test method accepts a
- single <code>IO</code> argument (a utility class handling input/output
- streams) and returns a <code>Object</code>. Only a return value
- of Boolean <code>true</code> is counted as a pass. Any other return
- value, any uncaught exceptions, or any data left on stdin, or stdout
- are taken to mean the test failed.</span></li>
- </ul>
- </ul>
-
- <h2>4. Adding a Test</h2>
- <p>To extend the testing suite, new tests can be added as follows:</p>
- <ul>
- <li><strong>4.1 Create or Extend a Test Class</strong></li>
- <ul>
- <li><span class="description">Add a new test class as required or append methods to an existing one.</span></li>
- </ul>
- <li><strong>4.2 Integrate the Test Class</strong></li>
- <ul>
- <li><span class="description">For classes with a <code>main</code> function, add the class name to <code>tool/shell_wrapper_list</code> to ensure it is included in the test environment.</span></li>
- </ul>
- </ul>
-
- </div>
-</body>
-</html>
+++ /dev/null
-This shows all tests passing.
-
-It can be a bit confusing to read, but the failure results from the tests named
-'test_failure_X' are actually passing when they report that they failed. This is
-because we are testing a test bench, and we are testing the feature of the test
-bench where it fails bad code.
-
-> cd Mosaic
-> source env_tester
-> emacs &
-
-...
-
-2024-11-04T11:23:08Z[Mosaic_tester]
-Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
-> clean_build_directories
-+ cd /var/user_data/Thomas-developer/Mosaic/tester
-+ rm -r scratchpad/Test0.class scratchpad/Test_IO.class 'scratchpad/Test_MockClass$TestSuite.class' scratchpad/Test_MockClass.class scratchpad/Test_TestBench.class scratchpad/Test_Util.class
-+ rm jvm/Test_Mosaic.jar
-+ rm shell/Test0 shell/Test_IO shell/Test_MockClass shell/Test_TestBench shell/Test_Util
-+ set +x
-clean_build_directories done.
-
-2024-11-04T11:23:23Z[Mosaic_tester]
-Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
-> make
-Compiling files...
-+ cd /var/user_data/Thomas-developer/Mosaic/tester
-+ javac -g -d scratchpad javac/Test0.java javac/Test_IO.java javac/Test_MockClass.java javac/Test_TestBench.java javac/Test_Util.java
-+ jar cf jvm/Test_Mosaic.jar -C scratchpad .
-+ set +x
-Creating shell wrappers...
-tester/tool/make done.
-
-2024-11-04T11:23:27Z[Mosaic_tester]
-Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
-> run_tests
-Running Test0...Test0 passed
-Running Test_Util...Test_Util passed
-Running Test_IO...Test_IO passed
-Running Test_TestBench...Expected output: Structural problem message for dummy_invalid_return_method.
-Structural problem: dummy_invalid_return_method does not return Boolean.
-Test_TestBench Total tests run: 3
-Test_TestBench Total tests passed: 3
-Test_TestBench Total tests failed: 0
-Running Test_MockClass...Test failed: 'test_failure_0' reported failure.
-Structural problem: test_failure_1 does not return Boolean.
-Error: test_failure_1 has an invalid structure.
-Test failed: 'test_failure_2' threw an exception: java.lang.reflect.InvocationTargetException
-Test failed: 'test_failure_3' produced extraneous stdout.
-Test failed: 'test_failure_4' produced extraneous stderr.
-Total tests run: 9
-Total tests passed: 4
-Total tests failed: 5
-
-2024-11-04T11:23:33Z[Mosaic_tester]
-Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
-> clean_build_directories
-+ cd /var/user_data/Thomas-developer/Mosaic/tester
-+ rm -r scratchpad/Test0.class scratchpad/Test_IO.class 'scratchpad/Test_MockClass$TestSuite.class' scratchpad/Test_MockClass.class scratchpad/Test_TestBench.class scratchpad/Test_Util.class
-+ rm jvm/Test_Mosaic.jar
-+ rm shell/Test0 shell/Test_IO shell/test_log.txt shell/Test_MockClass shell/Test_TestBench shell/Test_Util
-+ set +x
-clean_build_directories done.
--- /dev/null
+This shows all tests passing.
+
+Tests named `test_failure_<X>` should fail. We need to know that the `TestBench`
+can fail tests, so this is part of testing the `TestBench`.
+
+> cd Mosaic
+> source env_tester
+> emacs &
+
+...
+
+2024-11-04T11:23:08Z[Mosaic_tester]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
+> clean_build_directories
++ cd /var/user_data/Thomas-developer/Mosaic/tester
++ rm -r scratchpad/Test0.class scratchpad/Test_IO.class 'scratchpad/Test_MockClass$TestSuite.class' scratchpad/Test_MockClass.class scratchpad/Test_TestBench.class scratchpad/Test_Util.class
++ rm jvm/Test_Mosaic.jar
++ rm shell/Test0 shell/Test_IO shell/Test_MockClass shell/Test_TestBench shell/Test_Util
++ set +x
+clean_build_directories done.
+
+2024-11-04T11:23:23Z[Mosaic_tester]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
+> make
+Compiling files...
++ cd /var/user_data/Thomas-developer/Mosaic/tester
++ javac -g -d scratchpad javac/Test0.java javac/Test_IO.java javac/Test_MockClass.java javac/Test_TestBench.java javac/Test_Util.java
++ jar cf jvm/Test_Mosaic.jar -C scratchpad .
++ set +x
+Creating shell wrappers...
+tester/tool/make done.
+
+2024-11-04T11:23:27Z[Mosaic_tester]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
+> run_tests
+Running Test0...Test0 passed
+Running Test_Util...Test_Util passed
+Running Test_IO...Test_IO passed
+Running Test_TestBench...Expected output: Structural problem message for dummy_invalid_return_method.
+Structural problem: dummy_invalid_return_method does not return Boolean.
+Test_TestBench Total tests run: 3
+Test_TestBench Total tests passed: 3
+Test_TestBench Total tests failed: 0
+Running Test_MockClass...Test failed: 'test_failure_0' reported failure.
+Structural problem: test_failure_1 does not return Boolean.
+Error: test_failure_1 has an invalid structure.
+Test failed: 'test_failure_2' threw an exception: java.lang.reflect.InvocationTargetException
+Test failed: 'test_failure_3' produced extraneous stdout.
+Test failed: 'test_failure_4' produced extraneous stderr.
+Total tests run: 9
+Total tests passed: 4
+Total tests failed: 5
+
+2024-11-04T11:23:33Z[Mosaic_tester]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
+> clean_build_directories
++ cd /var/user_data/Thomas-developer/Mosaic/tester
++ rm -r scratchpad/Test0.class scratchpad/Test_IO.class 'scratchpad/Test_MockClass$TestSuite.class' scratchpad/Test_MockClass.class scratchpad/Test_TestBench.class scratchpad/Test_Util.class
++ rm jvm/Test_Mosaic.jar
++ rm shell/Test0 shell/Test_IO shell/test_log.txt shell/Test_MockClass shell/Test_TestBench shell/Test_Util
++ set +x
+clean_build_directories done.
--- /dev/null
+This shows all tests passing.
+
+Tests named `test_failure_<X>` should fail. We need to know that the `TestBench`
+can fail tests, so this is part of testing the `TestBench`.
+
+Staring the environment:
+
+2024-11-08T07:41:48Z[]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer§
+> bash
+
+2024-11-08T07:41:51Z[]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer§
+> cd Mosaic
+
+2024-11-08T07:41:54Z[]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic§
+> . env_tester
+REPO_HOME /var/user_data/Thomas-developer/Mosaic
+PROJECT Mosaic
+ENV tool_shared/bespoke/env
+ENV tester/tool/env
+
+2024-11-08T07:42:04Z[Mosaic_tester]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/tester§
+> emacs &
+
+Running the tests:
+
+2024-11-08T09:58:40Z[Mosaic_tester]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/tester§
+> clean_build_directories
++ cd /var/user_data/Thomas-developer/Mosaic/tester
++ rm -r scratchpad/Test0.class scratchpad/Test_IO.class 'scratchpad/Test_MockClass_0$TestSuite.class' scratchpad/Test_MockClass_0.class scratchpad/Test_Testbench.class scratchpad/Test_Util.class
++ rm jvm/Test_Mosaic.jar
++ rm shell/Test0 shell/Test_IO shell/test_log.txt shell/Test_MockClass_0 shell/Test_Testbench shell/Test_Util
++ set +x
+clean_build_directories done.
+
+2024-11-08T09:58:46Z[Mosaic_tester]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/tester§
+> make
+Compiling files...
++ cd /var/user_data/Thomas-developer/Mosaic/tester
++ javac -g -d scratchpad javac/Test0.java javac/Test_IO.java javac/Test_MockClass_0.java javac/Test_Testbench.java javac/Test_Util.java
++ jar cf jvm/Test_Mosaic.jar -C scratchpad .
++ set +x
+Creating shell wrappers...
+tester/tool/make done.
+
+2024-11-08T09:58:50Z[Mosaic_tester]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/tester§
+> run_tests
+Running Test0...Test0 passed
+Running Test_Util...Test_Util passed
+Running Test_IO...Test_IO passed
+Running Test_Testbench...Expected output: Structural problem message for dummy_invalid_return_method.
+Structural problem: dummy_invalid_return_method does not return Boolean.
+Test_Testbench Total tests run: 3
+Test_Testbench Total tests passed: 3
+Test_Testbench Total tests failed: 0
+Running Test_MockClass_0...Test failed: 'test_failure_0' reported failure.
+Structural problem: test_failure_1 does not return Boolean.
+Error: test_failure_1 has an invalid structure.
+Test failed: 'test_failure_2' threw an exception: java.lang.reflect.InvocationTargetException
+Test failed: 'test_failure_3' produced extraneous stdout.
+Test failed: 'test_failure_4' produced extraneous stderr.
+Total tests run: 9
+Total tests passed: 4
+Total tests failed: 5
+
+2024-11-08T09:58:55Z[Mosaic_tester]
+Thomas-developer@Blossac§/var/user_data/Thomas-developer/Mosaic/tester§
+>
-import com.ReasoningTechnology.Mosaic.Util;
+import com.ReasoningTechnology.Mosaic.Mosaic_Util;
/*
Test Zero
condition[0] = test_is_true();
int i = 0;
- if( !Util.all(condition) ){
+ if( !Mosaic_Util.all(condition) ){
System.out.println("Test0 failed");
return 1;
}
+++ /dev/null
-/*
-Component smoke test. At least call each method of each class.
-
-*/
-
-import com.ReasoningTechnology.Ariadne.*;
-import com.ReasoningTechnology.TestBench.*;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.lang.reflect.Method;
-import java.util.List;
-import java.util.Map;
-
-public class TestTestBench extends TestBench{
-
- public static class TestSuite{
-
- TestSuite(){
- }
-
- public Boolean test_pass(ByteArrayOutputStream out_content, ByteArrayOutputStream err_content){
- return true;
- }
-
- public Boolean test_fail_0(ByteArrayOutputStream out_content, ByteArrayOutputStream err_content){
- return false;
- }
-
- // Tests if exception uncaught by the test correctly causes a failure from the TestBench.
- public static Boolean test_fail_1() throws Exception {
- int randomInt = (int) (Math.random() * 100); // Generate a random integer
- // Always returns true, but Java will not complain that following code is unreachable
- if(
- (randomInt % 2 != 0 && ((randomInt * randomInt - 1) % 8 == 0))
- || (randomInt % 2 == 0 && (randomInt * randomInt) % 4 == 0)
- ){
- throw new Exception("Condition met, error thrown.");
- }
-
- return true; // If the condition fails, return true
- }
-
- }
-
- // Method to run all tests
- public static void test_TestBench(){
- System.out.println("TestTestBench: running tests. Note that two failures is normal");
- TestSuite test_suite = new TestSuite();
- TestBench.run( test_suite );
- }
-
- // Main function to provide a shell interface for running tests
- public static void main(String[] args){
- // tests currently takes no arguments or options
- test_TestBench(); // Calls the method to run all tests
- }
-
-}
-
-import com.ReasoningTechnology.Mosaic.IO;
-import com.ReasoningTechnology.Mosaic.Util;
+import com.ReasoningTechnology.Mosaic.Mosaic_IO;
+import com.ReasoningTechnology.Mosaic.Mosaic_Util;
public class Test_IO{
}
public static int run(){
- IO io = new IO();
+ Mosaic_IO io = new Mosaic_IO();
Boolean[] condition = new Boolean[3];
// Redirect IO streams
// Restore original IO streams
io.restore();
- if(!Util.all(condition)){
+ if(!Mosaic_Util.all(condition)){
System.out.println("Test_IO failed");
return 1;
}
+++ /dev/null
-/* --------------------------------------------------------------------------------
- Integration tests directly simulate the use cases for TestBench.
- Each test method validates a specific feature of TestBench ,including pass,
- fail ,error handling ,and I/O interactions.
-*/
-
-import java.util.Scanner;
-import com.ReasoningTechnology.Mosaic.IO;
-import com.ReasoningTechnology.Mosaic.TestBench;
-
-public class Test_MockClass{
-
- public class TestSuite{
-
- public TestSuite() {
- // no special initialization of data for this test
- }
-
- public Boolean test_failure_0(IO io){
- return false;
- }
-
- // returns a non-Boolean
- public Object test_failure_1(IO io){
- return 1;
- }
-
- // has an uncaught error
- public Boolean test_failure_2(IO io) throws Exception {
- throw new Exception("Intentional exception for testing error handling");
- }
-
- // extraneous characters on stdout
- public Boolean test_failure_3(IO io) throws Exception {
- System.out.println("Intentional extraneous chars to stdout for testing");
- return true;
- }
-
- // extraneous characters on stderr
- public Boolean test_failure_4(IO io) throws Exception {
- System.err.println("Intentional extraneous chars to stderr for testing.");
- return true;
- }
-
- public Boolean test_success_0(IO io){
- return true;
- }
-
- // pushing input for testing
-
- public Boolean test_success_1(IO io){
- io.push_input("input for the fut");
-
- Scanner scanner = new Scanner(System.in);
- String result = scanner.nextLine();
- scanner.close();
-
- Boolean flag = result.equals("input for the fut");
- return flag;
- }
-
- // checking fut stdout
- public Boolean test_success_2(IO io){
- System.out.println("fut stdout"); // suppose the fut does this:
- String peek_at_futs_output = io.get_out_content();
- Boolean flag0 = io.has_out_content();
- Boolean flag1 = peek_at_futs_output.equals("fut stdout\n");
- io.clear_buffers(); // otherwise extraneous chars will cause an fail
- return flag0 && flag1;
- }
-
- // checking fut stderr
- public Boolean test_success_3(IO io){
- System.err.print("fut stderr"); // suppose the fut does this:
- String peek_at_futs_output = io.get_err_content();
- Boolean flag0 = io.has_err_content();
- Boolean flag1 = peek_at_futs_output.equals("fut stderr");
- io.clear_buffers(); // otherwise extraneous chars will cause an fail
- return flag0 && flag1;
- }
-
- }
-
- public static void main(String[] args) {
- Test_MockClass outer = new Test_MockClass();
- TestSuite suite = outer.new TestSuite(); // Non-static instantiation
-
- /* for debug
- IO io = new IO();
- io.redirect();
- suite.test_success_2(io);
- */
-
- int result = TestBench.run(suite); // Pass the suite instance to TestBench
- System.exit(result);
- }
-
-}
--- /dev/null
+/* --------------------------------------------------------------------------------
+ Integration tests directly simulate the use cases for Mosaic_Testbench.
+ Each test method validates a specific feature of Mosaic_Testbench ,including pass,
+ fail ,error handling ,and I/O interactions.
+*/
+
+import java.util.Scanner;
+import com.ReasoningTechnology.Mosaic.Mosaic_IO;
+import com.ReasoningTechnology.Mosaic.Mosaic_Testbench;
+
+public class Test_MockClass_0{
+
+ public class TestSuite{
+
+ public TestSuite() {
+ // no special initialization of data for this test
+ }
+
+ public Boolean test_failure_0(Mosaic_IO io){
+ return false;
+ }
+
+ // returns a non-Boolean
+ public Object test_failure_1(Mosaic_IO io){
+ return 1;
+ }
+
+ // has an uncaught error
+ public Boolean test_failure_2(Mosaic_IO io) throws Exception {
+ throw new Exception("Intentional exception for testing error handling");
+ }
+
+ // extraneous characters on stdout
+ public Boolean test_failure_3(Mosaic_IO io) throws Exception {
+ System.out.println("Intentional extraneous chars to stdout for testing");
+ return true;
+ }
+
+ // extraneous characters on stderr
+ public Boolean test_failure_4(Mosaic_IO io) throws Exception {
+ System.err.println("Intentional extraneous chars to stderr for testing.");
+ return true;
+ }
+
+ public Boolean test_success_0(Mosaic_IO io){
+ return true;
+ }
+
+ // pushing input for testing
+
+ public Boolean test_success_1(Mosaic_IO io){
+ io.push_input("input for the fut");
+
+ Scanner scanner = new Scanner(System.in);
+ String result = scanner.nextLine();
+ scanner.close();
+
+ Boolean flag = result.equals("input for the fut");
+ return flag;
+ }
+
+ // checking fut stdout
+ public Boolean test_success_2(Mosaic_IO io){
+ System.out.println("fut stdout"); // suppose the fut does this:
+ String peek_at_futs_output = io.get_out_content();
+ Boolean flag0 = io.has_out_content();
+ Boolean flag1 = peek_at_futs_output.equals("fut stdout\n");
+ io.clear_buffers(); // otherwise extraneous chars will cause an fail
+ return flag0 && flag1;
+ }
+
+ // checking fut stderr
+ public Boolean test_success_3(Mosaic_IO io){
+ System.err.print("fut stderr"); // suppose the fut does this:
+ String peek_at_futs_output = io.get_err_content();
+ Boolean flag0 = io.has_err_content();
+ Boolean flag1 = peek_at_futs_output.equals("fut stderr");
+ io.clear_buffers(); // otherwise extraneous chars will cause an fail
+ return flag0 && flag1;
+ }
+
+ }
+
+ public static void main(String[] args) {
+ Test_MockClass_0 outer = new Test_MockClass_0();
+ TestSuite suite = outer.new TestSuite(); // Non-static instantiation
+
+ /* for debug
+ Mosaic_IO io = new Mosaic_IO();
+ io.redirect();
+ suite.test_success_2(io);
+ */
+
+ int result = Mosaic_Testbench.run(suite); // Pass the suite instance to Mosaic_Testbench
+ System.exit(result);
+ }
+
+}
+++ /dev/null
-import java.lang.reflect.Method;
-import com.ReasoningTechnology.Mosaic.IO;
-import com.ReasoningTechnology.Mosaic.TestBench;
-
-public class Test_TestBench {
-
- /* --------------------------------------------------------------------------------
- Test methods to validate TestBench functionality
- Each method tests a specific aspect of the TestBench class, with a focus on
- ensuring that well-formed and ill-formed test cases are correctly identified
- and handled.
- */
-
- // Tests if a correctly formed method is recognized as well-formed by TestBench
- public static Boolean test_method_is_wellformed_0(IO io) {
- try {
- Method validMethod = Test_TestBench.class.getMethod("dummy_test_method", IO.class);
- return Boolean.TRUE.equals(TestBench.method_is_wellformed(validMethod));
- } catch (NoSuchMethodException e) {
- return false;
- }
- }
-
- // Tests if a method with an invalid return type is identified as malformed by TestBench
- public static Boolean test_method_is_wellformed_1(IO io) {
- System.out.println("Expected output: Structural problem message for dummy_invalid_return_method.");
- try {
- Method invalidReturnMethod = Test_TestBench.class.getMethod("dummy_invalid_return_method", IO.class);
- return Boolean.FALSE.equals(TestBench.method_is_wellformed(invalidReturnMethod));
- } catch (NoSuchMethodException e) {
- return false;
- }
- }
-
- // Tests if a valid test method runs successfully with the TestBench
- public static Boolean test_run_test_0(IO io) {
- try {
- Method validMethod = Test_TestBench.class.getMethod("dummy_test_method", IO.class);
- return Boolean.TRUE.equals(TestBench.run_test(new Test_TestBench(), validMethod, io));
- } catch (NoSuchMethodException e) {
- return false;
- }
- }
-
- /* Dummy methods for testing */
- public Boolean dummy_test_method(IO io) {
- return true; // Simulates a passing test case
- }
-
- public void dummy_invalid_return_method(IO io) {
- // Simulates a test case with an invalid return type
- }
-
- /* --------------------------------------------------------------------------------
- Manually run all tests and summarize results without using TestBench itself.
- Each test's name is printed if it fails, and only pass/fail counts are summarized.
- */
- public static int run() {
- int passed_tests = 0;
- int failed_tests = 0;
- IO io = new IO();
-
- if (test_method_is_wellformed_0(io)) passed_tests++; else { System.out.println("test_method_is_wellformed_0"); failed_tests++; }
- if (test_method_is_wellformed_1(io)) passed_tests++; else { System.out.println("test_method_is_wellformed_1"); failed_tests++; }
- if (test_run_test_0(io)) passed_tests++; else { System.out.println("test_run_test_0"); failed_tests++; }
-
- // Summary for all the tests
- System.out.println("Test_TestBench Total tests run: " + (passed_tests + failed_tests));
- System.out.println("Test_TestBench Total tests passed: " + passed_tests);
- System.out.println("Test_TestBench Total tests failed: " + failed_tests);
-
- return (failed_tests > 0) ? 1 : 0;
- }
-
- /* --------------------------------------------------------------------------------
- Main method for shell interface, sets the exit status based on test results
- */
- public static void main(String[] args) {
- int exitCode = run();
- System.exit(exitCode);
- }
-}
--- /dev/null
+import java.lang.reflect.Method;
+import com.ReasoningTechnology.Mosaic.Mosaic_IO;
+import com.ReasoningTechnology.Mosaic.Mosaic_Testbench;
+
+public class Test_Testbench {
+
+ /* --------------------------------------------------------------------------------
+ Test methods to validate Testbench functionality
+ Each method tests a specific aspect of the Testbench class, with a focus on
+ ensuring that well-formed and ill-formed test cases are correctly identified
+ and handled.
+ */
+
+ // Tests if a correctly formed method is recognized as well-formed by Testbench
+ public static Boolean test_method_is_wellformed_0(Mosaic_IO io) {
+ try {
+ Method validMethod = Test_Testbench.class.getMethod("dummy_test_method", Mosaic_IO.class);
+ return Boolean.TRUE.equals(Mosaic_Testbench.method_is_wellformed(validMethod));
+ } catch (NoSuchMethodException e) {
+ return false;
+ }
+ }
+
+ // Tests if a method with an invalid return type is identified as malformed by Testbench
+ public static Boolean test_method_is_wellformed_1(Mosaic_IO io) {
+ System.out.println("Expected output: Structural problem message for dummy_invalid_return_method.");
+ try {
+ Method invalidReturnMethod = Test_Testbench.class.getMethod("dummy_invalid_return_method", Mosaic_IO.class);
+ return Boolean.FALSE.equals(Mosaic_Testbench.method_is_wellformed(invalidReturnMethod));
+ } catch (NoSuchMethodException e) {
+ return false;
+ }
+ }
+
+ // Tests if a valid test method runs successfully with the Testbench
+ public static Boolean test_run_test_0(Mosaic_IO io) {
+ try {
+ Method validMethod = Test_Testbench.class.getMethod("dummy_test_method", Mosaic_IO.class);
+ return Boolean.TRUE.equals(Mosaic_Testbench.run_test(new Test_Testbench(), validMethod, io));
+ } catch (NoSuchMethodException e) {
+ return false;
+ }
+ }
+
+ /* Dummy methods for testing */
+ public Boolean dummy_test_method(Mosaic_IO io) {
+ return true; // Simulates a passing test case
+ }
+
+ public void dummy_invalid_return_method(Mosaic_IO io) {
+ // Simulates a test case with an invalid return type
+ }
+
+ /* --------------------------------------------------------------------------------
+ Manually run all tests and summarize results without using Testbench itself.
+ Each test's name is printed if it fails, and only pass/fail counts are summarized.
+ */
+ public static int run() {
+ int passed_tests = 0;
+ int failed_tests = 0;
+ Mosaic_IO io = new Mosaic_IO();
+
+ if (test_method_is_wellformed_0(io)) passed_tests++; else { System.out.println("test_method_is_wellformed_0"); failed_tests++; }
+ if (test_method_is_wellformed_1(io)) passed_tests++; else { System.out.println("test_method_is_wellformed_1"); failed_tests++; }
+ if (test_run_test_0(io)) passed_tests++; else { System.out.println("test_run_test_0"); failed_tests++; }
+
+ // Summary for all the tests
+ System.out.println("Test_Testbench Total tests run: " + (passed_tests + failed_tests));
+ System.out.println("Test_Testbench Total tests passed: " + passed_tests);
+ System.out.println("Test_Testbench Total tests failed: " + failed_tests);
+
+ return (failed_tests > 0) ? 1 : 0;
+ }
+
+ /* --------------------------------------------------------------------------------
+ Main method for shell interface, sets the exit status based on test results
+ */
+ public static void main(String[] args) {
+ int exitCode = run();
+ System.exit(exitCode);
+ }
+}
-import com.ReasoningTechnology.Mosaic.Util;
+import com.ReasoningTechnology.Mosaic.Mosaic_Util;
/*
Test_Util
public static Boolean test_all(){
// Test with zero condition
Boolean[] condition0 = {};
- Boolean result = !Util.all(condition0); // Empty condition list is false.
+ Boolean result = !Mosaic_Util.all(condition0); // Empty condition list is false.
// Test with one condition
Boolean[] condition1_true = {true};
Boolean[] condition1_false = {false};
- result &= Util.all(condition1_true); // should return true
- result &= !Util.all(condition1_false); // should return false
+ result &= Mosaic_Util.all(condition1_true); // should return true
+ result &= !Mosaic_Util.all(condition1_false); // should return false
// Test with two condition
Boolean[] condition2_true = {true, true};
Boolean[] condition2_false1 = {true, false};
Boolean[] condition2_false2 = {false, true};
Boolean[] condition2_false3 = {false, false};
- result &= Util.all(condition2_true); // should return true
- result &= !Util.all(condition2_false1); // should return false
- result &= !Util.all(condition2_false2); // should return false
- result &= !Util.all(condition2_false3); // should return false
+ result &= Mosaic_Util.all(condition2_true); // should return true
+ result &= !Mosaic_Util.all(condition2_false1); // should return false
+ result &= !Mosaic_Util.all(condition2_false2); // should return false
+ result &= !Mosaic_Util.all(condition2_false3); // should return false
// Test with three condition
Boolean[] condition3_false1 = {true, true, false};
Boolean[] condition3_false2 = {true, false, true};
Boolean[] condition3_false3 = {false, true, true};
Boolean[] condition3_false4 = {false, false, false};
- result &= !Util.all(condition3_false1); // should return false
- result &= Util.all(condition3_true); // should return true
- result &= !Util.all(condition3_false2); // should return false
- result &= !Util.all(condition3_false3); // should return false
- result &= !Util.all(condition3_false4); // should return false
+ result &= !Mosaic_Util.all(condition3_false1); // should return false
+ result &= Mosaic_Util.all(condition3_true); // should return true
+ result &= !Mosaic_Util.all(condition3_false2); // should return false
+ result &= !Mosaic_Util.all(condition3_false3); // should return false
+ result &= !Mosaic_Util.all(condition3_false4); // should return false
return result;
}
public static Boolean test_all_set_false(){
Boolean[] condition_list = {true, true, true};
- Util.all_set_false(condition_list);
+ Mosaic_Util.all_set_false(condition_list);
return !condition_list[0] && !condition_list[1] && !condition_list[2];
}
public static Boolean test_all_set_true(){
Boolean[] condition_list = {false, false, false};
- Util.all_set_true(condition_list);
+ Mosaic_Util.all_set_true(condition_list);
return condition_list[0] && condition_list[1] && condition_list[2];
}
+++ /dev/null
-#!/bin/env bash
-java Test_MockClass
--- /dev/null
+#!/bin/env bash
+java Test_MockClass_0
+++ /dev/null
-#!/bin/env bash
-java Test_TestBench
--- /dev/null
+#!/bin/env bash
+java Test_Testbench
-2024-11-04T13:53:57.865246Z -----------------------------------------------------------
+2024-11-08T09:58:55.370638Z -----------------------------------------------------------
Test: test_failure_3
Stream: stdout
Output:
Intentional extraneous chars to stdout for testing
-2024-11-04T13:53:57.874296Z -----------------------------------------------------------
+2024-11-08T09:58:55.380370Z -----------------------------------------------------------
Test: test_failure_4
Stream: stderr
Output:
fi
# space separated list of shell interface wrappers
-echo Test0 Test_Util Test_IO Test_TestBench Test_MockClass
+echo Test0 Test_Util Test_IO Test_Testbench Test_MockClass_0
#!/usr/bin/env bash
+script_afp=$(realpath "${BASH_SOURCE[0]}")
+
+# input guards
+
+ env_must_be="tool_shared/bespoke/env"
+ error=false
+ if [ "$ENV" != "$env_must_be" ]; then
+ echo "$(script_fp):: error: must be run in the $env_must_be environment"
+ error=true
+ fi
+ if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
+ echo "$script_afp:: This script must be sourced, not executed."
+ error=true
+ fi
+ if $error; then exit 1; fi
-script_name=$(basename ${BASH_SOURCE[0]})
-if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
- echo "$script_name must be sourced, not executed. Exiting."
- exit 1
-fi
+export PATH=\
+"$REPO_HOME"/tool_shared/bespoke/\
+:"$PATH"
-if [ -z "$ENV_BASE" ]; then
- script_path="$(dirname "$(realpath "${BASH_SOURCE[0]}")")"
- source "${script_path}/env_base"
-fi
+# expose sneaky hidden files
+alias ls="ls -a"
-ENV_PM=true
+# some feedback to show all went well
-PROJECT="$PROJECT"_administrator
+ export PROMPT_DECOR="$PROJECT"_administrator
+ export ENV=$(script_fp)
+ echo ENV "$ENV"
-export PATH=\
-"$REPO_HOME"/tool\
-:"$PATH"
-# no sneaky hidden files
-alias ls="ls -a"
-export ENV_PM=true
-echo "$script_name done."
+++ /dev/null
-#!/bin/env bash
-java com.ReasoningTechnology."Mosaic_tester".Test.shell_wrapper_list
--- /dev/null
+# ssh login will fail if .bashrc writes to stdout, so we write to "bash_error.txt"
+# set -x
+# in F37 something seems to be caching PATH, which can be annoying
+
+# If not running interactively, don't do anything
+ case $- in
+ *i*) ;;
+ *) return;;
+ esac
+
+# This should also be the default from login.defs, because gnome ignores
+# .login, .profile, etc. and uses systemd to launch applications from the desktop,
+ umask 0077
+
+# - note the variable $PROMPT_DECOR, that is how the project name ends up in the prompt.
+# - without -i bash will clear PS1, just because, so we set PPS1, ,PPS2 to not lose the profit.
+# - use $(pwd) instead of \w or it will prefix '~' which confuses dirtrack when the
+# user is changed using su
+ export PPS1='\n$($iseq/Z)[$PROMPT_DECOR]\n\u@\h§$(pwd)§\n> '
+ export PPS2='>> '
+ export PS1="$PPS1"
+ export PS2="$PPS2"
+
+# sort the output of printenv, show newlines as environment variable values as \n
+ alias printenv='printenv | awk '\''{gsub(/\n/, "\\n")}1'\'' | sort'
+
+# iso time in ls -l, show hidden files, human readable sizes
+ alias ls='ls -a -h --time-style=long-iso'
+
+# iso time for all Linux programs, which they will all ignore, but at least we
+# tried, perhaps someday ...
+ export TZ=UTC
+ export TIME_STYLE=long-iso
+ export LC_ALL=en_DK.UTF-8
+
+# -l don't truncate long lins
+# -p show pids
+ alias pstree='pstree -lp'
+
+# - make bash gp to sleep, revealing the calling shell
+# - useful for job control of multiple bash shells from a controlling shell
+ alias zzz="kill -STOP \$\$"
+
+# The one true operating system.
+# Proof that an OS can be as small as an editor.
+ export EDITOR=emacs
+
+# check the window size after each command and, if necessary, update the values
+# of LINES and COLUMNS.
+ shopt -s checkwinsize
+
script_afp=$(realpath "${BASH_SOURCE[0]}")
# 2024-10-24T14:56:09Z project skeleton and test bench files extracted from Ariadne
-echo v2.0
+# 2024-11-08T07:18:03Z prefix `Mosaic_` to class names. See document/class_name.txt.
+echo v1.1