From: Thomas Walker Lynch Date: Thu, 31 Oct 2024 15:21:41 +0000 (+0000) Subject: moves run_all to the tool directiroy, adds Test_TestBench X-Git-Url: https://git.reasoningtechnology.com/style/static/gitweb.css?a=commitdiff_plain;h=2fd64b0e6d8f3da1fe4fa9f2a485ba382acfa2ec;p=Mosaic moves run_all to the tool directiroy, adds Test_TestBench --- diff --git a/developer/javac/TestBench.java b/developer/javac/TestBench.java index 8165867..ea0eb3c 100644 --- a/developer/javac/TestBench.java +++ b/developer/javac/TestBench.java @@ -1,149 +1,103 @@ package com.ReasoningTechnology.Mosaic; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.PrintStream; import java.lang.reflect.Method; -import java.util.Map; -public class TestBench{ +public class TestBench { /* -------------------------------------------------------------------------------- - Static Data + Validate the structure of a test method */ - - private static PrintStream original_out; - private static PrintStream original_err; - private static InputStream original_in; - - private static ByteArrayOutputStream out_content; - private static ByteArrayOutputStream err_content; - private static InputStream in_content; - - public static Boolean method_is_wellformed(Method method) { + public static Boolean method_is_wellformed(Method method){ // Check if the method returns Boolean if(!method.getReturnType().equals(Boolean.class)){ System.out.println("Structural problem: " + method.getName() + " does not return Boolean."); return false; } - // Check if the method has exactly three arguments + // Check if the method has exactly one argument of type IO Class[] parameterTypes = method.getParameterTypes(); - if(parameterTypes == null || parameterTypes.length != 3){ - System.out.println("Structural problem: " + method.getName() + " does not have three arguments."); - return false; - } - - // Check that all parameters are ByteArrayOutputStream - if( - !parameterTypes[0].equals(ByteArrayOutputStream.class) // Check first parameter - || !parameterTypes[1].equals(ByteArrayOutputStream.class) // Check second parameter - || !parameterTypes[2].equals(ByteArrayOutputStream.class) // Check third parameter - ){ - System.out.println("Structural problem: " + method.getName() + " has incorrect argument types."); + if(parameterTypes == null || parameterTypes.length != 1 || !parameterTypes[0].equals(IO.class)){ + System.out.println("Structural problem: " + method.getName() + " does not accept a single IO argument."); return false; } return true; } - public static Boolean run_test(Object test_suite ,Method method ,IO io){ + /* -------------------------------------------------------------------------------- + Run a single test method + */ + public static Boolean run_test(Object test_suite, Method method, IO io){ String test_name = method.getName(); - // Ways a test can fail, these are not generally singularly exclusive. - Boolean fail_TestBench = false; + // Tracking possible test failures Boolean fail_malformed = false; Boolean fail_reported = false; Boolean fail_exception = false; Boolean fail_extraneous_stdout = false; Boolean fail_extraneous_stderr = false; - String exception_string = ""; - // `method_is_wellformed` prints more information about type signature mismatch failures - if( !method_is_wellformed(method) ){ - System.out.println - ( - "Mosaic::TestBench::run test \'" - + test_name - + "\' has incorrect type signature for a TestBench test, calling it a failure." - ); + // Validate method structure + if(!method_is_wellformed(method)){ + System.out.println("Error: " + test_name + " has an invalid structure."); return false; } - // redirect I/O to an io instance + // Redirect I/O Boolean successful_redirect = io.redirect(); - if( successful_redirect ){ - io.clear_buffers(); // start each test with nothing on the I/O buffers - }else{ - // Surely a redirect failure is rare, but it is also rare that tests make - // use of IO redirection. A conundrum. So we log the error an wait utnil - // latter only throwing an error if IO redirection is made use of. - Util.log_message - ( - test_name - ,"Mosaic::TestBench::run redirect I/O failed before running this test." - ); - System.out.println - ( - "Mosaic::TestBench::run Immediately before running test, \"" - + test_name - + "\' I/O redirect failed." - ); + if(successful_redirect){ + io.clear_buffers(); // Start each test with empty buffers + } else { + Util.log_message(test_name, "Error: I/O redirection failed before running the test."); + System.out.println("Warning: Failed to redirect I/O for test: " + test_name); } - // Finally the gremlins run the test! + // Run the test and catch any exceptions try{ - Object result = method.invoke(test_suite ,in_content ,out_content ,err_content); - fail_reported = !Boolean.TRUE.equals(result); // test passes if ,and only if ,it returns exactly 'true'. + Object result = method.invoke(test_suite, io); + fail_reported = !Boolean.TRUE.equals(result); // Test passes only if it returns exactly `true` fail_extraneous_stdout = io.has_out_content(); fail_extraneous_stderr = io.has_err_content(); } catch(Exception e){ fail_exception = true; - // We report the actual error after the try block. exception_string = e.toString(); } finally{ io.restore(); } - // report results - if(fail_reported) System.out.println("failed: \'" + test_name + "\' by report from test."); - if(fail_exception) System.out.println("failed: \'" + test_name + "\' due to unhandled exception: " + exception_string); + // Report results + if(fail_reported) System.out.println("Test failed: '" + test_name + "' reported failure."); + if(fail_exception) System.out.println("Test failed: '" + test_name + "' threw an exception: " + exception_string); if(fail_extraneous_stdout){ - System.out.println("failed: \'" + test_name + "\' due extraneous stdout output ,see log."); - Util.log_output(test_name ,"stdout" ,io.get_out_content()); + System.out.println("Test failed: '" + test_name + "' produced extraneous stdout."); + Util.log_output(test_name, "stdout", io.get_out_content()); } if(fail_extraneous_stderr){ - System.out.println("failed: \'" + test_name + "\' due extraneous stderr output ,see log."); - Util.log_output(test_name ,"stderr" ,io.get_err_content()); + System.out.println("Test failed: '" + test_name + "' produced extraneous stderr."); + Util.log_output(test_name, "stderr", io.get_err_content()); } - // return condition - Boolean test_failed = - fail_reported - || fail_exception - || fail_extraneous_stdout - || fail_extraneous_stderr - ; - return !test_failed; + // Determine final test result + return !(fail_reported || fail_exception || fail_extraneous_stdout || fail_extraneous_stderr); } - + /* -------------------------------------------------------------------------------- + Run all tests in the test suite + */ public static void run(Object test_suite){ - int failed_test = 0; - int passed_test = 0; + int failed_tests = 0; + int passed_tests = 0; Method[] methods = test_suite.getClass().getDeclaredMethods(); IO io = new IO(); + for(Method method : methods){ - if( run_test(test_suite ,method ,io) ) passed_test++; else failed_test++; + if(run_test(test_suite, method, io)) passed_tests++; else failed_tests++; } - // summary for all the tests - System.out.println("Total tests run: " + (passed_test + failed_test)); - System.out.println("Total tests passed: " + passed_test); - System.out.println("Total tests failed: " + failed_test); - } -} // end of class TestBench + // Summary of test results + System.out.println("Total tests run: " + (passed_tests + failed_tests)); + System.out.println("Total tests passed: " + passed_tests); + System.out.println("Total tests failed: " + failed_tests); + } +} diff --git a/developer/javac/Util.java b/developer/javac/Util.java index 653f626..03796a9 100644 --- a/developer/javac/Util.java +++ b/developer/javac/Util.java @@ -31,11 +31,20 @@ public class Util{ return elements.length > 0 && find( elements ,element -> !(element instanceof Boolean) || !(Boolean) element ) == null; } - public static void all_set_false(Boolean[] conditions){ - for(Boolean condition : conditions) condition = false; + public static void all_set_false( Boolean[] condition_list ){ + int i = 0; + while(i < condition_list.length){ + condition_list[i] = false; + i++; + } } - public static void all_set_true(Boolean[] conditions){ - for(Boolean condition : conditions) condition = true; + + public static void all_set_true( Boolean[] condition_list ){ + int i = 0; + while(i < condition_list.length){ + condition_list[i] = true; + i++; + } } public static String iso_utc_time(){ diff --git a/developer/tool/clean_build_directories b/developer/tool/clean_build_directories index 4e2d60e..7091d81 100755 --- a/developer/tool/clean_build_directories +++ b/developer/tool/clean_build_directories @@ -2,9 +2,9 @@ script_afp=$(realpath "${BASH_SOURCE[0]}") # Removes all files found in the build directories. It asks no questions as to -# how or why the files got there. Be especially careful with the 'shell' directory -# if you added scripts to it for release with the project they will be deleted. -# consider adding a `shell-leaf` directory instead of adding scripts to `shell`. +# how or why the files got there. Be especially careful with the 'shell' +# directory if you have authored scripts for release, add a `shell-leaf` +# directory instead of putting them in `shell`. # input guards env_must_be="developer/tool/env" diff --git a/release/Mosaic.jar b/release/Mosaic.jar index 246a225..122284d 100644 Binary files a/release/Mosaic.jar and b/release/Mosaic.jar differ diff --git a/tester/document/about_the_tests.html b/tester/document/about_the_tests.html new file mode 100644 index 0000000..f7d5f0d --- /dev/null +++ b/tester/document/about_the_tests.html @@ -0,0 +1,116 @@ + + + + + + + About the Tests - Mosaic Project + + + +
+ +

About the Tests

+ +

This document provides an operational guide for running and expanding + tests of the Mosaic TestBench. I.e. it is not about running the Mosaic + TestBench, rather it is about testing the Mosaic TestBench

+ +

These tests are primarily ad hoc, as we avoid using the TestBench to test + itself. Despite being ad hoc, the tests follow a core philosophy: the goal + is to identify which functions fail, rather than diagnose why they fail. To + achieve this, tests do not print messages but instead + return true if they pass.

+ +

Accordingly, only pass/fail counts and the names of failing functions are + recorded. For more detailed investigation, the developer can run a failed + test using a debugging tool such as jdb.

+ +

1. Running the Tests

+

To run all tests and gather results, follow these steps:

+
    +
  1. Ensure the environment is clean by running clean_build_directories.
  2. +
  3. Run make to compile the project and prepare all test class shell wrappers.
  4. +
  5. Run run_tests to run the tests. Each test class will output + its results, identifying tests that failed.
  6. +
+ +

2. Ad Hoc Block Tests

+

The block tests are ad hoc and do not use TestBench directly. It would + have been nice to have used the TestBench, but doing so would have + introduce unnecessary complexity.

+ + +

3. Integration Tests

+

After completion of the ad hoc block testing, integration of the blocks + is tested with one or more tests that make use of the TestBench. The + TestBench framework offers a structured testing approach. Classes using + TestBench are referred to as Test Suites, each method within which is + treated as an independent test.

+ + +

4. Adding a Test

+

To extend the testing suite, new tests can be added as follows:

+ + +
+ + diff --git a/tester/document/what_the_tests_do.txt b/tester/document/what_the_tests_do.txt deleted file mode 100644 index 85563b6..0000000 --- a/tester/document/what_the_tests_do.txt +++ /dev/null @@ -1,7 +0,0 @@ - -Should probably finish this doc ;-) - - - - - diff --git a/tester/javac/Test_TestBench.java b/tester/javac/Test_TestBench.java new file mode 100644 index 0000000..848fae3 --- /dev/null +++ b/tester/javac/Test_TestBench.java @@ -0,0 +1,81 @@ +import java.lang.reflect.Method; +import com.ReasoningTechnology.Mosaic.IO; +import com.ReasoningTechnology.Mosaic.TestBench; + +public class Test_TestBench { + + /* -------------------------------------------------------------------------------- + Test methods to validate TestBench functionality + Each method tests a specific aspect of the TestBench class, with a focus on + ensuring that well-formed and ill-formed test cases are correctly identified + and handled. + */ + + // Tests if a correctly formed method is recognized as well-formed by TestBench + public static Boolean test_method_is_wellformed_0(IO io) { + try { + Method validMethod = Test_TestBench.class.getMethod("dummy_test_method", IO.class); + return Boolean.TRUE.equals(TestBench.method_is_wellformed(validMethod)); + } catch (NoSuchMethodException e) { + return false; + } + } + + // Tests if a method with an invalid return type is identified as malformed by TestBench + public static Boolean test_method_is_wellformed_1(IO io) { + try { + Method invalidReturnMethod = Test_TestBench.class.getMethod("dummy_invalid_return_method", IO.class); + return Boolean.FALSE.equals(TestBench.method_is_wellformed(invalidReturnMethod)); + } catch (NoSuchMethodException e) { + return false; + } + } + + // Tests if a valid test method runs successfully with the TestBench + public static Boolean test_run_test_0(IO io) { + try { + Method validMethod = Test_TestBench.class.getMethod("dummy_test_method", IO.class); + return Boolean.TRUE.equals(TestBench.run_test(new Test_TestBench(), validMethod, io)); + } catch (NoSuchMethodException e) { + return false; + } + } + + /* Dummy methods for testing */ + public Boolean dummy_test_method(IO io) { + return true; // Simulates a passing test case + } + + public void dummy_invalid_return_method(IO io) { + // Simulates a test case with an invalid return type + } + + /* -------------------------------------------------------------------------------- + Manually run all tests and summarize results without using TestBench itself. + Each test's name is printed if it fails, and only pass/fail counts are summarized. + */ + public static int run() { + int passed_tests = 0; + int failed_tests = 0; + IO io = new IO(); + + if (test_method_is_wellformed_0(io)) passed_tests++; else { System.out.println("test_method_is_wellformed_0"); failed_tests++; } + if (test_method_is_wellformed_1(io)) passed_tests++; else { System.out.println("test_method_is_wellformed_1"); failed_tests++; } + if (test_run_test_0(io)) passed_tests++; else { System.out.println("test_run_test_0"); failed_tests++; } + + // Summary for all the tests + System.out.println("Total tests run: " + (passed_tests + failed_tests)); + System.out.println("Total tests passed: " + passed_tests); + System.out.println("Total tests failed: " + failed_tests); + + return (failed_tests > 0) ? 1 : 0; + } + + /* -------------------------------------------------------------------------------- + Main method for shell interface, sets the exit status based on test results + */ + public static void main(String[] args) { + int exitCode = run(); + System.exit(exitCode); + } +} diff --git a/tester/javac/Test_Util.java b/tester/javac/Test_Util.java index de046ff..23a869e 100644 --- a/tester/javac/Test_Util.java +++ b/tester/javac/Test_Util.java @@ -8,60 +8,64 @@ Test_Util public class Test_Util{ public static Boolean test_all(){ - // Test with zero conditions - Boolean[] conditions0 = {}; - Boolean result = !Util.all(conditions0); // Empty conditions list is false. + // Test with zero condition + Boolean[] condition0 = {}; + Boolean result = !Util.all(condition0); // Empty condition list is false. // Test with one condition - Boolean[] conditions1_true = {true}; - Boolean[] conditions1_false = {false}; - result &= Util.all(conditions1_true); // should return true - result &= !Util.all(conditions1_false); // should return false + Boolean[] condition1_true = {true}; + Boolean[] condition1_false = {false}; + result &= Util.all(condition1_true); // should return true + result &= !Util.all(condition1_false); // should return false - // Test with two conditions - Boolean[] conditions2_true = {true, true}; - Boolean[] conditions2_false1 = {true, false}; - Boolean[] conditions2_false2 = {false, true}; - Boolean[] conditions2_false3 = {false, false}; - result &= Util.all(conditions2_true); // should return true - result &= !Util.all(conditions2_false1); // should return false - result &= !Util.all(conditions2_false2); // should return false - result &= !Util.all(conditions2_false3); // should return false + // Test with two condition + Boolean[] condition2_true = {true, true}; + Boolean[] condition2_false1 = {true, false}; + Boolean[] condition2_false2 = {false, true}; + Boolean[] condition2_false3 = {false, false}; + result &= Util.all(condition2_true); // should return true + result &= !Util.all(condition2_false1); // should return false + result &= !Util.all(condition2_false2); // should return false + result &= !Util.all(condition2_false3); // should return false - // Test with three conditions - Boolean[] conditions3_false1 = {true, true, false}; - Boolean[] conditions3_true = {true, true, true}; - Boolean[] conditions3_false2 = {true, false, true}; - Boolean[] conditions3_false3 = {false, true, true}; - Boolean[] conditions3_false4 = {false, false, false}; - result &= !Util.all(conditions3_false1); // should return false - result &= Util.all(conditions3_true); // should return true - result &= !Util.all(conditions3_false2); // should return false - result &= !Util.all(conditions3_false3); // should return false - result &= !Util.all(conditions3_false4); // should return false + // Test with three condition + Boolean[] condition3_false1 = {true, true, false}; + Boolean[] condition3_true = {true, true, true}; + Boolean[] condition3_false2 = {true, false, true}; + Boolean[] condition3_false3 = {false, true, true}; + Boolean[] condition3_false4 = {false, false, false}; + result &= !Util.all(condition3_false1); // should return false + result &= Util.all(condition3_true); // should return true + result &= !Util.all(condition3_false2); // should return false + result &= !Util.all(condition3_false3); // should return false + result &= !Util.all(condition3_false4); // should return false return result; } public static Boolean test_all_set_false(){ - Boolean[] conditions = {true, true, true}; - Util.all_set_false(conditions); - return !Util.all(conditions); // Should return false after setting all to false + Boolean[] condition_list = {true, true, true}; + Util.all_set_false(condition_list); + return !condition_list[0] && !condition_list[1] && !condition_list[2]; } public static Boolean test_all_set_true(){ - Boolean[] conditions = {false, false, false}; - Util.all_set_true(conditions); - return Util.all(conditions); // Should return true after setting all to true + Boolean[] condition_list = {false, false, false}; + Util.all_set_true(condition_list); + return condition_list[0] && condition_list[1] && condition_list[2]; } public static int run(){ - Boolean[] condition = new Boolean[3]; - condition[0] = test_all(); - condition[1] = test_all_set_false(); - condition[2] = test_all_set_true(); + Boolean[] condition_list = new Boolean[3]; + condition_list[0] = test_all(); + condition_list[1] = test_all_set_false(); + condition_list[2] = test_all_set_true(); - if( !Util.all(condition) ){ + if( + !condition_list[0] + || !condition_list[1] + || !condition_list[2] + ){ System.out.println("Test_Util failed"); return 1; } diff --git a/tester/jvm/Test_Mosaic.jar b/tester/jvm/Test_Mosaic.jar index acdaf46..ff370e3 100644 Binary files a/tester/jvm/Test_Mosaic.jar and b/tester/jvm/Test_Mosaic.jar differ diff --git a/tester/shell/Test_TestBench b/tester/shell/Test_TestBench new file mode 100755 index 0000000..e6b261f --- /dev/null +++ b/tester/shell/Test_TestBench @@ -0,0 +1,2 @@ +#!/bin/env bash +java Test_TestBench diff --git a/tester/tool/run_tests b/tester/tool/run_tests new file mode 100755 index 0000000..1e7182b --- /dev/null +++ b/tester/tool/run_tests @@ -0,0 +1,23 @@ +#!/bin/env bash + +# Ensure REPO_HOME is set +if [ -z "$REPO_HOME" ]; then + echo "Error: REPO_HOME is not set." + exit 1 +fi + +# Navigate to the shell directory +cd "$REPO_HOME/tester/shell" || exit + +# Get the list of test scripts in the specific order from shell_wrapper_list +test_list=$(shell_wrapper_list) + +# Execute each test in the specified order +for file in $test_list; do + if [[ -x "$file" && ! -d "$file" ]]; then + echo -n "Running $file..." + ./"$file" + else + echo "Skipping $file (not executable or is a directory)" + fi +done diff --git a/tester/tool/shell_wrapper_list b/tester/tool/shell_wrapper_list index aec2e97..3b46b8d 100755 --- a/tester/tool/shell_wrapper_list +++ b/tester/tool/shell_wrapper_list @@ -9,5 +9,5 @@ if [ "$ENV" != "$env_must_be" ]; then fi # space separated list of shell interface wrappers -echo Test0 Test_Util Test_IO +echo Test0 Test_Util Test_IO Test_TestBench