From 93fe2e47937b668166396e155821c7d450e3a590 Mon Sep 17 00:00:00 2001 From: Thomas Walker Lynch Date: Fri, 4 Oct 2024 08:46:17 +0000 Subject: [PATCH] more code from from the GQL_to_Cypher build script --- .../#Ariandne.groovy#} | 183 ++++++- developer/deprecated/.githolder | 0 .../deprecated/Ariandne.groovy | 9 +- .../dependency_graph_definition.groovy | 387 ++++++++++++++ developer/document/cycle_detection.html | 105 ++++ developer/document/dependency_graph.html | 137 +++++ developer/document/groovy.el | 19 + .../document/unique_node_label_check.txt | 15 + developer/executor/make.sh | 4 + developer/executor/release | 22 +- developer/executor/version | 4 + developer/groovy/build | 81 ++- ...eGraph$_run_build_scripts_f_closure3.class | Bin 0 -> 3794 bytes .../AriadneGraph$_wellformed_q_closure1.class | Bin 0 -> 1736 bytes .../AriadneGraph$_wellformed_q_closure2.class | Bin 0 -> 2149 bytes developer/groovyc/AriadneGraph.class | Bin 0 -> 10174 bytes developer/groovyc/AriadneGraph.groovy | 472 ++++++++++++++++++ .../{deprecated => scratch_pad}/.gitignore | 0 document/directory_naming.txt | 165 ++++-- .../temporary => scratch_pad}/.gitignore | 0 temporary/.gitignore | 2 - tester/test0/TestGraph.class | Bin 0 -> 1673 bytes tester/test0/TestGraph.groovy | 14 + tester/test0/env_test0 | 4 + tester/test0/graph.groovy | 2 - tester/test0/make.sh | 3 + ...eGraph$_run_build_scripts_f_closure3.class | Bin 0 -> 3794 bytes .../AriadneGraph$_wellformed_q_closure1.class | Bin 0 -> 1736 bytes .../AriadneGraph$_wellformed_q_closure2.class | Bin 0 -> 2149 bytes user/Ariadne_0.1/AriadneGraph.class | Bin 0 -> 10174 bytes user/Ariadne_0.1/AriadneGraph.groovy | 178 +++++++ user/Ariadne_0.1/build | 82 ++- 32 files changed, 1734 insertions(+), 154 deletions(-) rename developer/{groovy/Ariadne.groovy => deprecated/#Ariandne.groovy#} (71%) create mode 100644 developer/deprecated/.githolder rename user/Ariadne_0.1/Ariadne.groovy => developer/deprecated/Ariandne.groovy (98%) create mode 100644 developer/deprecated/dependency_graph_definition.groovy create mode 100644 developer/document/cycle_detection.html create mode 100644 developer/document/dependency_graph.html create mode 100644 developer/document/groovy.el create mode 100644 developer/document/unique_node_label_check.txt create mode 100755 developer/executor/make.sh create mode 100755 developer/executor/version create mode 100644 developer/groovyc/AriadneGraph$_run_build_scripts_f_closure3.class create mode 100644 developer/groovyc/AriadneGraph$_wellformed_q_closure1.class create mode 100644 developer/groovyc/AriadneGraph$_wellformed_q_closure2.class create mode 100644 developer/groovyc/AriadneGraph.class create mode 100644 developer/groovyc/AriadneGraph.groovy rename developer/{deprecated => scratch_pad}/.gitignore (100%) rename {developer/temporary => scratch_pad}/.gitignore (100%) delete mode 100644 temporary/.gitignore create mode 100644 tester/test0/TestGraph.class create mode 100644 tester/test0/TestGraph.groovy delete mode 100644 tester/test0/graph.groovy create mode 100755 tester/test0/make.sh create mode 100644 user/Ariadne_0.1/AriadneGraph$_run_build_scripts_f_closure3.class create mode 100644 user/Ariadne_0.1/AriadneGraph$_wellformed_q_closure1.class create mode 100644 user/Ariadne_0.1/AriadneGraph$_wellformed_q_closure2.class create mode 100644 user/Ariadne_0.1/AriadneGraph.class create mode 100644 user/Ariadne_0.1/AriadneGraph.groovy diff --git a/developer/groovy/Ariadne.groovy b/developer/deprecated/#Ariandne.groovy# similarity index 71% rename from developer/groovy/Ariadne.groovy rename to developer/deprecated/#Ariandne.groovy# index 6956705..6c0cf17 100644 --- a/developer/groovy/Ariadne.groovy +++ b/developer/deprecated/#Ariandne.groovy# @@ -1,3 +1,181 @@ +class AriadneGraph { + + // Instance variables for graph data if needed + Map node_map = [:] + List node_f_list = [] + + // Constructor to accept a graph definition (node_map and node_f_list) + AriadneGraph( Map node_map ,List node_f_list ){ + this.node_map = node_map ?: [:] + this.node_f_list = node_f_list ?: [] + } + + /*-------------------------------------------------------------------------------- + File utility functions + */ + + static Map unpack_file_path( String file_fp ){ + def file = new File( file_fp ) + + def parent_dp = file.getParent() + def file_fn = file.getName() + def file_fn_base = file_fn.lastIndexOf('.') > 0 ? file_fn[ 0..file_fn.lastIndexOf('.') - 1 ] : file_fn + def file_fn_ext = file_fn.lastIndexOf('.') > 0 ? file_fn[ file_fn.lastIndexOf('.') + 1..-1 ] : '' + + return [ + parent_dp: parent_dp + ,file_fn: file_fn + ,file_fn_base: file_fn_base + ,file_fn_ext: file_fn_ext + ] + } + + static boolean file_exists_q( String node_label ){ + def node_path = Paths.get( node_label ) + return Files.exists( node_path ) + } + + /*-------------------------------------------------------------------------------- + Node type checks and marking + */ + + static Set all_node_type_set = ['symbol' ,'path' ,'leaf' ,'generator'] as Set + static Set persistent_node_mark_set = ['cycle_member' ,'wellformed' ,'build_failed'] as Set + + static boolean leaf_q( Map node ){ + return node && node.type == 'leaf' + } + + static boolean has_mark( Map node ){ + return node?.mark?.isNotEmpty() + } + + static void set_mark( Map node ,String mark ){ + node.mark = node.mark ?: [] as Set + node.mark << mark + } + + static void clear_mark( Map node ,String mark ){ + node?.mark?.remove( mark ) + } + + static boolean marked_good_q( Map node ){ + return node && node.mark && ( 'wellformed' in node.mark ) && !( 'cycle_member' in node.mark ) && !( 'build_failed' in node.mark ) + } + + /*-------------------------------------------------------------------------------- + Well-formed Node Check + */ + + static Set all_form_error_set = [ + 'no_node' + ,'node_must_have_label' + ,'label_must_be_string' + ,'node_must_have_type' + ,'bad_node_type' + ,'neighbor_value_must_be_list' + ,'neighbor_reference_must_be_string' + ,'mark_property_value_must_be_set' + ,'unregistered_mark' + ,'missing_required_build_code' + ,'leaf_given_neighbor_property' + ,'leaf_given_build_property' + ] as Set + + static Set wellformed_q( Map node ){ + def form_error_set = [] as Set + + if( !node ){ + form_error_set << 'no_node' + return form_error_set + } + + if( !node.label ) + form_error_set << 'node_must_have_label' + else if( !( node.label instanceof String ) ) + form_error_set << 'label_must_be_string' + + if( !node.type ) + form_error_set << 'node_must_have_type' + else if( !( node.type instanceof String ) || !( node.type in all_node_type_set ) ) + form_error_set << 'bad_node_type' + + if( node.neighbor ){ + if( !( node.neighbor instanceof List ) ) + form_error_set << 'neighbor_value_must_be_list' + else if( !( node.neighbor.every { it instanceof String } ) ) + form_error_set << 'neighbor_reference_must_be_string' + } + + if( node.mark ){ + if( !( node.mark instanceof Set ) ) + form_error_set << 'mark_property_value_must_be_set' + else if( !( node.mark.every { it in persistent_node_mark_set } ) ) + form_error_set << 'unregistered_mark' + } + + if( node.type == 'path' && ( !node.build || !( node.build instanceof Closure ) ) ) + form_error_set << 'missing_required_build_code' + + if( node.type == 'leaf' ){ + if( node.neighbor ) form_error_set << 'leaf_given_neighbor_property' + if( node.build ) form_error_set << 'leaf_given_build_property' + } + + return form_error_set + } + + /*-------------------------------------------------------------------------------- + Graph traversal and build functions + */ + + def lookup( String node_label ,boolean verbose = false ){ + def lookup_node = node_map[ node_label ] + if( !lookup_node ){ + def match_result + for( func in node_f_list ){ + match_result = func( node_label ) + if( match_result.status == "matched" ){ + lookup_node = match_result + break + } + } + } + + if( !lookup_node && verbose ) println "lookup:: Node ${node_label} could not be found." + return lookup_node + } + + def run_build_scripts_f( List root_node_labels ,boolean verbose = true ){ + if( root_node_labels.isEmpty() ) return + + def node_function = { node ,error_token_set -> + + if( !can_be_built_q( node ) ){ + println( "Skipping build for ${node.label} due to dependency problems" ) + return + } + if( !should_be_built_q( node ) ){ + if( verbose ) println( "${node.label} already up to date" ) + return + } + + println( "Running build script for ${node.label}" ) + node.build( node ,node.neighbor ) + + if( should_be_built_q( node ) ){ + println( "Build failed for ${node.label}" ) + set_mark( node ,'build_failed' ) + } + } + + println( "run_build_scripts_f:: running ..." ) + all_DAG_DF( root_node_labels ,node_function ,verbose ) + } + + // Add the rest of your methods here as instance/static methods based on whether they depend on the graph instance + +} /* Some terms: @@ -29,7 +207,7 @@ */ def unpack_file_path = { file_fp -> - def file_fn = new File(file_fp) + def file = new File(file_fp) // Get the parent directory path def parent_dp = file_fn.getParent() @@ -71,7 +249,7 @@ def persistent_node_mark_set = ['cycle_member' ,'wellformed' ,'build_failed'] as def leaf_q = { node -> node && node.type && node.type == 'leaf' } // mark -def has_mark(node) = { node.mark && !node.mark.isEmpty() } +def has_mark = { node -> node.mark && !node.mark.isEmpty() } def set_mark(node ,mark){ node.mark = node.mark ?: [] as Set node.mark << mark @@ -446,6 +624,7 @@ def run_build_scripts_f(root_node_labels ,boolean verbose = true){ } + println("run_build_scripts_f:: running ...") // Apply the function to all nodes in a depth-first manner all_DAG_DF(root_node_labels ,node_function ,verbose) } diff --git a/developer/deprecated/.githolder b/developer/deprecated/.githolder new file mode 100644 index 0000000..e69de29 diff --git a/user/Ariadne_0.1/Ariadne.groovy b/developer/deprecated/Ariandne.groovy similarity index 98% rename from user/Ariadne_0.1/Ariadne.groovy rename to developer/deprecated/Ariandne.groovy index 6956705..a657f0d 100644 --- a/user/Ariadne_0.1/Ariadne.groovy +++ b/developer/deprecated/Ariandne.groovy @@ -29,13 +29,13 @@ */ def unpack_file_path = { file_fp -> - def file_fn = new File(file_fp) + def file = new File(file_fp) // Renamed to 'file' to avoid overwriting 'file_fn' // Get the parent directory path - def parent_dp = file_fn.getParent() + def parent_dp = file.getParent() // Get the file name (with extension) - def file_fn = file_fn.getName() + def file_fn = file.getName() // Split the file name into base and extension def file_fn_base = file_fn.lastIndexOf('.') > 0 ? file_fn[0..file_fn.lastIndexOf('.') - 1] : file_fn @@ -71,7 +71,7 @@ def persistent_node_mark_set = ['cycle_member' ,'wellformed' ,'build_failed'] as def leaf_q = { node -> node && node.type && node.type == 'leaf' } // mark -def has_mark(node) = { node.mark && !node.mark.isEmpty() } +def has_mark = { node -> node.mark && !node.mark.isEmpty() } def set_mark(node ,mark){ node.mark = node.mark ?: [] as Set node.mark << mark @@ -446,6 +446,7 @@ def run_build_scripts_f(root_node_labels ,boolean verbose = true){ } + println("run_build_scripts_f:: running ...") // Apply the function to all nodes in a depth-first manner all_DAG_DF(root_node_labels ,node_function ,verbose) } diff --git a/developer/deprecated/dependency_graph_definition.groovy b/developer/deprecated/dependency_graph_definition.groovy new file mode 100644 index 0000000..ec9028b --- /dev/null +++ b/developer/deprecated/dependency_graph_definition.groovy @@ -0,0 +1,387 @@ +/* + Here the developer describes the build environment and the graph of + dependencies between files to be built. + + Each file is given a 'node'. The node label is a path + to the file, existing or not. Node's can also be 'symbolic' + in that the label does not correspond to a file. + + File paths are relative to the developer's directory. Also + known as $DEVELOPER_HOME. + + The program that processes this file is + `executor/build_from_dependency_graph.groovy`. There are more notes and + definitions there. +*/ + + +/*-------------------------------------------------------------------------------- + The build environment + + Each tool has its '_IN' and '_OUT' directories that are referenced in + their build scripts. This keeps references in the build script local. + (Flow of files between tools can be seen in the overlapping definitions + among the various _IN and _OUT directories.) + + These suffixes are sometimes used to eliminate ambiguity: + + _FN = File Name + _FP = File Path (last element of the FP is the FN) + _DN = Directory Name + _DP = Directory path (last element of the DP is the DN) + + _FNL = File Name list + _FPL = File Path List + _DNL = Directory path list + _DPL = Directory path list + + Files in a _LEAF directory can not be built, and should never be deleted by the tools. + +*/ + +def env = [:] + +// Required shell environment variables +def varName_List = [ + 'REPO_HOME' + ,'PROJECT' + ,'ENV_BUILD_VERSION' + ,'DEVELOPER_HOME' +] +varName_List.each { varName -> + env[varName] = System.getenv(varName) ?: "" +} + +// Optional shell environment variables +def varNameOptional_List = [ + 'CLASSPATH' +] +varNameOptional_List.each { varName -> + env[varName] = System.getenv(varName) ?: "" +} + +env.CLASSPATH += ":${env.ANTLR_JAR}" + +// tools used in build scripts +def JAVA_COMP_FP = "${env.JAVA_HOME}/bin/javac" // Java compiler path +def JAVA_INTERP_FP = "${env.JAVA_HOME}/bin/java" // Java interpreter path +def JAVA_ARCHIVE_FP = "${env.JAVA_HOME}/bin/jar" // Java archive tool path + +def dir_map = [ + 'ANTLR_IN_LEAF' : 'ANTLR/' + ,'ANTLR_OUT' : 'javac/ANTLR/' + ,'ANTLR_OUT_PARENT' : 'javac/' // required by the antlr command to be a base for OUT + ,'EXECUTOR_IN' : 'executor/' + ,'JAVA_COMP_IN' : 'javac/' + ,'JAVA_COMP_IN_LEAF': 'javac/leaf/' + ,'JAVA_COMP_IN_ANTLR' : 'javac/ANTLR/' + ,'JAVA_COMP_IN_SYN' : 'javac/synthesized/' + ,'JAVA_COMP_OUT' : 'jvm/' + ,'JVM_IN' : 'jvm/' + ,'TEMP' : 'Erebus/' +] + +env.CLASSPATH += ":${dir_map.JVM_IN}" + +dir_map.JAVA_COMP_IN_LIST = + "${dir_map.JAVA_COMP_IN_LEAF}" + +":${dir_map.JAVA_COMP_IN_ANTLR}" + +":${dir_map.JAVA_COMP_IN_SYN}" + +//-------------------------------------------------------------------------------- +// required environment variable check. Will add additional preface checks here +// should they become available. +// + +// a helper function for `environment_check` +def print_missing_var_list(missing_var_list){ + if(missing_var_list.isEmpty()){ + // Print nothing if the list is empty + return + } else if(missing_var_list.size() == 1){ + println "This environment variable was not set: ${missing_var_list[0]}" + } else { + println "These environment variables were not set: ${missing_var_list.join(' ,')}" + } +} + +task environment_check { + dependsOn ':installTools' + doFirst { + + println "CLASSPATH: ${env.CLASSPATH}" + println "JAVA_COMP_IN_LIST: ${dir_map.JAVA_COMP_IN_LIST}" + + // did the required variables have definitions? + def error_missing = false + def error_project = false + def missing_var_list = [] // To collect missing environment variables + varName_List.each { varName -> + if(!env[varName]){ + error_missing = true + missing_var_list << varName + } + } + print_missing_var_list(missing_var_list) + + // did the required variables have definitions? + if(env.PROJECT != "GQL_to_Cypher"){ + error_project = true + println "Expected project 'GQL_to_Cypher' ,but found '${env.PROJECT}'." + } + if(error_missing || error_project){ + throw new GradleException("Bailing due to missing environment variables.") + } + } + doLast { + println "================================================================================" + println "Building project .." + } +} + +/*-------------------------------------------------------------------------------- + Map keyed on label node definitions + +*/ + +def node_map = [ + + "all" : [ + ,type: "symbol" + ,neighbor: [ + "ANTLR_OUT_FL" + ,"RuleNameList" + ,"RuleNameListRegx" + ,"Synthesize_SyntaxAnnotate" + ,"Synthesize_SyntaxAnnotate_PrintVisitor" + ,"Synthesize_SyntaxAnnotate_PrintVisitorMethod" + ] + ] + + "ANTLR_OUT_FL" : [ + ,type: "symbol" + ,neighbor: ["${dir_map.EXECUTOR_IN}/ANTLR_OUT_FL"] + ] + + ,"RuleNameList" : [ + ,type: "symbol" + ,neighbor: ["${dir_map.EXECUTOR_IN}/RuleNameList"] + ] + + ,"RuleNameListRegx" : [ + ,type: "symbol" + ,neighbor: ["${dir_map.EXECUTOR_IN}/RuleNameListRegx"] + ] + + ,"Synthesize_SyntaxAnnotate" : [ + ,type: "symbol" + ,neighbor: [ + "${dir_map.JAVA_COMP_IN_LEAF}/StringUtils.java" + ,"${dir_map.EXECUTOR_IN}/Synthesize_SyntaxAnnotate" + ] + ] + + "Synthesize_SyntaxAnnotate.class" : [ + type: 'path' , // It's a path type node + ,neighbor: [ + "${dir_map.JAVA_COMP_IN_LEAF}/Synthesize_SyntaxAnnotate.java" , // Dependency + "${dir_map.JAVA_COMP_IN_LEAF}/StringUtils.java" // Dependency + ] + ,build: { node ,neighbor -> + def javac_cmd = "${JAVA_COMP_FP} -d ${dir_map.JAVA_COMP_OUT} ${neighbor.join(' ')}" + javac_cmd.execute().waitFor() + } + ] + + ,"Synthesize_SyntaxAnnotate_PrintVisitor" : [ + ,type: "symbol" + ,neighbor: [ + "${dir_map.JAVA_COMP_IN_LEAF}/StringUtils.java" + ,"${dir_map.JAVA_COMP_IN_LEAF}/Synthesize_SyntaxAnnotate_PrintVisitorMethod.java" + ,"${dir_map.EXECUTOR_IN}/Synthesize_SyntaxAnnotate_PrintVisitor" + ] + ] + + ,"Synthesize_SyntaxAnnotate_PrintVisitorMethod" : [ + ,type: "symbol" + ,neighbor: [ + "${dir_map.JAVA_COMP_IN_LEAF}/StringUtils.java" + ,"${dir_map.EXECUTOR_IN}/Synthesize_SyntaxAnnotate_PrintVisitorMethod" + ] + ] +] + + + +//-------------------------------------------------------------------------------- +// node making functions +// + +// javac/leaf/ returns leaf node for javac/leaf/ +def node_leaf_f(node_label) { + def leafNodePattern = ~/${dir_map['JAVA_COMP_IN_LEAF']}(.*)/ + def match = node_label =~ leafNodePattern + if (!match) { + return [status: "no_match"] + } + def baseName = match[0][1] + + def leafFilePath = "${dir_map['JAVA_COMP_IN_LEAF']}${baseName}" + def leafFile = new File(leafFilePath) + if (!leafFile.exists()) { + return [status: "no_match"] + } + + return [ + status: "matched" + ,label: node_label + ,type: "leaf" + ,neighbor: [] + ] +} + +// given executor/ returns node to build script wrapping jvm/.jar +def node_executor_f(node) { + + def match = node =~ /^(executor\/)(${base})$/ + if (!match) { + return [status: "no_match"] + } + def baseName = match[0][2] + + def jarFilePath = "${dir_map['JVM_IN']}${baseName}.jar" + def wrapperFilePath = "${dir_map['EXECUTOR_IN']}${baseName}" + + def jarFile = new File(jarFilePath) + if (!jarFile.exists()) { + return [status: "no_match"] + } + + return [ + status: "matched" + ,label: node + ,type: "path" + ,neighbor: [jarFilePath] + ,must_have: [jarFilePath] + ,build: { node ,neighbor -> + + // The script for wrapping the jar file: + def wrapper = + """ + #!/usr/bin/env bash + ${dir_map['JAVA_INTERP']} -cp \${CLASSPATH}:${dir_map['JVM_IN']}:${dir_map['JVM_IN']}/${baseName}.jar ${baseName} \\\$\\@ + """ + + new File(wrapperFilePath).withWriter('UTF-8') { writer -> + writer.write(wrapper) + } + + println "Creating executable wrapper script for ${baseName} in executor directory." + "chmod +x ${wrapperFilePath}".execute().text + } + ] +} + +// given javac/ANTLR/.java, returns node to build grammar .g4 +def node_grammar_f(node) { + + def match = node =~ /^(${dir_map['ANTLR_OUT']})(${base})(Lexer|Parser|BaseListener|Listener|BaseVisitor|Visitor)\.java$/ + if( !match ){ + return [status: "no_match"] + } + + def grammarName = match[0][2] + def outputType = match[0][3] + + def grammarFilePath = "${dir_map['ANTLR_IN_LEAF']}${grammarName}.g4" + def grammarFile = new File(grammarFilePath) + + if( !grammarFile.exists() ){ + return [status: "no_match"] + } + + return [ + status: "matched" + ,label: node + ,type: "path" + ,neighbor: [grammarFilePath] + ] +} + +// given .class returns node to build .class from .java +def node_class_f(node) { + + def match = node =~ /^(${dir_map['JAVA_COMP_OUT']})(${base})\.class$/ + if( !match ){ + return [status: "no_match"] + } + + def baseName = match[0][2] + def javaFilePath = "${dir_map['JAVA_COMP_IN_PRIMARY_DIR']}/${baseName}.java" + def javaFile = new File(javaFilePath) + + if( !javaFile.exists() ){ + return [status: "no_match"] + } + + return [ + status: "matched", + label: node, + type: "path", // It's a path node since we're building the .class file + neighbor: [javaFilePath], // The corresponding .java file + build: { node, neighbor -> + def javac_cmd = "${JAVA_COMP_FP} -d ${dir_map.JAVA_COMP_OUT} -sourcepath ${dir_map.JAVA_COMP_IN_DL} ${neighbor[0]}" + javac_cmd.execute().waitFor() + } + ] +} + +// given .jar returns node to build .jar from .class +def node_jar_f(node) { + + // Use the symbolic name and base patterns + def match = node =~ /^(${dir_map['JAVA_COMP_OUT']})(${base})\.jar$/ + + if( !match ){ + return [status: "no_match"] + } + + def baseName = match[0][2] + def classFilePath = "${dir_map['JAVA_COMP_OUT']}${baseName}.class" + def classFile = new File(classFilePath) + + if( !classFile.exists() ){ + return [status: "no_match"] + } + + return [ + status: "matched" + ,label: node + ,type: "path" + ,neighbor: [classFilePath] + ,build: { node ,neighbor -> + println "Building jar for ${baseName}" + def command = "${ext.javaHome}/bin/jar cf ${baseName}.jar -C ${dir_map['JAVA_COMP_OUT']} ${baseName}.class" + return command.execute().text; + } + ] +} + +// list of the recognizer functions +def node_f_list = [ + node_leaf_f + ,node_executor_f + ,node_grammar_f + ,node_class_f + ,node_jar_f +] + +// comprehensions to make regexprs more readable +def base = "[a-zA-Z0-9_-]+" +def ext = "[a-zA-Z0-9_-]+$" +def name = "${base}\\.${ext}" +def path = ".+/${name}" + + + +// LocalWords: wellformed diff --git a/developer/document/cycle_detection.html b/developer/document/cycle_detection.html new file mode 100644 index 0000000..3c9fb85 --- /dev/null +++ b/developer/document/cycle_detection.html @@ -0,0 +1,105 @@ + + + + Dependency Build Algorithm + + + +

Cycle Detection in Dependency Graph

+

Overview

+

+ The is_acyclic_q function is designed to detect cycles in a dependency graph using a depth-first search (DFS) algorithm. It starts from a list of root node labels and traverses the graph to ensure that there are no cycles. If a cycle is detected, the function marks the nodes involved and continues to explore other parts of the graph. +

+

Key Concepts

+
    +
  • Dependency Graph: A graph where nodes represent build targets and edges represent dependencies between these targets.
  • +
  • Depth-First Search (DFS): An algorithm for traversing or searching tree or graph data structures. It starts at the root and explores as far as possible along each branch before backtracking.
  • +
  • Cycle Detection: The process of identifying cycles (loops) in a graph, where a cycle is a path that starts and ends at the same node.
  • +
+

Functions

+

1. is_acyclic_q

+

+ Purpose: To determine if the dependency graph is acyclic (i.e., contains no cycles). +

+

+ Parameters: +

    +
  • root_node_labels: A list of labels for the root nodes to start the cycle search.
  • +
  • verbose: A boolean flag for enabling detailed output (default is true).
  • +
+

+

+ Returns: +

    +
  • 'acyclic' if no cycles are found.
  • +
  • 'cycle_found' if cycles are detected.
  • +
+

+

+ Process: +

    +
  • Initializes a stack for DFS traversal.
  • +
  • Iteratively calls the is_acyclic_q_descend function to traverse the graph and detect cycles.
  • +
  • Updates the traversal state and continues exploring other paths until the stack is empty.
  • +
+

+

2. is_acyclic_q_descend

+

+ Purpose: To perform the actual DFS traversal and cycle detection for a given path. +

+

+ Parameters: +

    +
  • path_stack: A stack representing the current path in the graph.
  • +
  • verbose: A boolean flag for enabling detailed output (default is true).
  • +
+

+

+ Returns: +

    +
  • 'leaf_node' if the current node has no children.
  • +
  • 'cycle_found' if a cycle is detected.
  • +
+

+

+ Process: +

    +
  • Collects the current path and node.
  • +
  • Checks for cycles by comparing the current node with nodes in the path.
  • +
  • Marks nodes involved in cycles and updates the stack to continue traversal.
  • +
+

+

Usage

+

+ The is_acyclic_q function is used to ensure that the dependency graph defined in the build file is free of cycles. This is crucial for preventing infinite loops and ensuring that the build process can proceed smoothly. +

+ + diff --git a/developer/document/dependency_graph.html b/developer/document/dependency_graph.html new file mode 100644 index 0000000..378c52d --- /dev/null +++ b/developer/document/dependency_graph.html @@ -0,0 +1,137 @@ + + + + Dependency Build Algorithm + + + +

Cycle Detection in Dependency Graph

+

Overview

+

+ The is_acyclic_q function is designed to detect cycles in a dependency graph using a depth-first search (DFS) algorithm. It starts from a list of root node labels and traverses the graph to ensure that there are no cycles. If a cycle is detected, the function marks the nodes involved and continues to explore other parts of the graph. +

+

Key Concepts

+
    +
  • Dependency Graph: A graph where nodes represent build targets and edges represent dependencies between these targets.
  • +
  • Depth-First Search (DFS): An algorithm for traversing or searching tree or graph data structures. It starts at the root and explores as far as possible along each branch before backtracking.
  • +
  • Cycle Detection: The process of identifying cycles (loops) in a graph, where a cycle is a path that starts and ends at the same node.
  • +
+

Functions

+

1. is_acyclic_q

+

+ Purpose: To determine if the dependency graph is acyclic (i.e., contains no cycles). +

+

+ Parameters: +

    +
  • root_node_labels: A list of labels for the root nodes to start the cycle search.
  • +
  • verbose: A boolean flag for enabling detailed output (default is true).
  • +
+

+

+ Returns: +

    +
  • 'acyclic' if no cycles are found.
  • +
  • 'cycle_found' if cycles are detected.
  • +
+

+

+ Process: +

    +
  • Initializes a stack for DFS traversal.
  • +
  • Iteratively calls the is_acyclic_q_descend function to traverse the graph and detect cycles.
  • +
  • Updates the traversal state and continues exploring other paths until the stack is empty.
  • +
+

+

2. is_acyclic_q_descend

+

+ Purpose: To perform the actual DFS traversal and cycle detection for a given path. +

+

+ Parameters: +

    +
  • path_stack: A stack representing the current path in the graph.
  • +
  • verbose: A boolean flag for enabling detailed output (default is true).
  • +
+

+

+ Returns: +

    +
  • 'leaf_node' if the current node has no children.
  • +
  • 'cycle_found' if a cycle is detected.
  • +
+

+

+ Process: +

    +
  • Collects the current path and node.
  • +
  • Checks for cycles by comparing the current node with nodes in the path.
  • +
  • Marks nodes involved in cycles and updates the stack to continue traversal.
  • +
+

+

Usage

+

+ The is_acyclic_q function is used to ensure that the dependency graph defined in the build file is free of cycles. This is crucial for preventing infinite loops and ensuring that the build process can proceed smoothly. +

+ + + + +

2. Run Build Scripts

+
    +
  1. Traverse the queue starting from the tail (the most recently added nodes).
  2. +
  3. For each node, attempt to build it by checking the file dates of the target node and its dependencies: +
      +
    1. If the target file is older than any dependency, execute the node’s build function.
    2. +
    3. After building, recheck the file dates. If the file date has been updated, mark the node as successfully built.
    4. +
    5. If the build fails or the file date is not updated, mark the node with an error in its property list.
    6. +
    +
  4. +
  5. Nodes with dependencies marked with errors will not be built, and errors will propagate up the dependency tree.
  6. +
+ +

3. Final Reporting and Status

+
    +
  1. If the root node is successfully built, report the success and any other successfully built nodes.
  2. +
  3. If an error has propagated to the root, report the failure.
  4. +
  5. Keep a list of all successfully built nodes and provide a final summary of the build status.
  6. +
+ +

4. Node Definitions

+

Each node in the dependency graph is defined by a property dictionary. A node is either a symbol or a path:

+
    +
  1. Symbol Nodes: These represent abstract concepts or commands and always trigger a build unless marked with an error.
  2. +
  3. Path Nodes: These represent file paths. A path node is considered built if its target file is newer than its dependencies.
  4. +
+

Both node types are identified by a label, and their dependencies are stored as a list of node labels. The presence of an error property indicates that the node has failed to build or encountered a problem during processing.

+ + + diff --git a/developer/document/groovy.el b/developer/document/groovy.el new file mode 100644 index 0000000..1a93312 --- /dev/null +++ b/developer/document/groovy.el @@ -0,0 +1,19 @@ + +;; Ensure package archives are set up +(require 'package) +(setq package-archives '(("melpa" . "https://melpa.org/packages/") + ("gnu" . "https://elpa.gnu.org/packages/"))) +(package-initialize) + +;; Install neotree if not already installed +(unless (package-installed-p 'groovy-mode) + (package-refresh-contents) + (package-install 'groovy-mode)) + +;; Configure NeoTree +(with-eval-after-load 'groovy-mode + (setq groovy-indent-offset 2) + ) + +;; Enable Groovy mode for files with `#!/usr/bin/env groovy` shebang +(add-to-list 'interpreter-mode-alist '("groovy" . groovy-mode)) diff --git a/developer/document/unique_node_label_check.txt b/developer/document/unique_node_label_check.txt new file mode 100644 index 0000000..b978a74 --- /dev/null +++ b/developer/document/unique_node_label_check.txt @@ -0,0 +1,15 @@ + +predicate == is_well_formed_q + +We can not check that the node labels are unique because the given value +is a single node, and the code is stateless. Besides there is no contract with +the programmer on how to use the predicated, so the programmer could call the +predicate multiple times on the same node. Now can we test this condition in +our do_markup_graph routine because of the way lookup works, it will always +return the same node for the same label. This would be a truly difficult check +to perform because the map does not given an error but just takes the second of +the duplicate key definitions (is this really true?) besides, it would require +a formal proof of the recognizer functions that they do not return different +definitions for different keys to match regexprs against. I've been mulling +this over. As we currently the programmer provides the map and function +definitions, we don't even know which nodes will be in the graph... diff --git a/developer/executor/make.sh b/developer/executor/make.sh new file mode 100755 index 0000000..8c10711 --- /dev/null +++ b/developer/executor/make.sh @@ -0,0 +1,4 @@ +#!/bin/env bash + +cd "$REPO_HOME"/developer/groovyc +groovyc AriadneGraph.groovy diff --git a/developer/executor/release b/developer/executor/release index 2511d79..4927353 100755 --- a/developer/executor/release +++ b/developer/executor/release @@ -8,11 +8,8 @@ if (!repo_home) { } def version = "0.1" -def release_dir = "${repo_home}/user/Ariadne_${version}" -def build_fp = "${repo_home}/developer/groovy/build" -def ariadne_lib_fp = "${repo_home}/developer/groovy/Ariadne.groovy" -// Check if the release directory exists, if not, create it +def release_dir = "${repo_home}/user/Ariadne_${version}" def release_dir_file = new File(release_dir) if (!release_dir_file.exists()) { release_dir_file.mkdirs() @@ -24,18 +21,25 @@ def install_file(source_fp, target_dp, perms) { def cmd = ["install", "-m", perms, source_fp, target_file] def process = cmd.execute() process.waitFor() - if (process.exitValue() != 0) { println "Error: Failed to install ${new File(source_fp).name} to ${target_dp}" println process.err.text System.exit(1) } - println "Installed ${new File(source_fp).name} to ${target_dp} with permissions ${perms}" } -// Install the Shell UI and Ariadne.groovy into the release directory with the correct permissions -install_file(build_fp, release_dir, "ug+r,ug+x") // Executable script -install_file(ariadne_lib_fp, release_dir, "ug+r") // Readable script +def build_fp = "${repo_home}/developer/groovy/build" +def ariadne_class_files = new File("${repo_home}/developer/groovyc").listFiles().findAll { + it.name.startsWith("AriadneGraph") && it.name.endsWith(".class") +} + +// Install the build script +install_file(build_fp, release_dir, "ug+r,ug+x") + +// Install all matching class files +ariadne_class_files.each { class_file -> + install_file(class_file.absolutePath, release_dir, "ug+r") +} println "Release version ${version} completed at $release_dir" diff --git a/developer/executor/version b/developer/executor/version new file mode 100755 index 0000000..6e696ef --- /dev/null +++ b/developer/executor/version @@ -0,0 +1,4 @@ +#!/bin/env bash + +echo 0.1 + diff --git a/developer/groovy/build b/developer/groovy/build index 14c8bce..0e18b80 100755 --- a/developer/groovy/build +++ b/developer/groovy/build @@ -1,54 +1,47 @@ #!/usr/bin/env groovy -def include_ariadne_library(){ - - // Get the directory where this script is located - def script_dp = new File(getClass().protectionDomain.codeSource.location.path).parent - def ariadne_lib_fp = new File(script_dp ,"Ariadne.groovy") - - if(!ariadne_lib_fp.exists()){ - println "Error: Ariadne library not found in ${script_dp}" - System.exit(1) - } - - return ariadne_lib_fp.text +// Function to load the graph class dynamically +def include_graph_class( graph_class_fp ) { + def class_loader = this.class.classLoader + try { + def graph_class = class_loader.loadClass(graph_class_fp) + println "Successfully loaded class: ${graph_class}" + return graph_class.newInstance() + } catch (Exception e) { + println "Error loading class: ${e.message}" + e.printStackTrace() + System.exit(1) + } } - // Main build function for the Shell UI -def build(graph_fp){ - // Check if the graph definition exists - def graph_fn = new File(graph_fp) - if(!graph_fn.exists()){ - println "Error: Graph definition file not found: $graph_fp" - System.exit(1) - } - - // Prepare the binding and shell for evaluation - def binding = new Binding() - def shell = new GroovyShell(binding) - - // Load the graph definition into the shell - shell.evaluate(graph_fn.text) - - // Check if node_map and node_f_list are defined as local variables - if (!binding.variables.containsKey('node_map') || !binding.variables.containsKey('node_f_list')) { - println "Error: Graph definition must define both 'node_map' and 'node_f_list'" - System.exit(1) - } - - // Load the Ariadne library functions - shell.evaluate(include_ariadne_library()) - - // Call the build function in Ariadne.groovy - run_build_scripts(binding.getVariable('node_map'), binding.getVariable('node_f_list')) +def build( graph_class_fp ) { + // Load the graph class dynamically + def graph_class = include_graph_class( graph_class_fp ) + + // Get the node_map and node_f_list from the graph class + def node_map = graph_class.get_node_map() + def node_f_list = graph_class.get_node_f_list() + + // Log the result + println "node_map: ${node_map}" + println "node_f_list: ${node_f_list}" + + // Check if node_map and node_f_list are defined + if( !(node_map instanceof Map) || !(node_f_list instanceof List) ){ + println "Error: Graph definition must define both 'node_map' and 'node_f_list'" + System.exit(1) + } + + // Call the build function in AriadneGraph + AriadneGraph.run_build_scripts_f( node_map ,node_f_list ) } // Entry point for the script -if(args.length == 0){ - println "Usage: ./shellUI.groovy " - System.exit(1) +if( args.length == 0 ){ + println "Usage: ./build " + System.exit(1) } -def graph_fp = args[0] -build(graph_fp) +def graph_class_fp = args[0] +build( graph_class_fp ) diff --git a/developer/groovyc/AriadneGraph$_run_build_scripts_f_closure3.class b/developer/groovyc/AriadneGraph$_run_build_scripts_f_closure3.class new file mode 100644 index 0000000000000000000000000000000000000000..7cea6d1be805fb26aaab93f32778ffbf548af3c3 GIT binary patch literal 3794 zcmb_eZF3t}6@IQId)La=cGn3ZeHF!REXj!oEp3RKkl1Zg2c->B;@}hzE9uJKc)hFc zu1w8aOMyaZODTmy3WZWA9o}ZbTbePHugvrx@LOOQp1ZrU*O8n_X85xA?z!ha_uO-y zbIyJLzw7@1@BrQxC?4}|tLe()zSUlusQDeYR`1wOvlcXbyB!9#R;}T9LC2T(Y7heB zi@xWru9Y3jT`bSWD;hEaQ=Y$AZg@?(WOagaqD8pSUXkVF(v`jy%I3z7K|-aE-XHG} zxc!UUI6$Eo`%cJuueJ?Fr4zX>|rGpY(?$>HHBAlER z5a$Jk9<*IMJR)!;u`_K|ubtaiRhSppdtCs9c>_D~fgG~fZD0hW!x+FGfo!;B2aH}N z?bd7afOi?Gd7V)Db1q#%?+BQ^CJXGSDe&0Sym8Z;wH!y_;Pn~Y4{@Ynk)JZ6Xjpqv z$6Xl`c|c%azd)?6w4KhP?Uq*$mgn5&+MG+9TUJAAxLaVj?s;JllJ^47rA@NkRc~3A zPs(t~Yd&hZO-BY370+Alw5L->RYTu)7t@k08_%q@WqL7DpG%W4*`VA~wL?iZ?!{yd z_n856YJH5v>*w>0jF0a-#}H_EfY}ozV~XS&J}xlmSas>>Afx8|E2nGd~uQ?{ksWRkDt z+rC$K0H# zSJKzyDKtzBp-G3#sP2_-S+*lNMA2@sYs5`07VB7|b%6|PE0({k1J(A-o4&d zTnkO|pd*kYY393qGBa5q17@yO0zy{6B~?C2k@7>bWp$iTRmC&i8?l>ut_G&N!CLG{ z%RTGXy{8O(3HRmjMSPjuo8Y0xi^+rM|5sT)W)~)I})f9Sg3Y7XVc{}?IJcn=P@J)Q%z!+{; zP53(klYI^8^Ni5&BAwN}Vf+X=oIXCjWa0YXB?i8WmveXt-;04a?q%Tncv>yI%E+rI z#2rx8#Wc0I^t`K@rNAAViPBF|!;jeHoT#-LxQN$s_%U8*4$MZ$VOG3`<;+`rp|UKW zQ6GArY-lW>wA#u4XvKErW1W?{^b?6HOX}4*ZwEHF9CKYy5y*4-&N1d(zC zi0eDCi{GE}?-UF5Ajh4>($u^7;J!oIJ=tRIuefDT_S_$%hWiG;MWTYcZ^dmK)$DfM zK?F6sle2+nrq0Vyn#YG?bnzlnlYLu^6ie$U1M9dScpX~y9b_{XvA_5?99+o0heHb) zDjWr>gYRKxA!9yNUC5e`RNuwiIvxYgiAxIMQ#}ZWi+{l_mvJrvzKE%90T&eD6I%ec zJNvUrbzxJrRN~Q}S4T@)lD6X_?%(#n#j8+?CDUGq3tSSH=&cNX#_tI9@uOm&c~l${ z|Hf1JIb|P(!S5T`O^u_J%-|PTz*~3)zr?HfHU5rw@IHQnf8n=c0KXFk{y=Te>&nwc zH%6R!af~=?M==t3l93sR;yz7n^`aVP+}E*6v-l%6fiTNg&3u_$r>}s^a4ep%k0;7BA!L=D^#~i&yac zr9a_?O6i{%RnjYXWufwCyimG~AH-HGSF%=({0HM2UK$hlAzn-H#yBdIvRCjErOX-q z3FIhM=jhChw<2i{H_^jS^^CWh2 zttgQ?I27)NkRsw*48|5`399JxW@f=KSMHp_DXLpIPm*Go3=0kKDL>+FqJz-g;-gNp zaPA4+ac}_+L#-Zbt^>at@;Kg3H{!-c*<%_5-q2CB=$kl{v2e0hb60aAU^DyFiYjsiKf@HWH6Q|Y==q2SWORVv02 z4->JyM~!PdDZ=GSRP=Z7u8jp;W2gk0;ZmchmKTcQvX^s4M_k5+e5b4-khgHzMg`(kc=nZ$QX#p#3D?GZc%NaplpQ&@ zdwvv*^!hU`4Exsxd_*m>@Bzc5Va7NRhIVNO%AB+Nh@tsPRRejX z4unf)`)crj%RwmOdZtP(x^l{{GDQpbh{<6J>|g_TZQRELnx8I}+>C+pdAP&r4ciUH zH)dg&_5vv$CWpHs$|duUvzF}yaX>0-QYu3uC20gFiBbLmy&Ac$vnlDxN~x1B1ENTn|1-UA5gCWulq+NyfGy}96m{s~$=s(f^16NK^e@^=fyj0uR z9Hy9ana&t<0@qVagLH%L3{su`sh1WzM$_PA@i>_@x82H-7b&qST4>X)v0f)@6USof z7|S!2A2HE7!L8>%W5sL#iD@G}!JTitU$D|X#(D<-kPL{=Q!g-Up;Ke%;%<&tqrZu! edV)=(464dDtu{&1e5s^g)458LNj#!m1=c@X`omfP literal 0 HcmV?d00001 diff --git a/developer/groovyc/AriadneGraph$_wellformed_q_closure2.class b/developer/groovyc/AriadneGraph$_wellformed_q_closure2.class new file mode 100644 index 0000000000000000000000000000000000000000..b12ef7d94634f3d4bbb105634f2cf27ef1bee8c1 GIT binary patch literal 2149 zcma)7TT>iG6#jZ43`_>tK*XRiqS=IHmt-W~f&>-TgsfRY%pw}Rtj%n*Oc=V?nclVd z16KJ%e2-d%R33ayReqCYd3t67gKny*sp{!I=k#~(r=S1x^e+G(!eh9+6#Bd=#Y)KA zJB7wiA_%rsxF?#89~)kvqE0AoSYQm}tx&1%z8i4aaw`ePLLbAd3R|wHnqr4{A~%%^ zJ5u|5!d($kgj|c}(Ty_%mDPDYxxp~eTRDT16}K=%l&qNyQ^ne2-sNsb`+>X4+Y2+9 zb6w~K2WR0hm5I0xKhjd_LA_E=8=Z#Xc1N=IUqg3 zF!p~13}+h#HL-x~YpMzl0*0$S>`Nn*HIU&#TZEAxX(4q(QbHShJlt(WLJ#7NJ}MXr z;k91j>h86JPRo~W_oiEw&Hbt*TerC+jt9w(E(hcz_o;a z5GrzC?=#F7PnDIdf;8(6rcoTDNX;Swv z&mWtfsIx$=HApb&DR!oMzGsQKB#up5>33Ade z3^OOfbD~53(^?Fdj$_?R(88Bghe6zk4!**=jSYNFGpj*=T zYrYisJ9}FqOevEOlP6%qk9-nYl2Vy^P?E;pFqMY4OOra4b*hq{i8xAv*rT_ld3vbp z$#0qsUf7X&!y_ z0amFz`zK!cjkXNm&^i^92QYzG=>&qhfJx$+-D|YV(Vps?Rs*w4=N$b<%B80$0f%_! z5Fb1tN)F$~cg#&4BTpofN5~`e@ET-;cawMquR#A;SBV{9!QdoTISDnr`(27WPww^O z78Z${wPw0>fXZ0zR}7X8vGU}1RBPpD7&X*GtUapzfok~x4-@$B35*JT5g*oXlUG+dh;PqwF%nc?=G{ny$91p_sL z1;e>)cKo2z-=&{RqqTxYFUo3%RA6SZgLYyxIVPCZtx=1GRJwgfa%{a|h8`SBq*M6< z7xj#Hvxfw;lj(Fq6BQ1Q*@?VOGA~TrR#Xed>|CA`>`cKS9!=)e zqJlNP+!Zs0)TrGaAIPP~3Y!iVY{mb^!Q@!M&KX!l)&_ERvS4!+!Sd#-oRvIUUEkta zT=5dY5-bg4G0up)}vV7Ps#I@|p zvJpg^TTUak2wD&eb8c${b1+w-v zLx~I}2W$i9P%8b|Y$0Euu?y~UwV{QQ%8Y0C+wEj+Bs+L%GBcRA^UJ%l+5N?mmNZ$s zkV|ETUA;}8xQCYN9dyaJyR!C9*i7q;rwTUZz82?&aW2lE6{1ffIFMAbwx-%j%f1NK zp~K?VX;Y2fVaIm#_{D0RrWP!Wr1FB9-Cob+y!t9XG_Z*@l`aKjV2fa`uPqzcMprFn z#*zd36GN%Aofu0NMg(VhCF$;Ys(CTg=yg)r_HCSL;4%~Q$ft4*!3ACf?NB~q^h(hQ zJ`~;#^jNfoozyK2Y*#YJNMPa$dYz+E!AdW-JI@y?jy)F5>`Hn$p?Z>|wuxQ~HL6Ee zo1nwbVAv?otn33jhfG{8xX=gO<7?Hz(E2ZP9>)hPx_E-2PD@6QDsvrHE}SG6Z< z-Z>nE)jP>fWa{OVbfj)vZG_%eV2Co+@6tSMsW5!b5{}uEoT@b%4Ji6+R;>| zm^ZQCe|i-op1q7(s0Ejt;RKA`%pc_HUBu(g-?z_eEp8AQ!*zAD z4J)pdveK4UZrNH;%vsiA&QuhIc+C1S7!RN2d2oN z#0l;6lXxZa!TU8;QHIac6 zRB|$}#BAb4!FgU#?OPsHC)E1xKaTHMn2DFFDnf?~1HVI=lJAO4FZ0^0tFLM6csiz& zls6c`ck!xn{nx11v^`XakEDiZ-@j{NmWr0)0qP^?iY%0~N3-MH>F--;P^0xq?)G3} zIGY_*Mi9Q{uu1&mQVYafI zIoiHQi}^xgBsp#;+~L3Hh<;4yp?;Rhx6f&7GG5AO6Y`CUMtOjW#ZqsJ8zM(JmD2pVj!K(7jyO+1OM)y zP^KG&Q~&-pj5qNg6oox*=jbp0C0OrgWZvk%*;%Zl4X?CSaQq(&jrd=|vhs#l<=c$< z1Ju5ftw(?-1!pKzN{qR2W6I`f{U+XiPkTSdpc(RH#m&qvW^(p0<;GP9wF#TT(^XJK zE>mi`TWxu!7#6Eu%D&NOdxnIml+jc^&#;=v+1C|Q3eB-ubq+8kB3P`$;Bac(&LqnF zEp5h>nJm6N6wc(za0BbYsb{B9TGzu<)ad$cDTRAwJ|*~6PCS~ZxCGqFYUh@2^B&5&iXT*ZIgJ%`7cN-bB)mapz8(`UQO3dN`kHIRW z4x=nZq%DG{RP{Mu+PMtV;=l+4kF24r-zmR#IWpYNGmjzXQe}>YIy#zqXa-HDoW}ss zG>|Q(2b(h4LQ}upG?Xo723MPMfuP0jIOjbnmNcZ!qp~h6Yo&wSjqwOnNM}sBNaePG zE+f1FV13nz&Smj5E-9B-SRkE5rIYt(^UR?$oIU8U#JHqf?vN_7<+8ABm&>Odi*}b{hao#oQ?O{k z?F>tg?BW(vc_;PDZi@$d8fVNaw&e+*O_jpuxrSUR*x+N#)9tNF{ADKB>!>QdVc9G1 zX9)CJjAp~$41-G3>eVz6mYi#wyS+TV*F#xcu2y=zCV~g#Y_)xrw$*jamW^GBE!#}t z>1L}hbj7i!Z>&l^CFf$O7_hKV23e=@TFSiz(n*o$-|9=5iZGb^baqtcq)N~!r^J%s zPv?PgwIpBOA~;2CdzYP2G0GlXACX3x!&?j4ACYa+rJhC^!&rX0w?vIq5yC0IB9fIc z-u4I!i+Xh)_FRy(`l$T0S0<8|0t+W8Mpz5YW{#KZ83BC*h|leoA*_ufC03ogv5URS zmx{&r3P@?(C^v=W2D#Y*F7GQMxAJm_13X(Tnl4X$XOl^$*V8E-74-9DkZ;$=h7=EJ z9EdOhm+=G(n@cb{x0FX{_B1_p_vBpWsHo?S(eHHcBUoAq=6-~Rd{im?BVoB)?xC75 z8sI@k-}Hs@2S@v}jP&>GdkJkGDuq25VM(p3FZo!6)vSJc#5td+h|P*%j736;!}2-6 zskSQ=(=Ug(%bDDyCMjL2T2(kYTat=$tD^F`Ng~)Bf^D3B0b9+QQ|Qhiq7P{mkBU-h zVi93?o%Y6d5s^+mI@e`sYFRSZU4Lr-JpV$JHsw3O+rcI87=oF7HtSPfJ+Cnb_OqJV zlk8WA`dQtnjNMaIrw6yk>drFJ?@jVqjI7VUj8l>A8QO?; zFq^O8c(#}uu+^!ZVU3x{xwE8I^L8?iWvt$VVEMqaen4H;sywXjTG@tGd9Pa-k zKtouC1h#jxrf+0>ySi27_+sMB!+hTSf04ycjm80PiygW$jXsvo*9j)zc?du3l;v(P>%xLH^!Y)RK5j8YQ)aYH+*I`cL z5{Jet)2!^Mi`F@~b-iCFXEpLHf7=l}g-c-=2pHIGU~B2Oix0py{=S99PA6Zs%9l84 z9`B++am=A`E}%fJq6luqO!+c(vb}6>I0@+KX5XZi2 zT^^gnE}*-$w{;SGfgP*VLm$x7*4t)XHHiFy7Y?vOdgTBU&4$B9Cw%j!+jP_%^OE5On@J*_^~W{%+(yEy0C)*K;Y4`IO=U zejA9Q7Yp$S(~FYg21hEm3m@TI9Zuk*xSLY6@H}qeQvi$bENwe1kV_L@6BNXl@fC+s1D;x@Frqt&EU=g{GT?;?#-?xGl>4q+}=I80qUJAtnpf)PB7V2xV9kxMNC(N588h4id7 zdYy=afUgo+ZT4$r^mQCj=(7m>JTQrGPGQ-BInM(#s|<4t$6Y2Lq-=tO*+`fPpl`<{ zq3A=#s>7J$x}EjHBwhwy!V5iZJU|?I39n4xdu_eW6iVv{fX8nl2%N+ms&iLKr8BkR zuj7}74jTV@j|4kimcah!C`%>5-zzh>jS^IHpXuz8EB&Fnl2C`dLQPpeEcO1Vq^Uz#V!dAOeZtkBcl5`n9j$124kGhAWsd#} zm3}=+-{jHO{<60I0zYxIRZ~yWiI*cZV;$^&28IQDDKX=ACNc){W{THR_mpBxj=}%yx)Au}nIe{NlP)njF z`mf69|LUS2!fwvu+soKmnL;4Q86y~$uVO5`8mn;)&SUnu97%@ge(dL0p#p5& zgCRW54?Ew-2roOY#Ty*?EqGauQCZ3F9__eJF6Q@@E0C9K_;KY96y<&#kk@fg-oTCW zYupqF;pV_negRP-Uq{=(3fCd5KqIfURkRg#=5btuWxZ34yc+uk&TO5O`A&MQQo|vB zuF|=WG|iL3%}mM}tUWqvJ3`UWYv_uGz9lP!;qPMg)FM#Xj$o?*RY!2H(Hjj;%KMbM z&(Z2`hz2{%sM!-Wdk-UY_|$jWrYt#QaV1K$#7@ZCzMxK?t?}9y@k(D!qnyty+9(&s zkIVW=*~kdjR>_xZ5k85945Xd%Hk#O~KUsDXDs!G1GoVXGD|*+SMZn?vXfU|WgsRC{O?tr+^93WDw|li z1(^IMWi!!gSOz9!3!j2i|K+lkPqkD7kDZP(aFp+=4(eJwA>AjWXJ6vDT+u^8?kRg= ztsFTad;4Osw&T+0D5X)Znv@T4|FPC+uaTJh|IH!#%9?h!#W!U-A%P%sf_5YC~HbF-?JLUIiz8r zML5>VY*%wk(uxPESR;j75>(g`l&CPzq$}5I%5AdH4{K}im}K4o%i+|E)euzwKY}aR zP2@`Pb$0BSlL2!cVwoR)kxzx?h?(KiKB!uyyh&`Ls)0))zDSbksWejC^()15}A0=;wSuo`p2TH)+gIV>UmiK%QaCvzQ!a`!$XSV90Z9f5PL(g?xJ-*66m| zZFjrv``z{$w@tfk(QR*V+XvnDF}MAK+aAH!HSRau_K4g6ZMXft+x~$&{ 0 ? file_fn[ 0..file_fn.lastIndexOf('.') - 1 ] : file_fn + def file_fn_ext = file_fn.lastIndexOf('.') > 0 ? file_fn[ file_fn.lastIndexOf('.') + 1..-1 ] : '' + + return [ + parent_dp: parent_dp + ,file_fn: file_fn + ,file_fn_base: file_fn_base + ,file_fn_ext: file_fn_ext + ] + } + + static boolean file_exists_q( String node_label ){ + def node_path = Paths.get( node_label ) + return Files.exists( node_path ) + } + + /*-------------------------------------------------------------------------------- + Node type checks and marking + */ + + static Set all_node_type_set = ['symbol' ,'path' ,'leaf' ,'generator'] as Set + static Set persistent_node_mark_set = ['cycle_member' ,'wellformed' ,'build_failed'] as Set + + static boolean leaf_q( Map node ){ + return node && node.type == 'leaf' + } + + static boolean has_mark( Map node ){ + return node?.mark?.isNotEmpty() + } + + static void set_mark( Map node ,String mark ){ + node.mark = node.mark ?: [] as Set + node.mark << mark + } + + static void clear_mark( Map node ,String mark ){ + node?.mark?.remove( mark ) + } + + static boolean marked_good_q( Map node ){ + return node && node.mark && ( 'wellformed' in node.mark ) && !( 'cycle_member' in node.mark ) && !( 'build_failed' in node.mark ) + } + + /*-------------------------------------------------------------------------------- + Well-formed Node Check + */ + + static Set all_form_error_set = [ + 'no_node' + ,'node_must_have_label' + ,'label_must_be_string' + ,'node_must_have_type' + ,'bad_node_type' + ,'neighbor_value_must_be_list' + ,'neighbor_reference_must_be_string' + ,'mark_property_value_must_be_set' + ,'unregistered_mark' + ,'missing_required_build_code' + ,'leaf_given_neighbor_property' + ,'leaf_given_build_property' + ] as Set + + static Set wellformed_q( Map node ){ + def form_error_set = [] as Set + + if( !node ){ + form_error_set << 'no_node' + return form_error_set + } + + if( !node.label ) + form_error_set << 'node_must_have_label' + else if( !( node.label instanceof String ) ) + form_error_set << 'label_must_be_string' + + if( !node.type ) + form_error_set << 'node_must_have_type' + else if( !( node.type instanceof String ) || !( node.type in all_node_type_set ) ) + form_error_set << 'bad_node_type' + + if( node.neighbor ){ + if( !( node.neighbor instanceof List ) ) + form_error_set << 'neighbor_value_must_be_list' + else if( !( node.neighbor.every { it instanceof String } ) ) + form_error_set << 'neighbor_reference_must_be_string' + } + + if( node.mark ){ + if( !( node.mark instanceof Set ) ) + form_error_set << 'mark_property_value_must_be_set' + else if( !( node.mark.every { it in persistent_node_mark_set } ) ) + form_error_set << 'unregistered_mark' + } + + if( node.type == 'path' && ( !node.build || !( node.build instanceof Closure ) ) ) + form_error_set << 'missing_required_build_code' + + if( node.type == 'leaf' ){ + if( node.neighbor ) form_error_set << 'leaf_given_neighbor_property' + if( node.build ) form_error_set << 'leaf_given_build_property' + } + + return form_error_set + } + + /*-------------------------------------------------------------------------------- + A well formed graph checker. Traverses entire graph and marks nodes + that are not well formed or that are part of a cycle. + + This must be run on the graph for `lookup_marked_good` to work. + */ + + /* + Given a node label list. Applies well_formed_q to each node and marks the + node accordingly. Returns 'all_wellformed' or 'exists_malformed'. + */ + def mark_the_wellformed_f(node_label_list ,boolean verbose = true){ + def all_wellformed = true + + def neighbors = node_label_list.collect{ neighbor_label -> + def neighbor_node = lookup(neighbor_label) + def form_errors = wellformed_q(neighbor_node) + if(form_errors.isEmpty()){ + neighbor_node.mark = neighbor_node.mark ?: [] as Set + neighbor_node.mark << 'wellformed' + } else { + all_wellformed = false + if(verbose){ + if(neighbor_node.label && neighbor_node.label.length() > 0){ + print("node ${neighbor_node.label} is malformed due to:") + } else { + print("anonymous node is malformed due to:") + } + form_errors.each { error -> print(" ${error}") } + println("") + } + } + neighbor_label + } + + return all_wellformed ? 'all_wellformed' : 'exists_malformed' + } + + /* + Given a path stack initialized with the path root ,descends to a leaf node + while looking for cycles. Marks nodes as 'cycle_member' if a cycle is + detected. Marks nodes as `wellformed` if `wellformed_q`. Returns a set of + tokens indicating the status: 'cycle_found' ,'defacto_leaf_node' ,and + 'exists_malformed'. + */ + def markup_graph_f_descend(path_stack ,boolean verbose = true){ + def ret_value = [] as Set + def local_path = path_stack.collect{ it[0] } + def local_node_label = local_path[-1] + def cycle_start_index + + do{ + // Check for a cycle in the local path + cycle_start_index = local_path[0..-2].findIndexOf{ it == local_node_label } + if(cycle_start_index != -1){ // Cycle detected + ret_value << 'cycle_found' + if(verbose) print "markup_graph_f_descend:: dependency cycle found:" + local_path[cycle_start_index..-1].each{ cycle_node_label -> + def cycle_node = lookup(cycle_node_label) + if(verbose) print " ${cycle_node.label}" + cycle_node.mark = cycle_node.mark ?: [] as Set // Initialize mark set if needed + cycle_node.mark << 'cycle_member' + } + if(verbose) println "" + // we can not continue searching after the loop so ,we pop back to treat + // the first node in the loop as though a leaf node. + path_stack = path_stack[0..cycle_start_index] + return ret_value + } + + // a 'de-facto' leaf node test subtleties here because we have not yet + // determined if the nodes we are wellformed. This is purposeful ,as + // this function does not know about the relationships between the + // possible error marks. + def local_node = lookup(local_node_label) + if(local_node.neighbor.isEmpty()){ + ret_value << 'defacto_leaf_node' + return ret_value + } + + // Mark the wellformed nodes and get the result + def result = mark_the_wellformed_f(local_node.neighbor ,verbose) + if(result == 'exists_malformed'){ + ret_value << 'exists_malformed' + } + + // Descend further into the tree. + path_stack << local_node.neighbor.clone() + local_node_label = local_node.neighbor[0] + local_path << local_node_label + }while(true) + } + + /* + Given root_node_labels ,marks up the graph and returns a set possibly + containing 'all_wellformed' and 'cycles_exist'. + + Marks potentially added to each node include 'cycle_member' ,'wellformed'. + Note that these marks are independent. + */ + def wellformed_graph_q(root_node_labels ,boolean verbose = true){ + def ret_value = [] as Set + def exists_malformed = false; + + // check the root nodes + def result = mark_the_wellformed_f(root_node_labels ,verbose) + if(result == 'exists_malformed'){ + ret_value << 'exists_malformed' + } + + // Initialize the DFS tree iterator. + def path_stack = [] + path_stack << root_node_labels.clone() + + // iterate over left side tree descent ,not ideal as it starts at the + // root each time ,but avoids complexity in the cycle detection logic. + do{ + def result = markup_graph_f_descend(path_stack ,verbose) + if('cycle_found' in result) ret_value << 'cycle_exists' + if('exists_malformed' in result) exists_malformed = true; + + // increment the iterator to the next leftmost path + def top_list = path_stack[-1] + top_list.remove(0) + if(top_list.isEmpty()) path_stack.pop() + + }while(!path_stack.isEmpty()) + + if(!exists_malformed) ret_value << 'all_wellformed' + if( verbose ){ + if(exists_malformed) println("one or more malformed nodes were found") + def exists_cycle = 'cycle_found' in ret_value + if(exists_cycle) println("one or more cyclic dependency loop found") + if( exists_malformed || exists_cycle ) println("will attempt to build unaffected nodes") + } + + return ret_value + } + + /*-------------------------------------------------------------------------------- + Graph traversal + */ + + Map lookup( String node_label ,boolean verbose = false ){ + def lookup_node = node_map[ node_label ] + if( !lookup_node ){ + def match_result + for( func in node_f_list ){ + match_result = func( node_label ) + if( match_result.status == "matched" ){ + lookup_node = match_result + break + } + } + } + + if( !lookup_node && verbose ) println "lookup:: Node ${node_label} could not be found." + return lookup_node + } + + // mark aware lookup function + def lookup_marked_good(node_label ,verbose = false){ + def node = lookup(node_label ,verbose) + if( node && marked_good_q(node) ) return node; + return null; + } + + + /* + Given `root_node_labels` of a DAG. Applies `node_function` to each node in a + depth-first traversal order. Returns a set of error tokens encountered + during traversal. + + `wellformed_graph_q` must be run on the DAG before this function is called ,or + `lookup_marked_good` will not function correctly. + */ + def all_DAG_DF(root_node_labels ,node_function ,boolean verbose = true) { + def error_token_set = [] as Set + + if (root_node_labels.isEmpty()) return error_token_set + + def visited = [] as Set + def in_traversal_order = [] + def stack = [] + + root_node_labels.each { root_label -> + stack << root_label + } + + do { + def node_label = stack.pop() + + def node = lookup_marked_good(node_label ,verbose) + if (!node) { + error_token_set << 'lookup_fail' + continue + } + + if (node.label in visited) continue + visited << node.label + + in_traversal_order << node + + node.neighbor.each { neighbor_label -> + stack << neighbor_label + } + } while (!stack.isEmpty()) + + in_traversal_order.reverse().each { node -> + node_function(node ,error_token_set ,verbose) + } + + return error_token_set + } + + /*-------------------------------------------------------------------------------- + run the build scripts + depends upon is_acyclic having already marked up the graph. + + import java.nio.file.Files + import java.nio.file.Paths + */ + + // a symbol dependency is good ,as long as it is built before the node in question + def good_dependency_q(node_labels) { + return node_labels.every { node_label -> + def node = lookup_marked_good(node_label) + if (!node) return false + if (node.type in ['path' ,'leaf'] && !file_exists_q(node.label)) return false + return true + } + } + + /* + Given a node label and a list of node labels ,returns true if the file at the + node label in the first argument is newer than all the files at the + corresponding node labels in the second list. + */ + def newer_than_all(node_label ,node_label_list) { + def node_path = Paths.get(node_label) + if (!Files.exists(node_path)) return false + + def node_last_modified = Files.getLastModifiedTime(node_path).toMillis() + + return node_label_list.every { label -> + def path = Paths.get(label) + if (!Files.exists(path)) return false + def last_modified = Files.getLastModifiedTime(path).toMillis() + return node_last_modified > last_modified + } + } + + def can_be_built_q(node){ + if( !marked_good_q(node) ) return false; + if( + (node.type == 'symbol' || type == 'path') + && !good_dependency_q( node.neighbor ) + ){ + return false + } + if( + node.type == 'leaf' + && !file_exists_q(node.label) + ){ + return false; + } + return true + } + + // `can_be_build_q` must be true for this to be meaningful: + def should_be_built_q(node ,verbose = true) { + if(node.type == 'leaf') return false + if(node.type == 'symbol') return true + if( node.type == 'path') return !newer_than_all(node.label ,node.neighbor) + println("should_be_build_q:: unrecognized node type ,so assuming it should not be built.") + return false + } + + void run_build_scripts_f( List root_node_labels ,boolean verbose = true ){ + if( root_node_labels.isEmpty() ) return + + def node_function = { node ,error_token_set -> + + if( !can_be_built_q( node ) ){ + println( "Skipping build for ${node.label} due to dependency problems" ) + return + } + if( !should_be_built_q( node ) ){ + if( verbose ) println( "${node.label} already up to date" ) + return + } + + println( "Running build script for ${node.label}" ) + node.build( node ,node.neighbor ) + + if( should_be_built_q( node ) ){ + println( "Build failed for ${node.label}" ) + set_mark( node ,'build_failed' ) + } + } + + println( "run_build_scripts_f:: running ..." ) + all_DAG_DF( root_node_labels ,node_function ,verbose ) + } + + // Add the rest of your methods here as instance/static methods based on whether they depend on the graph instance + +} + + +/* + def clean(nodes_to_clean) { + def all_dependencies = node_map["all"].neighbor.clone() + nodes_to_clean.each { node -> + all_dependencies.remove(node) + } + + def must_have_nodes = [] + all_dependencies.each { node -> + def node_info = node_map[node] + if (node_info.must_have) { + must_have_nodes += node_info.must_have + } + } + + def to_clean_list = [] + nodes_to_clean.each { node -> + if (!must_have_nodes.contains(node) && node_map[node].type == "path") { + to_clean_list += node + } + } + + to_clean_list.each { node -> + def file_path = node_map[node].label + def file = new File(file_path) + if (file.exists()) { + file.delete() + println "Deleted file: ${file_path}" + } + } +} +*/ diff --git a/developer/deprecated/.gitignore b/developer/scratch_pad/.gitignore similarity index 100% rename from developer/deprecated/.gitignore rename to developer/scratch_pad/.gitignore diff --git a/document/directory_naming.txt b/document/directory_naming.txt index aca6100..bd6ebb9 100644 --- a/document/directory_naming.txt +++ b/document/directory_naming.txt @@ -1,44 +1,126 @@ -Naming Conventions for Directories +Directories Naming Convention +----------------------------- - Our shop has a convention of naming directories after the role of the person, - the name of the program, or generally, the agent, who is going to use the - files in the directory. In short, the name is often the answer to the question - "who are the files are for?". +Property based file organization - Sometimes there is not a good answer to that question. A good example is the - documents directory. There really isn't a good term for the role that people - are playing when the read the documents. Perhaps, 'readers'? This is not a - job function, and it is a somewhat ambiguous. Perhaps, 'projectologist'? Ah - nah .. + I am experimenting with a file system where instead of having directories, we + have collections of files that have property in common. - When we can not answer the question of who the files are for, we instead - choose another common property shared by each and every file in the directory. - It is often the case that a property that each and every file has in common - will be singular. Hence in the example in the prior paragraph, when each file - in a directory has the property of being a document, the directory gets called - 'document'. + In this distribution the project is on a conventional file system, but we still + have the directory names. These following properties have been used, in + order of preference: -Top Level Directory + 1. Who the file is for. This is typically a role of a contributor + to the project, or the name of a program. - The top level of a github project is of course named after the project. Though - we people like to see actors related to project names. Look at all the - mythical animals on the covers of the O'Reilley manuals. + 2. The role that the file plays in the project. This is more general than + saying a file is for a specific program, or even kind of program. As an + example, the 'tool' directory. + + 3. A role that the directory plays. As an example, 'scratch_pad'. Another + example is 'deprecated' which is a short term archived file. + + In this third category the file property is imposed upon the file, + rather than being a description of the file's contents. + + One side effect of grouping files based on a shared property is that + the directory name, coming from the property name, is often singular. + +Who the file is for + + These file groups fit the model well: developer, tester, user. Also in the + developer directory, directories named after programs work well, as examples, + python, groovy, cc, linker, javac, etc. + +Generalization of who the file is for, e.g. executor + + Sometimes multiple related actors operate on the files in a directory. In + which case we give the directory a more general name that describes the + actors as a group. + + So if we had a directory that held a mix of files for various compilers we might + name the directory 'compiler_input' or even 'compiler'. If the files are + interpreted and multiple interpreters are involved, then 'interpreter_input'. + + A such generalization you will see in this project is 'executor'. An executor + is any program that runs another program; examples include shells that + interpret shell scripts; various language interpreter; and most famously the + machine loader, which will load an instruction sequence into memory and point + the program counter at it. + +document + + What role do people play when reading documents? That this question does not + have an obvious direct answer says something about our values, and this in + turn might explain why so few people actually read the documents. + + An author will call the person reading his work 'the reader', perhaps even + addressing a reader directly, 'Dear Reader'. However, the are a large number + of reader programs and devices, such as Adobe Reader and a bar card reader. + Unlike toolsmith, developer, and tester, being a 'reader' is not a job + title that a person is going to be addressed by. + + A person might reach for latin and use the word, 'lector'. However it is + a stretch to get the meaning from this one. Perhaps we move to Greek, and + imagine the role 'projectologist'. lol. Perhaps shortened to 'ologist'? + + Actually, there is a word for a person who studies books, articles, and + documents, a 'student'. It is actually a fairly accurate description of the + role a person plays while trying to learn about the project. The only drawback + is that it is not a professional role title. Perhaps it is not much of a + stretch to say someone is still in the student phase learning about a project. + I tried this label out for a while, but it did not really fit. + + There is a clear property choice. Each file in the document directory has the + property of being a document, hence, the directory name is 'document'. The + document directory at the top level is for the project manager, while the + document directory in the developer's directory is about the code being + developed and hold to build it. + + +Purpose - tool, temporary, deprecated + + For some other directories it is even more of a stretch to try and say + they are for an actor. + + Ancient man would make up spirits in such situations. Perhaps then files in a + temporary directory are for the 'tanuki', a real animal in Japan who that has + taken on mythical proportions causing things to disappear or playing tricks. A + directory of deprecated files could said to be for 'Lethe', the river that + carries away the memories of those who are reincarnated - which reminds me of + the rivers in so many places that carry away trash. I tried these names + out. It was good entertainment, but when trying to explain them I met with + blank expressions. + + Since in these cases we can not answer the question of who the files are for, + we instead choose another common property shared by each and every file in the + directory. Hence we end up with 'tool' for the directory with tools, instead + of saying they are for the tool smith, or even for Hephaestus, besides tool + smiths such as Hephaestus create tools, and the tool directory is full of + tools that were already created. + +Top level directory + + The top level of a github project is of course named after the project. Here + to us programmers have appealed to mythology to find actors. Just look at all + the mythical animals on the covers of the O'Reilley manuals. The top level directory of our git project is reserved for project manager to - use. The project manager builds the directory structure, initializes the - repository, installs tools that will be needed, and generally administers - the project. + use. The 'project manager', in this context, builds the directory structure, + initializes the repository, installs tools that will be needed, and generally + administers the project. In the environment, the top level directory is located at `$REPO_HOME` -Developer +developer The developer's directory is located at `$REPO_HOME/developer`. This directory contains the developer's workspace. Developers are free to - organize it in any manner they see fit, though they should continue to follow the - convention of naming directories after the agents that operate on the contained files. + organize it in any manner they see fit, though they should continue to follow + the convention of naming directories after properties when it is practical to + do so. As examples, @@ -51,32 +133,21 @@ Developer the project involves files for another tool or compiler, the directory is named after that tool. -Executor - - Sometimes multiple related actors operate on the files in a directory. In - which case we give the directory a more general name that describes the - actors as a group. - - So if we had a directory that held a mix of files for various compilers we might - name the directory 'compiler_input' or even 'compiler'. - - One common generalization is 'executor'. An executor is any program that runs - another program; examples include shells that interpret shell scripts; - various language interpreter; and most famously the machine loader, which will - load an instruction sequence into memory and point the program counter at it. - -Temporary +scratch_pad - This is a scratch pad directory used by programs. Files will appear - and disappear from here. There is no reason a developer can not manually - add a file, but scripts such as `make clean`, might delete it. Directories - with this name should be git ignored. + This is a temporary directory used by programs. Files in this directory are + typically removed by the program that placed the file, or by a `clean` script. + There is no reason a developer can not manually add a file, but scripts such + as `make clean`, might delete it. Directories with this name should be git + ignored. -Deprecated +deprecated As a developer I often have files that I set aside just in case I want to look at them again. Sometimes I plan to bring them back later. Unlike temporary files, they are not deleted by any clean script or any program that is using them as - intermediate files. This directory is also git ignored. + intermediate files. This directory is similar in intent to `git stash` or + using to and going back to look at old versions. The contents of this directory + do end up in the repo. LocalWords: projectologist diff --git a/developer/temporary/.gitignore b/scratch_pad/.gitignore similarity index 100% rename from developer/temporary/.gitignore rename to scratch_pad/.gitignore diff --git a/temporary/.gitignore b/temporary/.gitignore deleted file mode 100644 index 120f485..0000000 --- a/temporary/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -* -!/.gitignore diff --git a/tester/test0/TestGraph.class b/tester/test0/TestGraph.class new file mode 100644 index 0000000000000000000000000000000000000000..6b332c864ae8669f813a578d399aec7c6e861721 GIT binary patch literal 1673 zcma)6-*ePd6#nk+(sV=GE-l#dLj+x5e<-1%AQsTJN-3l`V>@N44?DNJw{2)P=_J|C z@X8y1i0=gyJI?5%&iFSuj^9nPX{qbTLvoXI&;8Ch-}%n{^Y34N18@_M1j;KaiWfq; zvt>XCRG!FPX?xP&w7*$@qS~>6qQKN<7zDddQ~N^l-QPXNGdaZ7)Q_fW6wBCc&wDb8 z7X6Jtpy34Jrri!YYD@MaJ3|cBhQ}yw;M=+HjDXQ<5n*XwKzzp>yDFB+w!oxwIG3gV z2G0!Lc6~R#D^RS}R|U@X9pg~?(MAwHqMw8v(1hqyaAr3==SG(Zm~NCPuo}P}hVv1V(h9ay|u` zwL!TLZnK3C@sSRB)xt2!n*3`5wSf!{kd}du)2`%V6S#9Ye41_j?X_y*6MSmoI&P%E zd0iGh(=xe^TLNeMQaaG$qR&41(qsSMNy}eX)TzCq)#@rg=EkXk92mI6P?0)J;h9=L z!V$6RNiem*ogmomQ7QE|n&bMr!M3uQ^j6ThFa3_EqH5~O_~#GAI>^xg=X1zQfGvED zITK&u9!FuGN);G)g0}QlW$4OvPp#+|$LiZu*Q;*i(lG1$fu6j;=j@C*uCE^Ty6Y;; zyhk{e?QLo|`!|<^Uf5RmTu^+3V%ll`fiZ2}$IUh8 z6{ef7@p%g7BncKq|H6bp$qB8!tZJL9u4%IIBc|4VM``bIV~xeLaBVp_&q5as+)aue zMKLLQh6iu+5%>aMW`N^af3H+ E1NB;@}hzE9uJKc)hFc zu1w8aOMyaZODTmy3WZWA9o}ZbTbePHugvrx@LOOQp1ZrU*O8n_X85xA?z!ha_uO-y zbIyJLzw7@1@BrQxC?4}|tLe()zSUlusQDeYR`1wOvlcXbyB!9#R;}T9LC2T(Y7heB zi@xWru9Y3jT`bSWD;hEaQ=Y$AZg@?(WOagaqD8pSUXkVF(v`jy%I3z7K|-aE-XHG} zxc!UUI6$Eo`%cJuueJ?Fr4zX>|rGpY(?$>HHBAlER z5a$Jk9<*IMJR)!;u`_K|ubtaiRhSppdtCs9c>_D~fgG~fZD0hW!x+FGfo!;B2aH}N z?bd7afOi?Gd7V)Db1q#%?+BQ^CJXGSDe&0Sym8Z;wH!y_;Pn~Y4{@Ynk)JZ6Xjpqv z$6Xl`c|c%azd)?6w4KhP?Uq*$mgn5&+MG+9TUJAAxLaVj?s;JllJ^47rA@NkRc~3A zPs(t~Yd&hZO-BY370+Alw5L->RYTu)7t@k08_%q@WqL7DpG%W4*`VA~wL?iZ?!{yd z_n856YJH5v>*w>0jF0a-#}H_EfY}ozV~XS&J}xlmSas>>Afx8|E2nGd~uQ?{ksWRkDt z+rC$K0H# zSJKzyDKtzBp-G3#sP2_-S+*lNMA2@sYs5`07VB7|b%6|PE0({k1J(A-o4&d zTnkO|pd*kYY393qGBa5q17@yO0zy{6B~?C2k@7>bWp$iTRmC&i8?l>ut_G&N!CLG{ z%RTGXy{8O(3HRmjMSPjuo8Y0xi^+rM|5sT)W)~)I})f9Sg3Y7XVc{}?IJcn=P@J)Q%z!+{; zP53(klYI^8^Ni5&BAwN}Vf+X=oIXCjWa0YXB?i8WmveXt-;04a?q%Tncv>yI%E+rI z#2rx8#Wc0I^t`K@rNAAViPBF|!;jeHoT#-LxQN$s_%U8*4$MZ$VOG3`<;+`rp|UKW zQ6GArY-lW>wA#u4XvKErW1W?{^b?6HOX}4*ZwEHF9CKYy5y*4-&N1d(zC zi0eDCi{GE}?-UF5Ajh4>($u^7;J!oIJ=tRIuefDT_S_$%hWiG;MWTYcZ^dmK)$DfM zK?F6sle2+nrq0Vyn#YG?bnzlnlYLu^6ie$U1M9dScpX~y9b_{XvA_5?99+o0heHb) zDjWr>gYRKxA!9yNUC5e`RNuwiIvxYgiAxIMQ#}ZWi+{l_mvJrvzKE%90T&eD6I%ec zJNvUrbzxJrRN~Q}S4T@)lD6X_?%(#n#j8+?CDUGq3tSSH=&cNX#_tI9@uOm&c~l${ z|Hf1JIb|P(!S5T`O^u_J%-|PTz*~3)zr?HfHU5rw@IHQnf8n=c0KXFk{y=Te>&nwc zH%6R!af~=?M==t3l93sR;yz7n^`aVP+}E*6v-l%6fiTNg&3u_$r>}s^a4ep%k0;7BA!L=D^#~i&yac zr9a_?O6i{%RnjYXWufwCyimG~AH-HGSF%=({0HM2UK$hlAzn-H#yBdIvRCjErOX-q z3FIhM=jhChw<2i{H_^jS^^CWh2 zttgQ?I27)NkRsw*48|5`399JxW@f=KSMHp_DXLpIPm*Go3=0kKDL>+FqJz-g;-gNp zaPA4+ac}_+L#-Zbt^>at@;Kg3H{!-c*<%_5-q2CB=$kl{v2e0hb60aAU^DyFiYjsiKf@HWH6Q|Y==q2SWORVv02 z4->JyM~!PdDZ=GSRP=Z7u8jp;W2gk0;ZmchmKTcQvX^s4M_k5+e5b4-khgHzMg`(kc=nZ$QX#p#3D?GZc%NaplpQ&@ zdwvv*^!hU`4Exsxd_*m>@Bzc5Va7NRhIVNO%AB+Nh@tsPRRejX z4unf)`)crj%RwmOdZtP(x^l{{GDQpbh{<6J>|g_TZQRELnx8I}+>C+pdAP&r4ciUH zH)dg&_5vv$CWpHs$|duUvzF}yaX>0-QYu3uC20gFiBbLmy&Ac$vnlDxN~x1B1ENTn|1-UA5gCWulq+NyfGy}96m{s~$=s(f^16NK^e@^=fyj0uR z9Hy9ana&t<0@qVagLH%L3{su`sh1WzM$_PA@i>_@x82H-7b&qST4>X)v0f)@6USof z7|S!2A2HE7!L8>%W5sL#iD@G}!JTitU$D|X#(D<-kPL{=Q!g-Up;Ke%;%<&tqrZu! edV)=(464dDtu{&1e5s^g)458LNj#!m1=c@X`omfP literal 0 HcmV?d00001 diff --git a/user/Ariadne_0.1/AriadneGraph$_wellformed_q_closure2.class b/user/Ariadne_0.1/AriadneGraph$_wellformed_q_closure2.class new file mode 100644 index 0000000000000000000000000000000000000000..b12ef7d94634f3d4bbb105634f2cf27ef1bee8c1 GIT binary patch literal 2149 zcma)7TT>iG6#jZ43`_>tK*XRiqS=IHmt-W~f&>-TgsfRY%pw}Rtj%n*Oc=V?nclVd z16KJ%e2-d%R33ayReqCYd3t67gKny*sp{!I=k#~(r=S1x^e+G(!eh9+6#Bd=#Y)KA zJB7wiA_%rsxF?#89~)kvqE0AoSYQm}tx&1%z8i4aaw`ePLLbAd3R|wHnqr4{A~%%^ zJ5u|5!d($kgj|c}(Ty_%mDPDYxxp~eTRDT16}K=%l&qNyQ^ne2-sNsb`+>X4+Y2+9 zb6w~K2WR0hm5I0xKhjd_LA_E=8=Z#Xc1N=IUqg3 zF!p~13}+h#HL-x~YpMzl0*0$S>`Nn*HIU&#TZEAxX(4q(QbHShJlt(WLJ#7NJ}MXr z;k91j>h86JPRo~W_oiEw&Hbt*TerC+jt9w(E(hcz_o;a z5GrzC?=#F7PnDIdf;8(6rcoTDNX;Swv z&mWtfsIx$=HApb&DR!oMzGsQKB#up5>33Ade z3^OOfbD~53(^?Fdj$_?R(88Bghe6zk4!**=jSYNFGpj*=T zYrYisJ9}FqOevEOlP6%qk9-nYl2Vy^P?E;pFqMY4OOra4b*hq{i8xAv*rT_ld3vbp z$#0qsUf7X&!y_ z0amFz`zK!cjkXNm&^i^92QYzG=>&qhfJx$+-D|YV(Vps?Rs*w4=N$b<%B80$0f%_! z5Fb1tN)F$~cg#&4BTpofN5~`e@ET-;cawMquR#A;SBV{9!QdoTISDnr`(27WPww^O z78Z${wPw0>fXZ0zR}7X8vGU}1RBPpD7&X*GtUapzfok~x4-@$B35*JT5g*oXlUG+dh;PqwF%nc?=G{ny$91p_sL z1;e>)cKo2z-=&{RqqTxYFUo3%RA6SZgLYyxIVPCZtx=1GRJwgfa%{a|h8`SBq*M6< z7xj#Hvxfw;lj(Fq6BQ1Q*@?VOGA~TrR#Xed>|CA`>`cKS9!=)e zqJlNP+!Zs0)TrGaAIPP~3Y!iVY{mb^!Q@!M&KX!l)&_ERvS4!+!Sd#-oRvIUUEkta zT=5dY5-bg4G0up)}vV7Ps#I@|p zvJpg^TTUak2wD&eb8c${b1+w-v zLx~I}2W$i9P%8b|Y$0Euu?y~UwV{QQ%8Y0C+wEj+Bs+L%GBcRA^UJ%l+5N?mmNZ$s zkV|ETUA;}8xQCYN9dyaJyR!C9*i7q;rwTUZz82?&aW2lE6{1ffIFMAbwx-%j%f1NK zp~K?VX;Y2fVaIm#_{D0RrWP!Wr1FB9-Cob+y!t9XG_Z*@l`aKjV2fa`uPqzcMprFn z#*zd36GN%Aofu0NMg(VhCF$;Ys(CTg=yg)r_HCSL;4%~Q$ft4*!3ACf?NB~q^h(hQ zJ`~;#^jNfoozyK2Y*#YJNMPa$dYz+E!AdW-JI@y?jy)F5>`Hn$p?Z>|wuxQ~HL6Ee zo1nwbVAv?otn33jhfG{8xX=gO<7?Hz(E2ZP9>)hPx_E-2PD@6QDsvrHE}SG6Z< z-Z>nE)jP>fWa{OVbfj)vZG_%eV2Co+@6tSMsW5!b5{}uEoT@b%4Ji6+R;>| zm^ZQCe|i-op1q7(s0Ejt;RKA`%pc_HUBu(g-?z_eEp8AQ!*zAD z4J)pdveK4UZrNH;%vsiA&QuhIc+C1S7!RN2d2oN z#0l;6lXxZa!TU8;QHIac6 zRB|$}#BAb4!FgU#?OPsHC)E1xKaTHMn2DFFDnf?~1HVI=lJAO4FZ0^0tFLM6csiz& zls6c`ck!xn{nx11v^`XakEDiZ-@j{NmWr0)0qP^?iY%0~N3-MH>F--;P^0xq?)G3} zIGY_*Mi9Q{uu1&mQVYafI zIoiHQi}^xgBsp#;+~L3Hh<;4yp?;Rhx6f&7GG5AO6Y`CUMtOjW#ZqsJ8zM(JmD2pVj!K(7jyO+1OM)y zP^KG&Q~&-pj5qNg6oox*=jbp0C0OrgWZvk%*;%Zl4X?CSaQq(&jrd=|vhs#l<=c$< z1Ju5ftw(?-1!pKzN{qR2W6I`f{U+XiPkTSdpc(RH#m&qvW^(p0<;GP9wF#TT(^XJK zE>mi`TWxu!7#6Eu%D&NOdxnIml+jc^&#;=v+1C|Q3eB-ubq+8kB3P`$;Bac(&LqnF zEp5h>nJm6N6wc(za0BbYsb{B9TGzu<)ad$cDTRAwJ|*~6PCS~ZxCGqFYUh@2^B&5&iXT*ZIgJ%`7cN-bB)mapz8(`UQO3dN`kHIRW z4x=nZq%DG{RP{Mu+PMtV;=l+4kF24r-zmR#IWpYNGmjzXQe}>YIy#zqXa-HDoW}ss zG>|Q(2b(h4LQ}upG?Xo723MPMfuP0jIOjbnmNcZ!qp~h6Yo&wSjqwOnNM}sBNaePG zE+f1FV13nz&Smj5E-9B-SRkE5rIYt(^UR?$oIU8U#JHqf?vN_7<+8ABm&>Odi*}b{hao#oQ?O{k z?F>tg?BW(vc_;PDZi@$d8fVNaw&e+*O_jpuxrSUR*x+N#)9tNF{ADKB>!>QdVc9G1 zX9)CJjAp~$41-G3>eVz6mYi#wyS+TV*F#xcu2y=zCV~g#Y_)xrw$*jamW^GBE!#}t z>1L}hbj7i!Z>&l^CFf$O7_hKV23e=@TFSiz(n*o$-|9=5iZGb^baqtcq)N~!r^J%s zPv?PgwIpBOA~;2CdzYP2G0GlXACX3x!&?j4ACYa+rJhC^!&rX0w?vIq5yC0IB9fIc z-u4I!i+Xh)_FRy(`l$T0S0<8|0t+W8Mpz5YW{#KZ83BC*h|leoA*_ufC03ogv5URS zmx{&r3P@?(C^v=W2D#Y*F7GQMxAJm_13X(Tnl4X$XOl^$*V8E-74-9DkZ;$=h7=EJ z9EdOhm+=G(n@cb{x0FX{_B1_p_vBpWsHo?S(eHHcBUoAq=6-~Rd{im?BVoB)?xC75 z8sI@k-}Hs@2S@v}jP&>GdkJkGDuq25VM(p3FZo!6)vSJc#5td+h|P*%j736;!}2-6 zskSQ=(=Ug(%bDDyCMjL2T2(kYTat=$tD^F`Ng~)Bf^D3B0b9+QQ|Qhiq7P{mkBU-h zVi93?o%Y6d5s^+mI@e`sYFRSZU4Lr-JpV$JHsw3O+rcI87=oF7HtSPfJ+Cnb_OqJV zlk8WA`dQtnjNMaIrw6yk>drFJ?@jVqjI7VUj8l>A8QO?; zFq^O8c(#}uu+^!ZVU3x{xwE8I^L8?iWvt$VVEMqaen4H;sywXjTG@tGd9Pa-k zKtouC1h#jxrf+0>ySi27_+sMB!+hTSf04ycjm80PiygW$jXsvo*9j)zc?du3l;v(P>%xLH^!Y)RK5j8YQ)aYH+*I`cL z5{Jet)2!^Mi`F@~b-iCFXEpLHf7=l}g-c-=2pHIGU~B2Oix0py{=S99PA6Zs%9l84 z9`B++am=A`E}%fJq6luqO!+c(vb}6>I0@+KX5XZi2 zT^^gnE}*-$w{;SGfgP*VLm$x7*4t)XHHiFy7Y?vOdgTBU&4$B9Cw%j!+jP_%^OE5On@J*_^~W{%+(yEy0C)*K;Y4`IO=U zejA9Q7Yp$S(~FYg21hEm3m@TI9Zuk*xSLY6@H}qeQvi$bENwe1kV_L@6BNXl@fC+s1D;x@Frqt&EU=g{GT?;?#-?xGl>4q+}=I80qUJAtnpf)PB7V2xV9kxMNC(N588h4id7 zdYy=afUgo+ZT4$r^mQCj=(7m>JTQrGPGQ-BInM(#s|<4t$6Y2Lq-=tO*+`fPpl`<{ zq3A=#s>7J$x}EjHBwhwy!V5iZJU|?I39n4xdu_eW6iVv{fX8nl2%N+ms&iLKr8BkR zuj7}74jTV@j|4kimcah!C`%>5-zzh>jS^IHpXuz8EB&Fnl2C`dLQPpeEcO1Vq^Uz#V!dAOeZtkBcl5`n9j$124kGhAWsd#} zm3}=+-{jHO{<60I0zYxIRZ~yWiI*cZV;$^&28IQDDKX=ACNc){W{THR_mpBxj=}%yx)Au}nIe{NlP)njF z`mf69|LUS2!fwvu+soKmnL;4Q86y~$uVO5`8mn;)&SUnu97%@ge(dL0p#p5& zgCRW54?Ew-2roOY#Ty*?EqGauQCZ3F9__eJF6Q@@E0C9K_;KY96y<&#kk@fg-oTCW zYupqF;pV_negRP-Uq{=(3fCd5KqIfURkRg#=5btuWxZ34yc+uk&TO5O`A&MQQo|vB zuF|=WG|iL3%}mM}tUWqvJ3`UWYv_uGz9lP!;qPMg)FM#Xj$o?*RY!2H(Hjj;%KMbM z&(Z2`hz2{%sM!-Wdk-UY_|$jWrYt#QaV1K$#7@ZCzMxK?t?}9y@k(D!qnyty+9(&s zkIVW=*~kdjR>_xZ5k85945Xd%Hk#O~KUsDXDs!G1GoVXGD|*+SMZn?vXfU|WgsRC{O?tr+^93WDw|li z1(^IMWi!!gSOz9!3!j2i|K+lkPqkD7kDZP(aFp+=4(eJwA>AjWXJ6vDT+u^8?kRg= ztsFTad;4Osw&T+0D5X)Znv@T4|FPC+uaTJh|IH!#%9?h!#W!U-A%P%sf_5YC~HbF-?JLUIiz8r zML5>VY*%wk(uxPESR;j75>(g`l&CPzq$}5I%5AdH4{K}im}K4o%i+|E)euzwKY}aR zP2@`Pb$0BSlL2!cVwoR)kxzx?h?(KiKB!uyyh&`Ls)0))zDSbksWejC^()15}A0=;wSuo`p2TH)+gIV>UmiK%QaCvzQ!a`!$XSV90Z9f5PL(g?xJ-*66m| zZFjrv``z{$w@tfk(QR*V+XvnDF}MAK+aAH!HSRau_K4g6ZMXft+x~$&{ 0 ? file_fn[ 0..file_fn.lastIndexOf('.') - 1 ] : file_fn + def file_fn_ext = file_fn.lastIndexOf('.') > 0 ? file_fn[ file_fn.lastIndexOf('.') + 1..-1 ] : '' + + return [ + parent_dp: parent_dp + ,file_fn: file_fn + ,file_fn_base: file_fn_base + ,file_fn_ext: file_fn_ext + ] + } + + static boolean file_exists_q( String node_label ){ + def node_path = Paths.get( node_label ) + return Files.exists( node_path ) + } + + /*-------------------------------------------------------------------------------- + Node type checks and marking + */ + + static Set all_node_type_set = ['symbol' ,'path' ,'leaf' ,'generator'] as Set + static Set persistent_node_mark_set = ['cycle_member' ,'wellformed' ,'build_failed'] as Set + + static boolean leaf_q( Map node ){ + return node && node.type == 'leaf' + } + + static boolean has_mark( Map node ){ + return node?.mark?.isNotEmpty() + } + + static void set_mark( Map node ,String mark ){ + node.mark = node.mark ?: [] as Set + node.mark << mark + } + + static void clear_mark( Map node ,String mark ){ + node?.mark?.remove( mark ) + } + + static boolean marked_good_q( Map node ){ + return node && node.mark && ( 'wellformed' in node.mark ) && !( 'cycle_member' in node.mark ) && !( 'build_failed' in node.mark ) + } + + /*-------------------------------------------------------------------------------- + Well-formed Node Check + */ + + static Set all_form_error_set = [ + 'no_node' + ,'node_must_have_label' + ,'label_must_be_string' + ,'node_must_have_type' + ,'bad_node_type' + ,'neighbor_value_must_be_list' + ,'neighbor_reference_must_be_string' + ,'mark_property_value_must_be_set' + ,'unregistered_mark' + ,'missing_required_build_code' + ,'leaf_given_neighbor_property' + ,'leaf_given_build_property' + ] as Set + + static Set wellformed_q( Map node ){ + def form_error_set = [] as Set + + if( !node ){ + form_error_set << 'no_node' + return form_error_set + } + + if( !node.label ) + form_error_set << 'node_must_have_label' + else if( !( node.label instanceof String ) ) + form_error_set << 'label_must_be_string' + + if( !node.type ) + form_error_set << 'node_must_have_type' + else if( !( node.type instanceof String ) || !( node.type in all_node_type_set ) ) + form_error_set << 'bad_node_type' + + if( node.neighbor ){ + if( !( node.neighbor instanceof List ) ) + form_error_set << 'neighbor_value_must_be_list' + else if( !( node.neighbor.every { it instanceof String } ) ) + form_error_set << 'neighbor_reference_must_be_string' + } + + if( node.mark ){ + if( !( node.mark instanceof Set ) ) + form_error_set << 'mark_property_value_must_be_set' + else if( !( node.mark.every { it in persistent_node_mark_set } ) ) + form_error_set << 'unregistered_mark' + } + + if( node.type == 'path' && ( !node.build || !( node.build instanceof Closure ) ) ) + form_error_set << 'missing_required_build_code' + + if( node.type == 'leaf' ){ + if( node.neighbor ) form_error_set << 'leaf_given_neighbor_property' + if( node.build ) form_error_set << 'leaf_given_build_property' + } + + return form_error_set + } + + /*-------------------------------------------------------------------------------- + Graph traversal and build functions + */ + + def lookup( String node_label ,boolean verbose = false ){ + def lookup_node = node_map[ node_label ] + if( !lookup_node ){ + def match_result + for( func in node_f_list ){ + match_result = func( node_label ) + if( match_result.status == "matched" ){ + lookup_node = match_result + break + } + } + } + + if( !lookup_node && verbose ) println "lookup:: Node ${node_label} could not be found." + return lookup_node + } + + def run_build_scripts_f( List root_node_labels ,boolean verbose = true ){ + if( root_node_labels.isEmpty() ) return + + def node_function = { node ,error_token_set -> + + if( !can_be_built_q( node ) ){ + println( "Skipping build for ${node.label} due to dependency problems" ) + return + } + if( !should_be_built_q( node ) ){ + if( verbose ) println( "${node.label} already up to date" ) + return + } + + println( "Running build script for ${node.label}" ) + node.build( node ,node.neighbor ) + + if( should_be_built_q( node ) ){ + println( "Build failed for ${node.label}" ) + set_mark( node ,'build_failed' ) + } + } + + println( "run_build_scripts_f:: running ..." ) + all_DAG_DF( root_node_labels ,node_function ,verbose ) + } + + // Add the rest of your methods here as instance/static methods based on whether they depend on the graph instance + +} diff --git a/user/Ariadne_0.1/build b/user/Ariadne_0.1/build index 14c8bce..9e6339b 100755 --- a/user/Ariadne_0.1/build +++ b/user/Ariadne_0.1/build @@ -1,54 +1,48 @@ #!/usr/bin/env groovy -def include_ariadne_library(){ - - // Get the directory where this script is located - def script_dp = new File(getClass().protectionDomain.codeSource.location.path).parent - def ariadne_lib_fp = new File(script_dp ,"Ariadne.groovy") - - if(!ariadne_lib_fp.exists()){ - println "Error: Ariadne library not found in ${script_dp}" - System.exit(1) - } - - return ariadne_lib_fp.text +// Function to load the graph class dynamically +def include_graph_class( graph_class_fp ) { + def class_loader = this.class.classLoader + try { + def graph_class = class_loader.loadClass(graph_class_fp) + println "Successfully loaded class: ${graph_class}" + return graph_class.newInstance() + } catch (Exception e) { + println "Error loading class: ${e.message}" + e.printStackTrace() + System.exit(1) + } } - // Main build function for the Shell UI -def build(graph_fp){ - // Check if the graph definition exists - def graph_fn = new File(graph_fp) - if(!graph_fn.exists()){ - println "Error: Graph definition file not found: $graph_fp" - System.exit(1) - } - - // Prepare the binding and shell for evaluation - def binding = new Binding() - def shell = new GroovyShell(binding) - - // Load the graph definition into the shell - shell.evaluate(graph_fn.text) - - // Check if node_map and node_f_list are defined as local variables - if (!binding.variables.containsKey('node_map') || !binding.variables.containsKey('node_f_list')) { - println "Error: Graph definition must define both 'node_map' and 'node_f_list'" - System.exit(1) - } - - // Load the Ariadne library functions - shell.evaluate(include_ariadne_library()) - - // Call the build function in Ariadne.groovy - run_build_scripts(binding.getVariable('node_map'), binding.getVariable('node_f_list')) +def build( graph_class_fp ) { + // Load the graph class dynamically + def graph_class = include_graph_class( graph_class_fp ) + + // Get the node_map and node_f_list from the graph class + def node_map = graph_class.get_node_map() + def node_f_list = graph_class.get_node_f_list() + + // Log the result + println "node_map: ${node_map}" + println "node_f_list: ${node_f_list}" + + // Check if node_map and node_f_list are defined + if( !(node_map instanceof Map) || !(node_f_list instanceof List) ){ + println "Error: Graph definition must define both 'node_map' and 'node_f_list'" + System.exit(1) + } + + + // Call the build function in AriadneGraph + AriadneGraph.run_build_scripts_f( node_map ,node_f_list ) } // Entry point for the script -if(args.length == 0){ - println "Usage: ./shellUI.groovy " - System.exit(1) +if( args.length == 0 ){ + println "Usage: ./build " + System.exit(1) } -def graph_fp = args[0] -build(graph_fp) +def graph_class_fp = args[0] +build( graph_class_fp ) -- 2.20.1