def __init__(self, pipeline): # Define errors. self.pipelineErrors = pce.pipelineErrors() # Record the name of the top level pipeline, e.g. that defined on the command line. self.pipeline = None # The number of tiers in the super pipeline. self.numberOfTiers = 1 # Store all of the pipeline configuration files comprising the superpipeline in a # dictionary, indexed by the name of the pipeline. self.pipelineConfigurationData = {} # Store information on all the constituent tools. self.toolConfigurationData = {} # Store all the tasks in the superpipeline along with the tools, self.tools = [] self.tasks = {} # Store the unique and shared node IDs in all the pipeline. self.uniqueNodeIds = [] self.sharedNodeIds = [] # Store all of the constituent pipeline with their tier and also all the pipelines indexed # by tier. self.pipelinesByTier = {} self.tiersByPipeline = {} # Keep track of the tasks that should be included in a plot. self.tasksInPlot = {} # Associate a random string with the graph. This will be used in any makefiles and intermediate # files to ensure simultaneous executions of gkno will not produced filename conflicts. self.randomString = strOps.getRandomString(8)
def __init__(self, allowTermination = True): # Handle errors. self.errors = errors.pipelineErrors() # Store the name of the pipeline. self.name = None # Store the path to the configuration file. self.path = None # Store the id for this pipeline. self.id = None # The pipeline description. self.description = 'No description' # The categories the pipeline is included in for help messages. self.categories = [] # The parameter set information for this pipeline. self.parameterSets = parameterSets.parameterSets() # The tasks that the pipeline comprises. Also store all of the tasks and all of the tools # that are executed by the pipeline. self.pipelineTasks = {} self.allTasks = [] self.allTools = [] # The pipeline graph nodes that are shared between different tasks in the pipeline. self.sharedNodeAttributes = {} # Pipeline graph nodes that are kept as unique for a specific task, self.uniqueNodeAttributes = {} # The connections that need to be made between nodes and tasks. self.connections = [] # Store all of the valid top level pipeline arguments. self.longFormArguments = {} self.shortFormArguments = {} # If the pipeline contains tasks that are themselves pipelines. Store all of the pipelines # used in this pipeline. self.hasPipelineAsTask = False self.requiredPipelines = [] # If there is a request to import argument from a tool, store the name of the tool. This tool # will not be checked for validity, that is left to the methods that use the tool. self.importArgumentsFromTool = None # If the pipeline is nested within other pipelines, the nodes associated with this pipeline # have an address to locate them within the graph structure. For example, if the main pipeline # has a task 'run' which is a pipeline, all the nodes created for this pipeline are prepended # with 'run.' etc. Store this address. self.address = None # It is sometimes desirable to allow all steps to be processed without termination. Other # times, if a problem is encountered, execution should terminate. Keep track of this. self.allowTermination = allowTermination # Flag if this pipeline contains instructions for building tasks that generate multiple output # file nodes. self.generatesMultipleNodes = False # Pipelines can be marked as developmental. This will keep them out of help messages and not include # them in the web json. self.isDevelopment = False # As pipeline configuration files are processed, success will identify whether a problem was # encountered. self.success = True