Beispiel #1
0
 def __init__(self, task, inputs, outputs, logger, recipe_path):
     super(PipelineCook, self).__init__(task, inputs, outputs, logger)
     # Ensures the recipe to be run can be imported from the recipe path
     self.logger.warn("Ignoring this recipe_path: " + str(recipe_path))
     try:
         try:
             module = importlib.import_module("lofarpipe.recipes.master." +
                                              task)
         except ImportError:
             # ...also support lower-cased file names.
             module = importlib.import_module("lofarpipe.recipes.master." +
                                              task.lower())
         self.recipe = None
         try:
             self.recipe = getattr(module, task)()
         except AttributeError:
             # Try with first letter capital (python type nameconvention)
             self.recipe = getattr(module, task.capitalize())()
         self.recipe.logger = getSearchingLogger("%s.%s" %
                                                 (self.logger.name, task))
         self.recipe.logger.setLevel(self.logger.level)
     except Exception as e:
         self.logger.exception("Exception caught: " + str(e))
         self.recipe = None
         raise CookError(task + ' can not be loaded')
Beispiel #2
0
def call_generic_pipeline(op_name, direction_name, parset, config, logbasename,
    genericpipeline_executable):
    """
    Creates a GenericPipeline object and runs the pipeline

    Parameters
    ----------
    op_name : str
        Name of operation
    direction_name : str
        Name of direction
    parset : str
        Name of pipeline parset file
    config : str
        Name of pipeline config file
    logbasename : str
        Log file base name
    genericpipeline_executable : str
        Path to genericpipeline.py executable

    """
    from lofarpipe.support.pipelinelogging import getSearchingLogger
    from factor.lib.context import RedirectStdStreams
    import time

    genericpipeline_path = os.path.dirname(genericpipeline_executable)
    loader = imp.load_source('loader', os.path.join(genericpipeline_path,
        'loader.py'))
    gp = imp.load_source('gp', genericpipeline_executable)

    # Initalize pipeline object
    pipeline = gp.GenericPipeline()

    # Add needed attr/methods
    pipeline.name = '{0}_{1}'.format(op_name, direction_name)
    pipeline.logger = getSearchingLogger(pipeline.name)
    pipeline.inputs['args'] = [parset]
    pipeline.inputs['config'] = config
    pipeline.inputs['job_name'] = direction_name

    # Set pipeline logging to DEBUG level
    logging.root.setLevel(logging.DEBUG)
    pipeline.logger.setLevel(logging.DEBUG)
    for handler in pipeline.logger.handlers:
        handler.setLevel(logging.DEBUG)

    # Run the pipeline, redirecting screen output to log files
    time.sleep(2.0) # pause to allow result_callback() to transfer resources
    log.info('<-- Operation {0} started (direction: {1})'.format(op_name,
        direction_name))
    with open("{0}.out.log".format(logbasename), "wb") as out, \
        open("{0}.err.log".format(logbasename), "wb") as err:
        with RedirectStdStreams(stdout=out, stderr=err):
            status = pipeline.run(pipeline.name)

    return (op_name, direction_name, status)
Beispiel #3
0
 def main_init(self):
     """Main initialization for stand alone execution, reading input from
     the command line"""
     # The root logger has a null handler; we'll override in recipes.
     logging.getLogger().addHandler(NullLogHandler())
     self.logger = getSearchingLogger(self.name)
     opts = sys.argv[1:]
     try:
         myParset = parset.Parset(self.name + ".parset")
         for p in myParset.keys():
             opts[0:0] = "--" + p, myParset.getString(p)
     except IOError:
         logging.debug("Unable to open parset")
     (options, args) = self.optionparser.parse_args(opts)
     if options.help:
         return 1
     else:
         for key, value in vars(options).iteritems():
             if value is not None:
                 self.inputs[key] = value
         self.inputs['args'] = args
         return 0
 def main_init(self):
     """Main initialization for stand alone execution, reading input from
     the command line"""
     # The root logger has a null handler; we'll override in recipes.
     logging.getLogger().addHandler(NullLogHandler())
     self.logger = getSearchingLogger(self.name)
     opts = sys.argv[1:]
     try:
         myParset = parset.Parset(self.name + ".parset")
         for p in myParset.keys():
             opts[0:0] = "--" + p, myParset.getString(p)
     except IOError:
         logging.debug("Unable to open parset")
     (options, args) = self.optionparser.parse_args(opts)
     if options.help:
         return 1
     else:
         for key, value in vars(options).iteritems():
             if value is not None:
                 self.inputs[key] = value
         self.inputs['args'] = args
         return 0
Beispiel #5
0
 def __init__(self, task, inputs, outputs, logger, recipe_path):
     super(PipelineCook, self).__init__(task, inputs, outputs, logger)
     # Ensures the recipe to be run can be imported from the recipe path
     try:
         try:
             module_details = imp.find_module(task, recipe_path)
         except ImportError:
             # ...also support lower-cased file names.
             module_details = imp.find_module(task.lower(), recipe_path)
         module = imp.load_module(task, *module_details)
         self.recipe = None
         try:
             self.recipe = getattr(module, task)()
         except AttributeError:
             # Try with first letter capital (python type nameconvention)
             self.recipe = getattr(module, task.capitalize())()
         self.recipe.logger = getSearchingLogger("%s.%s" % (self.logger.name, task))
         self.recipe.logger.setLevel(self.logger.level)
     except Exception, e:
         self.logger.exception("Exception caught: " + str(e))
         self.recipe = None
         raise CookError (task + ' can not be loaded')
    def go(self):
        #"""
        #Read the parset-file that was given as input argument, and set the
        #jobname before calling the base-class's `go()` method.
        #"""
        try:
            parset_file = os.path.abspath(self.inputs['args'][0])
        except IndexError:
            #return self.usage()
            self.usage()

        # Set job-name to basename of parset-file w/o extension, if it's not
        # set on the command-line with '-j' or '--job-name'
        if not 'job_name' in self.inputs:
            self.inputs['job_name'] = (
                os.path.splitext(os.path.basename(parset_file))[0])
            self.name = self.inputs['job_name']
        try:
            self.logger
        except:
            self.logger = getSearchingLogger(self.name)
            self.logger.setLevel(self.inputs['loglevel'])
        # Call the base-class's `go()` method.
        return super(GenericPipeline, self).go()
Beispiel #7
0
    def go(self):
        #"""
        #Read the parset-file that was given as input argument, and set the
        #jobname before calling the base-class's `go()` method.
        #"""
        try:
            parset_file = os.path.abspath(self.inputs['args'][0])
        except IndexError:
            #return self.usage()
            self.usage()

        # Set job-name to basename of parset-file w/o extension, if it's not
        # set on the command-line with '-j' or '--job-name'
        if not 'job_name' in self.inputs:
            self.inputs['job_name'] = (os.path.splitext(
                os.path.basename(parset_file))[0])
            self.name = self.inputs['job_name']
        try:
            self.logger
        except:
            self.logger = getSearchingLogger(self.name)
            self.logger.setLevel(self.inputs['loglevel'])
        # Call the base-class's `go()` method.
        return super(GenericPipeline, self).go()