Beispiel #1
0
def create_gc3pie_engine(store):
    '''Creates an `Engine` instance for submitting jobs for parallel
    processing.

    Parameters
    ----------
    store: gc3libs.persistence.store.Store
        GC3Pie store object

    Returns
    -------
    gc3libs.core.Engine
        engine
    '''
    logger.debug('create GC3Pie engine')
    n = cfg.resource.max_cores * 2
    logger.debug('set maximum number of submitted jobs to %d', n)
    engine = gc3libs.create_engine(store=store,
                                   max_in_flight=n,
                                   max_submitted=n,
                                   forget_terminated=True)
    # Put all output files in the same directory
    logger.debug('store stdout/stderr in common output directory')
    engine.retrieve_overwrites = True
    return engine
Beispiel #2
0
    def complete(self):
        """
        Complete a bulk job submission.

        Create engine, and progress it until all jobs have terminated.
        """
        # create an instance of `Engine` using the list of configuration files
        try:
            self._engine = create_engine(*self.config_files,
                                         resource_errors_are_fatal=True)

        except gc3libs.exceptions.Error as err:
            raise EasyBuildError("Failed to create GC3Pie engine: %s", err)

        # make sure that all job log files end up in the same directory, rather than renaming the output directory
        # see https://gc3pie.readthedocs.org/en/latest/programmers/api/gc3libs/core.html#gc3libs.core.Engine
        self._engine.retrieve_overwrites = True

        # `Engine.stats()` (which is used later on in `_print_status_report()`)
        # changed between 2.4.2 and 2.5.0.dev -- make sure we stay compatible
        # with both
        try:
            self._engine.init_stats_for(Application)
        except AttributeError:
            _log.debug("No `init_stats_for` method in the Engine class;"
                       " assuming pre-2.5.0 GC3Pie and ignoring error.")

        # Add your application to the engine. This will NOT submit
        # your application yet, but will make the engine *aware* of
        # the application.
        self._engine.add(self.jobs)

        # in case you want to select a specific resource, call
        target_resource = build_option('job_target_resource')
        if target_resource:
            res = self._engine.select_resource(target_resource)
            if res == 0:
                raise EasyBuildError(
                    "Failed to select target resource '%s' in GC3Pie",
                    target_resource)

        # Periodically check the status of your application.
        while self.jobs.execution.state != Run.State.TERMINATED:
            # `Engine.progress()` will do the GC3Pie magic:
            # submit new jobs, update status of submitted jobs, get
            # results of terminating jobs etc...
            self._engine.progress()

            # report progress
            self._print_status_report()

            # Wait a few seconds...
            time.sleep(self.poll_interval)

        # final status report
        print_msg("Done processing jobs",
                  log=self.log,
                  silent=build_option('silent'))
        self._print_status_report()
Beispiel #3
0
def test_create_engine_non_default2():
    """Test `create_engine` with several non-default arguments."""
    with temporary_config_file() as cfgfile:
        engine = create_engine(cfgfile.name,
                               can_submit=False,
                               max_in_flight=1234)
        assert_equal(engine.can_submit, False)
        assert_equal(engine.max_in_flight, 1234)
Beispiel #4
0
def test_create_engine_non_default2():
    """Test `create_engine` with several non-default arguments."""
    with temporary_config_file() as cfgfile:
        engine = create_engine(cfgfile.name,
                               can_submit=False,
                               max_in_flight=1234)
        assert engine.can_submit == False
        assert engine.max_in_flight == 1234
Beispiel #5
0
    def complete(self):
        """
        Complete a bulk job submission.

        Create engine, and progress it until all jobs have terminated.
        """
        # create an instance of `Engine` using the list of configuration files
        try:
            self._engine = create_engine(*self.config_files, resource_errors_are_fatal=True)

        except gc3libs.exceptions.Error as err:
            raise EasyBuildError("Failed to create GC3Pie engine: %s", err)

        # make sure that all job log files end up in the same directory, rather than renaming the output directory
        # see https://gc3pie.readthedocs.org/en/latest/programmers/api/gc3libs/core.html#gc3libs.core.Engine
        self._engine.retrieve_overwrites = True

        # some sites may not be happy with flooding the cluster with build jobs...
        self._engine.max_in_flight = build_option('job_max_jobs')

        # `Engine.stats()` (which is used later on in `_print_status_report()`)
        # changed between 2.4.2 and 2.5.0.dev -- make sure we stay compatible
        # with both
        try:
            self._engine.init_stats_for(Application)
        except AttributeError:
            _log.debug("No `init_stats_for` method in the Engine class;"
                       " assuming pre-2.5.0 GC3Pie and ignoring error.")

        # Add your application to the engine. This will NOT submit
        # your application yet, but will make the engine *aware* of
        # the application.
        self._engine.add(self.jobs)

        # in case you want to select a specific resource, call
        target_resource = build_option('job_target_resource')
        if target_resource:
            res = self._engine.select_resource(target_resource)
            if res == 0:
                raise EasyBuildError("Failed to select target resource '%s' in GC3Pie", target_resource)

        # Periodically check the status of your application.
        while self.jobs.execution.state != Run.State.TERMINATED:
            # `Engine.progress()` will do the GC3Pie magic:
            # submit new jobs, update status of submitted jobs, get
            # results of terminating jobs etc...
            self._engine.progress()

            # report progress
            self._print_status_report()

            # Wait a few seconds...
            time.sleep(self.poll_interval)

        # final status report
        print_msg("Done processing jobs", log=self.log, silent=build_option('silent'))
        self._print_status_report()
Beispiel #6
0
def test_create_engine_with_core_options():
    """Test `create_engine` with a mix of Engine and Core options."""
    with temporary_config_file() as cfgfile:
        # use a specific MatchMaker instance for equality testing
        mm = MatchMaker()
        engine = create_engine(cfgfile.name,
                               can_submit=False,
                               matchmaker=mm,
                               auto_enable_auth=False)
        assert_equal(engine.can_submit, False)
        assert_equal(engine._core.matchmaker, mm)
        assert_equal(engine._core.auto_enable_auth, False)
Beispiel #7
0
def test_create_engine_with_core_options():
    """Test `create_engine` with a mix of Engine and Core options."""
    with temporary_config_file() as cfgfile:
        # use a specific MatchMaker instance for equality testing
        mm = MatchMaker()
        engine = create_engine(cfgfile.name,
                               can_submit=False,
                               matchmaker=mm,
                               auto_enable_auth=False)
        assert engine.can_submit == False
        assert engine._core.matchmaker == mm
        assert engine._core.auto_enable_auth == False
Beispiel #8
0
    def complete(self):
        """
        Complete a bulk job submission.

        Create engine, and progress it until all jobs have terminated.
        """
        # create an instance of `Engine` using the list of configuration files
        try:
            self._engine = create_engine(*self.config_files,
                                         resource_errors_are_fatal=True)

        except gc3libs.exceptions.Error as err:
            raise EasyBuildError("Failed to create GC3Pie engine: %s", err)

        # make sure that all job log files end up in the same directory, rather than renaming the output directory
        # see https://gc3pie.readthedocs.org/en/latest/programmers/api/gc3libs/core.html#gc3libs.core.Engine
        self._engine.retrieve_overwrites = True

        # some sites may not be happy with flooding the cluster with build jobs...
        self._engine.max_in_flight = build_option('job_max_jobs')

        # Add your application to the engine. This will NOT submit
        # your application yet, but will make the engine *aware* of
        # the application.
        self._engine.add(self.jobs)

        # select a specific execution resource?
        target_resource = build_option('job_target_resource')
        if target_resource:
            res = self._engine.select_resource(target_resource)
            if res == 0:
                raise EasyBuildError(
                    "Failed to select target resource '%s' in GC3Pie",
                    target_resource)

        # Periodically check the status of your application.
        while self.jobs.execution.state != Run.State.TERMINATED:
            # `Engine.progress()` will do the GC3Pie magic:
            # submit new jobs, update status of submitted jobs, get
            # results of terminating jobs etc...
            self._engine.progress()

            # report progress
            self._print_status_report()

            # Wait a few seconds...
            time.sleep(self.poll_interval)

        # final status report
        print_msg("Done processing jobs",
                  log=self.log,
                  silent=build_option('silent'))
        self._print_status_report()
Beispiel #9
0
    def complete(self):
        """
        Complete a bulk job submission.

        Create engine, and progress it until all jobs have terminated.
        """
        # create an instance of `Engine` using the list of configuration files
        try:
            self._engine = create_engine(*self.config_files, resource_errors_are_fatal=True)

        except gc3libs.exceptions.Error as err:
            raise EasyBuildError("Failed to create GC3Pie engine: %s", err)

        # make sure that all job log files end up in the same directory, rather than renaming the output directory
        # see https://gc3pie.readthedocs.org/en/latest/programmers/api/gc3libs/core.html#gc3libs.core.Engine
        self._engine.retrieve_overwrites = True

        # Add your application to the engine. This will NOT submit
        # your application yet, but will make the engine *aware* of
        # the application.
        self._engine.add(self.jobs)

        # in case you want to select a specific resource, call
        target_resource = build_option("job_target_resource")
        if target_resource:
            res = self._engine.select_resource(target_resource)
            if res == 0:
                raise EasyBuildError("Failed to select target resource '%s' in GC3Pie", target_resource)

        # Periodically check the status of your application.
        while self.jobs.execution.state != Run.State.TERMINATED:
            # `Engine.progress()` will do the GC3Pie magic:
            # submit new jobs, update status of submitted jobs, get
            # results of terminating jobs etc...
            self._engine.progress()

            # report progress
            self._print_status_report()

            # Wait a few seconds...
            time.sleep(self.poll_interval)

        # final status report
        print_msg("Done processing jobs", log=self.log, silent=build_option("silent"))
        self._print_status_report()
Beispiel #10
0
    def test_issue(self):
        """Test that SequentialTasksCollection goes in terminated state when all of its tasks are in TERMINATED state."""
        self.ptasks = 5
        task = MySequentialCollection(
            [
                Application(
                    ['echo','test1'],
                    [],[],
                    os.path.join(self.tmpdir, 'test.%d.d' % i)) for i in range(self.ptasks)
                ]
            )
        engine = create_engine(self.cfgfile, auto_enable_auth=True)
        engine.add(task)
        while True:
            engine.progress()

            if len([t for t in task.tasks if t.execution.state == Run.State.TERMINATED]) == self.ptasks:
                engine.progress()
                assert_equal(task.execution.state, Run.State.TERMINATED)
                break
        assert_equal(task.next_called_n_times, self.ptasks)
Beispiel #11
0
 def test_issue_335(self):
     """Test that SequentialTasksCollection goes in TERMINATED state when
     all of its tasks are in TERMINATED state."""
     num_tasks_in_seq = 5
     seq = MySequentialCollection([
         Application(['echo', 'test1'], [], [],
                     os.path.join(self.tmpdir, 'test.%d.d' % i))
         for i in range(num_tasks_in_seq)
     ])
     engine = create_engine(self.cfgfile, auto_enable_auth=True)
     engine.add(seq)
     while True:
         engine.progress()
         if (len([
                 task for task in seq.tasks
                 if task.execution.state == Run.State.TERMINATED
         ]) == num_tasks_in_seq):
             engine.progress()
             # check that final SequentialCollection state is TERMINATED
             assert seq.execution.state == Run.State.TERMINATED
             break
     # check that next() has been called once per each task
     assert seq.next_called_n_times == num_tasks_in_seq
Beispiel #12
0
 def test_issue_335(self):
     """Test that SequentialTasksCollection goes in TERMINATED state when
     all of its tasks are in TERMINATED state."""
     num_tasks_in_seq = 5
     seq = MySequentialCollection([
         Application(
             ['echo', 'test1'],
             [], [],
             os.path.join(self.tmpdir, 'test.%d.d' % i))
         for i in range(num_tasks_in_seq)
     ])
     engine = create_engine(self.cfgfile, auto_enable_auth=True)
     engine.add(seq)
     while True:
         engine.progress()
         if (len([task for task in seq.tasks
                  if task.execution.state == Run.State.TERMINATED])
                 == num_tasks_in_seq):
             engine.progress()
             # check that final SequentialCollection state is TERMINATED
             assert seq.execution.state == Run.State.TERMINATED
             break
     # check that next() has been called once per each task
     assert seq.next_called_n_times == num_tasks_in_seq
Beispiel #13
0
def create_gc3pie_engine(store):
    '''Creates an `Engine` instance for submitting jobs for parallel
    processing.

    Parameters
    ----------
    store: gc3libs.persistence.store.Store
        GC3Pie store object

    Returns
    -------
    gc3libs.core.Engine
        engine
    '''
    logger.debug('create GC3Pie engine')
    n = cfg.resource.max_cores * 2
    logger.debug('set maximum number of submitted jobs to %d', n)
    engine = gc3libs.create_engine(
        store=store, max_in_flight=n, max_submitted=n, forget_terminated=True
    )
    # Put all output files in the same directory
    logger.debug('store stdout/stderr in common output directory')
    engine.retrieve_overwrites = True
    return engine
Beispiel #14
0
            # the following arguments are mandatory:
            arguments=["/bin/hostname"],
            inputs=[],
            outputs=[],
            output_dir="./GdemoSimpleApp_output",
            # the rest is optional and has reasonable defaults:
            stdout="stdout.txt",
        )


# Create an instance of GdemoSimpleApp
app = GdemoSimpleApp()

# Create an instance of `Engine` using the configuration file present
# in your home directory.
engine = gc3libs.create_engine()

# Add your application to the engine. This will NOT submit your
# application yet, but will make the engine awere *aware* of the
# application.
engine.add(app)

# in case you want to select a specific resource, call
# `Engine.select_resource(<resource_name>)`
if len(sys.argv) > 1:
    engine.select_resource(sys.argv[1])

# Periodically check the status of your application.
while app.execution.state != gc3libs.Run.State.TERMINATED:
    print "Job in status %s " % app.execution.state
    # `Engine.progress()` will do the GC3Pie magic:
Beispiel #15
0
            # the following arguments are mandatory:
            arguments=args,
            inputs=[],
            outputs=[],
            output_dir=self.__class__.__name__,
            # the rest is optional and has reasonable defaults:
            stdout="stdout.txt",)

    def terminated(self):
        logger.info('program was terminated')
        print self.execution.state
        print self.execution.returncode
        self.execution.returncode = 1


app = CommandApp('/bin/hostname')
engine = gc3libs.create_engine()
engine.add(app)

if len(sys.argv) > 1:
    engine.select_resource(sys.argv[1])


while app.execution.state != gc3libs.Run.State.TERMINATED:
    print "Job in status %s " % app.execution.state
    engine.progress()
    time.sleep(1)

print("Job is now terminated.")
print("The output of the application is in `%s`." % app.output_dir)
Beispiel #16
0
def run_jobs(jobs, argv, interval=1, verbose=True, max_concurrent=0):
    """
    Create and run jobs, each executing the command specified by `argv`.

    If any item in sequence `argv` is equal to the (single-character
    string) ``#``, it is substituted with the current job index.
    """
    engine = create_engine(max_in_flight=max_concurrent)
    tasks = []  # convenience for extracting stats later
    for n in xrange(jobs):
        jobname = ('worker{n}'.format(n=n))
        job_argv = [(arg if arg != '#' else n) for arg in argv]
        task = Application(
            job_argv,
            inputs=[],
            outputs=[],
            output_dir=os.path.join(os.getcwd(), jobname),
            stdout=(jobname + '.log'),
            join=True,
            jobname = jobname,
        )
        engine.add(task)
        tasks.append(task)
    # loop until all jobs are done
    stats = engine.stats()
    done = stats['TERMINATED']
    while done < jobs:
        time.sleep(interval)
        engine.progress()
        stats = engine.stats()
        done = stats['TERMINATED']
        logging.info(
            "%d jobs terminated (of which %d successfully),"
            " %d running, %d queued.",
            done, stats['ok'], stats['RUNNING'], stats['SUBMITTED'] + stats['NEW']
        )
    if verbose:
        fields = [
            # description  field name          type      zero value
            ('duration',   'duration',         Duration, (0, seconds)),
            ('CPU time',   'used_cpu_time',    Duration, (0, seconds)),
            ('RAM',        'max_used_memory',  Memory,   (0, Memory.MB)),
        ]
        # initialize counters to 0
        totals = {}
        totals_ok  = {}
        for desc, name, init, zero in fields:
            totals[name] = init(*zero)
            totals_ok[name] = init(*zero)
        # compute totals
        for task in tasks:
            for desc, name, _, _ in fields:
                value = getattr(task.execution, name)
                totals[name] += value
                if (task.execution.state == 'TERMINATED'
                    and task.execution.returncode == 0):
                    totals_ok[name] += value
        # print totals and averages
        print("Resource consumption statistics:")
        for desc, name, _, _ in fields:
            print ("- Average {0} per job: {1}".format(desc, totals[name] / jobs))
            print ("- Average {0} per *successful* job: {1}".format(desc, totals_ok[name] / jobs))
Beispiel #17
0
def test_create_engine_default():
    """Test `create_engine` with factory defaults."""
    with temporary_config_file() as cfgfile:
        # std factory params
        engine = create_engine(cfgfile.name)
        assert_is_instance(engine, Engine)
Beispiel #18
0
def test_create_engine_non_default1():
    """Test `create_engine` with one non-default argument."""
    with temporary_config_file() as cfgfile:
        engine = create_engine(cfgfile.name, can_submit=False)
        assert engine.can_submit == False
Beispiel #19
0
def test_create_engine_default():
    """Test `create_engine` with factory defaults."""
    with temporary_config_file() as cfgfile:
        # std factory params
        engine = create_engine(cfgfile.name)
        assert isinstance(engine, Engine)
Beispiel #20
0
def test_create_engine_non_default1():
    """Test `create_engine` with one non-default argument."""
    with temporary_config_file() as cfgfile:
        engine = create_engine(cfgfile.name, can_submit=False)
        assert_equal(engine.can_submit, False)
Beispiel #21
0
import gc3libs
gc3libs.configure_logger(30)
gc3conffile = 'config/samples/gc3pie_localhost.conf'
engine = gc3libs.create_engine(gc3conffile)
core = engine._core