Example #1
0
def definition_closure(jobs, db):
    """ The result does not contain jobs (unless one job defines another) """
    #print('definition_closure(%s)' % jobs)
    check_isinstance(jobs, (list, set))
    jobs = set(jobs)
    from compmake.jobs.uptodate import CacheQueryDB
    cq = CacheQueryDB(db)
    stack = set(jobs)
    result = set()
    while stack:
        #print('stack: %s' % stack)
        a = stack.pop()
        if not cq.job_exists(a):
            print('Warning: job %r does not exist anymore; ignoring.' % a)
            continue

        if cq.get_job_cache(a).state == Cache.DONE:
            a_d = cq.jobs_defined(a)
            #print('%s ->%s' % (a, a_d))
            for x in a_d:
                result.add(x)
                stack.add(x)

    #print('  result = %s' % result)
    return result
Example #2
0
def definition_closure(jobs, db):
    """ The result does not contain jobs (unless one job defines another) """
    #print('definition_closure(%s)' % jobs)
    check_isinstance(jobs, (list, set))
    jobs = set(jobs)
    from compmake.jobs.uptodate import CacheQueryDB
    cq = CacheQueryDB(db)
    stack = set(jobs)
    result = set()
    while stack:
        #print('stack: %s' % stack)
        a = stack.pop()
        if not cq.job_exists(a):
            print('Warning: job %r does not exist anymore; ignoring.' % a)
            continue

        if cq.get_job_cache(a).state == Cache.DONE:
            a_d = cq.jobs_defined(a)
            #print('%s ->%s' % (a, a_d))
            for x in a_d:
                result.add(x)
                stack.add(x)

    #print('  result = %s' % result)
    return result
Example #3
0
 def up_to_date(self, job_id):
     from compmake.jobs.uptodate import CacheQueryDB
     cq = CacheQueryDB(db=self.db)
     up, reason, timestamp = cq.up_to_date(job_id)
     print('up_to_date(%r): %s, %r, %s' % 
           (job_id, up, reason, timestamp))
     return up
Example #4
0
def list_ready_jobs():
    ''' Returns a list of jobs that can be done now,
        as their dependencies are up-to-date. '''
    
    cq = CacheQueryDB()
    for job_id in all_jobs():
        if cq.dependencies_up_to_date(job_id):
            yield job_id
Example #5
0
def parse_job_list(tokens, context, cq=None):
    """
        Parses a job list. tokens can be:

        1. a string, in that case it is split()
        2. a list, in which case each element is treated as a token.

        NO(If tokens is not empty, then if it evaluates to empty,
        an error is raised (e.g. "make failed" and no failed jobs will
        throw an error).)

        Returns a list of strings.
    """
    if cq is None:
        cq = CacheQueryDB(context.get_compmake_db())

    if isinstance(tokens, str):
        tokens = tokens.strip().split()

    if not tokens:
        return []

    # First we look for operators 
    ops = Operators.parse(tokens)

    # print " %s => %s" % (tokens, ops)
    result = eval_ops(ops=ops, context=context, cq=cq)

    # FIXME, remove
    result = list(result)
    # print " %s => %s" % (tokens, result)

    return result
Example #6
0
def count_resources(context, the_job):
    db = context.get_compmake_db()
    cache = get_job_cache(the_job, db=db)
    if cache.state != Cache.DONE:
        msg = 'The job %s was supposed to be finished: %s' % (the_job, cache) 
        raise Exception(msg)
    
    cq = CacheQueryDB(db)
    children = cq.tree_children_and_uodeps(the_job)
    check_isinstance(children, set)
    children.add(the_job)
    
    res = {}
    for j in children:
        res[j] = context.comp_dynamic(my_get_job_cache, j, extra_dep=[Promise(j)],
                                     job_id='count-%s-%s' % (the_job, j))
        
    return context.comp(finalize_result, res)
Example #7
0
def go(path):
    db = StorageFilesystem(path, compress=True)
    args = ['failed']
    cq = CacheQueryDB(db)
    context = Context(db)
    if not list(db.keys()):
        msg = 'Compmake DB is empty'
        logger.error(msg)
    else:
        job_list = parse_job_list(args, context=context, cq=cq)
        s = ""
        if job_list:
            job_list = job_list[:2]
            s += 'Running on host: %s' % hostname
            s += "\nJob failed in path %s" % path
            for job_id in job_list:
                if job_cache_exists(job_id, db):
                    cache = get_job_cache(job_id, db)
                    status = Cache.state2desc[cache.state]

                    s += "\nFailure of job %s" % job_id

                    if cache.state in [Cache.FAILED, Cache.BLOCKED]:
                        why = str(cache.exception).strip()
                    else:
                        why = 'No why for job done.'
                    s += '\n' + "```\n" + why + "\n```"
                    s += '\n\n'
                else:
                    logger.warning('no cache for %s' % job_id)

            s += '\n@censi'
            s += '\n@jacopo'
            s += '\n@paull'
            s += '\n@walter'
            s += '\n@daniele'
            print(s)
            slack.chat.post_message(channel, s, link_names=1)

        else:
            s = 'Everything is fine'
            # slack.chat.post_message(channel, s)
            logger.info('No jobs found')
Example #8
0
def read_commands_from_file(filename, context):
    from compmake.jobs.uptodate import CacheQueryDB

    filename = os.path.realpath(filename)
    if filename in context.rc_files_read:
        return
    else:
        context.rc_files_read.append(filename)

    cq = CacheQueryDB(context.get_compmake_db())
    assert context is not None
    info('Reading configuration from %r.' % friendly_path(filename))
    with open(filename, 'r') as f:
        for line in f:
            line = line.strip()
            if not line:
                continue
            if line[0] == '#':
                continue
            interpret_commands_wrap(line, context=context, cq=cq)
Example #9
0
    def go(self):
        # check that if we have a parent who is a quickapp,
        # then use its context
        qapp_parent = self.get_qapp_parent()
        if qapp_parent is not None:
            # self.info('Found parent: %s' % qapp_parent)
            qc = qapp_parent.child_context
            self.define_jobs_context(qc)
            return
        else:
            # self.info('Parent not found')
            pass

        # if False:
        #     import resource
        #     gbs = 5
        #     max_mem = long(gbs * 1000 * 1048576)
        #     resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1))
        #     resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1))

        options = self.get_options()

        # if self.get_qapp_parent() is None:
        # only do this if somebody didn't do it before
        if not options.contracts:
            msg = ('PyContracts disabled for speed. '
                   'Use --contracts to activate.')
            self.logger.warning(msg)
            contracts.disable_all()

        output_dir = options.output

        if options.reset:
            if os.path.exists(output_dir):
                self.logger.info('Removing output dir %r.' % output_dir)
                try:
                    shutil.rmtree(output_dir)
                except OSError as e:
                    # Directory not empty -- common enough on NFS filesystems
                    # print('errno: %r' % e.errno)
                    if e.errno == 39:
                        pass
                    else:
                        raise

        # Compmake storage for results
        storage = os.path.join(output_dir, 'compmake')
        logger.debug('Creating storage in %s  (compress = %s)' %
                     (storage, options.compress))
        db = StorageFilesystem(storage, compress=options.compress)
        currently_executing = ['root']
        # The original Compmake context
        oc = Context(db=db, currently_executing=currently_executing)
        # Our wrapper
        qc = CompmakeContext(cc=oc,
                             parent=None,
                             qapp=self,
                             job_prefix=None,
                             output_dir=output_dir)
        read_rc_files(oc)

        original = oc.get_comp_prefix()
        self.define_jobs_context(qc)
        oc.comp_prefix(original)

        merged = context_get_merge_data(qc)

        # Only create the index job if we have reports defined
        # or some branched context (which might create reports)
        has_reports = len(qc.get_report_manager().allreports) > 0
        has_branched = qc.has_branched()
        if has_reports or has_branched:
            # self.info('Creating reports')
            oc.comp_dynamic(_dynreports_create_index, merged)
        else:
            pass
            # self.info('Not creating reports.')

        ndefined = len(oc.get_jobs_defined_in_this_session())
        if ndefined == 0:
            # self.comp was never called
            msg = 'No jobs defined.'
            raise ValueError(msg)
        else:
            if options.console:
                oc.compmake_console()
                return 0
            else:
                cq = CacheQueryDB(oc.get_compmake_db())
                targets = cq.all_jobs()
                todo, done, ready = cq.list_todo_targets(targets)

                if not todo and options.command is None:
                    msg = "Note: there is nothing for me to do. "
                    msg += '\n(Jobs todo: %s done: %s ready: %s)' % (
                        len(todo), len(done), len(ready))
                    msg += """\
This application uses a cache system for the results.
This means that if you call it second time with the same arguments,
 and if you do not change any input, it will not do anything."""
                    self.warn(msg)
                    return 0

                if options.command is None:
                    command = 'make recurse=1'
                else:
                    command = options.command

                try:
                    _ = oc.batch_command(command)
                    # print('qapp: ret0 = %s'  % ret0)
                except CommandFailed:
                    # print('qapp: CommandFailed')
                    ret = QUICKAPP_COMPUTATION_ERROR
                except ShellExitRequested:
                    # print('qapp: ShellExitRequested')
                    ret = 0
                else:
                    # print('qapp: else ret = 0')
                    ret = 0

                return ret
Example #10
0
 def up_to_date(self, job_id):
     from compmake.jobs.uptodate import CacheQueryDB
     cq = CacheQueryDB(db=self.db)
     up, reason, timestamp = cq.up_to_date(job_id)
     print('up_to_date(%r): %s, %r, %s' % (job_id, up, reason, timestamp))
     return up
Example #11
0
def synchronize_db_up(context, targets):
    """ Syncrhonizes the DB up """
    db = context.get_compmake_db()
    # first create the volume if it doesn't exist
    vol = create_db_volume(db)
    
    # now we need to put all files
    
    keys = []
    
    cq = CacheQueryDB(db)
    jobs = set()
    jobs.update(targets)
    jobs.update(cq.tree(targets))
    
    #print('%d targets, %d jobs' % (len(targets), len(jobs)))
     
    # XXX: not all jobs
    for job_id in jobs:
        resources = [job2jobargskey, job2userobjectkey, 
                     job2cachekey, job2key]
        for r in resources:
            key = r(job_id)
            if key in db:
                keys.append(key)
                
    #print('Found %s files to upload' % len(keys))
    
    # Shadow storage
    db2 = StorageFilesystem(basepath=vol.mount_path)
    already = set([os.path.basename(x['path']) for x in vol.ls('.')])
    
    filename2contents = {}
    #print('obtained: %r' % already)
    for key in keys:
        f = db.filename_for_key(key)
        f2 = db2.filename_for_key(key)
        local_path = f
        remote_path = os.path.relpath(f2, db2.basepath)
        
        if remote_path in already:
            #print('skipping %r' % local_path)
            continue
        
        size = os.stat(local_path).st_size
        use_compact = size < 6*1024
        if use_compact:
            with open(local_path) as f:
                filename2contents[f2] = f.read()
        else:
            #print('%s -> %s' % (local_path, remote_path))
            assert os.path.join(db2.basepath, remote_path) == f2
            vol.put_file(local_path, remote_path, target_mode=None)
    
    import multyvac
    multyvac_job_id = multyvac.submit(copy_files, filename2contents, 
                                      _vol=[vol.name])
    multyvac_job = multyvac.get(multyvac_job_id)
    multyvac_job.get_result()
    
    return vol, db2