def test_dynamic5(self):

        # first define with job and run
        mockup5(self.cc, both=True)
        self.assert_cmd_success('make recurse=1')

        self.assertJobsEqual(
            'all', ['fd', 'fd-gd', 'fd-gd-g2', 'hd', 'hd-id', 'hd-id-i2'])
        self.assertJobsEqual(
            'done', ['fd', 'fd-gd', 'fd-gd-g2', 'hd', 'hd-id', 'hd-id-i2'])

        self.assert_cmd_success('details hd-id')
        self.assert_cmd_success('details hd-id-i2')
        self.assertEqualSet(definition_closure(['hd-id'], self.db),
                            ['hd-id-i2'])
        self.assertEqualSet(definition_closure(['hd'], self.db),
                            ['hd-id', 'hd-id-i2'])
        # now redo it
        self.db = StorageFilesystem(self.root, compress=True)
        self.cc = Context(db=self.db)

        mockup5(self.cc, both=False)
        self.assert_cmd_success('clean')
        self.assert_cmd_success('make recurse=1')
        self.assertJobsEqual('all', ['fd', 'fd-gd', 'fd-gd-g2'])
        self.assertJobsEqual('done', ['fd', 'fd-gd', 'fd-gd-g2'])
Exemple #2
0
def go(path):
    db = StorageFilesystem(path, compress=True)
    args = ['failed']
    cq = CacheQueryDB(db)
    context = Context(db)
    if not list(db.keys()):
        msg = 'Compmake DB is empty'
        logger.error(msg)
    else:
        job_list = parse_job_list(args, context=context, cq=cq)
        s = ""
        if job_list:
            job_list = job_list[:2]
            s += 'Running on host: %s' % hostname
            s += "\nJob failed in path %s" % path
            for job_id in job_list:
                if job_cache_exists(job_id, db):
                    cache = get_job_cache(job_id, db)
                    status = Cache.state2desc[cache.state]

                    s += "\nFailure of job %s" % job_id

                    if cache.state in [Cache.FAILED, Cache.BLOCKED]:
                        why = str(cache.exception).strip()
                    else:
                        why = 'No why for job done.'
                    s += '\n' + "```\n" + why + "\n```"
                    s += '\n\n'
                else:
                    logger.warning('no cache for %s' % job_id)

            s += '\n@censi'
            s += '\n@jacopo'
            s += '\n@paull'
            s += '\n@walter'
            s += '\n@daniele'
            print(s)
            slack.chat.post_message(channel, s, link_names=1)

        else:
            s = 'Everything is fine'
            # slack.chat.post_message(channel, s)
            logger.info('No jobs found')
    def test_dynamic6(self):
        
        # first define with job and run
        mockup6(self.cc, both=True)
        db = self.db
        
        self.assertRaises(CompmakeBug, jobs_defined, job_id='hd', db=db)
        
        self.assert_cmd_success('make recurse=1')
        check_job_cache_state(job_id='hd', states=[Cache.DONE], db=db)
        self.assertEqual(jobs_defined(job_id='hd', db=db),
                         set(['hd-id']))
        
        # self.assert_cmd_success('graph compact=0 color=0 '
        #                         'cluster=1 filter=dot')
        
        self.assertJobsEqual('all', ['fd', 'fd-gd', 'fd-gd-g2',  
                                     'hd', 'hd-id', 'hd-id-i2', 
                                     'summary'])
        self.assertJobsEqual('done',  ['fd', 'fd-gd', 'fd-gd-g2',  
                                       'hd', 'hd-id', 'hd-id-i2', 
                                       'summary'])
        
        # now redo it 
        self.db = StorageFilesystem(self.root, compress=True)
        self.cc = Context(db=self.db)
        
        print('running again with both=False')
        mockup6(self.cc, both=False)
        clean_other_jobs(context=self.cc)
        
        self.assertJobsEqual('all', ['fd', 'fd-gd', 'fd-gd-g2', 
                                     'summary'])
        
        job=  get_job('summary', self.db)
        print('job.children: %s' % job.children)
        print('job.dynamic_children: %s' % job.dynamic_children)
        self.assertEqual(job.dynamic_children, {'fd': set(['fd-gd'])})
        self.assertEqualSet(direct_children('summary', self.db), ['fd', 'fd-gd'])
        self.assert_cmd_success('ls')

        self.assert_cmd_success('make recurse=1')
        self.assertJobsEqual('all',  ['fd', 'fd-gd', 'fd-gd-g2', 'summary'])
        self.assertJobsEqual('done', ['fd', 'fd-gd', 'fd-gd-g2', 'summary']) 
Exemple #4
0
def mvac_job_rdb_worker(job_id, rdb_basepath, cwd, misc):    
    from compmake.jobs.actions import make
    rdb= StorageFilesystem(rdb_basepath)
    context = Context(rdb)
    
    if not os.path.exists(cwd):
        print('cwd %r not existing', cwd)
        os.makedirs(cwd)
    os.chdir(cwd)    
    
    try:
        res = make(job_id, context=context)
    except JobFailed as e:
        res = e.get_result_dict()
    except HostFailed as e:          
        res= e.get_result_dict()
    except CompmakeBug as e:
        res = e.get_result_dict()
    except Exception as e:
        res= CompmakeBug(str(e)).get_result_dict()
        
    result_dict_check(res)
    print('res: %r' % res)
    return res
Exemple #5
0
def synchronize_db_up(context, targets):
    """ Syncrhonizes the DB up """
    db = context.get_compmake_db()
    # first create the volume if it doesn't exist
    vol = create_db_volume(db)
    
    # now we need to put all files
    
    keys = []
    
    cq = CacheQueryDB(db)
    jobs = set()
    jobs.update(targets)
    jobs.update(cq.tree(targets))
    
    #print('%d targets, %d jobs' % (len(targets), len(jobs)))
     
    # XXX: not all jobs
    for job_id in jobs:
        resources = [job2jobargskey, job2userobjectkey, 
                     job2cachekey, job2key]
        for r in resources:
            key = r(job_id)
            if key in db:
                keys.append(key)
                
    #print('Found %s files to upload' % len(keys))
    
    # Shadow storage
    db2 = StorageFilesystem(basepath=vol.mount_path)
    already = set([os.path.basename(x['path']) for x in vol.ls('.')])
    
    filename2contents = {}
    #print('obtained: %r' % already)
    for key in keys:
        f = db.filename_for_key(key)
        f2 = db2.filename_for_key(key)
        local_path = f
        remote_path = os.path.relpath(f2, db2.basepath)
        
        if remote_path in already:
            #print('skipping %r' % local_path)
            continue
        
        size = os.stat(local_path).st_size
        use_compact = size < 6*1024
        if use_compact:
            with open(local_path) as f:
                filename2contents[f2] = f.read()
        else:
            #print('%s -> %s' % (local_path, remote_path))
            assert os.path.join(db2.basepath, remote_path) == f2
            vol.put_file(local_path, remote_path, target_mode=None)
    
    import multyvac
    multyvac_job_id = multyvac.submit(copy_files, filename2contents, 
                                      _vol=[vol.name])
    multyvac_job = multyvac.get(multyvac_job_id)
    multyvac_job.get_result()
    
    return vol, db2
Exemple #6
0
def define_jobs(root):
    db = StorageFilesystem(root, compress=True)
    cc = Context(db=db)
    cc.comp(h, cc.comp_dynamic(e))
    return db, cc
Exemple #7
0
def junit_test_case_from_compmake(db, job_id):
    cache = get_job_cache(job_id, db=db)
    if cache.state == Cache.DONE:  # and cache.done_iterations > 1:
        #elapsed_sec = cache.walltime_used
        elapsed_sec = cache.cputime_used
    else:
        elapsed_sec = None

    stderr = flatten_ascii(cache.captured_stderr)
    stdout = flatten_ascii(cache.captured_stdout)

    tc = TestCase(name=job_id,
                  classname=None,
                  elapsed_sec=elapsed_sec,
                  stdout=stdout,
                  stderr=stderr)

    if cache.state == Cache.FAILED:
        message = cache.exception
        output = cache.exception + "\n" + cache.backtrace
        tc.add_failure_info(flatten_ascii(message), flatten_ascii(output))

    return tc


if __name__ == '__main__':
    dirname = sys.argv[1]
    db = StorageFilesystem(dirname, compress=True)
    s = junit_xml(db)
    sys.stdout.write(s)