Exemplo n.º 1
0
 def setUp(self):
     self.root0 = mkdtemp()
     self.root = os.path.join(self.root0, 'compmake')
     self.db = StorageFilesystem(self.root, compress=True)
     self.cc = Context(db=self.db)
     # don't use '\r'
     set_compmake_config('interactive', False)
     set_compmake_config('console_status', False)
     from compmake.constants import CompmakeConstants
     CompmakeConstants.debug_check_invariants = True
     self.mySetUp()
Exemplo n.º 2
0
    def test_dynamic5(self):

        # first define with job and run
        mockup5(self.cc, both=True)
        self.assert_cmd_success('make recurse=1')

        self.assertJobsEqual(
            'all', ['fd', 'fd-gd', 'fd-gd-g2', 'hd', 'hd-id', 'hd-id-i2'])
        self.assertJobsEqual(
            'done', ['fd', 'fd-gd', 'fd-gd-g2', 'hd', 'hd-id', 'hd-id-i2'])

        self.assert_cmd_success('details hd-id')
        self.assert_cmd_success('details hd-id-i2')
        self.assertEqualSet(definition_closure(['hd-id'], self.db),
                            ['hd-id-i2'])
        self.assertEqualSet(definition_closure(['hd'], self.db),
                            ['hd-id', 'hd-id-i2'])
        # now redo it
        self.db = StorageFilesystem(self.root, compress=True)
        self.cc = Context(db=self.db)

        mockup5(self.cc, both=False)
        self.assert_cmd_success('clean')
        self.assert_cmd_success('make recurse=1')
        self.assertJobsEqual('all', ['fd', 'fd-gd', 'fd-gd-g2'])
        self.assertJobsEqual('done', ['fd', 'fd-gd', 'fd-gd-g2'])
Exemplo n.º 3
0
 def setUp(self):
     self.root0 = mkdtemp()
     self.root = os.path.join(self.root0, 'compmake')
     self.db = StorageFilesystem(self.root, compress=True)
     self.cc = Context(db=self.db)
     # don't use '\r'
     set_compmake_config('interactive', False)
     set_compmake_config('console_status', False)
     from compmake.constants import CompmakeConstants
     CompmakeConstants.debug_check_invariants = True
     self.mySetUp()
Exemplo n.º 4
0
def go(path):
    db = StorageFilesystem(path, compress=True)
    args = ['failed']
    cq = CacheQueryDB(db)
    context = Context(db)
    if not list(db.keys()):
        msg = 'Compmake DB is empty'
        logger.error(msg)
    else:
        job_list = parse_job_list(args, context=context, cq=cq)
        s = ""
        if job_list:
            job_list = job_list[:2]
            s += 'Running on host: %s' % hostname
            s += "\nJob failed in path %s" % path
            for job_id in job_list:
                if job_cache_exists(job_id, db):
                    cache = get_job_cache(job_id, db)
                    status = Cache.state2desc[cache.state]

                    s += "\nFailure of job %s" % job_id

                    if cache.state in [Cache.FAILED, Cache.BLOCKED]:
                        why = str(cache.exception).strip()
                    else:
                        why = 'No why for job done.'
                    s += '\n' + "```\n" + why + "\n```"
                    s += '\n\n'
                else:
                    logger.warning('no cache for %s' % job_id)

            s += '\n@censi'
            s += '\n@jacopo'
            s += '\n@paull'
            s += '\n@walter'
            s += '\n@daniele'
            print(s)
            slack.chat.post_message(channel, s, link_names=1)

        else:
            s = 'Everything is fine'
            # slack.chat.post_message(channel, s)
            logger.info('No jobs found')
Exemplo n.º 5
0
def mvac_job_rdb_worker(job_id, rdb_basepath, cwd, misc):    
    from compmake.jobs.actions import make
    rdb= StorageFilesystem(rdb_basepath)
    context = Context(rdb)
    
    if not os.path.exists(cwd):
        print('cwd %r not existing', cwd)
        os.makedirs(cwd)
    os.chdir(cwd)    
    
    try:
        res = make(job_id, context=context)
    except JobFailed as e:
        res = e.get_result_dict()
    except HostFailed as e:          
        res= e.get_result_dict()
    except CompmakeBug as e:
        res = e.get_result_dict()
    except Exception as e:
        res= CompmakeBug(str(e)).get_result_dict()
        
    result_dict_check(res)
    print('res: %r' % res)
    return res
Exemplo n.º 6
0
    def go(self):
        # check that if we have a parent who is a quickapp,
        # then use its context
        qapp_parent = self.get_qapp_parent()
        if qapp_parent is not None:
            # self.info('Found parent: %s' % qapp_parent)
            qc = qapp_parent.child_context
            self.define_jobs_context(qc)
            return
        else:
            # self.info('Parent not found')
            pass

        # if False:
        #     import resource
        #     gbs = 5
        #     max_mem = long(gbs * 1000 * 1048576)
        #     resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1))
        #     resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1))

        options = self.get_options()

        # if self.get_qapp_parent() is None:
        # only do this if somebody didn't do it before
        if not options.contracts:
            msg = ('PyContracts disabled for speed. '
                   'Use --contracts to activate.')
            self.logger.warning(msg)
            contracts.disable_all()

        output_dir = options.output

        if options.reset:
            if os.path.exists(output_dir):
                self.logger.info('Removing output dir %r.' % output_dir)
                try:
                    shutil.rmtree(output_dir)
                except OSError as e:
                    # Directory not empty -- common enough on NFS filesystems
                    # print('errno: %r' % e.errno)
                    if e.errno == 39:
                        pass
                    else:
                        raise

        # Compmake storage for results
        storage = os.path.join(output_dir, 'compmake')
        logger.debug('Creating storage in %s  (compress = %s)' %
                     (storage, options.compress))
        db = StorageFilesystem(storage, compress=options.compress)
        currently_executing = ['root']
        # The original Compmake context
        oc = Context(db=db, currently_executing=currently_executing)
        # Our wrapper
        qc = CompmakeContext(cc=oc,
                             parent=None,
                             qapp=self,
                             job_prefix=None,
                             output_dir=output_dir)
        read_rc_files(oc)

        original = oc.get_comp_prefix()
        self.define_jobs_context(qc)
        oc.comp_prefix(original)

        merged = context_get_merge_data(qc)

        # Only create the index job if we have reports defined
        # or some branched context (which might create reports)
        has_reports = len(qc.get_report_manager().allreports) > 0
        has_branched = qc.has_branched()
        if has_reports or has_branched:
            # self.info('Creating reports')
            oc.comp_dynamic(_dynreports_create_index, merged)
        else:
            pass
            # self.info('Not creating reports.')

        ndefined = len(oc.get_jobs_defined_in_this_session())
        if ndefined == 0:
            # self.comp was never called
            msg = 'No jobs defined.'
            raise ValueError(msg)
        else:
            if options.console:
                oc.compmake_console()
                return 0
            else:
                cq = CacheQueryDB(oc.get_compmake_db())
                targets = cq.all_jobs()
                todo, done, ready = cq.list_todo_targets(targets)

                if not todo and options.command is None:
                    msg = "Note: there is nothing for me to do. "
                    msg += '\n(Jobs todo: %s done: %s ready: %s)' % (
                        len(todo), len(done), len(ready))
                    msg += """\
This application uses a cache system for the results.
This means that if you call it second time with the same arguments,
 and if you do not change any input, it will not do anything."""
                    self.warn(msg)
                    return 0

                if options.command is None:
                    command = 'make recurse=1'
                else:
                    command = options.command

                try:
                    _ = oc.batch_command(command)
                    # print('qapp: ret0 = %s'  % ret0)
                except CommandFailed:
                    # print('qapp: CommandFailed')
                    ret = QUICKAPP_COMPUTATION_ERROR
                except ShellExitRequested:
                    # print('qapp: ShellExitRequested')
                    ret = 0
                else:
                    # print('qapp: else ret = 0')
                    ret = 0

                return ret
Exemplo n.º 7
0
class CompmakeTest(unittest.TestCase):
    __metaclass__ = ABCMeta

    def setUp(self):
        self.root0 = mkdtemp()
        self.root = os.path.join(self.root0, 'compmake')
        self.db = StorageFilesystem(self.root, compress=True)
        self.cc = Context(db=self.db)
        # don't use '\r'
        set_compmake_config('interactive', False)
        set_compmake_config('console_status', False)
        from compmake.constants import CompmakeConstants
        CompmakeConstants.debug_check_invariants = True
        self.mySetUp()

    def tearDown(self):
        if False:
            print('not deleting %s' % self.root0)
        else:
            rmtree(self.root0)

    # optional init
    # noinspection PyPep8Naming
    def mySetUp(self):
        pass

    # useful
    def comp(self, *args, **kwargs):
        return self.cc.comp(*args, **kwargs)

    @contract(job_id=str, returns=Job)
    def get_job(self, job_id):
        db = self.cc.get_compmake_db()
        return get_job(job_id=job_id, db=db)

    def get_jobs(self, expression):
        """ Returns the list of jobs corresponding to the given expression. """
        return list(parse_job_list(expression, context=self.cc))

    def assert_cmd_success(self, cmds):
        """ Executes the (list of) commands and checks it was succesful. """
        try:
            print('@ %s' % cmds)

            self.cc.batch_command(cmds)

        except MakeFailed as e:
            print('Detected MakeFailed')
            print('Failed jobs: %s' % e.failed)
            for job_id in e.failed:
                self.cc.interpret_commands_wrap('details %s' % job_id)

        except CommandFailed:
            # msg = 'Command %r failed. (res=%s)' % (cmds, res)
            raise

        self.cc.interpret_commands_wrap('check_consistency raise_if_error=1')

    def assert_cmd_fail(self, cmds):
        """ Executes the (list of) commands and checks it was succesful. """
        try:
            self.cc.batch_command(cmds)
        except CommandFailed:
            pass
        else:
            msg = 'Command %r did not fail.' % cmds
            raise Exception(msg)

    @contract(cmd_string=str)
    def assert_cmd_success_script(self, cmd_string):
        """ This runs the "compmake_main" script which recreates the DB and
        context from disk. """
        ret = compmake_main([self.root, '--nosysexit', '-c', cmd_string])
        self.assertEqual(ret, 0)

    # useful mcdp_lang_tests
    def assert_defined_by(self, job_id, expected):
        self.assertEqual(self.get_job(job_id).defined_by, expected)

    def assertEqualSet(self, a, b):
        self.assertEqual(set(a), set(b))

    @contract(expr=str)
    def assertJobsEqual(self, expr, jobs, ignore_dyn_reports=True):

        js = 'not-valid-yet'
        try:
            js = self.get_jobs(expr)
            if ignore_dyn_reports:
                js = [x for x in js if not 'dynreports' in x]
            self.assertEqualSet(js, jobs)
        except:
            print('expr %r -> %s' % (expr, js))
            print('differs from %s' % jobs)
            raise

    def assertMakeFailed(self, func, nfailed, nblocked):
        try:
            func()
        except MakeFailed as e:
            if len(e.failed) != nfailed:
                msg = 'Expected %d failed, got %d: %s' % (nfailed, len(
                    e.failed), e.failed)
                raise Exception(msg)
            if len(e.blocked) != nblocked:
                msg = 'Expected %d blocked, got %d: %s' % (
                    nblocked, len(e.blocked), e.blocked)
                raise Exception(msg)
        except Exception as e:
            raise Exception('unexpected: %s' % e)

    def assert_job_uptodate(self, job_id, status):
        res = self.up_to_date(job_id)
        self.assertEqual(res, status,
                         'Want %r uptodate? %s' % (job_id, status))

    @contract(returns=bool)
    def up_to_date(self, job_id):
        from compmake.jobs.uptodate import CacheQueryDB
        cq = CacheQueryDB(db=self.db)
        up, reason, timestamp = cq.up_to_date(job_id)
        print('up_to_date(%r): %s, %r, %s' % (job_id, up, reason, timestamp))
        return up
Exemplo n.º 8
0
class CompmakeTest(unittest.TestCase):
    __metaclass__ = ABCMeta

    def setUp(self):
        self.root0 = mkdtemp()
        self.root = os.path.join(self.root0, 'compmake')
        self.db = StorageFilesystem(self.root, compress=True)
        self.cc = Context(db=self.db)
        # don't use '\r'
        set_compmake_config('interactive', False)
        set_compmake_config('console_status', False)
        from compmake.constants import CompmakeConstants
        CompmakeConstants.debug_check_invariants = True
        self.mySetUp()

    def tearDown(self):
        if False:
            print('not deleting %s' % self.root0)
        else:
            rmtree(self.root0)

    # optional init
    # noinspection PyPep8Naming
    def mySetUp(self):
        pass

    # useful
    def comp(self, *args, **kwargs):
        return self.cc.comp(*args, **kwargs)

    @contract(job_id=str, returns=Job)
    def get_job(self, job_id):
        db = self.cc.get_compmake_db()
        return get_job(job_id=job_id, db=db)

    def get_jobs(self, expression):
        """ Returns the list of jobs corresponding to the given expression. """
        return list(parse_job_list(expression, context=self.cc))

    def assert_cmd_success(self, cmds):
        """ Executes the (list of) commands and checks it was succesful. """
        try:
            print('@ %s' % cmds)
            
            self.cc.batch_command(cmds)

        except MakeFailed as e:
            print('Detected MakeFailed')
            print('Failed jobs: %s' % e.failed)
            for job_id in e.failed:
                self.cc.interpret_commands_wrap('details %s' % job_id)

        except CommandFailed:
            # msg = 'Command %r failed. (res=%s)' % (cmds, res)
            raise

        self.cc.interpret_commands_wrap('check_consistency raise_if_error=1')

    def assert_cmd_fail(self, cmds):
        """ Executes the (list of) commands and checks it was succesful. """
        try:
            self.cc.batch_command(cmds)
        except CommandFailed:
            pass
        else:
            msg = 'Command %r did not fail.' % cmds
            raise Exception(msg)

    @contract(cmd_string=str)
    def assert_cmd_success_script(self, cmd_string):
        """ This runs the "compmake_main" script which recreates the DB and
        context from disk. """
        ret = compmake_main([self.root, '--nosysexit', '-c', cmd_string])
        self.assertEqual(ret, 0)

    # useful tests
    def assert_defined_by(self, job_id, expected):
        self.assertEqual(self.get_job(job_id).defined_by, expected)

    def assertEqualSet(self, a, b):
        self.assertEqual(set(a), set(b))

    @contract(expr=str)
    def assertJobsEqual(self, expr, jobs, ignore_dyn_reports=True):

        js = 'not-valid-yet'
        try:
            js = self.get_jobs(expr)
            if ignore_dyn_reports:
                js = [x for x in js if not 'dynreports' in x]
            self.assertEqualSet(js, jobs)
        except:
            print('expr %r -> %s' % (expr, js))
            print('differs from %s' % jobs)
            raise

    def assertMakeFailed(self, func, nfailed, nblocked):
        try:
            func()
        except MakeFailed as e:
            if len(e.failed) != nfailed:
                msg = 'Expected %d failed, got %d: %s' % (
                    nfailed, len(e.failed), e.failed)
                raise Exception(msg)
            if len(e.blocked) != nblocked:
                msg = 'Expected %d blocked, got %d: %s' % (
                    nblocked, len(e.blocked), e.blocked)
                raise Exception(msg)
        except Exception as e:
            raise Exception('unexpected: %s' % e)
     
    def assert_job_uptodate(self, job_id, status):
        res = self.up_to_date(job_id)
        self.assertEqual(res, status, 'Want %r uptodate? %s' % (job_id, status))
    
    @contract(returns=bool)
    def up_to_date(self, job_id):
        from compmake.jobs.uptodate import CacheQueryDB
        cq = CacheQueryDB(db=self.db)
        up, reason, timestamp = cq.up_to_date(job_id)
        print('up_to_date(%r): %s, %r, %s' % 
              (job_id, up, reason, timestamp))
        return up
Exemplo n.º 9
0
    def go(self): 
        # check that if we have a parent who is a quickapp,
        # then use its context      
        qapp_parent = self.get_qapp_parent()
        if qapp_parent is not None:
            # self.info('Found parent: %s' % qapp_parent)
            qc = qapp_parent.child_context  
            self.define_jobs_context(qc)
            return
        else:
            # self.info('Parent not found')
            pass
            

        if False:            
            import resource
            gbs = 5
            max_mem = long(gbs * 1000 * 1048576L)
            resource.setrlimit(resource.RLIMIT_AS, (max_mem, -1))
            resource.setrlimit(resource.RLIMIT_DATA, (max_mem, -1))

        options = self.get_options()
        
        
        if self.get_qapp_parent() is None:
            # only do this if somebody didn't do it before
            if not options.contracts:
                msg = ('PyContracts disabled for speed. '
                       'Use --contracts to activate.')
                self.logger.warning(msg)
                contracts.disable_all()

        output_dir = options.output
        
        if options.reset:
            if os.path.exists(output_dir):
                self.logger.info('Removing output dir %r.' % output_dir)
                shutil.rmtree(output_dir)
        
        # Compmake storage for results        
        storage = os.path.join(output_dir, 'compmake')
        db = StorageFilesystem(storage, compress=True)
        currently_executing = ['root']
        # The original Compmake context
        oc = Context(db=db, currently_executing=currently_executing)
        # Our wrapper
        qc = CompmakeContext(cc=oc,
                                  parent=None, qapp=self, job_prefix=None,
                                  output_dir=output_dir)
        read_rc_files(oc)
        
        original = oc.get_comp_prefix()
        self.define_jobs_context(qc)
        oc.comp_prefix(original)
        
        merged  = context_get_merge_data(qc)
    
        # Only create the index job if we have reports defined
        # or some branched context (which might create reports)
        has_reports = len(qc.get_report_manager().allreports) > 0
        has_branched = qc.has_branched()
        if has_reports or has_branched:
            self.info('Creating reports')
            oc.comp_dynamic(_dynreports_create_index, merged)
        else:
            self.info('Not creating reports.')
        
        ndefined = len(oc.get_jobs_defined_in_this_session())
        if ndefined == 0:
            # self.comp was never called
            msg = 'No jobs defined.'
            raise ValueError(msg)
        else: 
            if not options.console:
                try: 
                    oc.batch_command(options.command)
                except CommandFailed:
                    ret = QUICKAPP_COMPUTATION_ERROR
                else:
                    ret = 0
                     
                return ret
            else:
                oc.compmake_console()
                return 0
Exemplo n.º 10
0
def define_jobs(root):
    db = StorageFilesystem(root, compress=True)
    cc = Context(db=db)
    cc.comp(h, cc.comp_dynamic(e))
    return db, cc
Exemplo n.º 11
0
def define_jobs(root):
    db = StorageFilesystem(root, compress=True)
    cc = Context(db=db)
    cc.comp(h, cc.comp_dynamic(e)) 
    return db, cc