Exemplo n.º 1
0
    def test_dynamic5(self):

        # first define with job and run
        mockup5(self.cc, both=True)
        self.assert_cmd_success('make recurse=1')

        self.assertJobsEqual(
            'all', ['fd', 'fd-gd', 'fd-gd-g2', 'hd', 'hd-id', 'hd-id-i2'])
        self.assertJobsEqual(
            'done', ['fd', 'fd-gd', 'fd-gd-g2', 'hd', 'hd-id', 'hd-id-i2'])

        self.assert_cmd_success('details hd-id')
        self.assert_cmd_success('details hd-id-i2')
        self.assertEqualSet(definition_closure(['hd-id'], self.db),
                            ['hd-id-i2'])
        self.assertEqualSet(definition_closure(['hd'], self.db),
                            ['hd-id', 'hd-id-i2'])
        # now redo it
        self.db = StorageFilesystem(self.root, compress=True)
        self.cc = Context(db=self.db)

        mockup5(self.cc, both=False)
        self.assert_cmd_success('clean')
        self.assert_cmd_success('make recurse=1')
        self.assertJobsEqual('all', ['fd', 'fd-gd', 'fd-gd-g2'])
        self.assertJobsEqual('done', ['fd', 'fd-gd', 'fd-gd-g2'])
Exemplo n.º 2
0
def clean_targets(job_list, db):
    #     print('clean_targets (%r)' % job_list)
    job_list = set(job_list)

    # now we need to delete the definition closure

    from compmake.jobs.queries import definition_closure
    closure = definition_closure(job_list, db)

    basic = job_list - closure

    from compmake.jobs.queries import parents
    other_clean = set()
    for job_id in job_list:
        other_clean.update(parents(job_id, db))
    other_clean = other_clean - closure
    #
    #     print('deleting: %r' % closure)
    #     print('only cleaning: %r' % basic)
    #     print('other cleaning: %r' % other_clean)
    #
    for job_id in closure | basic | other_clean:
        clean_cache_relations(job_id, db)

    # delete all in closure
    for job_id in closure:
        from compmake.jobs.storage import delete_all_job_data
        delete_all_job_data(job_id, db)

    # just remove cache in basic
    for job_id in basic:
        # Cleans associated objects
        if job_cache_exists(job_id, db):
            delete_job_cache(job_id, db)
Exemplo n.º 3
0
    def test_dynamic8_clean(self):
        #         """ Re-execution creates more jobs.  """
        mockup8(self.cc)
        # run it
        TestDynamic8.define_other = True
        self.assert_cmd_success('make recurse=1')
        # we have three jobs defined
        self.assertJobsEqual('all', ['fd', 'fd-always', 'fd-other'])
        # clean and remake fd
        TestDynamic8.define_other = False

        self.assertJobsEqual('done', ['fd', 'fd-always', 'fd-other'])
        self.assertEqualSet(jobs_defined('fd', self.db),
                            ['fd-always', 'fd-other'])

        self.assertEqualSet(definition_closure(['fd'], self.db),
                            ['fd-always', 'fd-other'])
        direct = direct_uptodate_deps_inverse('fd', self.db)
        self.assertEqualSet(direct, ['fd-always', 'fd-other'])
        direct_closure = direct_uptodate_deps_inverse_closure('fd', self.db)
        self.assertEqualSet(direct_closure, ['fd-always', 'fd-other'])

        self.assert_cmd_success('clean fd')
        # clean should get rid of the jobs
        self.assertJobsEqual('all', ['fd'])
        self.assert_cmd_success('make fd')
        # now the "other" job should disappear
        self.assertJobsEqual('all', ['fd', 'fd-always'])
Exemplo n.º 4
0
    def test_dynamic8_clean(self):
#         """ Re-execution creates more jobs.  """ 
        mockup8(self.cc)
        # run it
        TestDynamic8.define_other = True
        self.assert_cmd_success('make recurse=1')
        # we have three jobs defined
        self.assertJobsEqual('all', ['fd', 'fd-always', 'fd-other'])
        # clean and remake fd
        TestDynamic8.define_other = False
                
        self.assertJobsEqual('done', ['fd', 'fd-always', 'fd-other'])
        self.assertEqualSet(jobs_defined('fd', self.db),                              ['fd-always', 'fd-other'])
        
        
        self.assertEqualSet(definition_closure(['fd'], self.db), ['fd-always', 'fd-other'])
        direct = direct_uptodate_deps_inverse('fd', self.db)
        self.assertEqualSet(direct, ['fd-always', 'fd-other'])
        direct_closure = direct_uptodate_deps_inverse_closure('fd', self.db)
        self.assertEqualSet(direct_closure, ['fd-always', 'fd-other'])
        
        self.assert_cmd_success('clean fd')
        # clean should get rid of the jobs
        self.assertJobsEqual('all', ['fd'])
        self.assert_cmd_success('make fd')
        # now the "other" job should disappear
        self.assertJobsEqual('all', ['fd', 'fd-always'])
Exemplo n.º 5
0
def clean_targets(job_list, db):
    #     print('clean_targets (%r)' % job_list)
    job_list = set(job_list)

    # now we need to delete the definition closure

    from compmake.jobs.queries import definition_closure
    closure = definition_closure(job_list, db)

    basic = job_list - closure

    from compmake.jobs.queries import parents
    other_clean = set()
    for job_id in job_list:
        other_clean.update(parents(job_id, db))
    other_clean = other_clean - closure
    #
    #     print('deleting: %r' % closure)
    #     print('only cleaning: %r' % basic)
    #     print('other cleaning: %r' % other_clean)
    #
    for job_id in closure | basic | other_clean:
        clean_cache_relations(job_id, db)

    # delete all in closure
    for job_id in closure:
        from compmake.jobs.storage import delete_all_job_data
        delete_all_job_data(job_id, db)

    # just remove cache in basic
    for job_id in basic:
        # Cleans associated objects
        if job_cache_exists(job_id, db):
            delete_job_cache(job_id, db)
Exemplo n.º 6
0
    def test_dynamic5(self):
        
        # first define with job and run
        mockup5(self.cc, both=True)
        self.assert_cmd_success('make recurse=1')
        
        self.assertJobsEqual('all', ['fd', 'fd-gd', 'fd-gd-g2',  'hd', 'hd-id', 'hd-id-i2'])
        self.assertJobsEqual('done',  ['fd', 'fd-gd', 'fd-gd-g2',  'hd', 'hd-id', 'hd-id-i2'])

        self.assert_cmd_success('details hd-id')
        self.assert_cmd_success('details hd-id-i2')
        self.assertEqualSet(definition_closure(['hd-id'], self.db), ['hd-id-i2'])
        self.assertEqualSet(definition_closure(['hd'], self.db), ['hd-id', 'hd-id-i2'])        
        # now redo it 
        self.db = StorageFilesystem(self.root, compress=True)
        self.cc = Context(db=self.db)
        
        mockup5(self.cc, both=False)
        self.assert_cmd_success('clean')
        self.assert_cmd_success('make recurse=1')
        self.assertJobsEqual('all',  ['fd', 'fd-gd', 'fd-gd-g2'])
        self.assertJobsEqual('done', ['fd', 'fd-gd', 'fd-gd-g2']) 
Exemplo n.º 7
0
def delete_jobs_recurse_definition(jobs, db):
    """ Deletes all jobs given and the jobs that they defined.
        Returns the set of jobs deleted. """
    from compmake.jobs.queries import definition_closure
    closure = definition_closure(jobs, db)

    all_jobs = jobs | closure
    for job_id in all_jobs:
        clean_cache_relations(job_id, db)

    for job_id in all_jobs:
        from compmake.jobs.storage import delete_all_job_data
        delete_all_job_data(job_id, db)

    return all_jobs
Exemplo n.º 8
0
def delete_jobs_recurse_definition(jobs, db):
    """ Deletes all jobs given and the jobs that they defined.
        Returns the set of jobs deleted. """
    from compmake.jobs.queries import definition_closure
    closure = definition_closure(jobs, db)

    all_jobs = jobs | closure
    for job_id in all_jobs:
        clean_cache_relations(job_id, db)

    
    for job_id in all_jobs:
        from compmake.jobs.storage import delete_all_job_data
        delete_all_job_data(job_id, db)
        
    return all_jobs
Exemplo n.º 9
0
def direct_uptodate_deps_inverse_closure(job_id, db):
    """ 
        Closure of direct_uptodate_deps_inverse:
        all jobs that depend on this.
    """
    from compmake.jobs.queries import parents
    # all parents
    dep_inv = parents(job_id, db)
    # plus their definition closure
    from compmake.jobs.queries import definition_closure
    closure = definition_closure(dep_inv, db)
    # this is not true in general
    # assert not closure & dep_inv
    dep_inv.update(closure)
    # plus the ones that were defined by it
    from compmake.jobs.storage import get_job_cache
    if get_job_cache(job_id, db).state == Cache.DONE:
        dep_inv.update(jobs_defined(job_id, db))
    return dep_inv 
Exemplo n.º 10
0
def direct_uptodate_deps_inverse_closure(job_id, db):
    """
        Closure of direct_uptodate_deps_inverse:
        all jobs that depend on this.
    """
    from compmake.jobs.queries import parents
    # all parents
    dep_inv = parents(job_id, db)
    # plus their definition closure
    from compmake.jobs.queries import definition_closure
    closure = definition_closure(dep_inv, db)
    # this is not true in general
    # assert not closure & dep_inv
    dep_inv.update(closure)
    # plus the ones that were defined by it
    from compmake.jobs.storage import get_job_cache
    if get_job_cache(job_id, db).state == Cache.DONE:
        dep_inv.update(jobs_defined(job_id, db))
    return dep_inv
Exemplo n.º 11
0
def graph(job_list, context, filename='compmake-graph',
          filter='dot', format='png',  # @ReservedAssignment
          label='id', color=True,
          cluster=False, processing=set()):
    """

        Creates a graph of the given targets and dependencies.

        Usage:

            @: graph filename=filename label=[id,function,none] color=[0|1] format=png filter=[dot|circo|...]

        Options:

            filename:  name of generated filename in the dot format
            label='none','id','function'
            color=[0|1]: whether to color the nodes
            filter=[dot,circo,twopi,...]  which algorithm to use to arrange
                       the nodes. The best choice depends on
                       the topology of your
                       computation. The default is 'dot'
                       (hierarchy top-bottom).
            format=[png,...]  The output file format.
    """
    possible = ['none', 'id', 'function']
    if not label in possible:
        msg = 'Invalid label method %r not in %r.' % (label, possible)
        raise ValueError(msg)

    db = context.get_compmake_db()
    if not job_list:
        job_list = list(top_targets(db))

    print('jobs: %s' % job_list)
    print('processing: %s' % processing)
    print('Importing gvgen')

    try:
#        import gvgen
        pass
    except:
        gvgen_url = 'https://github.com/stricaud/gvgen'
        msg = ('To use the "graph" command you have to install the "gvgen" '
               'package from %s') % gvgen_url
        raise UserError(msg)

    print('Getting all jobs in tree')

    cq = CacheQueryDB(db)
    job_list = set(job_list)
    # all the dependencies
    job_list.update(cq.tree(job_list))

    # plus all the jobs that were defined by them
    job_list.update(definition_closure(job_list, db))

    job_list = set(job_list)

#     print('closure: %s' % sorted(job_list))

    if cluster:
        ggraph = create_graph2_clusters(cq, job_list, label=label, color=color,
                                        processing=processing)
    else:
        ggraph = create_graph1(cq, job_list, label=label, color=color,
                               processing=processing)
    print('Writing graph on %r.' % filename)
    # TODO: add check?
    with open(filename, 'w') as f:
        ggraph.dot(f)

    print('Running rendering')
    output = filename + '.' + format
    cmd_line = '%s %s -T%s -o%s' % (filter, filename, format, output)
    print('  %s' % cmd_line)
    try:
        os.system(cmd_line)
    except:
        msg = "Could not run dot (cmdline='%s') Make sure graphviz is " \
              "installed" % cmd_line
        raise UserError(msg)  # XXX maybe not UserError

    info("Written output on files %s, %s." % (filename, output))