Пример #1
0
def exec_cache_PACK(options,jlist,base,jobspecfile,order,jjspec):

    offset = _get_offset(options,1)
    
    cache = PickleCache(options)
    
    
    for section,jobs in jlist.items():


        if exists('sri-cache'):
            shutil.rmtree('sri-cache')
        os.makedirs('sri-cache')
        
        for j,k in jobs:
            datareq = _cachejobs(j,**k)
            if datareq:
                f_from = cache.filename(*datareq,offset=offset)
                f_to = 'sri-cache'
                if f_from:
                    if exists(f_from):
                        shutil.copy(f_from,f_to)
                    else:
                        print "Cache file %s not exists" % f_from
                        
        shutil.make_archive('sri-cache','bztar','sri-cache')
        shutil.rmtree('sri-cache')
        shutil.copyfile('sri-cache.tar.bz2',join(options.output_path,'sri-cache.tar.bz2'))
Пример #2
0
def exec_cache_RENEW(options,jlist,base,jobspecfile,order,jjspec):

    offset = _get_offset(options)
    
    cache = PickleCache(options)
    
    for section,jobs in jlist.items():
        print section
        for j,k in jobs:
            datareq = _cachejobs(j,**k)
            if datareq:
                f_from = cache.filename(*datareq,offset=offset)
                f_to = cache.filename(*datareq)
                if f_from:
                    if exists(f_from):
                        shutil.move(f_from,f_to)
                    else:
                        print "Cache file %s not exists" % f_from
Пример #3
0
def exec_cache_LIST(options,jlist,base,jobspecfile,order,jjspec):
    
    cache = PickleCache(options)

    offset = _get_offset(options,1)
    
    for section,jobs in jlist.items():
        print section
        for j,k in jobs:
            datareq = _cachejobs(j,**k)
            if datareq:                
                f = cache.filename(*datareq,offset=offset)
                if f:
                    print j,f,exists(f)
                else:
                    print j,f