Beispiel #1
0
def extract_features_parallel(feature_certificate,
                              image_certificate,
                              model_certificate,
                              feature_config,
                              feature_hash,
                              convolve_func_name,
                              batch_size):
    conn = pm.Connection(document_class = SON)
    db = conn[DB_NAME]
    
    image_certdict = cPickle.load(open(image_certificate))
    image_hash = image_certdict['image_hash']
    image_args = image_certdict['args']

    model_certdict = cPickle.load(open(model_certificate))
    model_hash = model_certdict['model_hash']
    model_args = model_certdict['args']

    conn = pm.Connection(document_class = SON)
    db = conn[DB_NAME]
    f_coll = db['features.files']
    f_fs = gridfs.GridFS(db,'features')
    
    remove_existing(f_coll,f_fs,feature_hash)
    
    limits = get_feature_batches(image_hash,model_hash,db['images.files'],db['models.files'],batch_size = batch_size)
    
    jobids = []

    if convolve_func_name == 'numpy':
        queueName = 'extraction_cpu.q'
    elif convolve_func_name == 'pyfft':
        queueName = 'extraction_gpu.q'

    opstring = '-l qname=' + queueName + ' -o /home/render -e /home/render'
    for (ind,limit) in enumerate(limits):
        im_from,im_to,m_from,m_to = limit
        jobid = qsub(extract_features_core,[(image_certificate,
                                             model_certificate,
                                             feature_hash,
                                             image_hash,
                                             model_hash,
                                             convolve_func_name),
                                             {'im_skip':im_from,
                                              'im_limit':im_to-im_from,
                                              'm_skip':m_from,
                                              'm_limit':m_to-m_from
                                              }],
                                            opstring=opstring)
        jobids.append(jobid)

    createCertificateDict(feature_certificate,{'feature_hash':feature_hash,
                                               'image_hash':image_hash,
                                               'model_hash':model_hash,
                                               'args':feature_config})

    return {'child_jobs':jobids}
Beispiel #2
0
def generate_images_parallel(outdir,id_list):

    jobids = []
    for (i,id) in enumerate(id_list):
        url = 'http://50.19.109.25:9999/3dmodels?query={"id":"' + id + '"}'
        y = json.loads(urllib.urlopen(url).read())[0]
        x = {'bg_id':'INTERIOR_10SN.tdl',
             'model_params':[{'model_id':y['id'],
                              'rxy':y.get('canonical_view',{}).get('rxy',0),
                              'rxz':y.get('canonical_view',{}).get('rxz',0),
                              'ryz':y.get('canonical_view',{}).get('ryz',0)
                             }]
            }
        jobid = qsub(generate_single_image,(x,outdir),opstring='-pe orte 2 -l qname=rendering.q -o /home/render -e /home/render')  
        jobids.append(jobid)
        
    
    return {'child_jobs':jobids}
Beispiel #3
0
def generate_images_parallel(outfile,im_hash,config_gen):

    conn = pm.Connection(document_class = SON)
    db = conn[DB_NAME]
    im_coll = db['images.files']
    im_fs = gridfs.GridFS(db,'images')
    
    remove_existing(im_coll,im_fs,im_hash)
    
    X = rendering.config_gen(config_gen)
       
    jobids = []
    for (i,x) in enumerate(X):
        x['image']['generator'] = config_gen['images']['generator'] 
        jobid = qsub(generate_and_insert_single_image,(x,im_hash),opstring='-pe orte 2 -l qname=rendering.q -o /home/render -e /home/render')  
        jobids.append(jobid)
        
    createCertificateDict(outfile,{'image_hash':im_hash,'args':config_gen})

    return {'child_jobs':jobids}
Beispiel #4
0
def extract_and_evaluate_parallel(outfile,image_certificate_file,model_certificate_file,cpath,convolve_func_name,task,ext_hash):
        
    (model_configs, image_config_gen, model_hash, image_hash, task_list,
     perf_col, split_coll, split_fs, splitperf_coll, splitperf_fs) = prepare_extract_and_evaluate(ext_hash,
                                                                                                  image_certificate_file,
                                                                                                  model_certificate_file,
                                                                                                  task)

    
    jobids = []
    if convolve_func_name == 'numpy':
        opstring = '-l qname=extraction_cpu.q'
    elif convolve_func_name == 'pyfft':
        opstring = '-l qname=extraction_gpu.q -o /home/render -e /home/render'
        
    for m in model_configs: 
        print('Evaluating model',m)
        for task in task_list:
            classifier_kwargs = task.get('classifier_kwargs',{})    
            print('task',task)
            splits = generate_splits(task,image_hash,'images') 
            for (ind,split) in enumerate(splits):
                put_in_split(split,image_config_gen,m,task,ext_hash,ind,split_fs)  
                jobid = qsub(extract_and_evaluate_parallel_core,(image_config_gen,m,task,ext_hash,ind,convolve_func_name),opstring=opstring)
                jobids.append(jobid)

    print(jobids)
    statuses = wait_and_get_statuses(jobids)
    
    for m in model_configs: 
        print('Evaluating model',m)
        for task in task_list:
            split_results = get_most_recent_files(splitperf_coll,{'__hash__':ext_hash,'task':son_escape(task),'model':m['config']['model'],'images':son_escape(image_config_gen['images'])})
            put_in_performance(split_results,image_config_gen,m,model_hash,image_hash,perf_col,task,ext_hash)

    createCertificateDict(outfile,{'image_file':image_certificate_file,'models_file':model_certificate_file})