Example #1
0
def run_async_operation(request_handler, operation_name):
    gc.logger.debug('>>> running an async operation')
    request_id = util.generate_request_id()
    params, raw_post_body, plugin_client = get_request_params(request_handler)
    gc.logger.debug(request_id)
    gc.logger.debug(params)
    gc.logger.debug(raw_post_body)
    
    worker_thread = BackgroundWorker(operation_name, params, True, request_id, raw_post_body, plugin_client)
    gc.logger.debug('worker created')
    worker_thread.start()
    gc.logger.debug('worker thread started')
    async_request_queue[request_id] = worker_thread
    gc.logger.debug('placed into queue')

    #q = Queue() #on larger object puts, process would hang
    #using manager based on this recommendation:
    #http://stackoverflow.com/questions/11442892/python-multiprocessing-queue-failure
    # manager = Manager()
    # q = manager.Queue() #TODO: request is failing here many times
    # gc.logger.debug('finished with queue')
    #worker = BackgroundWorker(operation_name, params, True, request_id, raw_post_body, q)
    #gc.logger.debug('worker created')
    #p = multiprocessing.Process(target=process_request_in_background,args=(worker,))
    #p.start()
    #gc.logger.debug('started process!')
    #add_to_request_queue(request_id, p, q)
    #gc.logger.debug('preparing to response with the async id!')
    return respond_with_async_request_id(request_handler, request_id)
Example #2
0
def run_async_operation(request_handler, operation_name):
    gc.logger.debug('>>> running an async operation')
    request_id = util.generate_request_id()
    params, raw_post_body, plugin_client = get_request_params(request_handler)
    gc.logger.debug(request_id)
    gc.logger.debug(params)
    gc.logger.debug(raw_post_body)

    worker_thread = BackgroundWorker(operation_name, params, True, request_id,
                                     raw_post_body, plugin_client)
    gc.logger.debug('worker created')
    worker_thread.start()
    gc.logger.debug('worker thread started')
    async_request_queue[request_id] = worker_thread
    gc.logger.debug('placed into queue')

    #q = Queue() #on larger object puts, process would hang
    #using manager based on this recommendation:
    #http://stackoverflow.com/questions/11442892/python-multiprocessing-queue-failure
    # manager = Manager()
    # q = manager.Queue() #TODO: request is failing here many times
    # gc.logger.debug('finished with queue')
    #worker = BackgroundWorker(operation_name, params, True, request_id, raw_post_body, q)
    #gc.logger.debug('worker created')
    #p = multiprocessing.Process(target=process_request_in_background,args=(worker,))
    #p.start()
    #gc.logger.debug('started process!')
    #add_to_request_queue(request_id, p, q)
    #gc.logger.debug('preparing to response with the async id!')
    return respond_with_async_request_id(request_handler, request_id)
Example #3
0
def connections_delete_request(request_handler):
    request_id = util.generate_request_id()
    params, raw_post_body, plugin_client = get_request_params(request_handler)
    worker_thread = BackgroundWorker('delete_connection', params, False, request_id, raw_post_body, plugin_client)
    worker_thread.start()
    worker_thread.join()
    response = worker_thread.response
    respond(request_handler, response)
Example #4
0
def get_active_session_request(request_handler):
    '''
        GET /[email protected]&password=force&org_type=developer
    '''
    request_id = util.generate_request_id()
    params, json_body = get_request_params(request_handler)
    worker = BackgroundWorker('get_active_session', params, False, request_id, json_body)
    response = worker.run()
    respond(request_handler, response)
Example #5
0
def get_active_session_request(request_handler):
    '''
        GET /[email protected]&password=force&org_type=developer
    '''
    request_id = util.generate_request_id()
    params, json_body, plugin_client = get_request_params(request_handler)
    worker_thread = BackgroundWorker('get_active_session', params, False, request_id, json_body, plugin_client)
    worker_thread.start()
    worker_thread.join()
    response = worker_thread.response
    respond(request_handler, response)
Example #6
0
def metadata_list_request(request_handler):
    '''
        GET /metadata/list
        {
            "sid"             : "",
            "metadata_type"   : "",
            "murl"            : ""
        }
        call to get a list of metadata of a certain type
    '''
    request_id = util.generate_request_id()
    params, json_body = get_request_params(request_handler)
    worker = BackgroundWorker('list_metadata', params, False, request_id, json_body)
    response = worker.run()
    respond(request_handler, response)
Example #7
0
def metadata_list_request(request_handler):
    '''
        GET /metadata/list
        {
            "sid"             : "",
            "metadata_type"   : "",
            "murl"            : ""
        }
        call to get a list of metadata of a certain type
    '''
    request_id = util.generate_request_id()
    params, json_body, plugin_client = get_request_params(request_handler)
    worker_thread = BackgroundWorker('list_metadata', params, False, request_id, json_body, plugin_client)
    worker_thread.start()
    worker_thread.join()
    response = worker_thread.response
    respond(request_handler, response)
Example #8
0
def update_credentials_request(request_handler):
    '''
        POST /project/creds
        {
            "project_name"  : "my project name"
            "username"      : "*****@*****.**",
            "password"      : "force",
            "org_type"      : "developer",
        }
        NOTE: project name should not be updated, as it is used to find the project in question
        TODO: maybe we assign a unique ID to each project which will give users the flexibility
              to change the project name??
        TODO: we may need to implement a "clean" flag which will clean the project after creds
              have been updated
    '''
    request_id = util.generate_request_id()
    params, raw_post_body = get_request_params(request_handler)
    worker = BackgroundWorker('update_credentials', params, False, request_id, raw_post_body)
    response = worker.run()
    respond(request_handler, response)
Example #9
0
def connections_delete_request(request_handler):
    request_id = util.generate_request_id()
    params, raw_post_body, plugin_client = get_request_params(request_handler)
    worker_thread = BackgroundWorker('delete_connection', params, False, request_id, raw_post_body, plugin_client)
    worker_thread.start()
    worker_thread.join()
    response = worker_thread.response
    respond(request_handler, response)
Example #10
0
def get_active_session_request(request_handler):
    '''
        GET /[email protected]&password=force&org_type=developer
    '''
    request_id = util.generate_request_id()
    params, json_body, plugin_client = get_request_params(request_handler)
    worker_thread = BackgroundWorker('get_active_session', params, False, request_id, json_body, plugin_client)
    worker_thread.start()
    worker_thread.join()
    response = worker_thread.response
    respond(request_handler, response)
Example #11
0
def project_edit_subscription(request_handler):
    '''
        POST /project/subscription
        {
            "project_name"  : "my project name"
            "subscription"  : ["ApexClass", "ApexPage"]
        }
    '''
    request_id = util.generate_request_id()
    params, raw_post_body, plugin_client = get_request_params(request_handler)
    worker_thread = BackgroundWorker('update_subscription', params, False, request_id, raw_post_body, plugin_client)
    worker_thread.start()
    worker_thread.join()
    response = worker_thread.response
    respond(request_handler, response)
Example #12
0
def refresh_metadata_index(request_handler):
    '''
        GET /project/get_index/refresh
        {
            "project_name"      : "my project name",
            "metadata_types"    : ["ApexClass"]
        }
        call to refresh a certain type of metadata
    '''
    request_id = util.generate_request_id()
    params, json_body, plugin_client = get_request_params(request_handler)
    worker_thread = BackgroundWorker('refresh_metadata_index', params, False, request_id, json_body, plugin_client)
    worker_thread.start()
    worker_thread.join()
    response = worker_thread.response
    respond(request_handler, response) 
Example #13
0
def get_metadata_index(request_handler):
    '''
        GET /project/get_index
        {
            "project_name"  : "my project name",
            "keyword"       : "mykeyword" //optional
        }
        call to get the metadata index for a project
    '''
    request_id = util.generate_request_id()
    params, json_body, plugin_client = get_request_params(request_handler)
    worker_thread = BackgroundWorker('get_indexed_metadata', params, False, request_id, json_body, plugin_client)
    worker_thread.start()
    worker_thread.join()
    response = worker_thread.response
    respond(request_handler, response) 
Example #14
0
def metadata_list_request(request_handler):
    '''
        GET /metadata/list
        {
            "sid"             : "",
            "metadata_type"   : "",
            "murl"            : ""
        }
        call to get a list of metadata of a certain type
    '''
    request_id = util.generate_request_id()
    params, json_body, plugin_client = get_request_params(request_handler)
    worker_thread = BackgroundWorker('list_metadata', params, False, request_id, json_body, plugin_client)
    worker_thread.start()
    worker_thread.join()
    response = worker_thread.response
    respond(request_handler, response)
Example #15
0
def connections_delete_request(request_handler):
    request_id = util.generate_request_id()
    params, raw_post_body = get_request_params(request_handler)
    worker = BackgroundWorker('delete_connection', params, False, request_id, raw_post_body)
    response = worker.run()
    respond(request_handler, response)
Example #16
0
 def end(self):
     BackgroundWorker.end(self)
Example #17
0
 def begin(self):
     BackgroundWorker.begin(self)
Example #18
0
    def __init__(self, expbase, cmdparams=None):
        """cryodata is a CryoData instance. 
        expbase is a path to the base of folder where this experiment's files
        will be stored.  The folder above expbase will also be searched
        for .params files. These will be loaded first."""
        BackgroundWorker.__init__(self)

        # Create a background thread which handles IO
        self.io_queue = Queue()
        self.io_thread = Thread(target=self.ioworker)
        self.io_thread.daemon = True
        self.io_thread.start()

        # General setup ----------------------------------------------------
        self.expbase = expbase
        self.outbase = None

        # Paramter setup ---------------------------------------------------
        # search above expbase for params files
        _,_,filenames = os.walk(opj(expbase,'../')).next()
        self.paramfiles = [opj(opj(expbase,'../'), fname) \
                           for fname in filenames if fname.endswith('.params')]
        # search expbase for params files
        _,_,filenames = os.walk(opj(expbase)).next()
        self.paramfiles += [opj(expbase,fname)  \
                            for fname in filenames if fname.endswith('.params')]
        if 'local.params' in filenames:
            self.paramfiles += [opj(expbase,'local.params')]
        # load parameter files
        self.params = Params(self.paramfiles)
        self.cparams = None
        
        if cmdparams is not None:
            # Set parameter specified on the command line
            for k,v in cmdparams.iteritems():
                self.params[k] = v
                
        # Dataset setup -------------------------------------------------------
        self.imgpath = self.params['inpath']
        psize = self.params['resolution']
        if not isinstance(self.imgpath,list):
            imgstk = MRCImageStack(self.imgpath,psize)
        else:
            imgstk = CombinedImageStack([MRCImageStack(cimgpath,psize) for cimgpath in self.imgpath])

        if self.params.get('float_images',True):
            imgstk.float_images()
        
        self.ctfpath = self.params['ctfpath']
        mscope_params = self.params['microscope_params']
         
        if not isinstance(self.ctfpath,list):
            ctfstk = CTFStack(self.ctfpath,mscope_params)
        else:
            ctfstk = CombinedCTFStack([CTFStack(cctfpath,mscope_params) for cctfpath in self.ctfpath])


        self.cryodata = CryoDataset(imgstk,ctfstk)
        self.cryodata.compute_noise_statistics()
        if self.params.get('window_images',True):
            imgstk.window_images()
        minibatch_size = self.params['minisize']
        testset_size = self.params['test_imgs']
        partition = self.params.get('partition',0)
        num_partitions = self.params.get('num_partitions',1)
        seed = self.params['random_seed']
        if isinstance(partition,str):
            partition = eval(partition)
        if isinstance(num_partitions,str):
            num_partitions = eval(num_partitions)
        if isinstance(seed,str):
            seed = eval(seed)
        self.cryodata.divide_dataset(minibatch_size,testset_size,partition,num_partitions,seed)
        
        self.cryodata.set_datasign(self.params.get('datasign','auto'))
        if self.params.get('normalize_data',True):
            self.cryodata.normalize_dataset()

        self.voxel_size = self.cryodata.pixel_size


        # Iterations setup -------------------------------------------------
        self.iteration = 0 
        self.tic_epoch = None
        self.num_data_evals = 0
        self.eval_params()

        outdir = self.cparams.get('outdir',None)
        if outdir is None:
            if self.cparams.get('num_partitions',1) > 1:
                outdir = 'partition{0}'.format(self.cparams['partition'])
            else:
                outdir = ''
        self.outbase = opj(self.expbase,outdir)
        if not os.path.isdir(self.outbase):
            os.makedirs(self.outbase) 

        # Output setup -----------------------------------------------------
        self.ostream = OutputStream(opj(self.outbase,'stdout'))

        self.ostream(80*"=")
        self.ostream("Experiment: " + expbase + \
                     "    Kernel: " + self.params['kernel'])
        self.ostream("Started on " + socket.gethostname() + \
                     "    At: " + time.strftime('%B %d %Y: %I:%M:%S %p'))
        self.ostream("Git SHA1: " + gitutil.git_get_SHA1())
        self.ostream(80*"=")
        gitutil.git_info_dump(opj(self.outbase, 'gitinfo'))
        self.startdatetime = datetime.now()


        # for diagnostics and parameters
        self.diagout = Output(opj(self.outbase, 'diag'),runningout=False)
        # for stats (per image etc)
        self.statout = Output(opj(self.outbase, 'stat'),runningout=True)
        # for likelihoods of individual images
        self.likeout = Output(opj(self.outbase, 'like'),runningout=False)

        self.img_likes = n.empty(self.cryodata.N_D)
        self.img_likes[:] = n.inf

        # optimization state vars ------------------------------------------
        init_model = self.cparams.get('init_model',None)
        if init_model is not None:
            filename = init_model
            if filename.upper().endswith('.MRC'):
                M = readMRC(filename)
            else:
                with open(filename) as fp:
                    M = cPickle.load(fp)
                    if type(M)==list:
                        M = M[-1]['M'] 
            if M.shape != 3*(self.cryodata.N,):
                M = cryoem.resize_ndarray(M,3*(self.cryodata.N,),axes=(0,1,2))
        else:
            init_seed = self.cparams.get('init_random_seed',0)  + self.cparams.get('partition',0)
            print "Randomly generating initial density (init_random_seed = {0})...".format(init_seed), ; sys.stdout.flush()
            tic = time.time()
            M = cryoem.generate_phantom_density(self.cryodata.N, 0.95*self.cryodata.N/2.0, \
                                                5*self.cryodata.N/128.0, 30, seed=init_seed)
            print "done in {0}s".format(time.time() - tic)

        tic = time.time()
        print "Windowing and aligning initial density...", ; sys.stdout.flush()
        # window the initial density
        wfunc = self.cparams.get('init_window','circle')
        cryoem.window(M,wfunc)

        # Center and orient the initial density
        cryoem.align_density(M)
        print "done in {0:.2f}s".format(time.time() - tic)

        # apply the symmetry operator
        init_sym = get_symmetryop(self.cparams.get('init_symmetry',self.cparams.get('symmetry',None)))
        if init_sym is not None:
            tic = time.time()
            print "Applying symmetry operator...", ; sys.stdout.flush()
            M = init_sym.apply(M)
            print "done in {0:.2f}s".format(time.time() - tic)

        tic = time.time()
        print "Scaling initial model...", ; sys.stdout.flush()
        modelscale = self.cparams.get('modelscale','auto')
        mleDC, _, mleDC_est_std = self.cryodata.get_dc_estimate()
        if modelscale == 'auto':
            # Err on the side of a weaker prior by using a larger value for modelscale
            modelscale = (n.abs(mleDC) + 2*mleDC_est_std)/self.cryodata.N
            print "estimated modelscale = {0:.3g}...".format(modelscale), ; sys.stdout.flush()
            self.params['modelscale'] = modelscale
            self.cparams['modelscale'] = modelscale
        M *= modelscale/M.sum()
        print "done in {0:.2f}s".format(time.time() - tic)
        if mleDC_est_std/n.abs(mleDC) > 0.05:
            print "  WARNING: the DC component estimate has a high relative variance, it may be inaccurate!"
        if ((modelscale*self.cryodata.N - n.abs(mleDC)) / mleDC_est_std) > 3:
            print "  WARNING: the selected modelscale value is more than 3 std devs different than the estimated one.  Be sure this is correct."

        self.M = n.require(M,dtype=density.real_t)
        self.fM = density.real_to_fspace(M)
        self.dM = density.zeros_like(self.M)

        self.step = eval(self.cparams['optim_algo'])
        self.step.setup(self.cparams, self.diagout, self.statout, self.ostream)

        # Objective function setup --------------------------------------------
        param_type = self.cparams.get('parameterization','real')
        cplx_param = param_type in ['complex','complex_coeff','complex_herm_coeff']
        self.like_func = eval_objective(self.cparams['likelihood'])
        self.prior_func = eval_objective(self.cparams['prior'])

        if self.cparams.get('penalty',None) is not None:
            self.penalty_func = eval_objective(self.cparams['penalty'])
            prior_func = SumObjectives(self.prior_func.fspace, \
                                       [self.penalty_func,self.prior_func], None)
        else:
            prior_func = self.prior_func

        self.obj = SumObjectives(cplx_param,
                                 [self.like_func,prior_func], [None,None])
        self.obj.setup(self.cparams, self.diagout, self.statout, self.ostream)
        self.obj.set_dataset(self.cryodata)
        self.obj_wrapper = ObjectiveWrapper(param_type)

        self.last_save = time.time()
        
        self.logpost_history = FiniteRunningSum()
        self.like_history = FiniteRunningSum()

        # Importance Samplers -------------------------------------------------
        self.is_sym = get_symmetryop(self.cparams.get('is_symmetry',self.cparams.get('symmetry',None)))
        self.sampler_R = FixedFisherImportanceSampler('_R',self.is_sym)
        self.sampler_I = FixedFisherImportanceSampler('_I')
        self.sampler_S = FixedGaussianImportanceSampler('_S')
        self.like_func.set_samplers(sampler_R=self.sampler_R,sampler_I=self.sampler_I,sampler_S=self.sampler_S)