def create_output_func(dataset, x, y, drop, input, output_layer, batch_size): name = 'output' #Compability with create_theano_func data = Params({'set': {'output': dataset}}) output = (output_layer.output, y) return create_theano_func(name, data, x, y, drop, input, output, batch_size)
def setup(self): """ Builds vec_cn modules to test if they """ params = Params("source/tests/learned_cn/params.json") A = torch.tensor([[0., 1., 0.], [1., 0., 1.], [0., 1., 0.]]) self.vec_cn = LCIModule(params, A.numpy())
def __init__(self, expbase, cmdparams=None): """cryodata is a CryoData instance. expbase is a path to the base of folder where this experiment's files will be stored. The folder above expbase will also be searched for .params files. These will be loaded first.""" BackgroundWorker.__init__(self) # Create a background thread which handles IO self.io_queue = Queue() self.io_thread = Thread(target=self.ioworker) self.io_thread.daemon = True self.io_thread.start() # General setup ---------------------------------------------------- self.expbase = expbase self.outbase = None # Paramter setup --------------------------------------------------- # search above expbase for params files _,_,filenames = os.walk(opj(expbase,'../')).next() self.paramfiles = [opj(opj(expbase,'../'), fname) \ for fname in filenames if fname.endswith('.params')] # search expbase for params files _,_,filenames = os.walk(opj(expbase)).next() self.paramfiles += [opj(expbase,fname) \ for fname in filenames if fname.endswith('.params')] if 'local.params' in filenames: self.paramfiles += [opj(expbase,'local.params')] # load parameter files self.params = Params(self.paramfiles) self.cparams = None if cmdparams is not None: # Set parameter specified on the command line for k,v in cmdparams.iteritems(): self.params[k] = v # Dataset setup ------------------------------------------------------- self.imgpath = self.params['inpath'] psize = self.params['resolution'] if not isinstance(self.imgpath,list): imgstk = MRCImageStack(self.imgpath,psize) else: imgstk = CombinedImageStack([MRCImageStack(cimgpath,psize) for cimgpath in self.imgpath]) if self.params.get('float_images',True): imgstk.float_images() self.ctfpath = self.params['ctfpath'] mscope_params = self.params['microscope_params'] if not isinstance(self.ctfpath,list): ctfstk = CTFStack(self.ctfpath,mscope_params) else: ctfstk = CombinedCTFStack([CTFStack(cctfpath,mscope_params) for cctfpath in self.ctfpath]) self.cryodata = CryoDataset(imgstk,ctfstk) self.cryodata.compute_noise_statistics() if self.params.get('window_images',True): imgstk.window_images() minibatch_size = self.params['minisize'] testset_size = self.params['test_imgs'] partition = self.params.get('partition',0) num_partitions = self.params.get('num_partitions',1) seed = self.params['random_seed'] if isinstance(partition,str): partition = eval(partition) if isinstance(num_partitions,str): num_partitions = eval(num_partitions) if isinstance(seed,str): seed = eval(seed) self.cryodata.divide_dataset(minibatch_size,testset_size,partition,num_partitions,seed) self.cryodata.set_datasign(self.params.get('datasign','auto')) if self.params.get('normalize_data',True): self.cryodata.normalize_dataset() self.voxel_size = self.cryodata.pixel_size # Iterations setup ------------------------------------------------- self.iteration = 0 self.tic_epoch = None self.num_data_evals = 0 self.eval_params() outdir = self.cparams.get('outdir',None) if outdir is None: if self.cparams.get('num_partitions',1) > 1: outdir = 'partition{0}'.format(self.cparams['partition']) else: outdir = '' self.outbase = opj(self.expbase,outdir) if not os.path.isdir(self.outbase): os.makedirs(self.outbase) # Output setup ----------------------------------------------------- self.ostream = OutputStream(opj(self.outbase,'stdout')) self.ostream(80*"=") self.ostream("Experiment: " + expbase + \ " Kernel: " + self.params['kernel']) self.ostream("Started on " + socket.gethostname() + \ " At: " + time.strftime('%B %d %Y: %I:%M:%S %p')) self.ostream("Git SHA1: " + gitutil.git_get_SHA1()) self.ostream(80*"=") gitutil.git_info_dump(opj(self.outbase, 'gitinfo')) self.startdatetime = datetime.now() # for diagnostics and parameters self.diagout = Output(opj(self.outbase, 'diag'),runningout=False) # for stats (per image etc) self.statout = Output(opj(self.outbase, 'stat'),runningout=True) # for likelihoods of individual images self.likeout = Output(opj(self.outbase, 'like'),runningout=False) self.img_likes = n.empty(self.cryodata.N_D) self.img_likes[:] = n.inf # optimization state vars ------------------------------------------ init_model = self.cparams.get('init_model',None) if init_model is not None: filename = init_model if filename.upper().endswith('.MRC'): M = readMRC(filename) else: with open(filename) as fp: M = cPickle.load(fp) if type(M)==list: M = M[-1]['M'] if M.shape != 3*(self.cryodata.N,): M = cryoem.resize_ndarray(M,3*(self.cryodata.N,),axes=(0,1,2)) else: init_seed = self.cparams.get('init_random_seed',0) + self.cparams.get('partition',0) print "Randomly generating initial density (init_random_seed = {0})...".format(init_seed), ; sys.stdout.flush() tic = time.time() M = cryoem.generate_phantom_density(self.cryodata.N, 0.95*self.cryodata.N/2.0, \ 5*self.cryodata.N/128.0, 30, seed=init_seed) print "done in {0}s".format(time.time() - tic) tic = time.time() print "Windowing and aligning initial density...", ; sys.stdout.flush() # window the initial density wfunc = self.cparams.get('init_window','circle') cryoem.window(M,wfunc) # Center and orient the initial density cryoem.align_density(M) print "done in {0:.2f}s".format(time.time() - tic) # apply the symmetry operator init_sym = get_symmetryop(self.cparams.get('init_symmetry',self.cparams.get('symmetry',None))) if init_sym is not None: tic = time.time() print "Applying symmetry operator...", ; sys.stdout.flush() M = init_sym.apply(M) print "done in {0:.2f}s".format(time.time() - tic) tic = time.time() print "Scaling initial model...", ; sys.stdout.flush() modelscale = self.cparams.get('modelscale','auto') mleDC, _, mleDC_est_std = self.cryodata.get_dc_estimate() if modelscale == 'auto': # Err on the side of a weaker prior by using a larger value for modelscale modelscale = (n.abs(mleDC) + 2*mleDC_est_std)/self.cryodata.N print "estimated modelscale = {0:.3g}...".format(modelscale), ; sys.stdout.flush() self.params['modelscale'] = modelscale self.cparams['modelscale'] = modelscale M *= modelscale/M.sum() print "done in {0:.2f}s".format(time.time() - tic) if mleDC_est_std/n.abs(mleDC) > 0.05: print " WARNING: the DC component estimate has a high relative variance, it may be inaccurate!" if ((modelscale*self.cryodata.N - n.abs(mleDC)) / mleDC_est_std) > 3: print " WARNING: the selected modelscale value is more than 3 std devs different than the estimated one. Be sure this is correct." self.M = n.require(M,dtype=density.real_t) self.fM = density.real_to_fspace(M) self.dM = density.zeros_like(self.M) self.step = eval(self.cparams['optim_algo']) self.step.setup(self.cparams, self.diagout, self.statout, self.ostream) # Objective function setup -------------------------------------------- param_type = self.cparams.get('parameterization','real') cplx_param = param_type in ['complex','complex_coeff','complex_herm_coeff'] self.like_func = eval_objective(self.cparams['likelihood']) self.prior_func = eval_objective(self.cparams['prior']) if self.cparams.get('penalty',None) is not None: self.penalty_func = eval_objective(self.cparams['penalty']) prior_func = SumObjectives(self.prior_func.fspace, \ [self.penalty_func,self.prior_func], None) else: prior_func = self.prior_func self.obj = SumObjectives(cplx_param, [self.like_func,prior_func], [None,None]) self.obj.setup(self.cparams, self.diagout, self.statout, self.ostream) self.obj.set_dataset(self.cryodata) self.obj_wrapper = ObjectiveWrapper(param_type) self.last_save = time.time() self.logpost_history = FiniteRunningSum() self.like_history = FiniteRunningSum() # Importance Samplers ------------------------------------------------- self.is_sym = get_symmetryop(self.cparams.get('is_symmetry',self.cparams.get('symmetry',None))) self.sampler_R = FixedFisherImportanceSampler('_R',self.is_sym) self.sampler_I = FixedFisherImportanceSampler('_I') self.sampler_S = FixedGaussianImportanceSampler('_S') self.like_func.set_samplers(sampler_R=self.sampler_R,sampler_I=self.sampler_I,sampler_S=self.sampler_S)
from scipy.stats import rankdata from util import Params, set_logger, prepare_sns parser = argparse.ArgumentParser() parser.add_argument('--experiment_dir', default='experiments/base_model', help="Directory containing params.json") if __name__ == '__main__': # Load the parameters from the experiment params.json file in model_dir args = parser.parse_args() json_path = os.path.join(args.experiment_dir, 'params.json') assert os.path.isfile( json_path), "No json configuration file found at {}".format(json_path) params = Params(json_path) params.update(json_path) # Set the logger set_logger(os.path.join(args.experiment_dir, 'experiment.log'), level=logging.INFO, console=True) # Log Title logging.info("DPP-Diff Generator") logging.info("Sabri Eyuboglu -- SNAP Group") logging.info("======================================") prepare_sns(sns, params) diseases_dict = load_diseases(params.diseases_path, params.disease_subset)
Hh = np.zeros(n, dtype=np.cdouble) for i in range(n): for j in range(n): # Jh[i]=Jh[i] + (ds/2.0/np.pi)*np.exp(1j*s[j]*sigma[i])*amp[j] Hh[i] = Hh[i] + (ds / 2.0 / np.pi) * np.exp(1j * s[j] * sigma[i]) * \ amp[j] * (-np.abs(s[j]) / (3.0 * phix[i])) * rat[j] print('Hh reals: ', np.real(Hh)) return np.abs(np.real(Hh)) if __name__ == '__main__': # Set up line and get data from bin files lya = Line(1215.6701, 0.4164, 6.265e8) p = Params(line=lya, temp=1e4, tau0=1e7, num_dens=1701290465.5139434, energy=1., R=1e11, sigma_source=0., n_points=1e4) mu, x, time = read_bin('../data/1m_tau0_10000000.0_xinit_0.0_temp_10000.0_probabs_0.0/') norm = 4.0 * np.pi * p.R**2. * p.delta * 4.0 * np.pi / p.energy print('norm: ', norm) # Set up matplotlib figure fig, ax = plt.subplots(1, 1, figsize=(12, 9)) # Histogram data from Monte Carlo x xbins, n, err = monte_carlo(x) # xbins2, n2, err2 = monte_carlo(x, bins=256) ax.errorbar(xbins, n, yerr=err, fmt='.', c='m', ms=3, label='Monte Carlo') # ax.errorbar(xbins2, n2, yerr=err2, fmt='.', c='c', ms=3, label='Monte Carlo 2', alpha=0.25) # H_0 from analytic solution
import secret ''' This file contains the configurable hyperparameters of the CNN. For instance the backprogagation method, the number of convolutional layers and their architecutre, curriculum learning behavior etc. ''' #Create secret python file and variable token token = secret.token verbose = True number_of_epochs = 100 dataset_path = '/home/olav/Pictures/Mass_roads_alpha' pr_path = '/home/olav/Pictures/Mass_roads_alpha' filename_params = Params({ "results" : "./results", "network_save_name" : "./results/params.pkl", "curriculum_teacher" : "/home/olav/Documents/Results/E7_inexperienced_teacher/teacher/params.pkl", "curriculum_location" : "/media/olav/Data storage/dataset/Mass_inexperienced_100-2-5-stages" }) visual_params = Params({ "endpoint" : "http://178.62.232.71/", "gui_enabled" : True }) optimization_params = Params({ "backpropagation" : "sgd_nesterov", "batch_size" : 64, "l2_reg" : 0.0001, "momentum" : 0.93, "initial_patience" : 500000,
(obj.p.delta * obj.kappa_n / obj.p.k)**2. * y1 - 3. * obj.omega * obj.p.delta**2. * obj.p.phi(float(sigma)) / obj.p.k / c * x1, # dy2_dsigma x2, # dx1_dsigma = x2 y2 # dy1_dsigma = y2 ]) if __name__ == '__main__': # Create params object lya = Line(1215.6701, 0.4164, 6.265e8) p = Params(line=lya, temp=1e4, tau0=1e7, num_dens=1e6, energy=1., sigma_source=0., n_points=1e5, R=1e11) # Comparison of characteristic time and characteristic frequency tc = p.R / c * p.tau0 # Characteristic timescale omega_c = c / p.R * (p.a * p.tau0)**(-1. / 3.) print('1/tc=', 1. / tc) print('omega_c=', omega_c) print('R/c = ', p.R / c) # Plot some fourier coefficients # fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(8, 6))