Beispiel #1
0
	def produce_output(self):
		if not self.output_plan.use_plan:
			print 'skipping output...'
			return

		print 'producing output...'
		check_0 = time.time()
		#self.output_plan.flat_data = False
		pool = self.load_data_pool()
		data_ = lfu.data_container(data = pool.data)
		if self.output_plan.must_output(): self.output_plan(data_)
		if not pool.postproc_data is None:
			processes = self.postprocess_plan.post_processes
			for dex, proc in enumerate(processes):
				if proc.output.must_output():
					proc.provide_axes_manager_input()
					data_ = lfu.data_container(
						data = pool.postproc_data[dex])
					proc.determine_regime(self)
					proc.output(data_)

		if not pool.routine_data is None:
			routines = self.fitting_plan.routines
			for dex, rout in enumerate(routines):
				if rout.output.must_output():
					rout.provide_axes_manager_input()
					data_ = lfu.data_container(
						data = pool.routine_data[dex])
					rout.output(data_)

		print 'produced output: ', time.time() - check_0
Beispiel #2
0
def set_module_memory_(ensem):
	ensem._module_memory_ = [lfu.data_container(
		output_plan_selected_memory = 'Simulation', 
				variable_selected_memory = 'None', 
				function_selected_memory = 'None', 
				reaction_selected_memory = 'None', 
				species_selected_memory = 'None')]
Beispiel #3
0
	def save_data_pool(self):
		print 'saving data pool...'
		check = time.time()
		proc_data = None
		if self.postprocess_plan.use_plan:
			proc_data = [proc.data for proc in 
				self.postprocess_plan.post_processes]

		rout_data = None
		if self.fitting_plan.use_plan:
			rout_data = [rout.data for rout in 
					self.fitting_plan.routines]

		self.data_pool = lfu.data_container(data = self.data_pool, 
			postproc_data = proc_data, routine_data = rout_data)
		self.describe_data_pool(self.data_pool)
		if self.data_scheme == 'smart_batch':
			self.data_pool.data.live_pool = []
			self.data_pool_pkl = os.path.join(os.getcwd(), 'data_pools', 
										'.'.join(['data_pool', 'smart', 
											self.data_pool_id, 'pkl']))

		elif self.data_scheme == 'batch':
			self.data_pool_pkl = os.path.join(os.getcwd(), 'data_pools', 
					'.'.join(['data_pool', self.data_pool_id, 'pkl']))

		else:
			print 'data_scheme is unresolved... assuming "batch"'
			self.data_pool_pkl = os.path.join(os.getcwd(), 'data_pools', 
					'.'.join(['data_pool', self.data_pool_id, 'pkl']))

		lf.save_pkl_object(self.data_pool, self.data_pool_pkl)
		print 'saved data pool: ', time.time() - check
Beispiel #4
0
def set_module_memory_(ensem):
    ensem._module_memory_ = [
        lfu.data_container(output_plan_selected_memory='Simulation',
                           variable_selected_memory='None',
                           function_selected_memory='None',
                           reaction_selected_memory='None',
                           species_selected_memory='None')
    ]
	def deep_parse(self, *args, **kwargs):
		def empty_check(unic):
			checked = [x for x in unic if not x == '']
			return checked
		def ovrflw_check(unic):
			checked =\
				['1000000.0' if x.count('OVRFLW') else x for x in unic]
			return checked
		def _all_checks_(unic):
			unic = empty_check(unic)
			checked = ovrflw_check(unic)
			return checked
		def hms_to_mins(hms):
			convs = [60.0, 1.0, 1.0/60.0]
			mins = [sum([float(pair[0])*pair[1] for pair 
				in zip(hms[x].split(':'), convs)]) 
				for x in range(len(hms))]
			return mins
		def time_filter(hms):
			hms0 = '0:00:00'
			hms = [min_ for d, min_ in 
				enumerate(hms) if not 
				min_ == hms0 or d == 0]
			mins = hms_to_mins(hms)
			return mins
		known_filters = OrderedDict()
		known_filters['Time'] = time_filter
		known_filters['_all_'] = _all_checks_
		def filter_(key, dat):
			kf = known_filters
			dat = kf['_all_'](dat)
			if key in kf.keys(): dat = kf[key](dat)
			try: return np.array(dat,dtype='f')
			except ValueError: pdb.set_trace()
		measur = self._measurement_
		weldat = self._well_data_
		welkey = self._well_key_
		condat = self._cond_data_
		conkey = self._cond_key_
		con_data = lgeo.scalars_from_labels(conkey)
		for dex, key in enumerate(conkey):
			con_data[dex].scalars = filter_(key, condat[:,dex])
		wel_data = lgeo.scalars_from_labels(welkey)
		for dex, key in enumerate(welkey):
			wel_data[dex].scalars = filter_(key, weldat[:,dex])
		all_data = con_data + wel_data
		self._unreduced_ = lfu.data_container(data = all_data)
		self._reduced_ = self.apply_reduction(self._unreduced_.data)
		self.update_replicate_reduction()
	def __init__(self, *args, **kwargs):
		self.impose_default('parsed_data',lfu.data_container(),**kwargs)
		self.impose_default('input_data_file','051214_P2R2.txt',**kwargs)			# TO EXPEDITE TESTING!!
		self.impose_default('input_tmpl_file','061114 template for calc.txt',**kwargs)			# TO EXPEDITE TESTING!!
		#self.impose_default('input_data_file','',**kwargs)
		#self.impose_default('input_tmpl_file','',**kwargs)
		self.settings_manager = lset.settings_manager(parent = self, 
					filename = "plate_reader_analyzer_settings.txt")
		self.settings = self.settings_manager.read_settings()
		self.current_tab_index = 0
		self.current_tab_index_outputs = 0
		self.postprocess_plan = lpp.post_process_plan(
			label = 'Post Process Plan', parent = self)
		self.postprocess_plan._display_for_children_ = True
		lfu.modular_object_qt.__init__(self, *args, **kwargs)
		self._children_ = [self.postprocess_plan]
	def apply_reduction(self, unred):
		read = self.parent.parent.read['layout'].read
		flat = lfu.flatten(read['table'])
		well_cnt = len(flat)
		reduced = unred[:len(unred)-well_cnt]	#list of replicate averaged scalers
		con_offset = len(reduced)
		uniq = lfu.uniqfy(flat)
		layout = OrderedDict()
		for dex, key in enumerate(flat):
			if key in layout.keys(): layout[key].append(dex + con_offset)
			else: layout[key] = [dex + con_offset]
		new = lgeo.scalars_from_labels(layout.keys())
		for ndex, key in enumerate(layout.keys()):
			rel_dexes = layout[key]
			rel_dater = [unred[d] for d in rel_dexes]
			zi = zip(*[r.scalars for r in rel_dater])
			new[ndex].scalars = np.array([np.mean(z) for z in zi])
		reduced.extend(new)
		red = lfu.data_container(data = reduced)
		return red
Beispiel #8
0
    def on_write_data(self, *args, **kwargs):
        if not self.domain:
            self.domain = ["x"]
            self.codomains = ["y1", "y2", "y3"]
            self.domain_data = np.linspace(0, 10, 100)
            self.codomain_datas = [
                np.exp(self.domain_data) / np.exp(self.domain_data[-1]),
                np.sin(self.domain_data),
                np.cos(2 * self.domain_data),
            ]
            print "USED FAKE TEST DATA!!"

            # the data should be normalized -1, 1 on both axes
        x = self.domain_data
        ys = self.codomain_datas
        [plt.plot(x, y) for y in ys]
        plt.show()
        self.out_data = lfu.data_container(
            data=[
                lgeo.scalars(label=lab, scalars=dat)
                for lab, dat in zip(self.domain + self.codomains, [self.domain_data] + self.codomain_datas)
            ]
        )
        lf.save_pkl_object(self.out_data, self.out_filename)
	def get_single_well_data(self):
		# THIS FUNCTION WILL JUST RETURN THE SELECTED WELLS PLOT DATA
		od_dex = self.get_selected_well()
		time_ = self.data.data[0]
		od = self.data.data[od_dex]
		dbl = self._doubling_times_[od_dex]
		gro = self._growth_rates_[od_dex]
		lo, hi = self._thresholds_[od_dex]
		#od.scalars = od.scalars
		y_low = lgeo.scalars('__skip__', [lo]*2, color = 'black')
		#	[self.OD_threshold_low]*2, color = 'black')
		y_high = lgeo.scalars('__skip__', [hi]*2, color = 'black')
		#	[self.OD_threshold_high]*2, color = 'black')
		x_stack = lgeo.scalars('__skip__', 
			[time_.scalars.min(), time_.scalars.max()])
		# CALCULATE THE DOUBLING TIME AND ADD VERTICAL LINES FOR THE POINTS USED
		#  MAKE ALL THESE FLAT LINES DASHED OR SOMETHING
		#pdb.set_trace()
		data = lfu.data_container(
			domains = [time_, x_stack, x_stack], 
			codomains = [od, y_low, y_high], 
			doubling = dbl, growth_rate = gro, 
						thresholds = (lo, hi))
		return data
Beispiel #10
0
    def on_write_data(self, *args, **kwargs):
        if not self.domain:
            self.domain = ['x']
            self.codomains = ['y1', 'y2', 'y3']
            self.domain_data = np.linspace(0, 10, 100)
            self.codomain_datas = [
                np.exp(self.domain_data) / np.exp(self.domain_data[-1]),
                np.sin(self.domain_data),
                np.cos(2 * self.domain_data)
            ]
            print 'USED FAKE TEST DATA!!'

        #the data should be normalized -1, 1 on both axes
        x = self.domain_data
        ys = self.codomain_datas
        [plt.plot(x, y) for y in ys]
        plt.show()
        self.out_data = lfu.data_container(data=[
            lgeo.scalars(label=lab, scalars=dat)
            for lab, dat in zip(self.domain +
                                self.codomains, [self.domain_data] +
                                self.codomain_datas)
        ])
        lf.save_pkl_object(self.out_data, self.out_filename)
Beispiel #11
0
def set_module_memory_(ensem):
	ensem._module_memory_ = [lfu.data_container(
		output_plan_selected_memory = 'Simulation', 
				variable_selected_memory = 'None', 
				vector_selected_memory = 'None')]
Beispiel #12
0
def set_module_memory_(ensem):
    ensem._module_memory_ = [
        lfu.data_container(
            output_plan_selected_memory="Simulation", variable_selected_memory="None", vector_selected_memory="None"
        )
    ]
Beispiel #13
0
def set_module_memory_(ensem):
	ensem._module_memory_ = [lfu.data_container(
		output_plan_selected_memory = 'Simulation')]
Beispiel #14
0
	def __init__(self, *args, **kwargs):
		#self.impose_default('current_tab_index', 0, **kwargs)
		#self.current_tab_index = 0
		self.aborted = False
		self.data_pool = lfu.data_container(
			data = [], postproc_data = [])
		self.data_scheme = None
		self.__dict__ = lfu.dictionary()
		if 'parent' in kwargs.keys(): self.parent = kwargs['parent']
		self.cancel_make = False
		self.skip_simulation = False
		self.mcfg_path = ''
		#self.num_trajectories = 1
		num_traj = lset.get_setting('trajectory_count')
		if num_traj: self.num_trajectories = num_traj
		else: self.num_trajectories = 1
		self.data_pool_descr = ''
		self.treebook_memory = [0, [], []]
		self._module_memory_ = []

		self.simulation_plan = simulation_plan(parent = self)
		self.output_plan = lo.output_plan(
			label = 'Simulation', parent = self)
		self.fitting_plan = lfr.fit_routine_plan(parent = self)
		self.cartographer_plan = lgeo.cartographer_plan(
				label = 'Parameter Scan', parent = self)
		self.postprocess_plan = lpp.post_process_plan(
			label = 'Post Process Plan', parent = self, 
				_always_sourceable_ = ['simulation'])
		self.multiprocess_plan = lmp.multiprocess_plan(parent = self)

		self.run_params = lfu.dictionary(parent = self)
		self.run_params['end_criteria'] = \
			self.simulation_plan.end_criteria
		self.run_params['capture_criteria'] = \
			self.simulation_plan.capture_criteria
		self.run_params['plot_targets'] = \
			self.simulation_plan.plot_targets
		self.run_params['output_plans'] = {
			'Simulation' : self.output_plan}
		self.run_params['fit_routines'] = \
				self.fitting_plan.routines
		self.run_params['post_processes'] = \
			self.postprocess_plan.post_processes
		self.run_params['p_space_map'] = None
		self.run_params['multiprocessing'] = None

		self.__dict__.create_partition('template owners', 
			['output_plan', 'fitting_plan', 'cartographer_plan', 
			'postprocess_plan', 'multiprocess_plan', 'run_params', 
			'simulation_plan'])

		if 'label' not in kwargs.keys(): kwargs['label'] = 'ensemble'

		if 'module_options' in kwargs.keys():
			opts = kwargs['module_options']

		else:
			print 'no modules detected; requesting from manager'
			opts = self.parent.find_module_options()

		if len(opts) == 0:
			lgd.message_dialog(None, 
				'No module options detected!', 'Problem')
			self.cancel_make = True
			return

		elif len(opts) == 1: module = opts[0]
		else:
			module_choice_container = lfu.data_container(data = opts[0])
			module_options_templates = [lgm.interface_template_gui(
					layout = 'horizontal', 
					widgets = ['radio'], 
					verbosities = [0], 
					labels = [opts], 
					initials = [[module_choice_container.data]], 
					instances = [[module_choice_container]], 
					keys = [['data']], 
					box_labels = ['Ensemble Module'], 
					minimum_sizes = [[(250, 50)]])]
			mod_dlg = lgd.create_dialog(title = 'Choose Ensemble Module', 
				templates = module_options_templates, variety = 'templated')
			module = mod_dlg()
			if module: module = module_choice_container.data
			else:
				self.cancel_make = True
				return

		self.impose_default('module', module, **kwargs)
		self._children_ = [self.simulation_plan, self.output_plan, 
						self.fitting_plan, self.cartographer_plan, 
					self.postprocess_plan, self.multiprocess_plan]
		self.load_module(reset_params = True)
		self.mcfg_dir = os.path.join(os.getcwd(), self.module)
		if not os.path.isdir(self.mcfg_dir): self.mcfg_dir = os.getcwd()
		lfu.modular_object_qt.__init__(self, *args, **kwargs)
		self.data_pool_id = lfu.get_new_pool_id()
		self.data_pool_pkl = os.path.join(os.getcwd(), 'data_pools', 
				'.'.join(['data_pool', self.data_pool_id, 'pkl']))
Beispiel #15
0
    def enact_plan(self, *args):
        system = args[0]
        proper_targets = self.find_proper_targets()
        to_be_outted = []
        if not self.flat_data:  #if the list of data objects is not flat (system.data is the list)
            #self.to_be_outted has a 3rd element pointing to system within non-flat pool
            #system will be an object with attribute .data but .data is a non-flat list!
            #put each data list into a flat list of objects with flat lists for data attributes

            #targs = self.get_target_labels()
            for traj in system.data:
                data_container = lfu.data_container(data=traj)
                self.update_filenames()
                proper_paths = self.find_proper_paths()
                if self.output_vtk:
                    to_be_outted.append(
                        (proper_paths['vtk'], 0, data_container))

                if self.output_pkl:
                    to_be_outted.append(
                        (proper_paths['pkl'], 1, data_container))

                if self.output_txt:
                    to_be_outted.append(
                        (proper_paths['txt'], 2, data_container))

                if self.output_plt:
                    to_be_outted.append(
                        (proper_paths['plt'], 3, data_container))

            if 3 in [out[1] for out in to_be_outted]:
                self.writers[3].get_plt_window()

            [
                self.writers[out[1]](out[2], out[0], proper_targets[out[1]])
                for out in to_be_outted
            ]

            if 3 in [out[1] for out in to_be_outted]:
                self.writers[3].plt_window()

        else:
            self.update_filenames()
            proper_paths = self.find_proper_paths()
            #if the list of data objects is flat (system.data is the list)
            #self.to_be_outted has only 2 elements since data is already flat
            if self.output_vtk:
                to_be_outted.append((proper_paths['vtk'], 0))

            if self.output_pkl:
                to_be_outted.append((proper_paths['pkl'], 1))

            if self.output_txt:
                to_be_outted.append((proper_paths['txt'], 2))

            if self.output_plt:
                to_be_outted.append((proper_paths['plt'], 3))

            if 3 in [out[1] for out in to_be_outted]:
                self.writers[3].get_plt_window()

            [
                self.writers[out[1]](system, out[0], proper_targets[out[1]])
                for out in to_be_outted
            ]

            if 3 in [out[1] for out in to_be_outted]:
                self.writers[3].plt_window()