Esempio n. 1
0
	def deep_parse(self, *args, **kwargs):
		def empty_check(unic):
			checked = [x for x in unic if not x == '']
			return checked
		def ovrflw_check(unic):
			checked =\
				['1000000.0' if x.count('OVRFLW') else x for x in unic]
			return checked
		def _all_checks_(unic):
			unic = empty_check(unic)
			checked = ovrflw_check(unic)
			return checked
		def hms_to_mins(hms):
			convs = [60.0, 1.0, 1.0/60.0]
			mins = [sum([float(pair[0])*pair[1] for pair 
				in zip(hms[x].split(':'), convs)]) 
				for x in range(len(hms))]
			return mins
		def time_filter(hms):
			hms0 = '0:00:00'
			hms = [min_ for d, min_ in 
				enumerate(hms) if not 
				min_ == hms0 or d == 0]
			mins = hms_to_mins(hms)
			return mins
		known_filters = OrderedDict()
		known_filters['Time'] = time_filter
		known_filters['_all_'] = _all_checks_
		def filter_(key, dat):
			kf = known_filters
			dat = kf['_all_'](dat)
			if key in kf.keys(): dat = kf[key](dat)
			try: return np.array(dat,dtype='f')
			except ValueError: pdb.set_trace()
		measur = self._measurement_
		weldat = self._well_data_
		welkey = self._well_key_
		condat = self._cond_data_
		conkey = self._cond_key_
		con_data = lgeo.scalars_from_labels(conkey)
		for dex, key in enumerate(conkey):
			con_data[dex].scalars = filter_(key, condat[:,dex])
		wel_data = lgeo.scalars_from_labels(welkey)
		for dex, key in enumerate(welkey):
			wel_data[dex].scalars = filter_(key, weldat[:,dex])
		all_data = con_data + wel_data
		self._unreduced_ = lfu.data_container(data = all_data)
		self._reduced_ = self.apply_reduction(self._unreduced_.data)
		self.update_replicate_reduction()
Esempio n. 2
0
	def __init__(self, ensem, params = {}):
		self.ensemble = ensem
		#self.parameters = copy(params)
		self.parameters = params
		try: self.end_criteria = self.parameters['end_criteria']
		except KeyError: self.end_criteria = [lc.criterion_iteration()]
		try: self.capture_criteria = self.parameters['capture_criteria']
		except KeyError: self.capture_criteria = []
		try:
			self.end_bool_expression =\
				self.parameters['bool_expressions']['end']
			self.capt_bool_expression =\
				self.parameters['bool_expressions']['capt']

		except KeyError: 
			self.end_bool_expression = ''
			self.end_capture_expression = ''

		try:
			data = lgeo.scalars_from_labels(
				self.parameters['plot_targets'])

		except KeyError: print 'simulating with no resultant data!'
		self.bAbort = False
		self.time = []
		self.iteration = 0
		if not self.end_criteria:
			self.end_criteria = [lc.criterion_iteration()]

		self.determine_end_valid_data = (None, (None))
		#lfu.modular_object_qt.__init__(self, label = label, data = data)
		lfu.modular_object_qt.__init__(self, data = data)
Esempio n. 3
0
    def reduction(self, *args, **kwargs):
        pdb.set_trace()

        data = lgeo.scalars_from_labels(self.target_list)
        for dex, mean_of in enumerate(self.means_of):
            bin_axes, mean_axes = select_for_binning(args[0], self.function_of,
                                                     mean_of)
            bins, vals = bin_scalars(bin_axes, mean_axes, self.bin_count,
                                     self.ordered)
            means = [mean(val) for val in vals]
            data[dex + 1].scalars = means

        data[0].scalars = bins
        return data
Esempio n. 4
0
	def reduction(self, *args, **kwargs):
		pdb.set_trace()


		data = lgeo.scalars_from_labels(self.target_list)
		for dex, mean_of in enumerate(self.means_of):
			bin_axes, mean_axes = select_for_binning(
				args[0], self.function_of, mean_of)
			bins, vals = bin_scalars(bin_axes, mean_axes, 
							self.bin_count, self.ordered)
			means = [mean(val) for val in vals]
			data[dex + 1].scalars = means

		data[0].scalars = bins
		return data
Esempio n. 5
0
	def initialize(self, *args, **kwargs):
		if not self.cooling_curve:
			self.final_iteration =\
				self.fitted_criteria[0].max_iterations
			lam = -1.0 * np.log(self.max_temperature)/\
								self.final_iteration
			cooling_domain = np.array(range(self.final_iteration))
			cooling_codomain = self.max_temperature*np.exp(
										lam*cooling_domain)
			self.cooling_curve = lgeo.scalars(
				label = 'cooling curve', scalars = cooling_codomain)

		fit_routine.initialize(self, *args, **kwargs)
		self.data.extend(lgeo.scalars_from_labels(
						['annealing temperature']))
		self.temperature = self.cooling_curve.scalars[self.iteration]
		self.parameter_space.initial_factor = self.temperature
Esempio n. 6
0
	def apply_reduction(self, unred):
		read = self.parent.parent.read['layout'].read
		flat = lfu.flatten(read['table'])
		well_cnt = len(flat)
		reduced = unred[:len(unred)-well_cnt]	#list of replicate averaged scalers
		con_offset = len(reduced)
		uniq = lfu.uniqfy(flat)
		layout = OrderedDict()
		for dex, key in enumerate(flat):
			if key in layout.keys(): layout[key].append(dex + con_offset)
			else: layout[key] = [dex + con_offset]
		new = lgeo.scalars_from_labels(layout.keys())
		for ndex, key in enumerate(layout.keys()):
			rel_dexes = layout[key]
			rel_dater = [unred[d] for d in rel_dexes]
			zi = zip(*[r.scalars for r in rel_dater])
			new[ndex].scalars = np.array([np.mean(z) for z in zi])
		reduced.extend(new)
		red = lfu.data_container(data = reduced)
		return red
Esempio n. 7
0
	def initialize(self, *args, **kwargs):
		self.output.flat_data = True
		self.ensemble = self.parent.parent
		self.worker_count =\
			self.parent.parent.multiprocess_plan.worker_count
		self.proginy_count = 100
		if False and self.ensemble.multiprocess_plan.use_plan and\
				not self.regime.endswith('magnitude'):
			self.use_genetics = True
			self.proginy_count = 100

		else: self.use_genetics = False
		#self.ensemble.data_pool = self.ensemble.set_data_scheme()
		self.run_targets = self.ensemble.run_params['plot_targets']
		self.data_to_fit_to = self.get_input_data(read = False)
		self.target_key = [[dat.label for dat in 
			self.data_to_fit_to], self.run_targets]

		def expo_weights(leng):
			dom_weight_max = 5.0
			dom_weight_x = np.linspace(dom_weight_max, 0, leng)
			return np.exp(dom_weight_x)

		def para_weights(leng):
			dom_weight_max = 9.0
			x = np.linspace(0, leng, leng)
			y = x*(x - x[-1])
			y_0 = min(y)
			y = y - y_0
			y = dom_weight_max*y/max(y) + 1
			#for k in range(0, 2) + range(-3, -1): y[k] = 0.0
			return y

		def affi_weights(leng):
			x = np.linspace(0, leng, leng)
			b = 5.0
			m = -4.0/x[-1]
			y = m*x + b
			return y

		def flat_weights(leng):
			y = [1.0 for val in range(leng)]
			return y

		fit_x_leng = len(self.data_to_fit_to[0].scalars)
		#self.domain_weights = expo_weights(fit_x_leng)
		self.domain_weights = para_weights(fit_x_leng)
		#self.domain_weights = affi_weights(fit_x_leng)
		#self.domain_weights = flat_weights(fit_x_leng)
		self.iteration = 0
		self.timeouts = 0
		self.parameter_space =\
			self.parent.parent.cartographer_plan.parameter_space
		if self.regime == 'coarse-magnitude':
			self.use_mean_fitting = False
			self.parameter_space, valid =\
				lgeo.generate_coarse_parameter_space_from_fine(
						self.parameter_space, magnitudes = True)
			if not valid:
				traceback.print_exc(file=sys.stdout)
				lgd.message_dialog(None, 
					'P-Spaced couldnt be coarsened!', 'Problem')

		elif self.regime == 'coarse-decimate':
			self.use_mean_fitting = False
			self.parameter_space, valid =\
				lgeo.generate_coarse_parameter_space_from_fine(
						self.parameter_space, decimates = True)
			if not valid:
				traceback.print_exc(file=sys.stdout)
				lgd.message_dialog(None, 
					'P-Spaced couldnt be coarsened!', 'Problem')

		elif self.regime == 'fine': pass
		print '\tstarted fit routine', self.label, 'regime', self.regime
		self.parameter_space.set_start_position()
		for metric in self.metrics:
			metric.initialize(self, *args, **kwargs)

		self.data = lgeo.scalars_from_labels(['fitting iteration'] +\
				[met.label + ' measurement' for met in self.metrics])