def get_data_count(self): """ get data """ out = [] # If it is a dateplot if self.global_settings['default_xscale'] == 'dat': self.params = self.from_to.copy() self.params['id'] = self.id_list[0] self.gs = graphSettings(self.typed, params=self.params).settings new_key = key = 'query' counter = 0 while self.gs.has_key(new_key): # h[h.lower().find('from'):] query = self.gs[new_key] query = 'select count(*) ' + query[query.lower().find('from'):] out.append(self._result_from_query(query)[0][0]) counter += 1 new_key = key + str(counter) else: for idl in self.id_list: # Reset return varaibles self.data = None; self.gs = None; self.info = {} self.params = self.from_to.copy() self.params['id'] = idl self.gs = graphSettings(self.typed, params=self.params).settings query = self.gs['query'] query = 'select count(*) ' + query[query.lower().find('from'):] out.append(self._result_from_query(query)[0][0]) return out
def __get_data_xyplot_single(self, plot_n): """ This method need to be separated out from _get_data_xyplot because it is called manually """ # lgs is the local (specific) graphsettings gs = graphSettings(self.type_) meta_info = self._get_meta_info(gs, id_=plot_n) lgs = graphSettings(self.type_, params=meta_info) # Get the right query: Look if the value of the column defined # in graphsettings have the right value to use a special query, # otherwise use the default one query = lgs['queries']['default'] for k, v in lgs['queries'].items(): if k.find('extra') == 0: if v['match'] == meta_info[lgs['queries']['column']]: query = v['query'] # Single plot data structure ret = { 'key': 'dateplot' + str(plot_n), 'lgs': lgs, 'meta': meta_info, 'data': array(self._result_from_query(query)) } return ret
def __get_data_xyplot_single(self, plot_n): """ This method need to be separated out from _get_data_xyplot because it is called manually """ # lgs is the local (specific) graphsettings gs = graphSettings(self.type_).settings meta_info = self._get_meta_info(gs, id_=plot_n) lgs = graphSettings(self.type_, params=meta_info).settings # Get the right query: Look if the value of the column defined # in graphsettings have the right value to use a special query, # otherwise use the default one query = lgs['queries']['default'] for k, v in lgs['queries'].items(): if k.find('extra') == 0: if v['match'] == meta_info[lgs['queries']['column']]: query = v['query'] # Single plot data structure ret = {'key': 'dateplot' + str(plot_n), 'lgs': lgs, 'meta': meta_info, 'data': array(self._result_from_query(query)) } return ret
def get_data_count(self): """ get data """ out = [] # If it is a dateplot if self.global_settings['default_xscale'] == 'dat': self.params = self.from_to.copy() self.params['id'] = self.id_list[0] self.gs = graphSettings(self.typed, params=self.params).settings new_key = key = 'query' counter = 0 while self.gs.has_key(new_key): # h[h.lower().find('from'):] query = self.gs[new_key] query = 'select count(*) ' + query[query.lower().find('from'):] out.append(self._result_from_query(query)[0][0]) counter += 1 new_key = key + str(counter) else: for idl in self.id_list: # Reset return varaibles self.data = None self.gs = None self.info = {} self.params = self.from_to.copy() self.params['id'] = idl self.gs = graphSettings(self.typed, params=self.params).settings query = self.gs['query'] query = 'select count(*) ' + query[query.lower().find('from'):] out.append(self._result_from_query(query)[0][0]) return out
def __get_data_xyplot_single(self, plot_n): """ This method need to be separated out from _get_data_xyplot because it is called manually """ # lgs is the local (specific) graphsettings gs = graphSettings(self.type_).settings meta_info = self._get_meta_info(gs, id_=plot_n) lgs = graphSettings(self.type_, params=meta_info).settings # Get the right query: Look if the value of the column defined # in graphsettings have the right value to use a special query, # otherwise use the default one query = lgs['queries']['default'] for k, v in lgs['queries'].items(): if k.find('extra') == 0: if v['match'] == meta_info[lgs['queries']['column']]: query = v['query'] # Quick hack to provide axis zooming directly in the query if (self.o['xscale_bounding'] is not None and self.o['xscale_bounding'][1] > self.o['xscale_bounding'][0]): x = query.split(' ')[1].split(',')[0] query_addition = ' {0} between {1} and {2} and'.\ format(x, str(self.o['xscale_bounding'][0]), str(self.o['xscale_bounding'][1])) where_start = query.lower().find('where') + 5 query = query[: where_start] + query_addition + query[where_start:] # Single plot data structure ret = {'key': 'dateplot' + str(plot_n), 'lgs': lgs, 'meta': meta_info, 'data': array(self._result_from_query(query)) } return ret
def get_data(self): """ get data """ # If it is a dateplot if self.global_settings['default_xscale'] == 'dat': self.params = self.from_to.copy() self.params['id'] = self.id_list[0] self.gs = graphSettings(self.typed, params=self.params).settings new_key = key = 'query' counter = 0 while self.gs.has_key(new_key): self.data = None self.info = {} self.data = array(self._result_from_query(self.gs[new_key])) if self.gs.has_key('ordering'): (location, color ) = self.gs['ordering'].split(',')[counter].split('|') self.info[ 'on_the_right'] = True if location == 'right' else False self.info['color'] = color else: self.info['on_the_right'] = False yield {'data': self.data, 'gs': self.gs, 'info': self.info} counter += 1 new_key = key + str(counter) else: for idl in self.id_list: # Reset return varaibles self.data = None self.gs = None self.info = {} self.params = self.from_to.copy() self.params['id'] = idl self.gs = graphSettings(self.typed, params=self.params).settings self.data = array(self._result_from_query(self.gs['query'])) self.info = self._get_info() # Determine if it should go on the right y axis if self.gs.has_key('right_y_axis_field_value'): names_on_the_right = [ element.lower().strip() for element in self.gs['right_y_axis_field_value'].split(',') ] if self.info[self.gs['right_y_axis_field_name']].lower( ) in names_on_the_right: self.info['on_the_right'] = True else: self.info['on_the_right'] = False else: self.info['on_the_right'] = False # process_data() acts on self.data self._process_data(idl) yield {'data': self.data, 'gs': self.gs, 'info': self.info}
def get_data(self): """ get data """ # If it is a dateplot if self.global_settings['default_xscale'] == 'dat': self.params = self.from_to.copy() self.params['id'] = self.id_list[0] self.gs = graphSettings(self.typed, params=self.params).settings new_key = key = 'query' counter = 0 while self.gs.has_key(new_key): self.data = None; self.info = {} self.data = array(self._result_from_query(self.gs[new_key])) if self.gs.has_key('ordering'): (location, color) = self.gs['ordering'].split(',')[counter].split('|') self.info['on_the_right'] = True if location == 'right' else False self.info['color'] = color else: self.info['on_the_right'] = False yield {'data':self.data, 'gs':self.gs, 'info':self.info} counter += 1 new_key = key + str(counter) else: for idl in self.id_list: # Reset return varaibles self.data = None; self.gs = None; self.info = {} self.params = self.from_to.copy() self.params['id'] = idl self.gs = graphSettings(self.typed, params=self.params).settings self.data = array(self._result_from_query(self.gs['query'])) self.info = self._get_info() # Determine if it should go on the right y axis if self.gs.has_key('right_y_axis_field_value'): names_on_the_right = [element.lower().strip() for element in self.gs['right_y_axis_field_value'].split(',')] if self.info[self.gs['right_y_axis_field_name']].lower() in names_on_the_right: self.info['on_the_right'] = True else: self.info['on_the_right'] = False else: self.info['on_the_right'] = False # process_data() acts on self.data self._process_data(idl) yield {'data':self.data, 'gs':self.gs, 'info': self.info}
def __init__(self, typed, id_list=[], from_to={ 'from': None, 'to': None }, transform_x=None, flip_x=None, change_t_scale=None, offsets={}, as_function_of_t=False, shift_temp_unit=False, shift_be_ke=False): # From init self.typed = typed self.transform_x = transform_x self.flip_x = flip_x self.change_t_scale = change_t_scale self.offsets = offsets self.global_settings = None self.from_to = from_to self.as_function_of_t = as_function_of_t self.shift_temp_unit = shift_temp_unit self.shift_be_ke = shift_be_ke self.c_to_k = 273.15 if len(id_list) == 0: self.id_list = [None] else: self.id_list = id_list # Fetch the graphsettings self.params = from_to.copy() self.params['id'] = self.id_list[0] self.global_settings = graphSettings(self.typed, params=self.params).settings # For dateplots, fill in "from" "to" values if to few are given. If # missing "to" becomes now and "from" becomes 1 day ago if self.global_settings['default_xscale'] == 'dat': if not self.from_to['from']: self.from_to['from'] = (datetime.now()-timedelta(days=1)).\ strftime('%Y-%m-%d %H:%M') if not self.from_to['to']: self.from_to['to'] = datetime.now().strftime('%Y-%m-%d %H:%M') # Added here to make sure that the updated from to values gets taken # into account in the hash in plot.py self.global_settings['from_to'] = self.from_to # Create MySQL session and cursor db = MySQLdb.connect(user="******", passwd="cinf_reader", db="cinfdata") self.cursor = db.cursor()
def _get_data_dateplot(self): """ Get data for date plots """ gs = graphSettings(self.type_, params=self.from_to_dict).settings for side in ['left', 'right']: for plot_n in self.o[side + '_plotlist']: lgs = gs['dateplot' + str(plot_n)] self.data[side].append( {'key': 'dateplot' + str(plot_n), 'lgs': lgs, 'data': array(self._result_from_query(lgs['query'])) }) return
def _get_data_dateplot(self): """ Get data for date plots """ gs = graphSettings(self.type_, params=self.from_to_dict) for side in ['left', 'right']: for plot_n in self.o[side + '_plotlist']: lgs = gs['dateplot' + str(plot_n)] self.data[side].append({ 'key': 'dateplot' + str(plot_n), 'lgs': lgs, 'data': array(self._result_from_query(lgs['query'])) }) return
def __init__(self, typed, id_list=[], from_to={'from':None, 'to':None}, transform_x=None, flip_x=None, change_t_scale=None, offsets={}, as_function_of_t=False, shift_temp_unit=False, shift_be_ke=False): # From init self.typed = typed self.transform_x = transform_x self.flip_x = flip_x self.change_t_scale = change_t_scale self.offsets = offsets self.global_settings = None self.from_to = from_to self.as_function_of_t = as_function_of_t self.shift_temp_unit = shift_temp_unit self.shift_be_ke = shift_be_ke self.c_to_k = 273.15 if len(id_list) == 0: self.id_list = [None] else: self.id_list = id_list # Fetch the graphsettings self.params = from_to.copy() self.params['id'] = self.id_list[0] self.global_settings = graphSettings(self.typed, params=self.params).settings # For dateplots, fill in "from" "to" values if to few are given. If # missing "to" becomes now and "from" becomes 1 day ago if self.global_settings['default_xscale'] == 'dat': if not self.from_to['from']: self.from_to['from'] = (datetime.now()-timedelta(days=1)).\ strftime('%Y-%m-%d %H:%M') if not self.from_to['to']: self.from_to['to'] = datetime.now().strftime('%Y-%m-%d %H:%M') # Added here to make sure that the updated from to values gets taken # into account in the hash in plot.py self.global_settings['from_to'] = self.from_to # Create MySQL session and cursor db = MySQLdb.connect(user="******", passwd="cinf_reader", db="cinfdata") self.cursor = db.cursor()
def __init__(self): """ Turn all the input options into appropriate python data structures and initiate a few helper objects """ parser = OptionParser() # Add the options to the option parser # Option help at https://cinfwiki.fysik.dtu.dk/cinfwiki/Software/ # DataWebPageDeveloperDocumentation#plot.py parser.add_option('--type') # String option parser.add_option('--boolean_options') # Boolean options parser.add_option('--left_plotlist') # int list parser.add_option('--right_plotlist') # int list parser.add_option('--xscale_bounding') # Float pair parser.add_option('--left_yscale_bounding') # Float pair parser.add_option('--right_yscale_bounding') # Float pair parser.add_option('--from_to') # Time stamp pair NOT HANDLED # Parse the options (options, args) = parser.parse_args() ### Process options into self.o - all options are given as strings, ### and they need to be converted into other data types self.o = {} # Parse boolean options for pair in options.boolean_options.split(',')[1:]: key, value = pair.split(':') self.o[key] = True if value == 'checked' else False # Parse bounds bkeys = [s + '_bounding' for s in ['xscale', 'left_yscale', 'right_yscale']] for bound in bkeys: bounding = [b if b != '' else '0' for b in options.__dict__[bound].split(',')] if bounding != ['0', '0']: self.o[bound] = tuple( [float(b) for b in bounding] ) else: self.o[bound] = None # Parse lists for plotlist in ['left_plotlist', 'right_plotlist']: # List comprehension, split list string up and turn into integer # and add to new list, but only of > 0 self.o[plotlist] = [int(a) for a in options.__dict__[plotlist].split(',')[1:] if int(a) > 0] # Parse string options for key in ['type']: self.o[key] = options.__dict__[key] # From_to self.o['from_to'] = options.from_to.split(',') ### Done processing options # If a dateplot and called without (valid) datetimes fill them in try: strptime(self.o['from_to'][0], '%Y-%m-%d %H:%M') strptime(self.o['from_to'][1], '%Y-%m-%d %H:%M') except ValueError: # [now-1d, now] self.o['from_to'][0] = strftime('%Y-%m-%d %H:%M', localtime(time()-24*3600)) self.o['from_to'][1] = strftime('%Y-%m-%d %H:%M') # Get a (g)eneral (g)raph (s)ettings object # (Is not polulated with data set specific values) self.ggs = graphSettings(self.o['type']).settings ### Create database backend object self.db = dataBaseBackend(options=self.o, ggs=self.ggs) self.defaults = {}
def __init__(self): """ Description of init """ # Create optionparser parser = OptionParser() # Add the options to the option parser # Option help at https://cinfwiki.fysik.dtu.dk/cinfwiki/Software/ # DataWebPageDeveloperDocumentation#plot.py parser.add_option('--type') # String option parser.add_option('--boolean_options') # Boolean options parser.add_option('--left_plotlist') # int list parser.add_option('--right_plotlist') # int list parser.add_option('--xscale_bounding') # Float pair parser.add_option('--left_yscale_bounding') # Float pair parser.add_option('--right_yscale_bounding') # Float pair parser.add_option('--from_to') # Time stamp pair NOT HANDLED parser.add_option('--image_format') # String options parser.add_option('--manual_labels_n_titel') # Manual labels and title for mpl parser.add_option('--input_id') # Database id for plugin input parser.add_option('--reference_lines') # Reference lines # KARL TODO To __init__ add command line options to recieve # the reference line sets that should be displayed and parse # it # Parse the options (options, args) = parser.parse_args() ### Process options into self.o - all options are given as strings, ### and they need to be converted into other data types self.o = {} # Parse boolean options for pair in options.boolean_options.split(',')[1:]: key, value = pair.split(':') self.o[key] = True if value == 'checked' else False # Parse bounds bkeys = [s + '_bounding' for s in ['xscale', 'left_yscale', 'right_yscale']] for bound in bkeys: bounding = [b if b != '' else '0' for b in options.__dict__[bound].split(',')] if bounding != ['0', '0']: self.o[bound] = tuple( [float(b) for b in bounding] ) else: self.o[bound] = None # Parse lists for plotlist in ['left_plotlist', 'right_plotlist']: # List comprehension, split list string up and turn into integer # and add to new list, but only of > 0 self.o[plotlist] = [int(a) for a in options.__dict__[plotlist].split(',')[1:] if int(a) > 0] # Parse reference lines self.o['reference_lines'] = options.reference_lines.strip(',').split(',') # Parse string options for key in ['type', 'image_format']: self.o[key] = options.__dict__[key] for opt in options.manual_labels_n_titel.split(','): self.o[opt.split('=')[0]] = opt.split('=')[1] # From_to self.o['from_to'] = options.from_to.split(',') # Database ID for plugin input self.o['input_id'] = int(options.input_id) ### Done processing options # Get a (g)eneral (g)raph (s)ettings object # (Are not polulated with data set specific values) self.ggs = graphSettings(self.o['type']) # If a dateplot and called without (valid) datetimes fill them in try: strptime(self.o['from_to'][0], '%Y-%m-%d %H:%M') strptime(self.o['from_to'][1], '%Y-%m-%d %H:%M') except ValueError: start = time()-24*3600 if self.ggs.has_key('default_time'): default_time = int(self.ggs['default_time']) start = time()-default_time*3600 # [now-1d, now] self.o['from_to'][0] = strftime('%Y-%m-%d %H:%M', localtime(start)) self.o['from_to'][1] = strftime('%Y-%m-%d %H:%M') ### Create database backend object self.db = dataBaseBackend(options=self.o, ggs=self.ggs) self.defaults = {}
def __init__(self): """ Description of init """ # Create optionparser parser = OptionParser() # Add the options to the option parser # Option help at https://cinfwiki.fysik.dtu.dk/cinfwiki/Software/ # DataWebPageDeveloperDocumentation#plot.py parser.add_option('--type') # String option parser.add_option('--boolean_options') # Boolean options parser.add_option('--left_plotlist') # int list parser.add_option('--right_plotlist') # int list parser.add_option('--xscale_bounding') # Float pair parser.add_option('--left_yscale_bounding') # Float pair parser.add_option('--right_yscale_bounding') # Float pair parser.add_option('--from_to') # Time stamp pair NOT HANDLED parser.add_option('--image_format') # String options parser.add_option('--manual_labels_n_titel') # Manual labels and title for mpl parser.add_option('--input_id') # Database id for plugin input # Parse the options (options, args) = parser.parse_args() ### Process options into self.o - all options are given as strings, ### and they need to be converted into other data types self.o = {} # Parse boolean options for pair in options.boolean_options.split(',')[1:]: key, value = pair.split(':') self.o[key] = True if value == 'checked' else False # Parse bounds bkeys = [s + '_bounding' for s in ['xscale', 'left_yscale', 'right_yscale']] for bound in bkeys: bounding = [b if b != '' else '0' for b in options.__dict__[bound].split(',')] if bounding != ['0', '0']: self.o[bound] = tuple( [float(b) for b in bounding] ) else: self.o[bound] = None # Parse lists for plotlist in ['left_plotlist', 'right_plotlist']: # List comprehension, split list string up and turn into integer # and add to new list, but only of > 0 self.o[plotlist] = [int(a) for a in options.__dict__[plotlist].split(',')[1:] if int(a) > 0] # Parse string options for key in ['type', 'image_format']: self.o[key] = options.__dict__[key] for opt in options.manual_labels_n_titel.split(','): self.o[opt.split('=')[0]] = opt.split('=')[1] # From_to self.o['from_to'] = options.from_to.split(',') # Database ID for plugin input self.o['input_id'] = int(options.input_id) ### Done processing options # Get a (g)eneral (g)raph (s)ettings object # (Are not polulated with data set specific values) self.ggs = graphSettings(self.o['type']).settings # If a dateplot and called without (valid) datetimes fill them in try: strptime(self.o['from_to'][0], '%Y-%m-%d %H:%M') strptime(self.o['from_to'][1], '%Y-%m-%d %H:%M') except ValueError: start = time()-24*3600 if self.ggs.has_key('default_time'): default_time = int(self.ggs['default_time']) start = time()-default_time*3600 # [now-1d, now] self.o['from_to'][0] = strftime('%Y-%m-%d %H:%M', localtime(start)) self.o['from_to'][1] = strftime('%Y-%m-%d %H:%M') ### Create database backend object self.db = dataBaseBackend(options=self.o, ggs=self.ggs) self.defaults = {}