def parse_type(self, string, my_type): if my_type == 'int': return int(string) elif my_type == 'offset_now': value = int(string) if value > 0: return value else: return time.time() + value elif my_type == 'float': return float(string) elif my_type == 'eval': raise ValueError("sorry eval type not supported") elif my_type == 'datetime': return convert_to_datetime(string) elif my_type == 'timestamp': return to_timestamp(string) elif my_type == 'bool' or my_type == 'boolean': if not isinstance(string, str): return bool(string) if string.lower().strip() == 'false': return False elif string.lower().strip() == 'true': return True else: raise TypeError("Cannot convert string %s to boolean; valid " "inputs are 'true' or 'false'." % string) else: return str(string)
def parse_type( self, string, my_type ): if my_type == 'int': return int( string ) elif my_type == 'offset_now': value = int( string ) if value > 0: return value else: return time.time() + value elif my_type == 'float': return float( string ) elif my_type == 'eval': raise ValueError("sorry eval type not supported") elif my_type == 'datetime': return convert_to_datetime( string ) elif my_type == 'timestamp': return to_timestamp( string ) elif my_type == 'bool' or my_type == 'boolean': if not isinstance(string, str): return bool(string) if string.lower().strip() == 'false': return False elif string.lower().strip() == 'true': return True else: raise TypeError("Cannot convert string %s to boolean; valid " "inputs are 'true' or 'false'." % string ) else: return str( string )
def upload_data(times, rsv, conn, interval, endTimeFunc): curs = conn.cursor() curs.execute('set time_zone="+00:00"') for starttime in times: endtime = endTimeFunc(starttime) resources = query_rsv(rsv, 'rsv_sam_reliability', convert_to_datetime(starttime), convert_to_datetime(endtime)) print "query sites" sites = query_rsv_site(rsv, 'wlcg_site_reliability', starttime, endtime) print "insert data for %s to %s" % (starttime, endtime) for site in sites: print site, sites[site] insert_data(curs, name=site, resource_type='site', time_length=interval, starttime=convert_to_datetime(starttime), endtime=convert_to_datetime(endtime), **sites[site]) for resource in resources: insert_data(curs, name=resource, resource_type='resource', time_length=interval, starttime=convert_to_datetime(starttime), endtime=convert_to_datetime(endtime), **resources[resource]) conn.commit()
def parse_type( self, string, my_type ): if my_type == 'int': return int( string ) elif my_type == 'float': return float( string ) elif my_type == 'eval': raise ValueError("sorry eval type not supported") elif my_type == 'datetime': return convert_to_datetime( string ) elif my_type == 'timestamp': return to_timestamp( string ) else: return str( string )
def usage_statistics(sql_results, globals=globals(), **kw): """ This data mining function is meant to calculate the current usage of each site, the maximum historical usage, and the mean historical usage. It is meant to be the results parser class of the facility_usage_statistics graph. """ # Query the last 4 months or so of data starttime = convert_to_datetime(kw['starttime']) endtime = convert_to_datetime(kw['endtime']) diff = endtime - starttime span = 86400 * diff.days + diff.seconds kw2 = dict(kw) del kw2['results'] del kw2['query'] del kw2['fixed-height'] del kw2['pivot_transform'] kw2['span'] = span # cover 4 months (13 weeks) kw2['starttime'] = endtime - datetime.timedelta(7 * 13, 0) historical_info, _ = globals['GratiaBarQueries'].facility_hours_bar_smry( \ globals=globals, **kw2) results, metadata = simple_results_parser(sql_results, globals=globals, **kw) filtered_results = {} for pivot, val in results.items(): historical_values = historical_info.get(pivot, {0: val}).values() if historical_values: historical_max = max(historical_values) historical_avg = numpy.average(historical_values) else: historical_max, historical_avg = 0, 0 left_error = val - historical_avg right_error = historical_max - val filtered_results[pivot] = (val, left_error, right_error) return filtered_results, metadata
def parse_type( self, string, my_type ): if my_type == 'int': return int( string ) elif my_type == 'float': return float( string ) elif my_type == 'eval': return eval(str(string),{'__builtins__':None,'time':time},{}) elif my_type == 'datetime': return convert_to_datetime( string ) elif my_type == 'timestamp': return to_timestamp( string ) else: return str( string )
def parse_type(self, string, my_type): if my_type == 'int': return int(string) elif my_type == 'float': return float(string) elif my_type == 'eval': raise ValueError("sorry eval type not supported") elif my_type == 'datetime': return convert_to_datetime(string) elif my_type == 'timestamp': return to_timestamp(string) else: return str(string)
def usage_statistics(sql_results, globals=globals(), **kw): """ This data mining function is meant to calculate the current usage of each site, the maximum historical usage, and the mean historical usage. It is meant to be the results parser class of the facility_usage_statistics graph. """ # Query the last 4 months or so of data starttime = convert_to_datetime(kw['starttime']) endtime = convert_to_datetime(kw['endtime']) diff = endtime - starttime span = 86400*diff.days + diff.seconds kw2 = dict(kw) del kw2['results'] del kw2['query'] del kw2['fixed-height'] del kw2['pivot_transform'] kw2['span'] = span # cover 4 months (13 weeks) kw2['starttime'] = endtime - datetime.timedelta(7*13, 0) historical_info, _ = globals['GratiaBarQueries'].facility_hours_bar_smry( \ globals=globals, **kw2) results, metadata = simple_results_parser(sql_results, globals=globals, **kw) filtered_results = {} for pivot, val in results.items(): historical_values = historical_info.get(pivot, {0: val}).values() if historical_values: historical_max = max(historical_values) historical_avg = numpy.average(historical_values) else: historical_max, historical_avg = 0, 0 left_error = val - historical_avg right_error = historical_max - val filtered_results[pivot] = (val, left_error, right_error) return filtered_results, metadata
def addResults_pg( self, data, metadata, gen, **kw ): gen.write(metadata.get('pivot_name', 'Unknown Pivot') + ',') gen.write(metadata.get('grouping_name', 'Unknown Grouping') + ',') gen.write(metadata.get('column_names') + '\n') convert_to_time = metadata.get('grouping_name', 'False').lower() == 'time' for pivot in data.keys(): my_groups = data[pivot].keys(); my_groups.sort(); my_groups.reverse() for grouping in my_groups: if convert_to_time: my_group = convert_to_datetime(grouping).strftime('%x %X') else: my_group = str(grouping) gen.write(str(pivot) + "," + str(my_group) + ",") self.addData( data[pivot][grouping], gen, **kw ) gen.write("\n")
def addResults_pg(self, data, metadata, gen, **kw): gen.write(metadata.get('pivot_name', 'Unknown Pivot') + ',') gen.write(metadata.get('grouping_name', 'Unknown Grouping') + ',') gen.write(metadata.get('column_names') + '\n') convert_to_time = metadata.get('grouping_name', 'False').lower() == 'time' for pivot in data.keys(): my_groups = data[pivot].keys() my_groups.sort() my_groups.reverse() for grouping in my_groups: if convert_to_time: my_group = convert_to_datetime(grouping).strftime('%x %X') else: my_group = str(grouping) gen.write(str(pivot) + "," + str(my_group) + ",") self.addData(data[pivot][grouping], gen, **kw) gen.write("\n")
def results_parser(dom): data = {} pivots = dom.getElementsByTagName('pivot') for pivot_dom in pivots: pivot = pivot_dom.getAttribute('name') pivot_dict = {} data[pivot] = pivot_dict groups = pivot_dom.getElementsByTagName('group') for group_dom in groups: group = group_dom.getAttribute('value') try: group = convert_to_datetime(group) except: pass data_dom = group_dom.getElementsByTagName('d')[0] datum = float(str(data_dom.firstChild.data)) pivot_dict[group] = datum return data
def parse_type( self, string, my_type ): if my_type == 'int': return int( string ) elif my_type == 'float': return float( string ) elif my_type == 'eval': return eval(str(string),{'__builtins__':None,'time':time},{}) elif my_type == 'datetime': return convert_to_datetime( string ) elif my_type == 'timestamp': return to_timestamp( string ) elif my_type == 'bool' or my_type == 'boolean': if type(string) != types.StringType: return bool(string) if string.lower().strip() == 'false': return False elif string.lower().strip() == 'true': return True else: raise TypeError("Cannot convert string %s to boolean; valid " "inputs are 'true' or 'false'." % string ) else: return str( string )
def refine(self, data, filter_dict, facility=True, vo=True, dn=True,\ hours=True, default_rel_range=14*86400, probe=False): data['supports_hours'] = hours data['refine_vo'] = vo data['refine_facility'] = facility data['refine_dn'] = dn data['refine_probe'] = probe self.handle_time_parameters(data,filter_dict) self.copy_if_present(filter_dict, data, 'facility', 'vo', \ 'exclude-facility', 'exclude-vo', 'user', 'user', 'exclude-dn', \ 'vo_set', 'facility_set', 'probe','opportunistic-filter') if len(filter_dict.get('facility', '')) == 0 and 'facility_set' in \ filter_dict: try: filter_dict['facility'] = '|'.join(self.site_sets[\ filter_dict['facility_set']]) except: raise ValueError("Unknown facility set: %s." % \ filter_dict['facility_set']) # added following 'facility' test section - 2013Jul08 - wbh # change request: GratiaWeb-35 if len(filter_dict.get('facility', '')) != 0: testString = "%s" % filter_dict['facility'] print "testString: %s" % testString if self.NotContainsRegex(testString): filter_dict['facility'] = '^%s$' % testString else: pass if len(filter_dict.get('vo', '')) == 0 and 'vo_set' in \ filter_dict: try: filter_dict['vo'] = '|'.join(self.site_sets[\ filter_dict['vo_set']]) except: raise ValueError("Unknown VO set: %s." % \ filter_dict['vo_set']) # added following 'vo' test section - 2013Jul08 - wbh # change request: GratiaWeb-35 if len(filter_dict.get('vo', '')) != 0: testString = "%s" % filter_dict['vo'] print "testString: %s" % testString if self.NotContainsRegex(testString): filter_dict['vo_set'] = '^%s$' % testString else: pass data['query_kw'] = dict(filter_dict) if 'starttime' not in filter_dict: data['display_starttime'] = convert_to_datetime(time.time()-\ default_rel_range) else: data['display_starttime'] = convert_to_datetime(\ filter_dict['starttime']) data['display_starttime'] = data['display_starttime'].strftime(\ '%Y-%m-%d %H:%M:%S') if 'endtime' not in filter_dict: data['display_endtime'] = convert_to_datetime(time.time()-\ default_rel_range) else: data['display_endtime'] = convert_to_datetime(\ filter_dict['endtime']) data['display_endtime'] = data['display_endtime'].strftime(\ '%Y-%m-%d %H:%M:%S') data['filter_url'] = urllib.urlencode(filter_dict) self.assign_blank(filter_dict, 'facility', 'vo', 'exclude-vo', \ 'exclude-facility', 'exclude-dn', 'user') data['filter_dict'] = filter_dict if data['filter_url'] != '': data['filter_url'] = '?' + data['filter_url'] data['refine'] = self.getTemplateFilename('refine.tmpl') data['refine_error'] = None
def refine(self, data, filter_dict, facility=True, vo=True, dn=True,\ hours=True, default_rel_range=14*86400, probe=False): relTime = data.get('relativetime', False) data['supports_hours'] = hours data['refine_vo'] = vo data['refine_facility'] = facility data['refine_dn'] = dn data['refine_probe'] = probe if relTime: if relTime == 'absolute': data['relTime'] = 'absolute' starttime = data.get('starttime', None) filter_dict['starttime'] = starttime while True: try: valid = time.strptime(starttime, '%Y-%m-%d %H:%M:%S') break except ValueError: relTime = 1209600 break endtime = data.get('endtime', None) filter_dict['endtime'] = endtime while True: try: valid2 = time.strptime(endtime, '%Y-%m-%d %H:%M:%S') break except ValueError: relTime = 1209600 break # try to determine default span try: valid = datetime.datetime(*valid[:6]) valid2 = datetime.datetime(*valid2[:6]) timedelta = (valid2 - valid) myinterval = timedelta.days * 86400 + timedelta.seconds if myinterval < 4*86400: default_span = 3600 elif myinterval <= 30*86400: default_span = 86400 elif myinterval < 365*86400: default_span = 86400*7 else: default_span = 86400*30 except: default_span = 86400 # Set the span, defaulting to the determined default_span try: filter_dict['span'] = int(data['span']) except: filter_dict['span'] = default_span if relTime != 'absolute': data['relTime'] = relTime try: interval = int(relTime) except: raise ValueError("relTime must be an integer;" \ " input was %s." % relTime) filter_dict['starttime'] = 'time.time()-%i' % interval filter_dict['endtime'] = 'time.time()' if interval < 4*86400: filter_dict['span'] = 3600 elif interval <= 30*86400: filter_dict['span'] = 86400 elif interval < 365*86400: filter_dict['span'] = 86400*7 else: filter_dict['span'] = 86400*30 else: data['relTime'] = 'absolute' self.copy_if_present(filter_dict, data, 'facility', 'vo', \ 'exclude-facility', 'exclude-vo', 'user', 'user', 'exclude-dn', \ 'vo_set', 'facility_set', 'probe') if len(filter_dict.get('facility', '')) == 0 and 'facility_set' in \ filter_dict: try: filter_dict['facility'] = '|'.join(self.site_sets[\ filter_dict['facility_set']]) except: raise ValueError("Unknown facility set: %s." % \ filter_dict['facility_set']) # added following 'facility' test section - 2013Jul08 - wbh # change request: GratiaWeb-35 if len(filter_dict.get('facility', '')) != 0: testString = "%s" % filter_dict['facility'] print "testString: %s" % testString if self.NotContainsRegex(testString): filter_dict['facility'] = '^%s$' % testString else: pass if len(filter_dict.get('vo', '')) == 0 and 'vo_set' in \ filter_dict: try: filter_dict['vo'] = '|'.join(self.site_sets[\ filter_dict['vo_set']]) except: raise ValueError("Unknown VO set: %s." % \ filter_dict['vo_set']) # added following 'vo' test section - 2013Jul08 - wbh # change request: GratiaWeb-35 if len(filter_dict.get('vo', '')) != 0: testString = "%s" % filter_dict['vo'] print "testString: %s" % testString if self.NotContainsRegex(testString): filter_dict['vo_set'] = '^%s$' % testString else: pass data['query_kw'] = dict(filter_dict) if 'starttime' not in filter_dict: data['display_starttime'] = convert_to_datetime(time.time()-\ default_rel_range) else: data['display_starttime'] = convert_to_datetime(\ filter_dict['starttime']) data['display_starttime'] = data['display_starttime'].strftime(\ '%Y-%m-%d %H:%M:%S') if 'endtime' not in filter_dict: data['display_endtime'] = convert_to_datetime(time.time()-\ default_rel_range) else: data['display_endtime'] = convert_to_datetime(\ filter_dict['endtime']) data['display_endtime'] = data['display_endtime'].strftime(\ '%Y-%m-%d %H:%M:%S') data['filter_url'] = urllib.urlencode(filter_dict) self.assign_blank(filter_dict, 'facility', 'vo', 'exclude-vo', \ 'exclude-facility', 'exclude-dn', 'user') data['filter_dict'] = filter_dict if data['filter_url'] != '': data['filter_url'] = '?' + data['filter_url'] data['refine'] = self.getTemplateFilename('refine.tmpl') data['refine_error'] = None
def refine(self, data, filter_dict, facility=True, vo=True, dn=True,\ hours=True, default_rel_range=14*86400): relTime = data.get('relativetime', False) data['supports_hours'] = hours data['refine_vo'] = vo data['refine_facility'] = facility data['refine_dn'] = dn if relTime: if relTime == 'absolute': data['relTime'] = 'absolute' starttime = data.get('starttime', None) filter_dict['starttime'] = starttime while True: try: valid = time.strptime(starttime, '%Y-%m-%d %H:%M:%S') break except ValueError: relTime = 1209600 break endtime = data.get('endtime', None) filter_dict['endtime'] = endtime while True: try: valid2 = time.strptime(endtime, '%Y-%m-%d %H:%M:%S') break except ValueError: relTime = 1209600 break # try to determine default span try: valid = datetime.datetime(*valid[:6]) valid2 = datetime.datetime(*valid2[:6]) timedelta = (valid2 - valid) myinterval = timedelta.days * 86400 + timedelta.seconds if myinterval < 4 * 86400: default_span = 3600 elif myinterval <= 30 * 86400: default_span = 86400 elif myinterval < 365 * 86400: default_span = 86400 * 7 else: default_span = 86400 * 30 except: default_span = 86400 # Set the span, defaulting to the determined default_span try: filter_dict['span'] = int(data['span']) except: filter_dict['span'] = default_span if relTime != 'absolute': data['relTime'] = relTime try: interval = int(relTime) except: raise ValueError("relTime must be an integer;" \ " input was %s." % relTime) filter_dict['starttime'] = 'time.time()-%i' % interval filter_dict['endtime'] = 'time.time()' if interval < 4 * 86400: filter_dict['span'] = 3600 elif interval <= 30 * 86400: filter_dict['span'] = 86400 elif interval < 365 * 86400: filter_dict['span'] = 86400 * 7 else: filter_dict['span'] = 86400 * 30 else: data['relTime'] = 'absolute' self.copy_if_present(filter_dict, data, 'facility', 'vo', \ 'exclude-facility', 'exclude-vo', 'user', 'user', 'exclude-dn', \ 'vo_set', 'facility_set') if len(filter_dict.get('facility', '')) == 0 and 'facility_set' in \ filter_dict: try: filter_dict['facility'] = '|'.join(self.site_sets[\ filter_dict['facility_set']]) except: raise ValueError("Unknown facility set: %s." % \ filter_dict['facility_set']) print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> %s " % filter_dict if len(filter_dict.get('vo', '')) == 0 and 'vo_set' in \ filter_dict: try: filter_dict['vo'] = '|'.join(self.site_sets[\ filter_dict['vo_set']]) except: raise ValueError("Unknown VO set: %s." % \ filter_dict['vo_set']) data['query_kw'] = dict(filter_dict) if 'starttime' not in filter_dict: data['display_starttime'] = convert_to_datetime(time.time()-\ default_rel_range) else: data['display_starttime'] = convert_to_datetime(\ filter_dict['starttime']) data['display_starttime'] = data['display_starttime'].strftime(\ '%Y-%m-%d %H:%M:%S') if 'endtime' not in filter_dict: data['display_endtime'] = convert_to_datetime(time.time()-\ default_rel_range) else: data['display_endtime'] = convert_to_datetime(\ filter_dict['endtime']) data['display_endtime'] = data['display_endtime'].strftime(\ '%Y-%m-%d %H:%M:%S') data['filter_url'] = urllib.urlencode(filter_dict) self.assign_blank(filter_dict, 'facility', 'vo', 'exclude-vo', \ 'exclude-facility', 'exclude-dn', 'user') data['filter_dict'] = filter_dict if data['filter_url'] != '': data['filter_url'] = '?' + data['filter_url'] data['refine'] = self.getTemplateFilename('refine.tmpl') data['refine_error'] = None
def refine(self, data, filter_dict, facility=True, vo=True, dn=True,\ hours=True, default_rel_range=14*86400, probe=False): data['supports_hours'] = hours data['refine_vo'] = vo data['refine_facility'] = facility data['refine_dn'] = dn data['refine_probe'] = probe self.handle_time_parameters(data, filter_dict) self.copy_if_present(filter_dict, data, 'facility', 'vo', \ 'exclude-facility', 'exclude-vo', 'user', 'user', 'exclude-dn', \ 'vo_set', 'facility_set', 'probe','opportunistic-filter') if len(filter_dict.get('facility', '')) == 0 and 'facility_set' in \ filter_dict: try: filter_dict['facility'] = '|'.join(self.site_sets[\ filter_dict['facility_set']]) except: raise ValueError("Unknown facility set: %s." % \ filter_dict['facility_set']) # added following 'facility' test section - 2013Jul08 - wbh # change request: GratiaWeb-35 if len(filter_dict.get('facility', '')) != 0: testString = "%s" % filter_dict['facility'] print "testString: %s" % testString if self.NotContainsRegex(testString): filter_dict['facility'] = '^%s$' % testString else: pass if len(filter_dict.get('vo', '')) == 0 and 'vo_set' in \ filter_dict: try: filter_dict['vo'] = '|'.join(self.site_sets[\ filter_dict['vo_set']]) except: raise ValueError("Unknown VO set: %s." % \ filter_dict['vo_set']) # added following 'vo' test section - 2013Jul08 - wbh # change request: GratiaWeb-35 if len(filter_dict.get('vo', '')) != 0: testString = "%s" % filter_dict['vo'] print "testString: %s" % testString if self.NotContainsRegex(testString): filter_dict['vo_set'] = '^%s$' % testString else: pass data['query_kw'] = dict(filter_dict) if 'starttime' not in filter_dict: data['display_starttime'] = convert_to_datetime(time.time()-\ default_rel_range) else: data['display_starttime'] = convert_to_datetime(\ filter_dict['starttime']) data['display_starttime'] = data['display_starttime'].strftime(\ '%Y-%m-%d %H:%M:%S') if 'endtime' not in filter_dict: data['display_endtime'] = convert_to_datetime(time.time()-\ default_rel_range) else: data['display_endtime'] = convert_to_datetime(\ filter_dict['endtime']) data['display_endtime'] = data['display_endtime'].strftime(\ '%Y-%m-%d %H:%M:%S') data['filter_url'] = urllib.urlencode(filter_dict) self.assign_blank(filter_dict, 'facility', 'vo', 'exclude-vo', \ 'exclude-facility', 'exclude-dn', 'user') data['filter_dict'] = filter_dict if data['filter_url'] != '': data['filter_url'] = '?' + data['filter_url'] data['refine'] = self.getTemplateFilename('refine.tmpl') data['refine_error'] = None