def exec_task(self, auth, ctx): LOGGER.debug('entered in excec_task for Purging task') MetricsService().purging_for_all_nodes(auth) #purge the task results #no need to catch exception since task service will log #and rollback in case of an exception import tg from datetime import datetime, timedelta purge_interval = tg.config.get("task_results_purge_interval") cutoff_date = datetime.utcnow() + timedelta(days=-int(purge_interval)) DBSession.query(TaskResult).\ filter(TaskResult.timestamp <= cutoff_date).\ delete() #also purge the non-repeating tasks that were submitted long time #ago limit = 5000 try: limit=int(tg.config.get(constants.TASK_PURGE_COUNT)) except Exception, e: print "Exception: ", e
def exec_task(self, auth, ctx): LOGGER.debug('entered in excec task for TimeBasisRollupForNodes task') MetricsService().timebasis_rollup_for_all_nodes(auth)
def __init__(self): self.service = MetricsService() self.manager = Basic.getGridManager() self.utcoffset=None
class ChartService: def __init__(self): self.service = MetricsService() self.manager = Basic.getGridManager() self.utcoffset=None def metrics(self,auth): node_id='53c7a8bb-fc80-947d-f6c5-ab8f33d1be15'#VM node_id='04e059c6-264d-b1af-77dd-f0a62ae00c34'#NODE day1=datetime.utcnow() day2=datetime.utcnow() +timedelta(hours=-8) print day1,"--------",day2 result=self.service.getRawData(node_id,SERVER_RAW,day2,day1) #print len(result),"-------",result data_list=[] for ls in result: print ls[0],"--",ls[1],"--",ls[len(ls)-1] dt=ls[len(ls)-1] millis=calendar.timegm(dt.timetuple()) * 1000 print millis data_list.append(dict(cpu=ls[0],mem=ls[1],millis=millis)) return data_list def get_chart_data(self,auth,node_id,node_type,metric,period,offset,frm,to,\ chart_type=None,avg_fdate=None,avg_tdate=None): self.utcoffset=timedelta(milliseconds=long(offset)) print (self.utcoffset),"***************",datetime.utcnow() per_type='ROLLUP' rollup_type=constants.HOURLY time_format="%H:%M" xlabel=ylabel=label='' minTick='day' date2=datetime.utcnow() date1=datetime.utcnow() +timedelta(days=-1) if period==constants.CUSTOM: per_type='ROLLUP' date2=constants.defaultDate+timedelta(milliseconds=long(to)) date1=constants.defaultDate+timedelta(milliseconds=long(frm)) td=date2-date1 if td.days>3: rollup_type=constants.DAILY minTick=[1,'day'] xlabel="Time(days)" time_format="%b/%d" elif self.timedelta_seconds(td) < (12*60*60): #12hours per_type='RAW' rollup_type=constants.HOURLY minTick=[30,'minute'] xlabel="Time(minutes)" time_format="%H:%M" else: rollup_type=constants.HOURLY minTick=[2,'hour'] xlabel="Time(hours)" time_format="%H:%M" else: per_type='ROLLUP' if period==constants.HRS24: rollup_type=constants.HOURLY minTick=[2,'hour'] xlabel="Time(hours)" time_format="%H:%M" date1=date2 +timedelta(days=-1) elif period==constants.HRS12: per_type='RAW' rollup_type=constants.HOURLY minTick=[30,'minute'] xlabel="Time(minutes)" time_format="%H:%M" date1=date2 +timedelta(seconds=-(12*60*60)) elif period==constants.DAYS7: rollup_type=constants.DAILY minTick=[1,'day'] xlabel="Time(days)" time_format="%b/%d" date1=date2 +timedelta(weeks=-1) elif period==constants.DAYS30: rollup_type=constants.DAILY minTick=[2,'day'] xlabel="Time(days)" time_format="%b/%d" date1=date2 +timedelta(days=-31) elif period==constants.DTD: date2=constants.defaultDate+timedelta(milliseconds=long(to)) date2=date2+self.utcoffset date1=datetime(date2.year,date2.month,date2.day)-self.utcoffset date2=date2-self.utcoffset rollup_type=constants.HOURLY minTick=[2,'hour'] xlabel="Time(hours)" time_format="%H:%M" elif period==constants.WTD: rollup_type=constants.DAILY minTick=[1,'day'] xlabel="Time(days)" time_format="%b/%d" weekdays=date2.date().weekday() date1=datetime(date2.year,date2.month,date2.day) +\ timedelta(days=-weekdays)-self.utcoffset diff=(date2-date1).days if diff<3: minTick=[4,'hour'] time_format="%b/%d:%H" xlabel="Time(hours)" if diff<1: minTick=[1,'hour'] time_format="%b/%d:%H" xlabel="Time(hours)" elif period==constants.MTD: rollup_type=constants.DAILY minTick=[1,'day'] xlabel="Time(days)" time_format="%b/%d" date1=datetime(date2.year,date2.month,date2.day) +\ timedelta(days=-(date2.day-1))-self.utcoffset diff=(date2-date1).days if diff>8: minTick=[2,'day'] xlabel="Time(days)" # if chart_type==constants.TOP5SERVERS: # date2=datetime.utcnow() # date1=datetime.utcnow() +timedelta(seconds=-3601) # minTick=[5,'minute'] # xlabel="Time(minutes)" # time_format="%H:%M" hr1=to_str(date1.hour) hr2=to_str(date2.hour) minute1=to_str(date1.minute) minute2=to_str(date2.minute) if date1.hour<10: hr1="0"+hr1 if date2.hour<10: hr2="0"+hr2 dt_str=to_str(date1.year)+"/"+to_str(date1.month)+"/"+to_str(date1.day)+" "+hr1+":"+minute1+" - "+\ to_str(date2.year)+"/"+to_str(date2.month)+"/"+to_str(date2.day)+" "+hr2+":"+minute2 if metric==constants.METRIC_CPU: ylabel="cpu(%)" elif metric==constants.METRIC_VMCPU: ylabel="vm cpu(%)" elif metric==constants.METRIC_MEM: ylabel="memory(%)" label=dt_str series=[] ymax=1 avg=0.0 show_avg=False if chart_type==constants.TOP5SERVERS: metric_type=self.get_metric_type(node_type, per_type) (series,ymax)=self.topNServers(auth,node_id,node_type,metric,metric_type,\ rollup_type,per_type,date1,date2,period) elif chart_type==constants.COMPARISONCHART: (series,ymax)=self.comparison_chart(auth,node_id,node_type,metric,-1,\ rollup_type,per_type,date1,date2,period) else: metric_type=self.get_metric_type(node_type, per_type) (series,ymax,avg,show_avg)=self.chart_series_data(node_id,node_type, metric,metric_type,\ rollup_type,per_type,date1,date2,avg_fdate,avg_tdate,period) if len(series)==0: series.append(dict(data=[],label="")) min=calendar.timegm(date1.timetuple()) * 1000 max=calendar.timegm(date2.timetuple()) * 1000 return dict(time_format=time_format,label=label,xlabel=xlabel,ylabel=ylabel,\ show_avg=show_avg,avg=avg,min=min,max=max,ymax=ymax,minTick=minTick,series=series) def chart_series_data(self,node_id,node_type, metric,metric_type,rollup_type,per_type,\ date1,date2,avg_fdate=None,avg_tdate=None,period=None): series=[] node_ids=[] node_ids.append(node_id) avg=0.0 avg=False if avg_fdate is not None and avg_tdate is not None: avg=True result=self.get_metrics_data(node_ids,metric,metric_type,rollup_type,per_type,date1,date2,period,avg) show_avg=False if avg_fdate is not None and avg_tdate is not None: avg_tdate=constants.defaultDate+timedelta(milliseconds=long(avg_tdate)) avg_fdate=constants.defaultDate+timedelta(milliseconds=long(avg_fdate)) show_avg=True # else: # avg_fdate=date1 # avg_tdate=date2 # show_avg=False if per_type == "ROLLUP": avg=self.service.getRollupAvg(node_id,metric,metric_type,rollup_type,avg_fdate,avg_tdate) else: avg=self.service.getRawAvg(node_id,node_type, metric,metric_type,avg_fdate,avg_tdate) (data_list,ymax)=self.get_series_data(result) series.append(dict(data=data_list,label="")) return (series,ymax,avg,show_avg) def topNServers(self,auth,node_id,node_type,metric,metric_type,rollup_type,per_type,date1,date2,period): series=[] series.append(dict(data=[],label="")) ymx=2 srvrs=[] if node_type==constants.DATA_CENTER: site=auth.get_entity(node_id) grps=site.children for grp in grps: srvrs.extend(grp.children) elif node_type==constants.SERVER_POOL: grp=auth.get_entity(node_id) if grp is None: return (series,ymx) srvrs=grp.children srvr_ids=[] srvr_dict={} for srvr in srvrs: srvr_ids.append(srvr.entity_id) srvr_dict[srvr.entity_id]=srvr.name #print self.service.getRollupTop(node_ids,metric,metric_type,rollup_type,date1,date2) dt2=datetime.utcnow() dt1=dt2 +timedelta(seconds=-3601) data_list=[] tc=TopCache() data_list = tc.get_top_entities(node_id,node_type,metric,"topNservers",auth,constants.SERVER_RAW,srvr_ids,dt1,dt2) if per_type=='ROLLUP': metric_type=constants.SERVER_ROLLUP else: metric_type=constants.SERVER_RAW for data in data_list: srvr_ids=[] srvr_ids.append(data[1]) result=self.get_metrics_data(srvr_ids,metric,metric_type,rollup_type,per_type,date1,date2,period) (data_list,ymax)=self.get_series_data(result) if ymax>ymx: ymx=ymax series.append(dict(data=data_list,label=srvr_dict[data[1]])) return (series,ymx) def comparison_chart(self,auth,node_id,node_type,metric,metric_type,rollup_type,per_type,date1,date2,period): series=[] series.append(dict(data=[],label="")) ymx=2 node_ids=node_id.split('*') node_types=node_type.split('*') i=-1 for node_id in node_ids: i+=1 ent=auth.get_entity(node_id) if ent is None: continue node_ids=[] node_ids.append(node_id) metric_type=self.get_metric_type(node_types[i], per_type) result=self.get_metrics_data(node_ids,metric,metric_type,rollup_type,per_type,date1,date2,period) (data_list,ymax)=self.get_series_data(result) if ymax>ymx: ymx=ymax series.append(dict(data=data_list,label=ent.name)) return (series,ymx) def get_metrics_data(self,node_id,metric,metric_type,rollup_type,per_type,date1,date2,period,avg=False): result=[] if period==constants.CUSTOM or avg == True: result=self.get_metrics_specific_value(node_id,metric,metric_type,rollup_type,per_type,date1,date2) else: from convirt.model.MetricCache import MetricCache mc=MetricCache() result=mc.metric_cache(node_id[0],metric,metric_type,rollup_type,per_type,date1,date2,period) return result def get_metrics_specific_value(self,node_id,metric,metric_type,rollup_type,per_type,date1,date2): result=[] if per_type=='ROLLUP': result=self.service.getRollupMetricData(node_id,metric,metric_type,rollup_type,date1,date2) else: result=self.service.getRawMetricData(node_id,metric,metric_type,date1,date2) return result def get_series_data(self,listdata): ymax=0 data_list=[] for ls in listdata: #print ls[0],"--",ls[1] dt=ls[1] millis=calendar.timegm(dt.timetuple()) * 1000 if ls[0]>ymax: ymax=ls[0] data_list.append(dict(metric=ls[0],millis=millis)) if (100-ymax)<=10: ymax=100 else: ymax=ymax+2 return (data_list,ymax) def get_metric_type(self,node_type,per_type): nod_type="" if node_type==constants.DATA_CENTER: nod_type='DATACENTER' elif node_type==constants.SERVER_POOL: nod_type='SERVERPOOL' elif node_type==constants.MANAGED_NODE: nod_type='SERVER' elif node_type==constants.DOMAIN: nod_type='VM' metric_type=eval("constants."+nod_type+"_"+per_type) return metric_type def timedelta_seconds(self,td): return (td.days*86400000 + td.seconds*1000 + td.microseconds/1000)/1000
class TopCache(GenericCache): service = MetricsService() def get_top_entities(self, node_id, node_type, metric, top_type, auth, metric_type, ids, date1, date2): """ Setting value for cache by checking the conditions """ now = datetime.utcnow() status = False user_id = auth.user.user_id top_cache = self.get_top_value(user_id) usage_list = [] cache_key = (node_id, node_type, metric, top_type) #checking cache's key is already exisiting if top_cache.has_key(cache_key): cache_ids = [] for data in top_cache[cache_key].get("value"): cache_ids.append(data[1]) diff_list = [item for item in cache_ids if not item in ids] # print "FOUNDDDDDDDDDDTOP555555==",(node_id,node_type,metric,top_type) cached_time = top_cache[cache_key].get("cached_time") if (now > cached_time) or len(diff_list) > 0: status = True else: status = True if status: #quering the result and set it to cache cache_time = now + timedelta( minutes=int(tg.config.get(constants.CACHE_TIME))) data_list = self.service.getRawTopMetric(ids, metric, metric_type, date1, date2, "DESC", 5) if len(data_list) > 0: self.check_cache_limit(top_cache) top_cache[cache_key] = { "cached_time": cache_time, "value": data_list } top_cache[cache_key]["last_accessed"] = now self.user_cache[user_id].update({cache_key: top_cache[cache_key]}) # making key to remove if not deleted on entity operations if len(ids) == 0 and self.user_cache.has_key(user_id): user = self.user_cache[user_id] if user.has_key(cache_key): self.user_cache[user_id][cache_key]["value"] = [] usage_list = self.user_cache[user_id][cache_key].get("value", []) if len(usage_list) == 0: del self.user_cache[user_id][cache_key] return usage_list def get_top_value(self, user_id): """ getting cache value of user """ if not self.user_cache.has_key(user_id): self.user_cache[user_id] = {} top_cache = self.user_cache.get(user_id, {}) return top_cache def delete_usercache(self, auth): """ deleting cache value of user """ user_id = auth.user.user_id if self.user_cache.has_key(user_id): del self.user_cache[user_id]
except Exception, e: print "Error while scanning the image store ", e try: storage_stats_data_upgrade() except Exception, e: print "Error while recomputing storage stats ", e try: unreserve_disks_on_cms_start() except Exception, e: print "Error while unreserving storage disks ", e #start the services thread #maker should already have been configured by calling init_model sc = ServiceCentral(zopelessmaker) sc.start() atexit.register(sc.quit) base_config.convirt_service_central = sc MetricsService().init_mappers() Node.use_bash_timeout = eval(tg.config.get("use_bash_timeout")) Node.default_bash_timeout = tg.config.get("bash_default_timeout") Node.bash_dir = os.path.join(tg.config.get('convirt_cache_dir'), 'common/scripts') Node.local_bash_dir = tg.config.get("common_script") # Wrap your base TurboGears 2 application with custom middleware here try: pass # add_deployment_stats_task() except Exception, e: print "Error while adding deployment stats task", e return app
def __init__(self): self.service = MetricsService() self.manager = Basic.getGridManager() self.utcoffset = None
class ChartService: def __init__(self): self.service = MetricsService() self.manager = Basic.getGridManager() self.utcoffset = None def metrics(self, auth): node_id = '53c7a8bb-fc80-947d-f6c5-ab8f33d1be15' #VM node_id = '04e059c6-264d-b1af-77dd-f0a62ae00c34' #NODE day1 = datetime.utcnow() day2 = datetime.utcnow() + timedelta(hours=-8) print day1, "--------", day2 result = self.service.getRawData(node_id, SERVER_RAW, day2, day1) #print len(result),"-------",result data_list = [] for ls in result: print ls[0], "--", ls[1], "--", ls[len(ls) - 1] dt = ls[len(ls) - 1] millis = calendar.timegm(dt.timetuple()) * 1000 print millis data_list.append(dict(cpu=ls[0], mem=ls[1], millis=millis)) return data_list def get_chart_data(self,auth,node_id,node_type,metric,period,offset,frm,to,\ chart_type=None,avg_fdate=None,avg_tdate=None): self.utcoffset = timedelta(milliseconds=long(offset)) print(self.utcoffset), "***************", datetime.utcnow() per_type = 'ROLLUP' rollup_type = constants.HOURLY time_format = "%H:%M" xlabel = ylabel = label = '' minTick = 'day' date2 = datetime.utcnow() date1 = datetime.utcnow() + timedelta(days=-1) if period == constants.CUSTOM: per_type = 'ROLLUP' date2 = constants.defaultDate + timedelta(milliseconds=long(to)) date1 = constants.defaultDate + timedelta(milliseconds=long(frm)) td = date2 - date1 if td.days > 3: rollup_type = constants.DAILY minTick = [1, 'day'] xlabel = "Time(days)" time_format = "%b/%d" elif self.timedelta_seconds(td) < (12 * 60 * 60): #12hours per_type = 'RAW' rollup_type = constants.HOURLY minTick = [30, 'minute'] xlabel = "Time(minutes)" time_format = "%H:%M" else: rollup_type = constants.HOURLY minTick = [2, 'hour'] xlabel = "Time(hours)" time_format = "%H:%M" else: per_type = 'ROLLUP' if period == constants.HRS24: rollup_type = constants.HOURLY minTick = [2, 'hour'] xlabel = "Time(hours)" time_format = "%H:%M" date1 = date2 + timedelta(days=-1) elif period == constants.HRS12: per_type = 'RAW' rollup_type = constants.HOURLY minTick = [30, 'minute'] xlabel = "Time(minutes)" time_format = "%H:%M" date1 = date2 + timedelta(seconds=-(12 * 60 * 60)) elif period == constants.DAYS7: rollup_type = constants.DAILY minTick = [1, 'day'] xlabel = "Time(days)" time_format = "%b/%d" date1 = date2 + timedelta(weeks=-1) elif period == constants.DAYS30: rollup_type = constants.DAILY minTick = [2, 'day'] xlabel = "Time(days)" time_format = "%b/%d" date1 = date2 + timedelta(days=-31) elif period == constants.DTD: date2 = constants.defaultDate + timedelta( milliseconds=long(to)) date2 = date2 + self.utcoffset date1 = datetime(date2.year, date2.month, date2.day) - self.utcoffset date2 = date2 - self.utcoffset rollup_type = constants.HOURLY minTick = [2, 'hour'] xlabel = "Time(hours)" time_format = "%H:%M" elif period == constants.WTD: rollup_type = constants.DAILY minTick = [1, 'day'] xlabel = "Time(days)" time_format = "%b/%d" weekdays = date2.date().weekday() date1=datetime(date2.year,date2.month,date2.day) +\ timedelta(days=-weekdays)-self.utcoffset diff = (date2 - date1).days if diff < 3: minTick = [4, 'hour'] time_format = "%b/%d:%H" xlabel = "Time(hours)" if diff < 1: minTick = [1, 'hour'] time_format = "%b/%d:%H" xlabel = "Time(hours)" elif period == constants.MTD: rollup_type = constants.DAILY minTick = [1, 'day'] xlabel = "Time(days)" time_format = "%b/%d" date1=datetime(date2.year,date2.month,date2.day) +\ timedelta(days=-(date2.day-1))-self.utcoffset diff = (date2 - date1).days if diff > 8: minTick = [2, 'day'] xlabel = "Time(days)" # if chart_type==constants.TOP5SERVERS: # date2=datetime.utcnow() # date1=datetime.utcnow() +timedelta(seconds=-3601) # minTick=[5,'minute'] # xlabel="Time(minutes)" # time_format="%H:%M" hr1 = to_str(date1.hour) hr2 = to_str(date2.hour) minute1 = to_str(date1.minute) minute2 = to_str(date2.minute) if date1.hour < 10: hr1 = "0" + hr1 if date2.hour < 10: hr2 = "0" + hr2 dt_str=to_str(date1.year)+"/"+to_str(date1.month)+"/"+to_str(date1.day)+" "+hr1+":"+minute1+" - "+\ to_str(date2.year)+"/"+to_str(date2.month)+"/"+to_str(date2.day)+" "+hr2+":"+minute2 if metric == constants.METRIC_CPU: ylabel = "cpu(%)" elif metric == constants.METRIC_VMCPU: ylabel = "vm cpu(%)" elif metric == constants.METRIC_MEM: ylabel = "memory(%)" label = dt_str series = [] ymax = 1 avg = 0.0 show_avg = False if chart_type == constants.TOP5SERVERS: metric_type = self.get_metric_type(node_type, per_type) (series,ymax)=self.topNServers(auth,node_id,node_type,metric,metric_type,\ rollup_type,per_type,date1,date2,period) elif chart_type == constants.COMPARISONCHART: (series,ymax)=self.comparison_chart(auth,node_id,node_type,metric,-1,\ rollup_type,per_type,date1,date2,period) else: metric_type = self.get_metric_type(node_type, per_type) (series,ymax,avg,show_avg)=self.chart_series_data(node_id,node_type, metric,metric_type,\ rollup_type,per_type,date1,date2,avg_fdate,avg_tdate,period) if len(series) == 0: series.append(dict(data=[], label="")) min = calendar.timegm(date1.timetuple()) * 1000 max = calendar.timegm(date2.timetuple()) * 1000 return dict(time_format=time_format,label=label,xlabel=xlabel,ylabel=ylabel,\ show_avg=show_avg,avg=avg,min=min,max=max,ymax=ymax,minTick=minTick,series=series) def chart_series_data(self,node_id,node_type, metric,metric_type,rollup_type,per_type,\ date1,date2,avg_fdate=None,avg_tdate=None,period=None): series = [] node_ids = [] node_ids.append(node_id) avg = 0.0 avg = False if avg_fdate is not None and avg_tdate is not None: avg = True result = self.get_metrics_data(node_ids, metric, metric_type, rollup_type, per_type, date1, date2, period, avg) show_avg = False if avg_fdate is not None and avg_tdate is not None: avg_tdate = constants.defaultDate + timedelta( milliseconds=long(avg_tdate)) avg_fdate = constants.defaultDate + timedelta( milliseconds=long(avg_fdate)) show_avg = True # else: # avg_fdate=date1 # avg_tdate=date2 # show_avg=False if per_type == "ROLLUP": avg = self.service.getRollupAvg(node_id, metric, metric_type, rollup_type, avg_fdate, avg_tdate) else: avg = self.service.getRawAvg(node_id, node_type, metric, metric_type, avg_fdate, avg_tdate) (data_list, ymax) = self.get_series_data(result) series.append(dict(data=data_list, label="")) return (series, ymax, avg, show_avg) def topNServers(self, auth, node_id, node_type, metric, metric_type, rollup_type, per_type, date1, date2, period): series = [] series.append(dict(data=[], label="")) ymx = 2 srvrs = [] if node_type == constants.DATA_CENTER: site = auth.get_entity(node_id) grps = site.children for grp in grps: srvrs.extend(grp.children) elif node_type == constants.SERVER_POOL: grp = auth.get_entity(node_id) if grp is None: return (series, ymx) srvrs = grp.children srvr_ids = [] srvr_dict = {} for srvr in srvrs: srvr_ids.append(srvr.entity_id) srvr_dict[srvr.entity_id] = srvr.name #print self.service.getRollupTop(node_ids,metric,metric_type,rollup_type,date1,date2) dt2 = datetime.utcnow() dt1 = dt2 + timedelta(seconds=-3601) data_list = [] tc = TopCache() data_list = tc.get_top_entities(node_id, node_type, metric, "topNservers", auth, constants.SERVER_RAW, srvr_ids, dt1, dt2) if per_type == 'ROLLUP': metric_type = constants.SERVER_ROLLUP else: metric_type = constants.SERVER_RAW for data in data_list: srvr_ids = [] srvr_ids.append(data[1]) result = self.get_metrics_data(srvr_ids, metric, metric_type, rollup_type, per_type, date1, date2, period) (data_list, ymax) = self.get_series_data(result) if ymax > ymx: ymx = ymax series.append(dict(data=data_list, label=srvr_dict[data[1]])) return (series, ymx) def comparison_chart(self, auth, node_id, node_type, metric, metric_type, rollup_type, per_type, date1, date2, period): series = [] series.append(dict(data=[], label="")) ymx = 2 node_ids = node_id.split('*') node_types = node_type.split('*') i = -1 for node_id in node_ids: i += 1 ent = auth.get_entity(node_id) if ent is None: continue node_ids = [] node_ids.append(node_id) metric_type = self.get_metric_type(node_types[i], per_type) result = self.get_metrics_data(node_ids, metric, metric_type, rollup_type, per_type, date1, date2, period) (data_list, ymax) = self.get_series_data(result) if ymax > ymx: ymx = ymax series.append(dict(data=data_list, label=ent.name)) return (series, ymx) def get_metrics_data(self, node_id, metric, metric_type, rollup_type, per_type, date1, date2, period, avg=False): result = [] if period == constants.CUSTOM or avg == True: result = self.get_metrics_specific_value(node_id, metric, metric_type, rollup_type, per_type, date1, date2) else: from convirt.model.MetricCache import MetricCache mc = MetricCache() result = mc.metric_cache(node_id[0], metric, metric_type, rollup_type, per_type, date1, date2, period) return result def get_metrics_specific_value(self, node_id, metric, metric_type, rollup_type, per_type, date1, date2): result = [] if per_type == 'ROLLUP': result = self.service.getRollupMetricData(node_id, metric, metric_type, rollup_type, date1, date2) else: result = self.service.getRawMetricData(node_id, metric, metric_type, date1, date2) return result def get_series_data(self, listdata): ymax = 0 data_list = [] for ls in listdata: #print ls[0],"--",ls[1] dt = ls[1] millis = calendar.timegm(dt.timetuple()) * 1000 if ls[0] > ymax: ymax = ls[0] data_list.append(dict(metric=ls[0], millis=millis)) if (100 - ymax) <= 10: ymax = 100 else: ymax = ymax + 2 return (data_list, ymax) def get_metric_type(self, node_type, per_type): nod_type = "" if node_type == constants.DATA_CENTER: nod_type = 'DATACENTER' elif node_type == constants.SERVER_POOL: nod_type = 'SERVERPOOL' elif node_type == constants.MANAGED_NODE: nod_type = 'SERVER' elif node_type == constants.DOMAIN: nod_type = 'VM' metric_type = eval("constants." + nod_type + "_" + per_type) return metric_type def timedelta_seconds(self, td): return (td.days * 86400000 + td.seconds * 1000 + td.microseconds / 1000) / 1000