def get_db(self): if self.db is None: self.db=MysqlDb(self.mysql_cfg); # self.db = CassaDb('data', self.db_host) return self.db
class ApiServer(): def __init__(self, mysql_cfg={}, log_file="/tmp/api-server.log", log_level=logging.NOTSET): # cassandra database object self.db = None self.mysql_cfg=mysql_cfg; self.buf = HallBuffer() self.logger = logging.getLogger() handler = logging.FileHandler(log_file) self.logger.addHandler(handler) self.logger.setLevel(log_level) def get_db(self): if self.db is None: self.db=MysqlDb(self.mysql_cfg); # self.db = CassaDb('data', self.db_host) return self.db ### change by lanjinsong 2012-08-10 def get_instances_list(self,start_time=time.strftime("%Y-%m-%d %T",time.gmtime(0))): """ show all instances in the databases""" db=self.get_db(); return self.db.get_instances_in_cache(); #result_set=db.get_all_instances(start_time); #print type(result_set),'result_set=',result_set; #instances=[]; #for row in result_set: # instances.append(row[0]); #return instances; def __get_instances_list(self, cf_str): if not isinstance(cf_str, unicode): print 'param types error' return None ret = list() limit = 20000 time_to = int(time.time()) time_from = time_to - 24 * 60 * 60 db = self.get_db() rs = db.get_range2(cf_str, row_count=20) return list(rs) if not rs is None: print rs for i in rs: ret.append(i[0]) return ret def __get_instances_list(self, cf_str): if not isinstance(cf_str, unicode): print 'param types error' return None ret = list() limit = 20000 time_to = int(time.time()) time_from = time_to - 24 * 60 * 60 db = self.get_db() rs = db.get_range2(cf_str, row_count=20) return list(rs) if not rs is None: print rs for i in rs: ret.append(i[0]) return ret def get_by_instance_id(self, row_id, cf_str): if not isinstance(row_id, unicode) \ or not isinstance(cf_str, unicode): print 'param types error' return None, 0, True db = self.get_db() rs = db.getbykey(cf_str, row_id) count = 0 if rs is None else len(rs) return rs, count, False if (count == 20000) else True ## change by lanjinsong def query_usage_report(self,args,**kwargs): """ query usage report modified by lanjinsong to use MySQL""" instance_id=str(args['instance_id']); start_time=args['start_time'] if ('start_time' in args) else time.strftime("%Y-%m-%d %T",time.gmtime(0)); db=self.get_db(); #result_set=db.get_instance_info(instance_id,start_time); result_set=db.get_instance_info_in_cache(instance_id); # print 'instance_id=%s start_time=%s'%(instance_id,start_time); # print 'result_set=',result_set; return result_set; ### TODO: decode it results=[]; for row in result_set: info=list(row); # print 'info=',info,'type of info[-1]=',type(info[-1]); info[-1]=info[-1].strftime("%Y-%m-%d %T"); results.append(info); return results; def __query_usage_report(self, args, **kwargs): # TODO: how to use kwargs? # def query_usage_report(self, arg, id=None, metric='cpu', # metric_param='total', # statistic='avg', period=5, # timestamp_from=None, timestamp_to=None, # **kwargs): """statistic is STATISTIC enum period default=5 minutes time_to default=0(now)""" """ { 'id': 'instance00001', 'metric': 'network', 'metric_param': 'vnet0', 'statistic': 'sum', 'period': 5, 'timestamp_from': '2012-02-20T12:12:12', 'timestamp_to': '2012-02-22T12:12:12', } """ # usage_report = dict() # datetime_from = iso8601.parse_date(timestamp_from) # datetime_to = iso8601.parse_date(timestamp_to) # # TODO: implement # return {'data': usage_report} row_id = args['id'] cf_str = args['metric'] scf_str = args['metric_param'] statistic = args['statistic'] period = int(args['period']) timestamp_from = args['timestamp_from'] timestamp_to = args['timestamp_to'] time_from = iso8601.parse_date(timestamp_from) time_from = int(time.mktime(time_from.timetuple())) time_to = int(time.time()) if not timestamp_to is None: time_to = iso8601.parse_date(timestamp_to) time_to = int(time.mktime(time_to.timetuple())) bufkey = str([row_id, cf_str, scf_str, statistic, period, time_from, time_to]) if self.buf.hit_test(bufkey): print "buffer hit:", bufkey return self.buf.get_buf(bufkey) ret_len = 0 (rs, count, all_data) = self.get_data(row_id, cf_str, scf_str, time_from, time_to) if not rs is None and count > 0: buf = self.analyize_data(rs, 1, statistic) ret = self.analyize_data(buf, period, statistic) if ret is None: ret_len = 0 else: ret = OrderedDict(sorted(ret.items(), key=lambda t: t[0])) ret_len = len(ret) print ret_len, "result." else: print "no result." ret = None ret_len = 0 result = ret, ret_len, all_data if (not result is None and time.time() - time_to > 120): self.buf.cleanup() self.buf.save(bufkey, result) return result