def get_speed(col_name, start_time, end_time): condition = { 'insert_time': { '$gt': time_util.DatetimeConverter(start_time).datetime_obj, '$lt': time_util.DatetimeConverter(end_time).datetime_obj }, } # condition = { # 'insert_time_str': {'$gt': time_util.DatetimeConverter(time.time() - 60).datetime_str}, # } # nb_print(condition) with decorators.TimerContextManager(): success_num = db.get_collection(col_name).count({ **{ 'success': True }, **condition }) fail_num = db.get_collection(col_name).count({ **{ 'success': False }, **condition }) qps = (success_num + fail_num) / (time_util.DatetimeConverter(end_time).timestamp - time_util.DatetimeConverter(start_time).timestamp) return { 'success_num': success_num, 'fail_num': fail_num, 'qps': round(qps, 1) }
def statistic_by_period(self, t_start: str, t_end: str): return self.col.count({ 'insert_time': { '$gt': time_util.DatetimeConverter(t_start).datetime_obj, '$lt': time_util.DatetimeConverter(t_end).datetime_obj } })
def query_result( col_name, start_time, end_time, is_success, function_params: str, page, ): condition = { 'insert_time': { '$gt': time_util.DatetimeConverter(start_time).datetime_obj, '$lt': time_util.DatetimeConverter(end_time).datetime_obj }, } # condition = { # 'insert_time_str': {'$gt': start_time, # '$lt': end_time}, # } if is_success in ('2', 2, True): condition.update({"success": True}) elif is_success in ('3', 3, False): condition.update({"success": False}) if function_params.strip(): condition.update({'params_str': {'$regex': function_params.strip()}}) # nb_print(col_name) # nb_print(condition) # results = list(db.get_collection(col_name).find(condition, ).sort([('insert_time', -1)]).skip(int(page) * 100).limit(100)) # with decorators.TimerContextManager(): results = list( db.get_collection(col_name).find(condition, { 'insert_time': 0, 'utime': 0 }).skip(int(page) * 100).limit(100)) # nb_print(result) return results
def build_result(self): with decorators.TimerContextManager(): for i in range(10): t1 = datetime.datetime.now() + datetime.timedelta( days=-(9 - i)) t2 = datetime.datetime.now() + datetime.timedelta( days=-(8 - i)) self.result['recent_10_days']['time_arr'].append( time_util.DatetimeConverter(t1).date_str) count = self.statistic_by_period( time_util.DatetimeConverter(t1).date_str + ' 00:00:00', time_util.DatetimeConverter(t2).date_str + ' 00:00:00') self.result['recent_10_days']['count_arr'].append(count) for i in range(0, 24): t1 = datetime.datetime.now() + datetime.timedelta( hours=-(23 - i)) t2 = datetime.datetime.now() + datetime.timedelta( hours=-(22 - i)) self.result['recent_24_hours']['time_arr'].append( t1.strftime('%Y-%m-%d %H:00:00')) # hour1_str = f'0{i}' if i < 10 else i count = self.statistic_by_period( t1.strftime('%Y-%m-%d %H:00:00'), t2.strftime('%Y-%m-%d %H:00:00')) self.result['recent_24_hours']['count_arr'].append(count) for i in range(0, 60): t1 = datetime.datetime.now() + datetime.timedelta( minutes=-(59 - i)) t2 = datetime.datetime.now() + datetime.timedelta( minutes=-(58 - i)) self.result['recent_60_minutes']['time_arr'].append( t1.strftime('%Y-%m-%d %H:%M:00')) count = self.statistic_by_period( t1.strftime('%Y-%m-%d %H:%M:00'), t2.strftime('%Y-%m-%d %H:%M:00')) self.result['recent_60_minutes']['count_arr'].append(count) for i in range(0, 60): t1 = datetime.datetime.now() + datetime.timedelta( seconds=-(59 - i)) t2 = datetime.datetime.now() + datetime.timedelta( seconds=-(58 - i)) self.result['recent_60_seconds']['time_arr'].append( t1.strftime('%Y-%m-%d %H:%M:%S')) count = self.statistic_by_period( t1.strftime('%Y-%m-%d %H:%M:%S'), t2.strftime('%Y-%m-%d %H:%M:%S')) self.result['recent_60_seconds']['count_arr'].append(count)
def index(logs_dir=''): current_app.logger.debug(logs_dir) file_ele_list = list() dir_ele_list = list() for f in (Path('/') / Path(logs_dir)).iterdir(): fullname = str(f).replace('\\', '/') if f.is_file(): # current_app.logger.debug(str(f).replace('\\', '/')[1:]) # current_app.logger.debug((logs_dir, str(f).replace('\\','/')[1:])) current_app.logger.debug(str(f)) current_app.logger.debug( url_for('download_file', fullname=fullname[0:])) # current_app.logger.debug(url_for('download_file', logs_dir='', filename='windows_to_linux_syn_config.json')) file_ele_list.append({ 'is_dir': 0, 'filesize': os.path.getsize(f) / 1000000, 'last_modify_time': time_util.DatetimeConverter(os.stat( str(fullname)).st_mtime).datetime_str, 'url': url_for('view', fullname=fullname[1:]), 'download_url': url_for('download_file', fullname=fullname[1:]), 'fullname': fullname }) if f.is_dir(): fullname = str(f).replace('\\', '/') dir_ele_list.append({ 'is_dir': 1, 'filesize': 0, 'last_modify_time': time_util.DatetimeConverter(os.stat( str(f)).st_mtime).datetime_str, 'url': url_for('index', logs_dir=fullname[1:]), 'download_url': url_for('index', logs_dir=fullname[1:]), 'fullname': fullname }) return render_template('dir_view.html', ele_list=dir_ele_list + file_ele_list, logs_dir=logs_dir)
def get_status_dict(self, without_datetime_obj=False): self.time_cost = round(time.time() - self.time_start, 3) item = self.__dict__ item['host_name'] = self.host_name item['host_process'] = self.host_process item['script_name'] = self.script_name item['script_name_long'] = self.script_name_long # item.pop('time_start') datetime_str = time_util.DatetimeConverter().datetime_str try: json.dumps(item['result']) # 不希望存不可json序列化的复杂类型。麻烦。存这种类型的结果是伪需求。 except TypeError: item['result'] = str(item['result'])[:1000] item.update({'insert_time_str': datetime_str, 'insert_minutes': datetime_str[:-3], }) if not without_datetime_obj: item.update({'insert_time': datetime.datetime.now(), 'utime': datetime.datetime.utcnow(), }) else: item = delete_keys_and_return_new_dict(item, ['insert_time', 'utime']) item['_id'] = str(uuid.uuid4()) self.logger.debug(item) return item
def _judge_is_daylight(self): if self._is_do_not_run_by_specify_time_effect and self._do_not_run_by_specify_time[ 0] < time_util.DatetimeConverter( ).time_str < self._do_not_run_by_specify_time[1]: self.logger.warning( f'现在时间是 {time_util.DatetimeConverter()} ,现在时间是在 {self._do_not_run_by_specify_time} 之间,不运行' ) return True
def to_dict(self): if isinstance(self.countdown, datetime.datetime): self.countdown = time_util.DatetimeConverter( self.countdown).datetime_str priority_consuming_control_config_dict = { k: v for k, v in self.__dict__.items() if v is not None } # 使中间件消息不要太长,框架默认的值不发到中间件。 return priority_consuming_control_config_dict
def _send_heartbeat(self): results = self.redis_db_frame.smembers(self._redis_key_name) with self.redis_db_frame.pipeline() as p: for result in results: if time.time() - time_util.DatetimeConverter(result.decode().split('&&')[-1]).timestamp > 15 or \ self._consumer_identification == result.decode().split('&&')[0]: p.srem(self._redis_key_name, result) p.sadd(self._redis_key_name, f'{self._consumer_identification}&&{time_util.DatetimeConverter().datetime_str}') p.execute()
def __init__(self, queue_name, fucntion_name, params): self.queue_name = queue_name self.function = fucntion_name publish_time = _get_publish_time(params) if publish_time: self.publish_time_str = time_util.DatetimeConverter(publish_time).datetime_str function_params = _delete_keys_and_return_new_dict(params, ) self.params = function_params self.params_str = json.dumps(function_params, ensure_ascii=False) self.result = '' self.run_times = 0 self.exception = '' self.time_start = time.time() self.time_cost = None self.success = False self.total_thread = threading.active_count() self.set_log_level(20)
def __init__(self, queue_name, fucntion_name, params): self.queue_name = queue_name self.function = fucntion_name if 'publish_time' in params: self.publish_time_str = time_util.DatetimeConverter(params['publish_time']).datetime_str function_params = delete_keys_and_return_new_dict(params, ['publish_time', 'publish_time_format']) self.params = function_params self.params_str = json.dumps(function_params) self.result = '' self.run_times = 0 self.exception = '' self.time_start = time.time() self.time_cost = None self.success = False self.current_thread = ConsumersManager.get_concurrent_info() self.total_thread = threading.active_count() self.set_log_level(20)