def update(self, doc_id, body, params=None): try: logger.debug("ES:%s,update=%s,=%s" % (self.doc_type, doc_id, body)) try: body["doc"]['@ts'] = get_now_ts() except Exception as e: logger.error("update error=%s" % e, exc_info=True) # VersionConflictEngineException if params is None: params = {} params['retry_on_conflict'] = 5 ret = self.es.update(index=self.index_name, id=doc_id, doc_type=self.doc_type, body=body, params=params) # ret = self.es.update(index=self.index_name, id=doc_id, body=body, params=params) except Exception as e: logger.warn("update:%s,%s,%s,%s" % (doc_id, body, params, e), exc_info=True) return None return ret
def get_media_flv_info(access_token, fpath): url_path = 'file' params = { "method": 'streaming', "access_token": access_token, "path": fpath, "type": "M3U8_FLV_264_480", "nom3u8": 1 } ua = "xpanvideo;{app};{ver};{sys};{sys_ver};flv".format(app="netdisk", ver='2.2.1', sys="pc-mac", sys_ver="10.13.6") headers = {"User-Agent": ua} url = "%s/%s" % (POINT, url_path) logger.info("get_media_flv_info params:{}, url:{}".format(params, url)) rs = requests.get(url, params=params, headers=headers, verify=False) jsonrs = rs.json() err_no = jsonrs["errno"] if err_no: err_msg = jsonrs.get("err_msg", "") if not err_msg: err_msg = PAN_ERROR_CODES.get(err_no, "") jsonrs["err_msg"] = err_msg else: mlink = "%s/%s" % (POINT, url_path) mlink = "{qp}?method=streaming&path={path}&type=M3U8_FLV_264_480&adToken={adToken}".format( qp=mlink, path=fpath, adToken=jsonrs['adToken']) jsonrs['mlink_start_at'] = jsonrs['ltime'] + get_now_ts() jsonrs['mlink'] = mlink logger.info("get_media_flv_info jsonrs:{}".format(jsonrs)) return jsonrs
def index(self, doc_id, doc): try: if 'doc' in doc: doc["doc"]['@ts'] = get_now_ts() doc["doc"]['@is_removed'] = 0 else: doc['@ts'] = get_now_ts() doc['@is_removed'] = 0 except Exception as e: logger.error("index=%s" % e, exc_info=True) # logger.debug("ES:index=%s" % doc) # print("index_name:%s,%s" % (self.index_name, self.doc_type)) return self.es.index(index=self.index_name, id=doc_id, doc_type=self.doc_type, body=doc)
def _clear_cache(): if get_now_ts( ) - LAST_CLEAR_CACHE_CONST['tm'] > LAST_CLEAR_CACHE_CONST['timeout']: _l = len(DATA_CACHES_TIMEOUT_KEYS_INDEX) idx = 0 for idx in range(_l, 0, -1): data_obj = DATA_CACHES_TIMEOUT_KEYS_INDEX[idx - 1] tm = data_obj.get('tm', 0) time_out = data_obj.get('to', 0) if time_out and get_now_ts() - tm > time_out: print("find timeout idx:", idx) break if idx > 0: print("clear cache by idx:", idx) for i in range(idx, 0, -1): d = DATA_CACHES_TIMEOUT_KEYS_INDEX.pop(i - 1) k = d['key'] _get_from_cache(k) LAST_CLEAR_CACHE_CONST['tm'] = get_now_ts()
def _get_from_cache(key): data_obj = DATA_CACHES.get(key, None) if not data_obj: return None tm = data_obj.get('tm', 0) time_out = data_obj.get('to', 0) if time_out and get_now_ts() - tm > time_out: DATA_CACHES.pop(key) return None else: print("_get_from_cache hit ok! key:", key) return data_obj.get('data', None)
def _put_to_cache(key, val, timeout_seconds=0): data_obj = { 'data': val, 'tm': get_now_ts(), 'to': timeout_seconds, 'key': key } print("_put_to_cache key:", key) DATA_CACHES[key] = data_obj if timeout_seconds > 0: DATA_CACHES_TIMEOUT_KEYS_INDEX.append(data_obj) DATA_CACHES_TIMEOUT_KEYS_INDEX.sort(key=lambda el: el['tm'] + el['to']) _clear_cache()
def __run(): dir_data_item_id = item_id key = "sync:pan:dir:%s_%s" % (user_id, pan_id) not_exists = cache_service.put(key, get_now_ts()) if not_exists: rs_key = "synced:pan:dir:%s_%s" % (user_id, pan_id) cache_service.rm(rs_key) root_data_item: DataItem = DataDao.get_data_item_by_id(dir_data_item_id) self.sync_dir_file_list(root_data_item, recursion) self.__thread = None cache_service.rm(key) cache_service.put(rs_key, root_data_item.id) mpan_service.update_dir_size(root_data_item) try_release_conn() pass
def async_checkout_client_item(self, prefix, suffix, action: Callable[..., dict] = None, final_call: Callable = None): ctx = self key = "async:%s:%s" % (prefix, suffix) rs_key = "async:%s:rs:%s:" % (prefix, suffix) def __run(): logger.info("thread to run in.") # cache_service.rm(rs_key) rs = {} if action: try: rs = action(key, rs_key) except Exception: logger.error("exe action failed.", exc_info=True) pass self.__thread = None cache_service.rm(key) rs['end'] = 1 # cache_service.put(rs_key, rs) self.update_state(prefix, suffix, rs) if final_call: try: final_call() except Exception: pass try_release_conn() ctx.release_thread() pass __thread = self.__build_thread(__run) if __thread: not_exists = cache_service.put_on_not_exists(key, get_now_ts()) if not_exists: __thread.start() else: return {"state": "block"} else: return {"state": "block"} return {"state": "run"}
def get_current_user(self): # headers = self.request.headers # print("get_current_user headers:", headers) # print("get_current_user in...") # print("get_current_user user_payload:", self.user_payload) if self.user_payload: if 'id' in self.user_payload: tm = self.user_payload['tm'] ctm = get_now_ts() # print('payload:', self.user_payload, ctm, ctm - tm, LOGIN_TOKEN_TIMEOUT) if ctm - tm > LOGIN_TOKEN_TIMEOUT: self.set_cookie('pan_site_force', str(1)) logger.info('token expired!!!') return False setattr(self.request, 'user_id', self.user_payload['user_id']) return True if self.is_web: logger.info('set is_web is 1, path:{}'.format(self.request.path)) self.set_cookie('pan_site_is_web', str(1)) self.set_cookie('pan_site_ref', self.request.path) return False
self.SYNC_PAN_DIR_CACHES[key] = val def rm(self, key): if key in self.SYNC_PAN_DIR_CACHES: return self.SYNC_PAN_DIR_CACHES.pop(key) return None def get(self, key): if key in self.SYNC_PAN_DIR_CACHES: return self.SYNC_PAN_DIR_CACHES[key] return None cache_service = CacheService() DATA_CACHES_TIMEOUT_KEYS_INDEX = [] LAST_CLEAR_CACHE_CONST = dict(tm=get_now_ts(), timeout=3600) DATA_CACHES = {} def _clear_cache(): if get_now_ts( ) - LAST_CLEAR_CACHE_CONST['tm'] > LAST_CLEAR_CACHE_CONST['timeout']: _l = len(DATA_CACHES_TIMEOUT_KEYS_INDEX) idx = 0 for idx in range(_l, 0, -1): data_obj = DATA_CACHES_TIMEOUT_KEYS_INDEX[idx - 1] tm = data_obj.get('tm', 0) time_out = data_obj.get('to', 0) if time_out and get_now_ts() - tm > time_out: print("find timeout idx:", idx) break
def login_check_user(self, acc: Accounts, need_update_login_time=True, source="BD"): need_renew_pan_acc = [] if acc: pan_acc_list = DataDao.pan_account_list(acc.id) # pan_acc: PanAccounts = DataDao.pan_account_list(acc.id) need_renew_access_token = False l = len(pan_acc_list) for pan_acc in pan_acc_list: if pan_acc.client_id != self.client_id or pan_acc.client_secret != self.client_secret: need_renew_access_token = True need_renew_pan_acc.append({ "id": pan_acc.id, "name": pan_acc.name, "use_cnt": pan_acc.use_count, "refresh": False, 'auth': self.pan_auth }) elif pan_acc.access_token and pan_acc.token_updated_at: tud = arrow.get(pan_acc.token_updated_at).replace( tzinfo=self.default_tz) if (arrow.now(self.default_tz) - tud).total_seconds() > PAN_ACCESS_TOKEN_TIMEOUT: need_renew_access_token = True need_renew_pan_acc.append({ "id": pan_acc.id, "name": pan_acc.name, "use_cnt": pan_acc.use_count, "refresh": True, 'auth': self.pan_auth }) else: need_renew_access_token = True need_renew_pan_acc.append({ "id": pan_acc.id, "name": pan_acc.name, "use_cnt": pan_acc.use_count, "refresh": True, 'auth': self.pan_auth }) if l == 0: need_renew_access_token = True # if pan_acc and pan_acc['access_token'] and pan_acc['token_updated_at']: # tud = arrow.get(pan_acc['token_updated_at']).replace(tzinfo=self.default_tz) # if (arrow.now(self.default_tz) - tud).total_seconds() < PAN_ACCESS_TOKEN_TIMEOUT: # need_renew_access_token = False lud = arrow.get( acc.login_updated_at).replace(tzinfo=self.default_tz) diff = arrow.now(self.default_tz) - lud params = {} if (need_update_login_time and diff.total_seconds() > LOGIN_TOKEN_TIMEOUT) or not acc.login_token: if not acc.fuzzy_id: acc.fuzzy_id = obfuscate_id(acc.id) params["fuzzy_id"] = acc.fuzzy_id # login_token = make_token(acc.fuzzy_id) login_token, _ = auth_service.build_user_payload(acc) acc.login_token = login_token params["login_token"] = login_token lud = params["login_updated_at"] = get_now_datetime() DataDao.update_account_by_pk(acc.id, params=params) else: tk = acc.login_token if tk: user_payload = get_payload_from_token(tk) if user_payload: tm = user_payload['tm'] ctm = get_now_ts() if ctm - tm > LOGIN_TOKEN_TIMEOUT: # login_token = make_token(acc.fuzzy_id) login_token, _ = auth_service.build_user_payload( acc) acc.login_token = login_token params["login_token"] = login_token lud = params[ "login_updated_at"] = get_now_datetime() DataDao.update_account_by_pk(acc.id, params=params) log.debug("login_token:{}".format(acc.login_token)) result = {"need_renew_access_token": need_renew_access_token} if need_renew_access_token: result['auth'] = self.pan_auth result['token'] = acc.login_token result['login_at'] = int(arrow.get(lud).timestamp * 1000) # print('login_at:', result['login_at']) result['pan_acc_list'] = need_renew_pan_acc self.__patch_acc_ext(acc, result, source) # account_ext = DataDao.account_ext_by_acc_id(acc.id) # result['username'] = account_ext.username # result['portrait'] = account_ext.portrait result['id'] = acc.fuzzy_id return result return None