def update_user(self, params): oid = 0 if 'fuzzy_id' in params: fuzzy_id = params['fuzzy_id'] oid = decrypt_user_id(fuzzy_id) name = params['name'] nickname = params['nickname'] mobile_no = params['mobile_no'] exists_user = self.check_user(name, mobile_no) # print('exists_user:'******'password'] org_id = params['org'] role_id = params['role'] extroles = params['extroles'] extorgs = params['extorgs'] type = params['type'] if oid: AuthDao.update_account( oid, name, nickname, password, org_id, role_id, type, extorgs, extroles, get_now_datetime(), lambda account: self.build_user_payload(account)) else: AuthDao.new_account( name, nickname, mobile_no, password, org_id, role_id, type, extorgs, extroles, get_now_datetime(), lambda account: self.build_user_payload(account)) return True
def check_expired_pan_account_by_id(cls, pan_id, callback=None): _pan_account_list = PanAccounts.select().where( PanAccounts.id == pan_id, PanAccounts.expires_at < get_now_datetime()) if callback: return callback(_pan_account_list) return None
def cb(pan_accounts): now = arrow.now(self.default_tz) pan: PanAccounts = None need_sleep = False for pan in pan_accounts: if pan.refresh_token: if need_sleep: time.sleep(3) jsonrs = restapi.refresh_token(pan.refresh_token, True) access_token = jsonrs["access_token"] refresh_token = jsonrs["refresh_token"] expires_in = jsonrs["expires_in"] - 20 * 60 # seconds expires_at = now.shift(seconds=+expires_in).datetime now_tm = get_now_datetime() DataDao.update_pan_account_by_pk( pan.id, { "access_token": access_token, "refresh_token": refresh_token, "expires_at": expires_at, "token_updated_at": now_tm }) pan.access_token = access_token pan.refresh_token = refresh_token pan.expires_at = expires_at pan.token_updated_at = now_tm try: log.info("sync pan user[{},{}] info to db!".format( access_token, pan.user_id)) self.sync_pan_user_info(access_token, pan.user_id) except Exception as e: log.error("sync_pan_user_info err:", exc_info=True) need_sleep = True return pan
def _new_user(self, name, password, nickname, access_token, refresh_token, expires_at, context): mobile_no = '' origin_name = name exists_user = AuthDao.check_user_only_by_name(name) dog = 20 while dog > 0 and exists_user: name = "{}_{}".format(origin_name, str(int(time.time()))[-4:]) exists_user = AuthDao.check_user_only_by_name(name) time.sleep(1) dog = dog - 1 org_id = NEW_USER_DEFAULT['org_id'] role_id = NEW_USER_DEFAULT['role_id'] extroles = [] extorgs = [] type = USER_TYPE['SINGLE'] user_token, user_ext_dict = AuthDao.new_account( name, nickname, mobile_no, password, org_id, role_id, type, extorgs, extroles, get_now_datetime(), lambda account, ctx: self._default_new_user_build_user_payload( account, ctx), { "access_token": access_token, "refresh_token": refresh_token, "expires_at": expires_at, "accext": context }) return user_token, user_ext_dict
def _default_new_user_build_user_payload(self, account: Accounts, params): auth_user_dict = AuthDao.auth_user(account.id) fuzzy_id = obfuscate_id(account.id) auth_user_dict['id'] = fuzzy_id auth_user_dict['_id'] = account.id auth_user_dict['login_updated_at'] = account.login_updated_at access_token = params.get('access_token') refresh_token = params.get('refresh_token') expires_at = params.get('expires_at') account_ext_ctx = params.get('accext', {}) client_id = PAN_SERVICE['client_id'] client_secret = PAN_SERVICE['client_secret'] account_ext_ctx['account_id'] = account.id log.info("will new account ext:{}".format(account_ext_ctx)) acc_ext: AccountExt = DataDao.new_accounts_ext(**account_ext_ctx) log.info("new account ext ok acc_ext id:{}".format(acc_ext.id)) pan_acc_id = DataDao.new_pan_account(account.id, account.name, client_id, client_secret, access_token, refresh_token, expires_at, get_now_datetime(), pin=1, bd_uid=acc_ext.user_id) auth_user_dict['_p'] = obfuscate_id(pan_acc_id) # print("auth_user_dict:", auth_user_dict) tk = make_account_token(auth_user_dict) # print('make_account_token:', tk) return tk, auth_user_dict
def check_expired_pan_account(cls, size=10, callback=None): fetch_size = size while fetch_size == size: _pan_account_list = PanAccounts.select().where( PanAccounts.expires_at < get_now_datetime()).limit(size) fetch_size = len(_pan_account_list) if callback: callback(_pan_account_list)
def loop_ad_tasks(cls): rs = {} task: LoopAdTask = LoopAdTask.select().where( LoopAdTask.started_at <= get_now_datetime(), (LoopAdTask.ended_at.is_null()) | (LoopAdTask.ended_at > get_now_datetime()), LoopAdTask.pin == 0).first() if task: rs = LoopAdTask.to_dict(task) srcs = AdSource.select().where(AdSource.task_id == task.id, AdSource.pin == 0).order_by( AdSource.idx) if srcs: rs['sources'] = [] for s in srcs: s_dict = AdSource.to_dict(s) rs['sources'].append(s_dict) return rs
def save_user(self, mobile_no, passwd): acc: Accounts = DataDao.account_by_name(mobile_no) if acc: return None, None, "exist" else: user_token, user_ext_dict = DataDao.new_account( mobile_no, passwd, get_now_datetime(), lambda account: auth_service.build_user_payload(account)) fuzzy_id = user_ext_dict['id'] return user_token, fuzzy_id, None
def __init__(self, args): self.batch_size = args.batch_size self.time_step = args.time_step # number of time steps self.pitch_range = args.pitch_range # number of pitches self.input_c_dim = args.input_nc # number of input image channels self.output_c_dim = args.output_nc # number of output image channels self.lr = args.lr self.L1_lambda = args.L1_lambda self.gamma = args.gamma self.sigma_d = args.sigma_d self.dataset_A_dir = args.dataset_A_dir self.dataset_B_dir = args.dataset_B_dir self.d_loss_path = args.d_loss_path self.g_loss_path = args.g_loss_path self.cycle_loss_path = args.cycle_loss_path self.sample_dir = args.sample_dir self.model = args.model self.discriminator = build_discriminator self.generator = build_generator self.criterionGAN = mae_criterion OPTIONS = namedtuple( "OPTIONS", "batch_size " "time_step " "input_nc " "output_nc " "pitch_range " "gf_dim " "df_dim " "is_training", ) self.options = OPTIONS._make( ( args.batch_size, args.time_step, args.pitch_range, args.input_nc, args.output_nc, args.ngf, args.ndf, args.phase == "train", ) ) self.now_datetime = get_now_datetime() self.pool = ImagePool(args.max_size) self._build_model(args) print("Initialized model.")
def __init__(self, args): self.dataset_A_dir = args.dataset_A_dir self.dataset_B_dir = args.dataset_B_dir self.sample_dir = args.sample_dir self.batch_size = args.batch_size self.time_step = args.time_step self.pitch_range = args.pitch_range self.input_c_dim = args.input_nc # number of input image channels self.sigma_c = args.sigma_c self.sigma_d = args.sigma_d self.lr = args.lr self.now_datetime = get_now_datetime() self.model = args.model self.generator = build_generator self.discriminator = build_discriminator_classifier self._build_model(args) OPTIONS = namedtuple( "OPTIONS", "batch_size " "time_step " "input_nc " "output_nc " "pitch_range " "gf_dim " "df_dim " "is_training", ) self.options = OPTIONS._make( ( args.batch_size, args.time_step, args.pitch_range, args.input_nc, args.output_nc, args.ngf, args.ndf, args.phase == "train", ) ) print("Initializing classifier...")
def recheck_shared_d_link(self, shared_log_id): share_log: ShareLogs = DataDao.query_shared_log_by_pk_id(shared_log_id) if share_log: data_item: DataItem = DataDao.get_data_item_by_fs_id( share_log.fs_id) need_sync = False # print("query_file dlink:", data_item.dlink) if not data_item.dlink_updated_at or not data_item.dlink: need_sync = True elif data_item.dlink_updated_at: dt = arrow.get( data_item.dlink_updated_at).replace(tzinfo=self.default_tz) if dt.shift(hours=+DLINK_TIMEOUT) < arrow.now(): need_sync = True if need_sync: account_id = data_item.account_id acc: Accounts = DataDao.account_by_id(account_id) pan_acc: PanAccounts = self.get_pan_account( data_item.panacc, data_item.account_id) # sync_list = restapi.sync_file(self.pan_acc.access_token, [int(data_item.fs_id)]) sync_dlink, thumbs = restapi.get_dlink_by_sync_file( pan_acc.access_token, int(data_item.fs_id)) if sync_dlink: data_item.dlink = sync_dlink data_item.dlink_updated_at = get_now_datetime() DataDao.update_data_item( data_item.id, { "dlink": data_item.dlink, "dlink_updated_at": data_item.dlink_updated_at }) share_log.dlink = data_item.dlink DataDao.update_share_log_by_pk(share_log.id, {'dlink': data_item.dlink}) return ShareLogs.to_dict(share_log) return None
def query_share_logs_by_hours(cls, hours, offset=0, limit=100): return ShareLogs.select().where( ShareLogs.updated_at < get_now_datetime(hours * 60 * 60)).limit( limit).offset(offset)
def login_check_user(self, acc: Accounts, need_update_login_time=True, source="BD"): need_renew_pan_acc = [] if acc: pan_acc_list = DataDao.pan_account_list(acc.id) # pan_acc: PanAccounts = DataDao.pan_account_list(acc.id) need_renew_access_token = False l = len(pan_acc_list) for pan_acc in pan_acc_list: if pan_acc.client_id != self.client_id or pan_acc.client_secret != self.client_secret: need_renew_access_token = True need_renew_pan_acc.append({ "id": pan_acc.id, "name": pan_acc.name, "use_cnt": pan_acc.use_count, "refresh": False, 'auth': self.pan_auth }) elif pan_acc.access_token and pan_acc.token_updated_at: tud = arrow.get(pan_acc.token_updated_at).replace( tzinfo=self.default_tz) if (arrow.now(self.default_tz) - tud).total_seconds() > PAN_ACCESS_TOKEN_TIMEOUT: need_renew_access_token = True need_renew_pan_acc.append({ "id": pan_acc.id, "name": pan_acc.name, "use_cnt": pan_acc.use_count, "refresh": True, 'auth': self.pan_auth }) else: need_renew_access_token = True need_renew_pan_acc.append({ "id": pan_acc.id, "name": pan_acc.name, "use_cnt": pan_acc.use_count, "refresh": True, 'auth': self.pan_auth }) if l == 0: need_renew_access_token = True # if pan_acc and pan_acc['access_token'] and pan_acc['token_updated_at']: # tud = arrow.get(pan_acc['token_updated_at']).replace(tzinfo=self.default_tz) # if (arrow.now(self.default_tz) - tud).total_seconds() < PAN_ACCESS_TOKEN_TIMEOUT: # need_renew_access_token = False lud = arrow.get( acc.login_updated_at).replace(tzinfo=self.default_tz) diff = arrow.now(self.default_tz) - lud params = {} if (need_update_login_time and diff.total_seconds() > LOGIN_TOKEN_TIMEOUT) or not acc.login_token: if not acc.fuzzy_id: acc.fuzzy_id = obfuscate_id(acc.id) params["fuzzy_id"] = acc.fuzzy_id # login_token = make_token(acc.fuzzy_id) login_token, _ = auth_service.build_user_payload(acc) acc.login_token = login_token params["login_token"] = login_token lud = params["login_updated_at"] = get_now_datetime() DataDao.update_account_by_pk(acc.id, params=params) else: tk = acc.login_token if tk: user_payload = get_payload_from_token(tk) if user_payload: tm = user_payload['tm'] ctm = get_now_ts() if ctm - tm > LOGIN_TOKEN_TIMEOUT: # login_token = make_token(acc.fuzzy_id) login_token, _ = auth_service.build_user_payload( acc) acc.login_token = login_token params["login_token"] = login_token lud = params[ "login_updated_at"] = get_now_datetime() DataDao.update_account_by_pk(acc.id, params=params) log.debug("login_token:{}".format(acc.login_token)) result = {"need_renew_access_token": need_renew_access_token} if need_renew_access_token: result['auth'] = self.pan_auth result['token'] = acc.login_token result['login_at'] = int(arrow.get(lud).timestamp * 1000) # print('login_at:', result['login_at']) result['pan_acc_list'] = need_renew_pan_acc self.__patch_acc_ext(acc, result, source) # account_ext = DataDao.account_ext_by_acc_id(acc.id) # result['username'] = account_ext.username # result['portrait'] = account_ext.portrait result['id'] = acc.fuzzy_id return result return None
def get_pan_user_access_token(self, user_id, code, pan_name, can_refresh=True): # access_token = "" pan_url = '{}token'.format(self.auth_point) # _pan_acc: PanAccounts = DataDao.pan_account_list(user_id) _pan_acc = DataDao.pan_account_by_name(user_id, pan_name) # pan_acc = DataDao.pan_account_list(user_id, transfer_to_dict=False) pan_acc_id = 0 pan_acc_not_exist = False can_refresh_token = False if not _pan_acc: pan_acc_not_exist = True else: pan_acc_id = _pan_acc.id if can_refresh and _pan_acc.refresh_token: can_refresh_token = True now = arrow.now(self.default_tz) logger.info("pan_acc_not_exist:{}, can_refresh_token:{}".format( pan_acc_not_exist, can_refresh_token)) if pan_acc_not_exist or not can_refresh_token: # params = {'grant_type': 'authorization_code', 'client_id': PAN_SERVICE["client_id"], # 'code': code, # 'redirect_uri': 'oob', # 'client_secret': PAN_SERVICE["client_secret"]} # print("query access_token pan_url:", pan_url) # rs = requests.get(pan_url, params=params) # print(rs.content) # jsonrs = rs.json() jsonrs = restapi.access_token_code(code) access_token = jsonrs["access_token"] refresh_token = jsonrs["refresh_token"] expires_in = jsonrs["expires_in"] # seconds expires_at = now.shift(seconds=+expires_in).datetime account_ext: AccountExt = self.sync_pan_user_info( access_token, user_id) if pan_acc_not_exist: # print("will new pan account") account_ext_id = 0 if account_ext: account_ext_id = account_ext.user_id pan_acc_id = DataDao.new_pan_account(user_id, pan_name, self.client_id, self.client_secret, access_token, refresh_token, expires_at, get_now_datetime(), pin=1, bd_uid=account_ext_id) else: # print("will update pan account") DataDao.update_pan_account_by_pk( pan_acc_id, { "access_token": access_token, 'name': pan_name, 'client_id': self.client_id, 'client_secret': self.client_secret, "refresh_token": refresh_token, "expires_at": expires_at, "token_updated_at": get_now_datetime() }) else: params = { 'grant_type': 'refresh_token', 'client_id': self.client_id, 'refresh_token': _pan_acc.refresh_token, 'client_secret': self.client_secret } print("query refresh_token pan_url:", pan_url) rs = requests.get(pan_url, params=params) print(rs.content) jsonrs = rs.json() access_token = jsonrs["access_token"] refresh_token = jsonrs["refresh_token"] expires_in = jsonrs["expires_in"] # seconds expires_at = now.shift(seconds=+expires_in).datetime DataDao.update_pan_account_by_pk( pan_acc_id, { "access_token": access_token, "refresh_token": refresh_token, "expires_at": expires_at, "token_updated_at": get_now_datetime() }) self.sync_pan_user_info(access_token, user_id) return access_token, pan_acc_id, None
def bd_sync_login(self, params): acc_name = params.get('acc_name') refresh_token = params.get('refresh_token') access_token = params.get('access_token') expires_in = params.get('expires_in') userid = int(params.get('userid')) portrait = params.get('portrait') username = params.get('username') openid = params.get('openid') is_realname = int(params.get('is_realname', '1')) realname = '' userdetail = '' birthday = '' marriage = '' sex = '' blood = '' figure = '' constellation = '' education = '' trade = '' job = '' expires_in = expires_in - 20 * 60 # seconds expires_at = arrow.now( self.default_tz).shift(seconds=+expires_in).datetime acc_ext: AccountExt = DataDao.account_ext_by_bd_user_id(userid) # print("find acc_ext:", acc_ext) log.info("bd_sync_login bd userid:{}".format(userid)) now_tm = get_now_datetime() result = {} if not acc_ext: # new user # print("not find acc_ext userid:", userid) log.info("bd_sync_login not find acc_ext :{}".format(userid)) user_token, user_ext_dict = self._new_user( acc_name, '654321', username, access_token, refresh_token, expires_at, dict(realname=realname, portrait=portrait, userdetail=userdetail, birthday=birthday, marriage=marriage, sex=sex, blood=blood, figure=figure, constellation=constellation, education=education, trade=trade, job=job, username=username, is_realname=is_realname, user_id=userid)) # acc_id = user_ext_dict['_id'] # DataDao.new_accounts_ext(userid, username, realname, portrait, userdetail, birthday, marriage, sex, # blood, figure, constellation, education, trade, job, is_realname, # account_id=acc_id) login_updated_at = user_ext_dict['login_updated_at'] lud = arrow.get(login_updated_at).replace(tzinfo=self.default_tz) result['token'] = user_token result['login_at'] = int(arrow.get(lud).timestamp * 1000) # print('login_at:', result['login_at']) result['pan_acc_list'] = [] result['username'] = username result['portrait'] = portrait result['id'] = user_ext_dict['id'] else: # print("find acc_ext:", acc_ext.username) log.info("bd_sync_login find acc_ext :{}".format(userid)) acc_id = acc_ext.account_id account: Accounts = DataDao.account_by_id(acc_id) DataDao.update_account_ext_by_user_id( userid, dict(username=username, portrait=portrait, account_id=acc_id)) pan_acc: PanAccounts = DataDao.pan_account_by_bd_uid( acc_id, acc_ext.user_id) if not pan_acc: pan_acc = DataDao.pan_account_by_name(acc_id, acc_name) if pan_acc: if pan_acc.pin != 1: n = DataDao.query_pan_acc_count_by_acc_id(acc_id) if n > 1: DataDao.update_pan_account_by_acc_id( acc_id, {'pin': 0}) DataDao.update_pan_account_by_pk( pan_acc.id, { "access_token": access_token, "refresh_token": refresh_token, "expires_at": expires_at, "token_updated_at": now_tm, "pin": 1 }) else: client_id = PAN_SERVICE['client_id'] client_secret = PAN_SERVICE['client_secret'] pan_acc_id = DataDao.new_pan_account(acc_id, acc_name, client_id, client_secret, access_token, refresh_token, expires_at, get_now_datetime(), pin=1, bd_uid=acc_ext.user_id) pan_acc = self.get_pan_account(pan_acc_id, acc_id) result = self.login_check_user(account) return result
def checkout_dlink(self, item_id, user_id, user_ref_id): _client_data_item: ClientDataItem = ClientDataDao.get_data_item_by_id( item_id, user_ref_id) need_sync = False if not _client_data_item.dlink_updated_at or not _client_data_item.dlink: need_sync = True elif _client_data_item.dlink_updated_at: dt = arrow.get(_client_data_item.dlink_updated_at).replace( tzinfo=self.default_tz) if dt.shift(hours=+DLINK_TIMEOUT) < arrow.now(): need_sync = True need_thumbs = False if is_image_media(_client_data_item.filename ) and _client_data_item.category == 3: need_thumbs = True if need_sync: pan_id = _client_data_item.panacc pan_acc: PanAccounts = auth_service.get_pan_account( pan_id, user_id) # sync_list = restapi.sync_file(self.pan_acc.access_token, [int(data_item.fs_id)]) sync_dlink, thumbs = restapi.get_dlink_by_sync_file( pan_acc.access_token, int(_client_data_item.fs_id), need_thumbs) if sync_dlink: _client_data_item.dlink = "{}&access_token={}".format( sync_dlink, pan_acc.access_token) _client_data_item.dlink_updated_at = get_now_datetime() data_item_params = { "dlink": _client_data_item.dlink, "dlink_updated_at": _client_data_item.dlink_updated_at } if need_thumbs: if "url2" in thumbs: data_item_params["thumb"] = thumbs["url2"] _client_data_item.thumb = data_item_params["thumb"] elif "url1" in thumbs: data_item_params["thumb"] = thumbs["url1"] _client_data_item.thumb = data_item_params["thumb"] elif "icon" in thumbs: data_item_params["thumb"] = thumbs["icon"] _client_data_item.thumb = data_item_params["thumb"] ClientDataDao.update_client_item(_client_data_item.id, data_item_params) expired_at = arrow.get(_client_data_item.dlink_updated_at).replace( tzinfo=self.default_tz).shift(hours=+DLINK_TIMEOUT).datetime loader = { 'id': 0, 'created_at': _client_data_item.created_at, 'updated_at': _client_data_item.updated_at, 'fs_id': _client_data_item.fs_id, 'md5_val': _client_data_item.md5_val, 'path': _client_data_item.path, 'size': _client_data_item.size, 'category': _client_data_item.category, 'pin': _client_data_item.pin, 'dlink': _client_data_item.dlink, 'filename': _client_data_item.filename, 'expired_at': expired_at } return [loader]
def query_file(self, item_id): data_item: DataItem = DataDao.get_data_item_by_id(item_id) need_sync = False logger.info("query_file dlink:{}".format(data_item.dlink)) if not data_item.dlink_updated_at or not data_item.dlink: need_sync = True elif data_item.dlink_updated_at: dt = arrow.get( data_item.dlink_updated_at).replace(tzinfo=self.default_tz) if dt.shift(hours=+DLINK_TIMEOUT) < arrow.now(): need_sync = True account_id = data_item.account_id acc: Accounts = DataDao.account_by_id(account_id) flv_json = None need_thumbs = False # data_item_ext = None # if data_item.category == 1 and is_video_media(data_item.filename): # data_item_ext = DataDao.get_data_item_ext_by_id(data_item.id) if is_image_media(data_item.filename) and data_item.category == 3: need_thumbs = True if need_sync: pan_acc: PanAccounts = self.get_pan_account( data_item.panacc, data_item.account_id) # sync_list = restapi.sync_file(self.pan_acc.access_token, [int(data_item.fs_id)]) sync_dlink, thumbs = restapi.get_dlink_by_sync_file( pan_acc.access_token, int(data_item.fs_id), need_thumbs) if sync_dlink: data_item.dlink = "{}&access_token={}".format( sync_dlink, pan_acc.access_token) data_item.dlink_updated_at = get_now_datetime() data_item_params = { "dlink": data_item.dlink, "dlink_updated_at": data_item.dlink_updated_at } if need_thumbs: if "url3" in thumbs: data_item_params["thumb"] = thumbs["url3"] data_item.thumb = data_item_params["thumb"] elif "url2" in thumbs: data_item_params["thumb"] = thumbs["url2"] data_item.thumb = data_item_params["thumb"] elif "url1" in thumbs: data_item_params["thumb"] = thumbs["url1"] data_item.thumb = data_item_params["thumb"] elif "icon" in thumbs: data_item_params["thumb"] = thumbs["icon"] data_item.thumb = data_item_params["thumb"] DataDao.update_data_item(data_item.id, data_item_params) # not authorized # if data_item.category == 1 and is_video_media(data_item.filename): # flv_json = restapi.get_media_flv_info(pan_acc.access_token, data_item.path) # if flv_json and "mlink" in flv_json: # flv_params = {"fs_id": data_item.fs_id, "mlink": flv_json["mlink"], # "start_at_time": flv_json["mlink_start_at"]} # if data_item_ext: # data_item_ext.mlink = flv_params["mlink"] # data_item_ext.start_at_time = flv_params["start_at_time"] # DataDao.update_data_item_ext(data_item.id, flv_params) # else: # data_item_ext = DataDao.new_data_item_ext(data_item.id, flv_params) used_pan_acc_id = data_item.panacc if data_item: data_item.size = int(data_item.size / 1024) f_type = guess_file_type(data_item.filename) params = {"item": DataItem.to_dict(data_item, ['id', 'parent'])} params["item"]["id"] = obfuscate_id(data_item.id) params["item"]["type"] = f_type params["item"]["media_type"] = self.check_data_item_media_type( data_item.category, data_item.filename) params["item"]["dlink_tokens"] = [used_pan_acc_id] # if data_item.category == 1 and is_video_media(data_item.filename) and data_item_ext: # params["item"]["mlink"] = data_item_ext.mlink # params["item"]["start_at_time"] = data_item_ext.start_at_time return params