def getSalesRecordPeriod(pid): dateFrom = request.args.get('from', date.today().isoformat()) dateTo = request.args.get('to', date.today().isoformat()) logger.info(f'get product {pid} records from:{dateFrom}, to :{dateTo}') records = get_records_by_period(pid, date.fromisoformat(dateFrom), date.fromisoformat(dateTo)) records = SalesVolumes.serialize_list(records) return jsonify(status=True, message='succeed', data=records)
def login(): if request.method == 'GET': if 'username' in session: exist = username_exist(session.get('username')) if exist[0]: logger.info(f'user login, username:{session.get("username")}') return jsonify( status=True, message=f'user: {session.get("username")} login', data=exist[1].serialize()) else: return jsonify(status=False, message=f'user invaild', data='') else: return jsonify(status=False, message='no session', data='') else: username = request.json.get('username', '') password = request.json.get('password', '') print(username, password) if username == '' or password == '': print('error') return jsonify(status=False, message='invaid data', data='') else: print('useful') userInfo = {'username': username, 'password': password} available, user = user_verification(userInfo) if available: session['username'] = userInfo['username'] session.permanent = True return jsonify(status=True, message='ok', data=user.serialize()) else: return jsonify(status=False, message='user invaild', data='')
def post(self): data = parser.parse_args() user_id = get_jwt_identity()['userid'] logger.info(f'{get_jwt_identity()}=={data}') @dataauth(user_id, data.project_id) def function(): try: ProjectModel.query.filter_by( project_id=data.project_id).update({ 'project_name': data.project_name, 'project_v': data.project_v, 'project_desc': data.project_desc, 'project_state': data.project_state }) send_data = {'message': 'SUCCESS', 'code': 0} logger.info(f'{send_data}') return send_data except Exception as e: logger.error(f"{e}") send_data = {'message': 'FALSE', 'code': 1} return send_data, 500 return function()
def post(self): data = parser.parse_args() user_id = get_jwt_identity()['userid'] logger.info(f'{get_jwt_identity()}=={data}') try: all_results = HostModel.query.filter_by(project_id=data.project_id, delete_sign=0).all() host_list = [] for i in all_results: host_data = { 'host_name': i.host_name, 'host_addr': i.host_addr, 'host_desc': i.host_desc, 'host_id': i.host_id } host_list.append(host_data) send_data = { 'code': 0, 'message': "SUCCESS", 'host_list': host_list } logger.info(f'{send_data}') return send_data except Exception as e: logger.error(f'{e}') send_data = {'messaeg': 'Something went wrong', 'code': 1} return send_data, 500
def benchmark_train_and_predict(column_to_predict, train_data, test_data): """ add a prediction column to `test_data` called `output` which contains exactly the statistical features computed during preprocessing: 'frequentation_prevue' or 'frequentation_reel' times 'effectif' """ logger.info("----------- check training data -------------") for resolution, dtf in train_data.groupby(['cantine_nom', 'cantine_type']): logger.info("canteen %s has %s days of history to train on starting on %s and ending on %s", resolution, len(dtf), dtf["date_str"].min(), dtf['date_str'].max(), ) if column_to_predict == "prevision": test_data['output'] = test_data['frequentation_prevue'] * test_data['effectif'] else: test_data['output'] = test_data['frequentation_reel'] * test_data['effectif'] logger.info("----------- check predictions -------------") for resolution, dtf in test_data.groupby(['cantine_nom', 'cantine_type']): logger.info("canteen %s has predictions for %s days starting on %s and ending on %s", resolution, len(dtf), dtf["date_str"].min(), dtf['date_str'].max(), ) logger.info("----------- export predictions -------------") return test_data
async def fetch_keys(client: httpx.AsyncClient) -> None: """ Fetch JWT keys and store them in the JWT_KEYS that will act as a cache. """ # TODO: Handling of the JWKS should be refactored to a separate library and it # should make sure the caching + fetching fresh data is implemented according to # standards. This simplified version now caches the keys forever (= until restart). url = openid_conf.jwks_uri logger.info("Fetching JWKS", url=url) response = await client.get(url) key_data = response.json() for entry in key_data["keys"]: kid = entry["kid"] algorithm = entry["alg"] if entry.get("kty") == "RSA" and "n" in entry and "e" in entry: n = urlsafe_b64_to_unsigned_int(entry["n"]) e = urlsafe_b64_to_unsigned_int(entry["e"]) key = RSAPublicNumbers(e=e, n=n).public_key(default_backend()) else: key = load_der_x509_certificate(b64decode(entry["x5c"][0]), default_backend()).public_key() JWT_KEYS[kid] = { "algorithm": algorithm, "key": key, } logger.info("Added JWT key", kid=kid, algorithm=algorithm, type=type(key).__name__)
def phone_exist(phone): user = User.query.filter_by(phone=phone).first() if user is not None: logger.info(f'exist: {user}') return True, user else: return False, None
def shopHandler(sid): # get shop info if request.method == 'GET': shop = get_shop_detail(sid) logger.info(f'try to get shop: id: {sid} info') if shop is None: return jsonify(status=False, message='shop not existed', data='') else: shop = shop.serialize() return jsonify(status=True, message='succeed', data=shop) # update shop info elif request.method == 'PUT': shopInfo = form2Dict(request.json, {'id': sid, 'name': '', 'description': '', 'img': ''}) logger.info(f'try to update shop: id: {sid} info: {shopInfo}') status = update_shop_info(shopInfo) if status: logger.info(f'succeed to update shop: id: {sid} info') return jsonify(status=True, message='succeed', data='') else: logger.info(f'fail to update shop: id: {sid} info') return jsonify(status=False, message='failed', data='') # delete shop else: # 首先删除销量数据和店铺商品 logger.info(f'try to delete shop: id: {sid} info') delete_records_by_sid(sid) delete_products_by_sid(sid) status = delete_shop(sid) if status: return jsonify(status=True, message='succeed', data='') else: return jsonify(status=False, message='failed', data='')
async def request( cls, method: str, url: str, *, json: Optional[dict] = None, params: Optional[dict] = None, headers: Optional[dict] = None, ) -> httpx.Response: """ Do a request using httpx, but with retries """ for attempt in range(1, cls.MAX_RETRIES + 1): try: return await cls.client.request(method, url, json=json, params=params, headers=headers) except httpx.RequestError as ex: if method == "GET": logger.info( "Failed to make a request to LE, attempt {}/{}", attempt, cls.MAX_RETRIES, url=url, params=params, err=ex.__class__.__name__, ) if attempt < cls.MAX_RETRIES: continue raise
async def fetch(self, client: httpx.AsyncClient) -> None: """ Fetch the OpenID Configuration and store it for later use. """ url = conf.OPENID_CONNECT_CONFIGURATION logger.info("Fetching OpenID Configuration", url=url) response = await client.get(url) self.conf = OpenIDConfiguration(**response.json())
def smarter_process_data(data_path, start, end, school_cafeterias, include_wednesday, date_format): """ Computes dataset based on datafiles stored in `data_path` such that: - one line by date and school_cafeteria - dates belong to [start, end] - school_cafeterias belong to `school_cafeterias` """ # generate dataframes based on input datafiles all_school_cafeterias, real_values, effectifs = compute_datafiles_related_dataframes( data_path, school_cafeterias) # generate dates rows all_dates, date_col = compute_dates_dataframe(start, end, date_format, data_path, include_wednesday) # cross product school_cafeterias x dates all_dates_x_all_school_cafeterias = cross_product(all_dates, all_school_cafeterias) # join real values all_data = all_dates_x_all_school_cafeterias.merge( real_values, left_on=[date_col, "cantine_nom", "cantine_type"], right_on=[date_col, "cantine_nom", "cantine_type"], how='left') # join effectif values all_data = all_data.merge( effectifs, left_on=["annee_scolaire", "cantine_nom", "cantine_type"], right_index=True, how='left') # compute statistical features all_data = add_statistical_features(all_data) all_data = tag_outliers(all_data, 'reel', 3) for resolution, dtf in all_data.groupby(['cantine_nom', 'cantine_type']): logger.info( "dataset for school_cafeteria %s generated contains %s days", str(resolution), str(len(dtf))) # fillnans with 0 all_data.loc[(all_data["working"] == 0) & np.isnan(all_data["reel"]), 'reel'] = 0 all_data.loc[(all_data["working"] == 0) & np.isnan(all_data["prevision"]), 'prevision'] = 0 all_data.loc[(all_data["wednesday"] == 1) & np.isnan(all_data["reel"]), 'reel'] = 0 all_data.loc[(all_data["wednesday"] == 1) & np.isnan(all_data["prevision"]), 'prevision'] = 0 all_data.to_csv(f'output/staging/prepared_data_{start}_{end}.csv', index=False)
def checkPhone(): phone = request.args.get('phone') exist = phone_exist(phone) if exist[0]: logger.warning(f'register failed, phone:{phone} has existed') return jsonify(status=False, message="phone has existed", data='') else: logger.info('phone available') return jsonify(status=True, message="phone available", data='')
def createShop(): username = session.get('username') uid = get_uid_by_username(username) shopInfo = form2Dict(request.json, {'name': '', 'description': '', 'img': 'default-shop.jpeg', 'uid': uid}) if shopInfo['img'] == '': shopInfo['img'] = 'default-shop.jpeg' logger.info(f'user: {username} try to create shop:{shopInfo}') sid = create_shop(shopInfo) logger.info(f'shop-id: {sid} created') return jsonify(status=True, message='succeed', data={'sid': sid})
def fuction(): try: ProjectModel.query.filter_by( project_id=data.project_id).update({'delete_sign': 1}) send_data = {'message': 'SUCCESS', 'code': 0} logger.info(f'{send_data}') return send_data except Exception as e: logger.error({f'e'}) send_data = {'message': 'Something went wrong', 'code': 1} return send_data, 500
def function(): try: todeletehost.update({'delete_sign': 1}) send_data = {'message': 'SUCCESS', 'code': 0} logger.info(f'{send_data}') return send_data except Exception as e: logger.error(f'{e}') print('=============================', e) send_data = {'messaeg': 'Something went wrong', 'code': 1} return send_data, 500
def create_shop(shop): with db.auto_commit_db(): new_shop = Shop(name=shop['name'], description=shop['description'], img=shop['img'], uid=shop['uid']) db.session.add(new_shop) db.session.flush() sid = new_shop.id logger.info(f'shop create succeed, sid: {sid}') return sid
def delete_record(pid, date): record = SalesVolumes.query.filter_by(pid=pid, date=date).first() if record is not None: db.session.delete(record) db.session.commit() logger.info(f'delete record (pid:{pid}, date:{date}) succeed') return True else: logger.info( f'delete record (pid:{pid}, date:{date}) failed, record not exists' ) return False
def getImgFile(): fileName = request.args.get('f', None) logger.info(f'try to get: {fileName}') if fileName is None: return jsonify(status=False, message='no filename', data='') else: filePath = os.path.join(app.config['BASE_PATH'], app.config['UPLOAD_FILE_FOLDER'], fileName) file = open(filePath, "rb").read() response = make_response(file) response.headers['Content-Type'] = 'image/png' return response
def evaluate_feature_importance(evaluation_data_x, model): """ given a trained model x and a dataframe evaluation_data_x, returns a list of features name with their importance for the model """ feature_importance = zip(evaluation_data_x.columns.values, model.feature_importances_) feature_importance_list = sorted(feature_importance, key=lambda t: t[1], reverse=True) logger.info("FI:") logger.info(feature_importance_list) return feature_importance_list
def function(): try: toupdatahost.update( dict(host_name=data.host_name, host_addr=data.host_addr, host_desc=data.host_desc, user_id=user_id)) send_data = {'message': 'SUCCESS', 'code': 0} logger.info(f'{send_data}') return send_data except Exception as e: logger.error(f'{e}') send_data = {'messaeg': 'Something went wrong', 'code': 1} return send_data, 500
def decorator(*args, **kwargs): try: if len( AuthModel.query.filter_by(user_id=user_id, project_id=project_id).all()) != 0: return function(*args, **kwargs) else: logger.info(f'{false_json}') return false_json except Exception as e: logger.error(f'{e}') print(e) return error_json, 500
def doWork(self): d = os.listdir(self._outDir) while "stream001.ts" not in d: time.sleep(2) d = os.listdir(self._outDir) logger.info("STARTING XBMC4XBOX Player") logger.debug( requests.get(self._endpoint + "ClearPlayList(1)", auth=self._auth).text) logger.debug( requests.get( self._endpoint + "AddToPlayList(" + urllib.parse.quote( self._baseUrl + "stream000.ts?token=" + self._token, safe="") + ";1)", auth=self._auth, ).text) logger.debug( requests.get(self._endpoint + "SetCurrentPlaylist(1)", auth=self._auth).text) logger.debug( requests.get(self._endpoint + "PlayNext()", auth=self._auth).text) logger.debug("STARTING XBMC4XBOX Main Loop") prev = 0 dowork = True while dowork: d = sorted(os.listdir(self._outDir)) num = -1 for i in range(len(d) - 1, 0, -1): if d[i][len(d[i]) - 2:] == "ts": num = d[i] num = num[6:len(num) - 3] break if int(num) > prev: logger.debug("Add stream: " + num) logger.debug( requests.get( self._endpoint + "AddToPlayList(" + urllib.parse.quote( self._baseUrl + "stream" + num + ".ts?token=" + self._token, safe="", ) + ";1)", auth=self._auth, ).text) prev = int(num) time.sleep(1) dowork = self.activePid(self._startData["pid"])
def updateUserPhone(): if 'username' not in session: logger.warning('user not login') return jsonify(status=False, message='user not login', data='') username = session.get('username') logger.info(f'{username} update phone') phone = request.json.get('phone', '') exist = phone_exist(phone) if exist[0]: logger.warning('update failed, phone has existed') return jsonify(status=False, message="phone has existed", data='') else: update_user_phone(username, phone) logger.warning('update succeed') return jsonify(status=True, message="phone update succeed", data='')
def function(): try: new_host = HostModel(host_name=data.host_name, host_addr=data.host_addr, host_desc=data.host_desc, user_id=user_id, project_id=data.project_id) new_host.save_to_db() send_data = {'message': 'SUCCESS', 'code': 0} logger.info(f'{send_data}') return send_data except Exception as e: logger.error(f'{e}') send_data = {'messaeg': 'Something went wrong', 'code': 1} return send_data, 500
def post(self): data = parser.parse_args() user_id = get_jwt_identity()['userid'] logger.info(f'{get_jwt_identity()}=={data}') try: # 多表联合查询ProjectModel与UserModel表,获取username及项目信息,通过创建时间倒序 all_results = db.session.query( ProjectModel.project_name, ProjectModel.project_desc, ProjectModel.project_v, ProjectModel.create_time, ProjectModel.project_id, ProjectModel.updata_time, ProjectModel.project_state, UserModel.username).filter( and_( ProjectModel.project_name.like("%" + data.project_name + "%") if data.project_name is not None else user_id != '', ProjectModel.delete_sign == 0)).filter( ProjectModel.user_id == UserModel.id).order_by( ProjectModel.create_time.desc()).all() project_list = [] for i in all_results: if i.project_state == 0: project_state = '进行中' elif i.project_state == 1: project_state = '项目暂停' else: project_state = '项目结束' project_data = { 'project_name': i.project_name, 'project_id': i.project_id, 'project_v': i.project_v, 'project_desc': i.project_desc, 'user_name': i.username, 'create_time': str(i.create_time), 'updata_time': str(i.updata_time), 'project_state': project_state, } project_list.append(project_data) send_data = { 'code': 0, 'message': "SUCCESS", 'project': project_list } logger.info(f'{send_data}') return send_data except Exception as e: logger.error(f'{e}') send_data = {'messaeg': 'Something went wrong', 'code': 1} return send_data, 500
def handle(self): param = self.queue.get() p_id, coin_num = param logger.info('<<<<< p_id: {} coin_num: {}'.format(p_id, coin_num)) try: assert isinstance(manager.gg, GameInit) assert p_id in manager.gg.player_info.keys() assert type(coin_num) is int and coin_num >= 1 player = manager.gg.player_info[p_id] table = player.area table.game.bet(player, coin_num) return True except AssertionError as e: logger.error('LimitedGuessBetHandler fail: {}'.format(e)) return False
def handle(self): param = self.queue.get() p_id, = param logger.info('<<<<< p_id: {}'.format(p_id)) try: assert isinstance(manager.gg, GameInit) assert p_id in manager.gg.player_info.keys() player = manager.gg.player_info[p_id] table = player.area table.game.ready(player) return True except AssertionError as e: logger.error('LimitedGuessReadyHandler fail: {}'.format(e)) return False
def create_feed(email: str): web_domain = settings.site_url url = web_domain + "/rss/" + email email_id = email + "@" + settings.email_domain feed_dict = { "url": url, "domain": web_domain, "email_id": email_id, "email": email } feed = Feed(**feed_dict) db.add(feed) db.commit() db.refresh(feed) logger.info(f'New feed created for {feed.email_id}') return feed
def getAllShops(): username = session.get('username') uid = get_uid_by_username(username) logger.info(f'user: {username} uid: {uid}, try to get all shops') shops = get_user_all_shops(uid) if shops is None: return jsonify(status=False, message='user has no shops', data='') else: shops = Shop.serialize_list(shops) for shop in shops: preProducts = get_preview_prodcuts_by_sid(shop['id'], 4) preProductImgs = [] for product in preProducts: preProductImgs.append(product.img) shop['preProductImgs'] = preProductImgs return jsonify(status=True, message='all shops', data=shops)
def restoreSettings(self): try: self.restoreGeometry(self.config.getValue("geometry")) self.restoreState(self.config.getValue("windowState")) except Exception: logger.info("No settings to restore") # restore tray icon state trayIconVisibility = self.config.getBooleanValue('trayIconVisibility', True) self.tray.setVisible(trayIconVisibility) self.showHostsInGroups = self.config.getBooleanValue('showHostsInGroups', False) if self.tray.isVisible(): mainWindowVisibility = self.config.getBooleanValue('mainWindowVisibility', True) self.setVisible(mainWindowVisibility) else: # it tray icon is not visible, always show main window self.show() self.groups = {str(k): v for k, v in self.config.getValue('groups', {}).items()}