def verify_token(*args, **kwargs): logger_config.get_logger() token = request.headers["authorization"].replace("Bearer", "").strip() if not token: raise Unauthorized() try: decode_token.get_token(token) return f(*args, **kwargs) except Exception as error: raise Forbidden()
class cache: cache = {} FILENAME = "data/cache.pickle" LOADED = False log = logger_config.get_logger(__name__) def __init__(self, key, secs, per_args=[]): if not cache.LOADED: my_file = Path(cache.FILENAME) if not cache.LOADED and my_file.is_file(): try: with open(cache.FILENAME, 'rb') as fp: cache.cache = pickle.load(fp) self.log.debug( f"opened cache db: {len(cache.cache)} entries") cache.LOADED = True except: self.log.error("failed to load cache file,") cache.LOADED = False self.__secs = secs self.__key = key self.__per_args = per_args def __call__(self, fn): def wrapped(*args, **kwargs): key = self.__key for arg_pos in self.__per_args: if arg_pos >= len(args): raise IndexError( f"the {arg_pos}th argument not found in the invocation of the method: {fn}. make sure you are calling the method with the right number of the arguments" ) key += "|" + str(args[arg_pos]) if key in cache.cache: entry = cache.cache[key] if entry[0] + self.__secs >= time(): return entry[1] else: self.log.debug(f"cache expired for key: {key}") returnValue = fn(*args, **kwargs) if returnValue == None: self.log.warn(f"'None' return! key: {key}") cache.cache[key] = [time(), returnValue] return returnValue return wrapped @staticmethod def persist(): with open(cache.FILENAME, 'wb') as fp: pickle.dump(cache.cache, fp) @staticmethod def invalidate(key): if key in cache.cache: del cache.cache[key]
def post_user(self, request): logger = logger_config.get_logger() payload = request.get_json() if not payload: return {"message": " payload is required"}, 404 fields_required = ["username", "password"] for fields in fields_required: if not payload.get(fields): return {"message": f"field {fields} is required"} payload['password'] = generate_password_hash(payload['password']) logger.info(f" User - User created success") return self.user.insert_user(payload)
class TgApi: TG_BASE_URL = "https://api.telegram.org" log = logger_config.get_logger(__name__) def __init__(self): self.request_session = requests.Session() retries = Retry(total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504]) self.request_session.mount(TgApi.TG_BASE_URL, HTTPAdapter(max_retries=retries)) def getTgUrl(self, methodName): return f'{TgApi.TG_BASE_URL}/bot{config.TG_TOKEN}/{methodName}' def sendMessage(self, msg, chatid, parse_mode=None): self.log.debug(f"sending msg to {chatid} '{msg}'") url = self.getTgUrl('sendMessage') r = requests.post(url=url, data={ 'chat_id': chatid, 'text': msg, 'parse_mode': parse_mode }) return r def sendPhoto(self, fileName, caption, chatid, parse_mode=None): files = {'photo': open(fileName, 'rb')} url = self.getTgUrl('sendPhoto') r = self.request_session.post(url=url, data={ 'chat_id': chatid, 'caption': caption, 'parse_mode': parse_mode, }, files=files) return r def getUpdates(self, last_update): offset = last_update + 1 url = self.getTgUrl('getUpdates') r = self.request_session.post(url=url, data={ 'offset': offset, 'limit': 100, 'timeout': 9 }) updates = r.json() if not 'ok' in updates or not updates['ok']: return None return updates['result']
def process_login(request): logger = logger_config.get_logger() payload = request.get_json() if not payload: code, json_payload = format_error_response(400, 'Bad Request', 'No payload.') return code, json_payload if not payload.get('username') or not payload.get("password"): code, json_payload = format_error_response( 400, 'Bad Request', 'Missing members: username and/or password.') return code, json_payload if not ApiUserController().get_user(payload): code, json_payload = format_error_response( 404, 'Bad Request', 'username not found', ) return code, json_payload user = ApiUserController().get_user(payload) username = payload['username'] password = payload['password'] if not verify_password(user['password'], password): code, json_payload = format_error_response( 400, 'Bad Request', 'username and/or password is Invalid') return code, json_payload payload_jwt = { "id": 1, "username": username, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=int(os.getenv("MINUTES"))) } token = jwt.encode(payload_jwt, os.environ.get('SECRET'), algorithm="HS256") data = {"id": user['username'], "token": token} code, json_payload = format_success_response(200, data) logger.info(f" Login - User authentication success") return code, json_payload
def get_card(number_card): logger = logger_config.get_logger() user_card = f'{number_card[:4]}*****{number_card[-4:]}' base_card = Card().find_card({"card": user_card}) if not base_card: code, json_payload = format_error_response(400, 'Bad Request', f'Card Not Found') return code, json_payload if not verify_password(base_card['hash_card'], number_card): code, json_payload = format_error_response(400, 'Bad Request', 'number card invalid') return code, json_payload if not base_card: code, json_payload = format_error_response(400, 'Bad Request', f'Card Not Found') return code, json_payload data = {"id": str(base_card['_id']), "card": user_card} code, json_payload = format_success_response(200, data) logger.info('Card - Get card successfully') return code, json_payload
def main(): parser = create_parser() args = parser.parse_args() mimic_download_latency = args.mimic_download_latency or float( os.environ.get('MIMIC_DOWNLOAD_LATENCY')) photos_directory = args.photos_directory or os.environ.get( 'PHOTOS_DIRECTORY') logging_lvl = logging.INFO if args.enable_logging else logging.NOTSET logger = get_logger(__file__, logging_lvl) app = web.Application() download_service = DownloadService(mimic_download_latency, photos_directory, logger) app.add_routes([ web.get('/', download_service.handle_index_page), web.get('/archive/{archive_hash}/', download_service.archivate), ]) web.run_app(app)
def run(self): self.log = logger_config.get_logger(__name__) try: with open(config.DB_FILENAME, 'rb') as fp: self.db = pickle.load(fp) except: self.log.error("error loading db, defaulting to empty db") self.db = {} self.api = TgApi() self.repository = MarketRepository() self.command_handler = CommandHandler(self.api, self.repository, self.db) self.log.debug("db at start: {}".format(self.db)) self.last_update = self.db[ 'last_update'] if 'last_update' in self.db else 0 # main loop loop = True while loop: try: updates = self.api.getUpdates(self.last_update) if updates is None: self.log.error('get update request failed') else: self.processUpdates(updates) try: self.processAlerts() except: self.log.exception("exception at processing alerts") except KeyboardInterrupt: self.log.info("interrupt received, stopping…") loop = False except: self.log.exception("exception at processing updates") loop = False self.persist_db() cache.persist() time.sleep(1)
def post_card(request): logger = logger_config.get_logger() payload = request.get_json() if not payload: code, json_payload = format_error_response(400, 'Bad Request', 'No payload.') return code, json_payload fields_required = ["card"] for fields in fields_required: if not payload.get(fields): code, json_payload = format_error_response( 400, 'Bad Request', f'field {fields} is required') return code, json_payload if len(payload['card']) <= int(os.getenv('DIGITS')): code, json_payload = format_error_response( 400, 'Bad Request', f'nnumber must be greater than {os.getenv("DIGITS")} characters') return code, json_payload if not payload['card'].isdigit(): code, json_payload = format_error_response(400, 'Bad Request', 'Only caracter number') return code, json_payload payload['hash_card'] = generate_password_hash(payload['card']) numbers_card = payload['card'] payload['card'] = f'{numbers_card[:4]}*****{numbers_card[-4:]}' try: logger.info('Card - card successfully registered ') id = Card().insert_card(payload) data = {"id": str(id)} code, json_payload = format_success_response(201, data) return code, json_payload except Exception as error: logger.error('Card - Error register card ') return 500, error
def get_user(self, data): logger = logger_config.get_logger() logger.info(f" User - Get - User success") return self.user.find_user({"username": data['username']})
from models import db, Location, Flat, Confluence, Building from sqlalchemy.orm import sessionmaker from math import radians, cos, sin, asin, sqrt, pi from logger_config import get_logger log = get_logger('engine_logger') log.setLevel('DEBUG') Session = sessionmaker(bind=db) def find_house_on_location(): session_db = Session() locations = session_db.query(Location).all() buildings = session_db.query(Building).all() coordinates = { 'point_1_lon': None, 'point_1_lat': None, 'point_2_lon': None, 'point_2_lat': None } for location in locations: coordinates['point_1_lon'] = location.longitude coordinates['point_1_lat'] = location.latitude for building in buildings: coordinates['point_2_lon'] = building.longitude coordinates['point_2_lat'] = building.latitude distance = calculate_distance(coordinates) radius = calculate_radius(location.area) if radius >= distance: building.location = location.id
import asyncio import pyppeteer from fake_useragent import UserAgent from screeninfo import get_monitors from pyppeteer import launch from random import uniform from time import sleep from logger_config import get_logger from bufferization import Buffer logger = get_logger('webdriver') class Webdriver: _ua = UserAgent() _buf = Buffer() _monitor = get_monitors()[0] _viewport = { 'width': _monitor.width, 'height': _monitor.height, } async def init_browser(self, hidden=False, language='en-gb'): ''' Initializing browser by opening pages, setting headers and parameters before starting lead generaiton :param hidden: Run Webdriver in headless mode :param language: Specify Accept-Language header value. English by default. Look up for them here https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry :return: returns nothing
from locators import * from my_config import * from time import sleep from random import uniform from bs4 import BeautifulSoup from pyppeteer.errors import PageError import sys sys.path.append('..') from logger_config import get_logger from webdriver import Webdriver logger = get_logger('webdriver.google_maps') class GoogleMaps(Webdriver): async def search(self, location=None, keyword=None, url=None): ''' :param desc: Description to generate leads (e.g., 'Pizza Delivery') :param loc: Location to search (e.g., 'California') :param url: Direct URL to yelp results page. Must be specified without `desc` and `loc` :return: returns nothing ''' if not '_page' in self.__dict__: raise ValueError( 'Initialize the browser before searching by `await *.init_broswser()`'
from pyppeteer.errors import TimeoutError from fake_useragent import UserAgent from bs4 import BeautifulSoup from random import uniform from time import sleep from locators import * from my_config import * import sys sys.path.append('..') from webdriver import Webdriver from logger_config import get_logger logger = get_logger('webdriver.yelp') ua = UserAgent() _found = lambda loc, desc: not f'No Results for {desc}' in requests.get( yelp.format(desc, loc), headers={ 'User-Agent': ua.random }).text class Yelp(Webdriver): _yelp = yelp.split('search')[0] async def search(self, loc=None, desc=None, url=None): ''' :param desc: Description to generate leads (e.g., 'Sportswear')
def __init__(self, api, repository, db): self.repository = repository self.db = db self.api = api self.log = logger_config.get_logger(__name__)
from my_config import * from os import environ from time import sleep from random import uniform, randint from bs4 import BeautifulSoup from fake_useragent import UserAgent from pyppeteer.errors import PageError import sys sys.path.append('..') from logger_config import get_logger from webdriver import Webdriver logger = get_logger('webdriver.linkedin') ua = UserAgent() class LinkedIn(Webdriver): def __init__(self, email=None, pwd=None): if not (email and pwd): try: self.email = environ.get('linkedin_mail') self.pwd = environ.get('linkedin_pwd') except: logger.fatal( 'Cannot get email and password to login in LinkedIn account', exc_info=True) else:
class MarketRepository(object): binance_api = RestApiBinance() crypto_compare = CryptoCompare() log = logger_config.get_logger(__name__) @cache("market.symbols", 3600) def get_symbols(self): symbols = self.crypto_compare.get_symbols() return symbols TSYMS = [ 'BTC', 'USD', 'EUR', 'SEK', 'IRR', 'JPY', 'CNY', 'GBP', 'CAD', 'AUD', 'RUB', 'INR', 'USDT', 'ETH' ] def isPricePairValid(self, fsym, tsym): return fsym in self.get_symbols().keys() and tsym in self.TSYMS @cache("market.top", 30) def get_top_coins(self): tsym = "USD" #must be in CAPS top_coins = self.crypto_compare.get_top(tsym) out = "`" for coin in top_coins: cap_f = math.floor(float(coin["cap"])) cap_s = '' if cap_f > 1000 * 1000 * 1000: cap_s = '${:.2f}B'.format(cap_f / (1000 * 1000 * 1000)) else: cap_s = '${:.3f}M'.format(cap_f / (1000 * 1000)) out = f"{out}{coin['rank']}: {coin['symbol']} {coin['price']} \t {cap_s}\n" out = out + '`' return out PARTITION_SIZE = 45 CACHE_DURATION_PRICE = 10.0 last_price_queries = {} price_partitions = {} def get_price(self, fsym, tsym): symbols = self.get_symbols() index = list(symbols.keys()).index(fsym) partition = index // MarketRepository.PARTITION_SIZE #print('index: {}, partition: {}, fsym: {}, tsym: {}'.format(index,partition, fsym,tsym)) if (partition not in MarketRepository.last_price_queries) or ( time() - MarketRepository.last_price_queries[partition] > MarketRepository.CACHE_DURATION_PRICE): index_start = max(0, partition * MarketRepository.PARTITION_SIZE - 2) index_end = index_start + MarketRepository.PARTITION_SIZE fsyms = list(symbols.keys())[index_start:index_end] self.price_partitions[partition] = self.crypto_compare.get_price( fsyms, self.TSYMS) MarketRepository.last_price_queries[partition] = time() return self.price_partitions[partition][fsym][tsym] def get_price_if_valid(self, fsym, tsym): if not self.isPricePairValid(fsym, tsym): self.log.debug(f"price pair not valid {fsym} {tsym}") else: return self.get_price(fsym, tsym) @cache("market.chart", 30, [1, 2, 3]) def get_chart(self, fsym, tsym, tf): CANDLES = 170 ROOT = "charts" fsym = fsym.upper() tsym = tsym.upper() print(f"generating chart for {fsym} {tsym} ") if tsym == "USD": tsym = "USDT" pair = fsym + tsym filenameBase = f"{pair}-{tf.value}-{CANDLES}" toRemove = [f for f in listdir(ROOT) if f.startswith(filenameBase)] for f in toRemove: remove(f"{ROOT}/{f}") filename = f"{ROOT}/{filenameBase}-{time()}.png" pairs = self.binance_api.get_pairs() if (fsym, tsym) in pairs: c = self.binance_api.get_candles(pair, tf, CANDLES) dr = DrawChart() dr.save( filename, c, f"{pair}-{tf.value}-Binance\n@crypto_price_notification_bot ") return filename return None def get_chart_far(self, fsym, tsym): return self.get_chart(fsym, tsym, CandleInterval.FOUR_HOUR) def get_chart_near(self, fsym, tsym): return self.get_chart(fsym, tsym, CandleInterval.FIFTEEN_MINUTE)
out[file] = [] logger.debug('Processing conversion of %s' % file) try: f = open(file+'.txt') for line in f.readlines(): if goodline.search(line): time = line.split('[')[1].split(']')[0].strip(' ') thds = line.split(']')[1].split('thds:')[ 1].split('tps')[0].strip(' ') tps = line.split('tps:')[1].split('qps')[0].strip(' ') qps = line.split('qps:')[1].split('(r/w/o:')[0].strip(' ') latency = line.split('lat (ms,95%):')[ 1].split('err/s')[0].strip(' ') errors = line.split( 'err/s:')[1].split('reconn/s')[0].strip(' ') out[file].append({'time': time, 'thds': thds, 'tps': tps, 'qps': qps, 'latency': latency, 'errors': errors}) f.close() except: logger.error('Could not open %s' % file+'.txt', exc_info=True) pass return out if __name__ == "__main__": args = parse_args() logger = logger_config.get_logger('plot-sysbench') logger.info('Kicking off graph preparation.') df = prepare(extract_from_file()) render(df)
#!/usr/bin/python3 # -*- coding: utf-8 -*- import os import pandas as pd from models import db from logger_config import get_logger log = get_logger('import_logger') log.setLevel('DEBUG') class ImportFile: def __init__(self, input_file, table_name): """python 3.9 Class for split one file to many :param input_file: File for import to DB """ self.input_file = input_file self.all_data_frame = pd.DataFrame() self.column_names = '' self.sheet_names = '' self.table_name = table_name def check_extension(self): log.debug('check_extension') _file_extension = os.path.splitext(self.input_file)[1] if _file_extension == '.xls' or _file_extension == '.xlsx': self.import_xls(self.input_file) elif _file_extension == '.csv': self.import_csv(self.input_file)
import os import logger_config from flask import ( Flask, request, jsonify, ) from decorator.token_web_authorize_jwt import required_web_token_authorize from controllers import (ApiAuthController, ApiCardController) from controllers.ApiUserController import ApiUserController app = Flask(__name__) logger_config.configure_logger(app, __name__) logger = logger_config.get_logger() @app.route("/") def hello(): return jsonify({"message": "Bem vindo Desafio Hypercriativa"}) @app.route('/card', methods=['POST']) @required_web_token_authorize def card(): http_code, json_payload = ApiCardController.post_card(request) logger.debug(f'AUTH - HTTP_CODE: {http_code}') response = app.response_class(response=json_payload, status=http_code, mimetype='application/json') return response
#!/usr/bin/python3 # -*- coding: utf-8 -*- import os import pandas as pd from logger_config import get_logger log = get_logger('Slice_XLS_logger') log.setLevel('DEBUG') path = 'data/2020.11.24 Выгрузка УК ноябрь.xlsx' class SliceXLS: def __init__(self, input_xls): '''python 3.9 Class for split one file to many :param input_xls: File for splitting ''' self.input_xls = input_xls self.all_data_frame = pd.DataFrame() self.column_names = '' self.sheet_names = '' def read_sheets(self): ''' Open Excel file and read sheet names :return: list sheet names ''' try: with pd.ExcelFile(self.input_xls) as source_xls: self.sheet_names = source_xls.sheet_names
from flask import Flask, request from flask_socketio import SocketIO, emit from classifier import CaripelaClassifier from flask_cors import CORS import sys import logger_config logger = logger_config.get_logger('web-server') def log_uncaught(exctype, value, tb): logger.error('Type: ', exctype) logger.error('Value:', value) logger.error('Traceback:', tb) #sys.excepthook = logger # initialize Flask app = Flask(__name__) cors = CORS(app) socketio = SocketIO(app) clasificados = CaripelaClassifier() """ Handle connections """ @socketio.on('connect') def on_connect():
import os from logger_config import get_logger log = get_logger('logger_main') if __name__ == '__main__': pass # slxl = SliceXLS(path) # slxl.run()
import pandas as pd import logging from transformer import create_transformer_output import operation as operation import logger_config as lc logger = lc.get_logger("transformer_log", "logs/logger.log") logger.setLevel(logging.DEBUG) #logger.debug(pd.show_versions(as_json=False)) def transform_tr_set(input_df, transformer_data): logger.info("Start Processing ") logger.debug(f"Transformer Data\n{transformer_data}") logger.debug(f"Input Dataframe\n{input_df}") output_df = create_transformer_output(input_df, transformer_data) logger.debug(f"Output is\n{output_df}") logger.info("Process completed") return output_df def delete_r_nan(df): logger.info("Start Processing - Delete NaN") logger.debug(f"Input DataFrame\n{df}") operation.drop_nan_rows(df) logger.debug(f"Output Data\n{df}") logger.info("End Processing ") '''
import csv from logger_config import get_logger logger = get_logger('buffer') class Buffer: lower_limit = 5 upper_limit = 200 fn = ['Title', 'Address', 'WebSite', 'PhoneNumber'] def __init__(self, filename: str = 'leads.csv', buffer_size: int = 5): if not filename.endswith('.csv'): raise ValueError( 'Incorrect filename specified. Filename should end with `.csv`' ) if not buffer_size in range(self.lower_limit, self.upper_limit + 1): raise ValueError( f'Expected value between {self.lower_limit} and {self.upper_limit}. However, got {buffer_size}' ) self._buffer_size = buffer_size self._filename = filename self._data = [] with open(self._filename, 'w', newline='') as f: writer = csv.DictWriter(f, fieldnames=self.fn) writer.writeheader() def dump(self):
# Script for data injestion import argparse from argparse import RawTextHelpFormatter from logger_config import get_logger from elasticsearch import Elasticsearch, helpers import csv import json import requests from datetime import datetime logger = get_logger(__name__) def load_data(input_file, index_name): try: elastic_search = Elasticsearch() with open(input_file) as f: reader = csv.DictReader(f) rows = [] for row in reader: row["_index"] = index_name rows.append(row) helpers.bulk(elastic_search, rows) except Exception as ex: logger.error(ex) def update_data(index_name): try:
from image_processor import ImageProcessor from model_manager import ModelManager from dataset import Dataset from mobilenet_predictor import MobilenetPredictor from personal_trainner import PersonalTrainer import numpy as np import logger_config logger = logger_config.get_logger(__name__) CROP_FACES = True class CaripelaClassifier: def __init__(self): self.image_processor = ImageProcessor(crop_face=CROP_FACES) self.model_manager = ModelManager() # initialize volatile variables self.prediction_model = MobilenetPredictor() self.personal_trainer = PersonalTrainer() self.sessions = {} def create_session(self, session_id): self.sessions[session_id] = { 'dataset': Dataset(), 'custom_model': self.model_manager.load_custom() } logger.debug('create_session > session created with id %s', session_id) def reset(self, session_id): self.clean_data_holder(session_id)