def connection_made(self, transport): self.transport = transport user = User.by_name(settings.USER_NAME) hash_ = binascii.hexlify( hashlib.pbkdf2_hmac( 'sha512', settings.get('password').encode('utf-8'), settings.USER_NAME.encode('utf-8'), 10000, )) if user: user.password = settings.get('password') user.auth_key = hash_ user.active = False else: user = User(username=settings.USER_NAME, password=settings.get('password'), auth_key=hash_) with db_lock: user.save() logger.debug('Установлено соединение с сервером.') self.write(Message.presence()) self.def_size, self.transport.max_size, self.cur_size = self.transport.max_size, self.CHUNK_SIZE, self.CHUNK_SIZE self._thread.notify('new_connect') self.long_data = b''
def send_html_email(mail_to, text, html, server, test=False): """Nagger email to each owner. Arguments: mail_to {[list]} -- [email1, email2,...] text {str} -- plain text message html {str} -- html message server {SMTP} -- SMTP server instance """ if not isinstance(mail_to, list): mail_to = [mail_to] mail_from = settings.get("EMAIL.from", "*****@*****.**") mail_to_string = ", ".join(mail_to) msg = MIMEMultipart("alternative") subject = settings.get("EMAIL.subject", "[Nagger BZ] Bugzilla cleanup report") msg["Subject"] = f"{subject} - {datetime.now().date().isoformat()}" msg["From"] = mail_from msg["To"] = mail_to_string # Record the MIME types of both parts - text/plain and text/html. part1 = MIMEText(text, "plain") part2 = MIMEText(html, "html") # Attach parts into message container. # According to RFC 2046, the last part of a multipart message, in this case # the HTML message, is best and preferred. msg.attach(part1) # if test: # click.echo(msg) # return msg.attach(part2) # html is included only if not --test return server.sendmail(mail_from, mail_to, msg.as_string())
def test_get_fresh(): assert settings.MUSTBEFRESH == 'first' os.environ['DYNACONF_MUSTBEFRESH'] = 'second' with pytest.raises(AssertionError): # fresh should now be second assert settings.exists('MUSTBEFRESH') assert settings.get_fresh('MUSTBEFRESH') == 'first' assert settings.get_fresh('MUSTBEFRESH') == 'second' os.environ['DYNACONF_THISMUSTEXIST'] = '@int 1' # must tnot exist yet (not loaded) assert settings.exists('THISMUSTEXIST') is False # must exist because fresh will call loaders assert settings.exists('THISMUSTEXIST', fresh=True) is True # loaders run only once assert settings.get('THISMUSTEXIST') == 1 os.environ['DYNACONF_THISMUSTEXIST'] = '@int 23' del os.environ['DYNACONF_THISMUSTEXIST'] # this should error because envvar got cleaned # but it is not, so cleaners should be fixed assert settings.get_fresh('THISMUSTEXIST') is None with pytest.raises(AttributeError): settings.THISMUSTEXIST with pytest.raises(KeyError): settings['THISMUSTEXIST'] os.environ['DYNACONF_THISMUSTEXIST'] = '@int 23' os.environ['BLARG_THISMUSTEXIST'] = '@int 99' # namespace switch is deleting the variable with settings.using_namespace('BLARG'): assert settings.get('THISMUSTEXIST') == 99 assert settings.get('THISMUSTEXIST') == 23
def arg_parser(): parser = argparse.ArgumentParser() parser.description = 'Talkative - Server Messager for study' parser.add_argument('--config', nargs='?') parser.add_argument('-e', '--encoding', nargs='?', help=f'Encoding (default "{settings.get("ENCODING")}")') parser.add_argument('-a', '--host', nargs='?', help=f'IP (default "{settings.get("HOST")}")') parser.add_argument('-p', '--port', nargs='?', help=f'Port (default "{settings.get("PORT")}")') parser.add_argument('--no-async', dest='no_async', action='store_true', help='Start do not async server') parser.set_defaults(no_async=False) parser.add_argument( '-v', '--verbose', action='count', default=settings.get('LOGGING_LEVEL'), help=f'Increase verbosity of log output (default "{settings.get("LOGGING_LEVEL")}")', ) log_group = parser.add_mutually_exclusive_group() log_group.add_argument('-g', '--gui', dest='gui', action='store_true', help='Start GUI Configuration') log_group.set_defaults(gui=settings.get('GUI')) log_group.add_argument('-c', '--console', dest='console', action='store_true', help='Start cli') log_group.set_defaults(console=settings.get('console')) namespace = parser.parse_args() if namespace.config: settings.load_file(path=namespace.config) for k, v in vars(namespace).items(): if not v: continue settings.set(k, v) _configure_logger(namespace.verbose)
def process_request(self, req, resp): """Process the request before routing it. Args: req: Request object that will eventually be routed to an on_* responder method. resp: Response object that will be routed to the on_* responder. """ # Set content type resp.content_type = "application/json" if not req.headers: return # Update https headers origin = req.headers.get("ORIGIN") or req.headers.get("HTTP_ORIGIN") if settings.get("AWOKADO_DEBUG") or ( origin and origin in settings.ORIGIN_HOSTS ): resp.append_header(name="Access-Control-Allow-Origin", value=origin) resp.set_headers( settings.get( "AWOKADO_ACCESS_CONTROL_HEADERS", DEFAULT_ACCESS_CONTROL_HEADERS ) )
def get_db(): user = settings.get('MYSQL_USER') password = settings.get('MYSQL_PASS') host = settings.get('MYSQL_HOST') database = settings.get('MYSQL_DATABASE') return "mysql+pymysql://{}:{}@{}/{}".format(user, password, host, database)
def _update_report(report): """Update the report with the parameters, etc.""" report_type = report["parameters"]["type"] report["name"] = _generate_report_name(report["parameters"]) report_filename = "{}.{}".format(report["name"], REPORTS[report_type]["extension"]) report.update({ "filename": report_filename, "mimetype": REPORTS[report_type]["mimetype"], "url": "{}/api/report/{}/download/{}".format( settings.get("BACKEND_URL", "http://localhost:8080"), report["id"], report_filename), "download_url": "{}/api/report/{}/download/{}".format( settings.get("BACKEND_URL", "http://localhost:8080"), report["id"], report_filename), "view_url": "{}/api/report/{}/view/{}".format( settings.get("BACKEND_URL", "http://localhost:8080"), report["id"], report_filename), "status": "running", }) mongo.reports.replace_one({"_id": ObjectId(report["id"])}, report)
def construct(**config): app = Flask(__name__, static_folder=settings.get('STATIC_DIR'), template_folder=settings.get('VIEWS_DIR')) configuration.init_app(app, **config) CORS(app) return app
def _setup(self, *args, **kwargs): """Установка БД. Raises: e: (Exception) DATABASES setting required Args: *args: доп. параметры **kwargs: доп. параметры """ try: db_settings = settings.get(f'DATABASES.{self.envs}') except Exception as e: logger.critical('DATABASES setting required') raise e db_name = Path( db_settings.get('NAME', '').format(**{'user': settings.USER_NAME})) db_name.parent.mkdir(parents=True, exist_ok=True) self.engine = sa.create_engine( f'{db_settings.get("ENGINE", "sqlite")}:///{db_name}', echo=settings.get('DEBUG_SQL', False), connect_args=db_settings.get('CONNECT_ARGS'), ) Base.metadata.create_all(self.engine) session_factory = sessionmaker(bind=self.engine) session = scoped_session(session_factory) Core.set_session(session()) ActiveUsers.delete_all()
def create_route(name): """ Criar arquivo para tratamento de rotas """ name = name.lower() if path.exists('app/http/controllers/' + name.capitalize() + 'Controller.py'): if check_route_exists(name): print("#######") print("-> Error!") print("-> Rota " + name + " existe!") print("-> Verifique o arquivo em app/http/routes/" + name + '.py') print("#######") else: with open( settings.get('FALAFEL_DIR') + settings.get('ROUTES_DIR') + '/' + name + '.py', 'w') as route: content = dedent("""\ from app.http.controllers import """ + name.capitalize() + """Controller from flask import Blueprint """ + name + """ = Blueprint('""" + name + """', __name__, url_prefix='/""" + p.plural(name) + """') @""" + name + """.route("/", methods=['GET']) def get(): # """ + name + """ routes # Utilize para separar as rotas da lógica de sua aplicação return """ + name.capitalize() + """Controller.get() def init_app(app): app.register_blueprint(""" + name + """) """) formatted = black.format_file_contents(content, fast=False, mode=mode) route.write(formatted) update_route_list(name) print("#######") print("-> Rota " + name + " criada com sucesso!") print("-> Verifique o arquivo em app/http/routes/") print("#######") else: print("#######") print("-> Error!") print("-> Controller " + name.capitalize() + " não existe!") print( "-> Rota precisa de um controlador para funcionar adequadamente.") print("-> Crie o controlador primeiro!") print("-> Crie um controlador digitando:") print("#######") print("python3 fava.py -mkcontroller " + name.capitalize()) print("#######")
def run(self): # Autenticación en S3 ses = boto3.session.Session(profile_name='luigi_dpa', region_name='us-west-2') s3_resource = ses.resource('s3') obj = s3_resource.Bucket(self.bucket) print(ses) # Autenticación del cliente: client = Socrata(settings.get('dburl'), settings.get('apptoken'), username=settings.get('user'), password=settings.get('pass')) # los resultados son retornados como un archivo JSON desde la API / # convertida a una lista de Python usando sodapy client.timeout = 1000 limit = 1000000000 # query results = client.get( "erm2-nwe9", limit=limit, where= f"created_date between '{self.year}-{self.month}-{self.day}T00:00:00.000' and '{self.year}-{self.month}-{self.day}T23:59:59.999'" ) with self.output().open('w') as json_file: json.dump(results, json_file)
def json_error_serializer(req: falcon.Request, resp: falcon.Response, exception: BaseApiException): # Serialize exception resp.body = exception.to_json() # Set content type resp.content_type = "application/json" resp.append_header("Vary", "Accept") resp.status = exception.status # Setup CORS origin = req.headers.get("HTTP_ORIGIN") origin2 = req.headers.get("ORIGIN") origin = origin2 or origin headers = {} if settings.get("AWOKADO_DEBUG") or (origin and origin in settings.ORIGIN_HOSTS): headers["Access-Control-Allow-Origin"] = origin headers_to_set = settings.get("AWOKADO_ACCESS_CONTROL_HEADERS", DEFAULT_ACCESS_CONTROL_HEADERS) for k, v in headers_to_set: headers[k] = v resp.set_headers(headers)
def test_get_fresh(): assert settings.MUSTBEFRESH == "first" environ["DYNACONF_MUSTBEFRESH"] = "second" with pytest.raises(AssertionError): # fresh should now be second assert settings.exists("MUSTBEFRESH") assert settings.get_fresh("MUSTBEFRESH") == "first" assert settings.get_fresh("MUSTBEFRESH") == "second" environ["DYNACONF_THISMUSTEXIST"] = "@int 1" # must tnot exist yet (not loaded) assert settings.exists("THISMUSTEXIST") is False # must exist because fresh will call loaders assert settings.exists("THISMUSTEXIST", fresh=True) is True # loaders run only once assert settings.get("THISMUSTEXIST") == 1 environ["DYNACONF_THISMUSTEXIST"] = "@int 23" del environ["DYNACONF_THISMUSTEXIST"] # this should error because envvar got cleaned # but it is not, so cleaners should be fixed assert settings.get_fresh("THISMUSTEXIST") is None with pytest.raises(AttributeError): settings.THISMUSTEXIST with pytest.raises(KeyError): settings["THISMUSTEXIST"] environ["DYNACONF_THISMUSTEXIST"] = "@int 23" load(settings) assert settings.get("THISMUSTEXIST") == 23
def __init__(self): # RDS self.str_NombreDB = settings.get('dbname') self.str_UsuarioDB = settings.get('user') self.str_PassDB = settings.get('password') self.str_EndPointDB = settings.get('host') self.str_Port = settings.get('port')
def run(self): cwd = os.getcwd() # path actual file_path = self.input().path # encontrar todos los archivos formato parquet ls_parquet_files = fn.execv("ls *.parquet", file_path) names_file=ls_parquet_files.split('\n') cmd_name = "echo %s | awk -F \"/\" \'{print $NF}\'" % (file_path) # obterner solo el nombre del archivo file_name = fn.execv(cmd_name, cwd) #num_file= functions.execv("ls | wc -l", file_path) # crea df vacío usando pandas #columns = ['name', 'extention', 'schema', 'action','creator', 'machine', 'ip', 'creation_date','size', 'location','entries', 'variables', 'script', 'log_script', 'status'] #df = pd.DataFrame(columns=columns) # defnir los comandos a utilizar para llenar las celdas #count = 0 for file in names_file: metadat=fn.get_preproc_metadata(file_path,cwd,file) # introducir nombreext_cmd = "ls -lad %s | awk -F\".\" \'{print $NF}\' " % (file) #cmd_name = "echo %s | awk -F \"/\" \'{print $NF}\'" % (file) #df.at[count, 'name' ] = functions.execv(cmd_name, cwd) #cmd_name=functions.execv(cmd_name, cwd) # introducir extension #ext_cmd = "ls -lad %s | awk -F\".\" \'{print $NF}\' " % (file) #df.at[count, 'extention'] = functions.execv(ext_cmd, cwd) # esquema y acción #df.at[count, 'schema'] = 'preprocessing' #df.at[count, 'action'] = 'transform json to parquet' # otras características de la creación #cre_cmd = "ls -lad %s | awk \'{print $3}\'" % (file) #df.at[count, 'creator'] = functions.execv(cre_cmd, cwd) #mch_cmd = "uname -a" #df.at[count, 'machine'] = functions.execv(mch_cmd, cwd) #ip_cmd = "curl ipecho.net/plain ; echo" #df.at[count, 'ip'] = functions.execv(ip_cmd, cwd) #cdt_cmd = "ls -lad %s | awk \'{print $6\"-\"$7\"-\"$8}\'" % (file) #df.at[count, 'creation_date'] = functions.execv(cdt_cmd, cwd) #siz_cmd = "ls -lad -h %s | awk \'{print $5}\'" % (file) #df.at[count, 'size'] = functions.execv(siz_cmd, cwd) #df.at[count, 'location'] = file_path #count += 1 conn=ps.connect(host=settings.get('host'), port=settings.get('port'), database=settings.get('database'), user=settings.get('usr'), password=settings.get('password')) cur = conn.cursor() columns = "(name, extention, schema, action, creator, machine, ip, creation_date, size, location,entries, variables, script, log_script, status)" sql="INSERT INTO preprocessed.etl_execution " + columns + " VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);" cur.execute(sql,metadat) conn.commit() cur.close() conn.close()
def init_socket(self): """Инициализация сокета.""" self.sock = socket.socket() self.port = settings.as_int('PORT') self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.sock.bind((settings.get('host'), self.port)) self.sock.settimeout(0.5) self.sock.listen(settings.get('max_connections')) self.started = True logger.info(f'start with {settings.get("host")}:{self.port}')
def check_route_exists(name): """ Checar se Arquivo Model existe """ if path.exists( settings.get('FALAFEL_DIR') + settings.get('ROUTES_DIR') + '/' + name + '.py'): return True else: return False
def listar_tudo(self, pagina=False, apenas_ativas=True): pagina = int(pagina) or 1 if apenas_ativas: return (Sessao.query.filter(Sessao.status == True).order_by( Sessao.data.desc()).paginate( pagina, settings.get("PAGINATION_OFFSET"))) return Sessao.query.order_by(Sessao.data.desc()).paginate( pagina, settings.get("PAGINATION_OFFSET"))
def check_model_exists(class_name): """ Checar se Arquivo Model existe """ if path.exists( settings.get('FALAFEL_DIR') + settings.get('MODELS_DIR') + '/' + class_name + '.py'): return True else: return False
def create_app(): app = Flask(__name__) FlaskDynaconf(app) app.config['SQLALCHEMY_DATABASE_URI'] = settings.get('sqlalchemy_database_uri') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = settings.get('sqlalchemy_track_modifications', False) db.init_app(app) views.init_app(app) auth.init_app(app) return app
def __init__(self, settings_path: Optional[str] = None): self.started_at = datetime.now() init_logger() load_or_create_settings(path=settings_path) if settings.get("COOKIES") and settings.get("USERNAME"): logger.info(f"✅ Used cookies of @{settings.USERNAME}") else: logger.info("🛑 Cookies not found, anonymous mode") logger.info("🥳 TikTokPy initialized")
def write_model_file(class_name, content): """ Criar arquivos de Modelos de banco de dados para migrações """ mode = black.FileMode() formatted = black.format_file_contents(content, fast=False, mode=mode) with open( settings.get('FALAFEL_DIR') + settings.get('MODELS_DIR') + '/' + class_name + '.py', 'w') as model: model.write(formatted) return class_name
def listar_todos(self, pagina=1, apenas_ativos=True): """ Retorna todos os vereadores ativos se apenas_ativos for True caso contrário retorna todos os vereadores ativos e inativos (objeto Paginate retornado) """ pagina = int(pagina) if apenas_ativos: return Vereador.query.order_by(Vereador.postado_em.desc()).filter( Vereador.status == True).paginate( pagina, settings.get("PAGINATION_OFFSET")) return Vereador.query.order_by(Vereador.postado_em.desc()).paginate( pagina, settings.get("PAGINATION_OFFSET"))
def _build_uri(self): if settings.get("USERNAME") and settings.get("PASSWORD"): uri = "mongodb://{username}:{password}@{host}:{port}/?authSource={database}".format( username=quote_plus(settings.USERNAME), password=quote_plus(settings.PASSWORD), host=settings.HOST, port=settings.PORT, database=settings.get("DATABASE", "test_artifacts"), ) else: uri = "mongodb://{host}:{port}".format(host=settings.HOST, port=settings.PORT) return uri
def get_health_info(): """Get the information about this server :rtype: HealthInfo """ return { "frontend": settings.get("FRONTEND_URL", "http://localhost:3000"), "backend": settings.get("BACKEND_URL", "http://localhost:8080"), "api_ui": settings.get("BACKEND_URL", "http://localhost:8080") + "/api/ui/", }
def api_exception_handler(error, req, resp, params): if isinstance(error, BaseApiException): resp.status = error.status if error.headers is not None: resp.set_headers(error.headers) if error.has_representation: json_error_serializer(req, resp, error) if settings.get("AWOKADO_LOG_USERS_EXCEPTIONS", False): exc_info = sys.exc_info() log.error("User error: ", exc_info=exc_info) elif isinstance(error, falcon.HTTPNotFound): resp.status = "404 Not Found" resp.content_type = "application/json" resp.body = json.dumps({"error": f"{req.path} not found"}) resp.append_header("Vary", "Accept") else: resp.status = "500 Internal Server Error" exc_info = sys.exc_info() log.error("api_exception_handler", exc_info=exc_info) if settings.get("AWOKADO_DEBUG"): if hasattr(error, "to_dict"): resp.body = json.dumps({"error": error.to_dict()}) elif hasattr(error, "to_json"): json_data = error.to_json() try: json_data = json.loads(json_data) except (TypeError, JSONDecodeError): json_data = json_data resp.body = json.dumps({"error": json_data}) else: exc_data = "".join(traceback.format_exception(*sys.exc_info())) resp.body = json.dumps({"error": exc_data}) else: resp.body = json.dumps({"error": resp.status}) # Set content type resp.content_type = "application/json" resp.append_header("Vary", "Accept")
def save_image(self, files, type_upload, id_user=None): print("CHAMOU SAVE IMAGE") extension = files.filename.split(".")[1] filename = str(uuid.uuid1()) + "." + extension print("FIRST FILENAME IN SAVE IMAGE: {}".format(filename)) if type_upload == "img_profile": files.save( os.path.join(settings.get('UPLOAD_USERS_FOLDER'), filename)) print("FILENAME IN SAVE IMAGE: {}".format(filename)) return filename elif type_upload == "img_kindness": files.save( os.path.join(settings.get('UPLOAD_KINDNESS_FOLDER'), filename)) return filename
def get_logger(): log_format = '【 %(levelname)s 】 %(asctime)s - file \"%(pathname)s\"\ - line: %(lineno)d - %(message)s ' logger = logging.getLogger('ms') logger.setLevel(logging.DEBUG) log_dir = settings.get('log_dir', '/tmp') log_file = settings.get('log_file', 'ms.log') logfile = os.path.join(log_dir, log_file) fh = logging.FileHandler(logfile) formatter = logging.Formatter(log_format) fh.setFormatter(formatter) logger.addHandler(fh) return logger
def CrearConexionRDS(): """Función para crear la conexion con la RDS (Asegurate de actualizar el archivo settings.toml) Returns: conn: Conexión a la RDS """ conn = psycopg2.connect(database=settings.get('dbname'), user=settings.get('user'), password=settings.get('password'), host=settings.get('host'), port='5432') return conn
def get_email_server(): """Creates an instance of email server. Returns: server -- SMTP instance """ server = (smtplib.SMTP_SSL if settings.get("EMAIL.ssl") else smtplib.SMTP)( settings.get("EMAIL.server", "localhost"), settings.get("EMAIL.port", 25), ) if settings.get("EMAIL.tls"): server.starttls() if settings.get("EMAIL.auth"): server.login(settings.EMAIL.auth.username, settings.EMAIL.auth.password) return server
def verify_token_reset_password(token): try: id = jwt.decode(token, settings.get('SECRET_KEY'), algorithm=['HS256'])['reset_password'] except: return return Users.query.get(id)
def verify_token_confirmed_email(token): try: email = jwt.decode(token, settings.get( 'SECRET_KEY'), algorithm=['HS256'])['confirm_email'] except: return return Users.query.filter_by(email=email).first()
def generate_token_reset_password(id_user, expire_in=600): return jwt.encode({'reset_password': id_user, 'exp': time() + expire_in}, settings.get('SECRET_KEY'), algorithm='HS256').decode('utf-8')
def generate_token_confirmed_email(email_user, expire_in=600): return jwt.encode({'confirm_email': email_user, 'exp': time() + expire_in}, settings.get('SECRET_KEY'), algorithm='HS256').decode('utf-8')
def get_spiders_iter(): for name in settings.get('SPIDER_MODULES'): for module in walk_modules(name): for spcls in iter_spider_classes(module): yield spcls
def crawl_queue(spider): start_url = settings.get('REDIS_START_URLS_KEY', DEFAULT_START_URLS_KEY) return start_url % {'spider': spider.name}
def items_queue(spider): crawled_items = settings.get('REDIS_ITEMS_KEY', DEFAULT_ITEMS_KEY) return crawled_items % {'spider': spider.name}
def requests_queue(spider): dupefilter = settings.get('REQUESTS_KEY', DEFAULT_REQUESTS_KEY) return dupefilter % {'spider': spider.name}
def dupefilter_queue(spider): dupefilter = settings.get('SCHEDULER_DUPEFILTER_KEY', DEFAULT_DUPEFILTER_KEY) return dupefilter % {'spider': spider.name}