def __init__(self, exchange_info): self.scheduler = create_scheduler() self.tunnel = QueueTunnel() self.storage = MongoStorage(settings.MONGO_URI, settings.as_int('MONGO_POOL_SIZE')) self.exchanges_settings: dict = exchange_info self._worked_tunnel = set()
def init_socket(self): """Инициализация сокета.""" self.sock = socket.socket() self.port = settings.as_int('PORT') self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.sock.bind((settings.get('host'), self.port)) self.sock.settimeout(0.5) self.sock.listen(settings.get('max_connections')) self.started = True logger.info(f'start with {settings.get("host")}:{self.port}')
def __init__(self, start: datetime, end: datetime): assert end - start <= timedelta(days=1), 'interval must be lte one day' self.start = start self.end = end self.start_date = f"{start:%Y-%m-%d}" self.out_sep = '+' self.mongo_storage = MongoStorage(settings.MONGO_URI, settings.as_int('MONGO_POOL_SIZE')) self.backup = S3Backup( access_key_id=settings['AWS_ACCESS_KEY_ID'], secret_access_key=settings['AWS_SECRET_ACCESS_KEY']) report_db = settings['MONGO_REPORT_DATABASE'] s3_coll_name = settings['MONGO_S3_SYNC_COLLECTION'] self.s3_coll = self.mongo_storage._mongo_client[report_db][ s3_coll_name] self.s3_bucket = settings['S3_BUCKET'] self.s3_presign_url_expire = settings['S3_PRESIGN_URL_EXPIRE']
def __init__(self, start: datetime, end: datetime): assert end - start <= timedelta(days=1), 'interval must be lte one day' self.start = start self.end = end self.start_date = f"{start:%Y-%m-%d}" self.out_sep = '+' self.mongo_storage = MongoStorage(settings.MONGO_URI, settings.as_int('MONGO_POOL_SIZE')) self.backup = OssBackup( access_key_id=settings['OSS_ACCESS_KEY_ID'], access_key_secret=settings['OSS_ACCESS_KEY_SECRET'], bucket_name=settings['OSS_BUCKET'], endpoint=settings['OSS_ENDPOINT']) report_db = settings['MONGO_REPORT_DATABASE'] oss_coll_name = settings['MONGO_OSS_SYNC_COLLECTION'] self.oss_coll = self.mongo_storage._mongo_client[report_db][ oss_coll_name]
async def async_start(self): self.loop = asyncio.get_running_loop() self.on_con_lost = self.loop.create_future() self.transport, self.protocol = await self.loop.create_connection( lambda: AsyncClientProtocol(self), settings.get('HOST'), settings.as_int('PORT'), ) # Wait until the protocol signals that the connection # is lost and close the transport. try: await self.on_con_lost except KeyboardInterrupt: logger.debug('closed') except Exception as error: logger.error(error, exc_info=True) finally: logger.debug('stoped') self.transport.close()
}, ] WSGI_APPLICATION = 'payserver.wsgi.application' # Database # https://docs.djangoproject.com/en/2.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', # 引擎名字 'NAME': dynasettings.MYSQL_DBNAME, # 数据库名字 'USER': dynasettings.MYSQL_USER, # 数据库用户 'PASSWORD': dynasettings.MYSQL_PASSWORD, # 数据库密码 'HOST': dynasettings.MYSQL_HOST, # 数据库ip 'PORT': dynasettings.as_int('MYSQL_PORT'), # 数据库端口 'OPTIONS': { 'charset': 'utf8mb4' }, 'TEST': { 'NAME': 'payserver_test', 'CHARSET': 'utf8mb4', 'COLLATION': 'utf8mb4_general_ci', }, } } # Password validation # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [
""" from datetime import datetime from dynaconf import settings from . import MonitorAbstract from ..sdk.cointiger import CointigerRest, CointigerWebsocket from ..schemas.regexes import COINTIGER_WS_CHANS from ..schemas.markets import (CointigerTicker, CointigerDepth, CointigerTrades, CointigerKline) __all__ = ( 'CointigerMonitor', ) ORDERBOOK_LEVEL = settings.as_int('ORDERBOOK_LEVEL') class CointigerMonitor(MonitorAbstract): exchange = 'cointiger' _ws_sdk_class = CointigerWebsocket _rest_sdk_class = CointigerRest async def schedule(self): for symbol in self.symbols: self.ws_sdk.register_depth(symbol) self.ws_sdk.register_trades(symbol) self.ws_sdk.register_kline(symbol) self.ws_sdk.register_ticker(symbol) await self.ws_sdk.subscribe() self.run_ws_in_background(handler=self.dispatch_ws_msg)
}, 'test': { 'PAYWEB': 'http://payweb-alpha.mishitu.com', 'SERVER': 'http://api-alpha.mishitu.com' }, 'prod': { 'PAYWEB': 'http://payweb.mishitu.com', 'SERVER': 'http://api.mishitu.com' } }[dynasettings.ENV] # redis connection pool SUBSCRIPT_REDIS_CONFIG = { "host": dynasettings.REDIS_HOST, "password": dynasettings.REDIS_PASSWORD if dynasettings.REDIS_PASSWORD else '', "port": dynasettings.as_int('REDIS_PORT'), "decode_responses": True, "db": 1 } SUBSCRIPT_REDIS_POOL = redis.ConnectionPool(**SUBSCRIPT_REDIS_CONFIG) # 管理后台生成二维码环境配置 QRCODE_ENVS = { 'test': 'GenerateQrCodeEnvTest', 'dev': 'GenerateQrCodeEnvTest', 'prod': 'GenerateQrCode', } QINIU_IMAGE_SERVERS = { 'test': 'ss-alpha.mishitu.com',
from jwt.exceptions import ExpiredSignatureError from passlib.context import CryptContext from sqlalchemy.orm import Session from src.database.database_schemas import RevokedTokens, Users from src.helpers.database import get_db from src.helpers.oauth2_scheme import oauth2_scheme from src.user.user_service import get_user_by_email from src.user.user_model import User, UserCreate logger = logging.getLogger("Noticrawl") SECRET_KEY = settings.SECRET_KEY JWT_ALGORITHM = settings.ALGORITHM ACCESS_TOKEN_LIFETIME = settings.as_int("ACCESS_TOKEN_LIFETIME") PWD_CONTEXT = CryptContext(schemes=["bcrypt"], deprecated="auto") def create_user(db: Session, user: UserCreate): hashed_password = hash_password(user.password) db_user = Users(email=user.email, password=hashed_password) db.add(db_user) db.commit() db.refresh(db_user) return db_user def create_token(*, data: dict, lifetime: int) -> jwt: data.update({
from string import Template from dynaconf import settings from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker # Connectiong to PostgreSQL database: database_url_template = Template( "postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}" ) DATABASE_URL = database_url_template.substitute( DB_USER=settings.DB_USER, DB_PASSWORD=settings.DB_PASSWORD, DB_NAME=settings.DB_NAME, DB_HOST=settings.DB_HOST, DB_PORT=settings.as_int("DB_PORT"), ) # Create SQLAlchemy engine: engine = create_engine(DATABASE_URL, pool_pre_ping=True) # Create SessionLocal class - actual database session: SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) # Create Base class # Later we will inherit from this class to create each of the database models or classes (the ORM models): Base = declarative_base()
def connect(self): """Соединение с сервером. И основной цикл Raises: ServerError: При ошибочном запросе """ global client client = None connected = False for i in range(5): logger.info(f'Попытка подключения №{i + 1}') try: self.sock.connect( (settings.get('HOST'), settings.as_int('PORT'))) except (OSError, ConnectionRefusedError): pass else: connected = True break time.sleep(1) if not connected: logger.critical('Не удалось установить соединение с сервером') sys.exit(1) logger.debug( f'Start with {settings.get("host")}:{settings.get("port")}') self.database = DBManager(app_name) user = User.by_name(settings.USER_NAME) hash_ = binascii.hexlify( hashlib.pbkdf2_hmac( 'sha512', settings.get('password').encode('utf-8'), settings.USER_NAME.encode('utf-8'), 10000, )) if user: user.password = settings.get('password') user.auth_key = hash_ user.active = False else: user = User(username=settings.USER_NAME, password=settings.get('password'), auth_key=hash_) user.save() self.send_message(Message.presence()) message = self.read_data() response = getattr(message, settings.RESPONSE, None) while True: if response == 200: break elif response == 205: # ????? break elif response == 400: raise ServerError(getattr(message, settings.ERROR, '')) elif response == 511: # Если всё нормально, то продолжаем процедуру авторизации. ans_data = getattr(message, settings.DATA, '') digest = hmac.new(user.auth_key, ans_data.encode('utf-8')).digest() response = Message( response=511, **{ settings.DATA: binascii.b2a_base64(digest).decode('ascii'), settings.USER: user.username, settings.ACTION: settings.AUTH }) self.send_message(response) message = self.read_data() if not message: logger.error(f'Авторизация не пройдена') sys.exit(1) response = getattr(message, settings.RESPONSE, None) user.active = True user.save() else: logger.error( f'Принят неизвестный код подтверждения {response}') return logger.debug( f'Установлено соединение с сервером. Ответ сервера: {message}') print(f'Установлено соединение с сервером.') self.update_user_list() self.update_contacts_list() receiver = ClientReader(self) receiver.daemon = True receiver.setDaemon(True) receiver.start() if settings.get('console'): client = ClientSender(self.sock) receiver.attach(client, settings.get('event_new_message')) client.daemon = True client.start() elif settings.get('gui'): sys.argv += ['-style', 'Fusion'] app = QApplication(sys.argv) client = ClientGui(self) receiver.new_message.connect(client.update) receiver.up_all_users.connect(client.update) receiver.response_key.connect(client.update) app.exec_() # Watchdog основной цикл, если один из потоков завершён, то значит или потеряно соединение или пользователь # ввёл exit. Поскольку все события обрабатываются в потоках, достаточно просто завершить цикл. try: while True: time.sleep(1) if not receiver.is_alive( ) or not client or not client.is_alive(): break except KeyboardInterrupt: pass self.exit_client()
import os import time from dynaconf import settings from src.database.database_schemas import Links, Scripts from src.helpers.crawling import data_selector, take_screenshot from src.helpers.database import get_db from src.notifications.notifications import send_email from .models.crawl_data_model import CrawlData logger = logging.getLogger("Noticrawl") # logger.setLevel("WARN") MAX_RUNNING_CRAWLS = settings.as_int("MAX_RUNNING_CRAWLS") MAX_WAITING_CRAWLS_QUEUE_SIZE = settings.as_int( "MAX_WAITING_CRAWLS_QUEUE_SIZE") class Scheduler: __waiting_crawls: asyncio.PriorityQueue __crawls_not_in_queue_num: int __running_crawls_futures = [] __crawls_to_update_or_delete = {} async def __create(self): self.__waiting_crawls = asyncio.PriorityQueue( maxsize=MAX_WAITING_CRAWLS_QUEUE_SIZE) self.__crawls_not_in_queue_num = 0 await self.reload_crawls()
def mongo_storage(loop): return MongoStorage(settings.MONGO_URI, settings.as_int('MONGO_POOL_SIZE'))