Ejemplo n.º 1
0
 def __init__(self, root_urls: List[str], parse_funcs: Dict[str, Callable]) -> None:
     """
     :param root_urls: urls for root parsing
     :param parse_funcs: funcs for parse and parse in depth, one by one
     """
     self.event_loop = asyncio.get_event_loop()
     self.root_urls: List[str] = root_urls
     self.parse_funcs: Dict[str, Callable] = parse_funcs
     self.logger = Logger.with_default_handlers()
     self.current_url = ""
Ejemplo n.º 2
0
async def main():
    logger = Logger.with_default_handlers(name='my-logger')

    await logger.debug("debug at stdout")
    await logger.info("info at stdout")

    await logger.warning("warning at stderr")
    await logger.error("error at stderr")
    await logger.critical("critical at stderr")

    await logger.shutdown()
Ejemplo n.º 3
0
        async def test():
            reader = asyncio.StreamReader(loop=loop)
            protocol = asyncio.StreamReaderProtocol(reader)

            transport, _ = await loop.connect_read_pipe(
                lambda: protocol, self.read_pipe
            )

            logger = Logger.with_default_handlers()
            await logger.info("Xablau")

            logged_content = await reader.readline()
            self.assertEqual(logged_content, b"Xablau\n")

            transport.close()
            await logger.shutdown()
Ejemplo n.º 4
0
def setup_custom_logger():
    LOG_PATH = 'log'
    FileUtils.create_folder_if_not_exists(LOG_PATH)

    handler = AsyncTimedRotatingFileHandler(
        filename="log/think.log",
        when=RolloverInterval.HOURS,
        backup_count=48,
    )
    formatter = logging.Formatter(
        "[%(asctime)s] %(threadName)s - %(pathname)s %(funcName)s():%(lineno)d  %(levelname)s %(message)s"
    )
    handler.formatter = formatter

    logger = Logger.with_default_handlers(formatter=formatter)
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)

    return logger
Ejemplo n.º 5
0
from urllib.parse import urljoin

import aiohttp

from checks import _get_kafka_messages, Check
from aiologger import Logger

logger = Logger.with_default_handlers()


async def get_fs_messages(start):
    return await _get_kafka_messages('fs', start)


class FSKafkaCheck:
    async def check(self, fs_messages):
        logger.info("[FS-Kafka] checking...")
        logger.info(f"[FS-Kafka] messages: {fs_messages}")
        result = any(fs_messages)
        logger.info(f"[FS-Kafka] check: {result}")
        return result


class FSCheck:
    def __init__(self, server_url, token, fact_result) -> None:
        super().__init__()
        self.server_url = server_url
        self.token = token
        self.fact_result = fact_result

    async def _get_facts(self, fs_messages):
Ejemplo n.º 6
0
from arcor2 import helpers as hlp
from arcor2.data import events
from arcor2.data.common import ActionState, CurrentAction, PackageState, Project, Scene
from arcor2.data.execution import PackageInfo
from arcor2.data.object_type import ObjectActionsDict, ObjectTypeMetaDict
from arcor2.data.robot import RobotMeta
from arcor2.data.services import ServiceTypeMetaDict
from arcor2.nodes.build import PORT as BUILD_PORT
from arcor2.nodes.execution import PORT as EXE_PORT
from arcor2.object_types import Generic
from arcor2.parameter_plugins.base import TypesDict
from arcor2.services.service import Service

logger = Logger.with_default_handlers(name='server',
                                      formatter=hlp.aiologger_formatter(),
                                      level=LogLevel.DEBUG)
VERBOSE: bool = False

MANAGER_URL = os.getenv("ARCOR2_EXECUTION_URL", f"ws://0.0.0.0:{EXE_PORT}")
BUILDER_URL = os.getenv("ARCOR2_BUILDER_URL", f"http://0.0.0.0:{BUILD_PORT}")

PORT: int = int(os.getenv("ARCOR2_SERVER_PORT", 6789))

SCENE: Optional[Scene] = None
PROJECT: Optional[Project] = None
MAIN_SCREEN: Optional[events.ShowMainScreenData] = \
    events.ShowMainScreenData(events.ShowMainScreenData.WhatEnum.ScenesList)

INTERFACES: Set[WsClient] = set()
Ejemplo n.º 7
0
import uvicorn
from fastapi import FastAPI
from aiologger import Logger

logger = Logger.with_default_handlers(name='my-logger')

app = FastAPI()


@app.get("/")
async def test_aiologger():
    # Default : stdout
    await logger.debug("debug")
    await logger.info("info")

    # Default : stderr
    await logger.warning("warning")
    await logger.error("error")
    await logger.critical("critical")


if __name__ == "__main__":
    uvicorn.run(app, host="0.0.0.0", port=8000, loop="uvloop")
    # Test : http://127.0.0.1:8000/docs
Ejemplo n.º 8
0
def get_logger(level):
    return Logger.with_default_handlers(
        name=__name__,
        level=level,
        formatter=Formatter(fmt=("%(levelname)05s [%(asctime)s.%(msecs)03d]"
                                 "[%(module)s:%(lineno)d]: %(message)s")))
Ejemplo n.º 9
0
import aiofiles
import argparse
import logging
import random
import tqdm
from aiohttp import ClientSession
from aiologger import Logger
from aiologger.formatters.base import Formatter
from pathlib import Path

from tools.aiohttp_ignore_ssl_error import ignore_aiohttp_ssl_error

aio_logger = Logger.with_default_handlers(
    name='aio_image_downloader',
    # formatter=Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    formatter=Formatter(fmt='%(message)s'),
    level=logging.INFO,
)

glob_configs = {"export_dir": Path("cats")}


def build_img_export_name(img_url) -> Path:
    try:
        image_name = str(img_url[(img_url.rfind('/')) + 1:])
        if '?' in image_name:
            image_name = image_name[:image_name.find('?')]
    except:
        image_name = str(random.randint(11111, 99999)) + '.jpg'

    return glob_configs["export_dir"] / image_name
Ejemplo n.º 10
0
async def stock_trigger(q_name, init_dict):
    """
    异动处理主逻辑
    :param q_name: 队列名称
    :param init_dict: 30日新高新低
    :return:
    """
    logger = Logger.with_default_handlers(name=q_name)
    # Get the current event loop. If there is no current event loop set in the current OS thread and set_event_loop()
    # has not yet been called, asyncio will create a new event loop and set it as the current one.
    loop = asyncio.get_event_loop()
    # async redis connection pool
    redis_loop = await aioredis.create_pool('redis://{}:{}'.format(REDIS_DICT['host'], REDIS_DICT['port']),
                                            db=REDIS_DICT['db'],
                                            password=REDIS_DICT['password'],
                                            minsize=1, maxsize=10, loop=loop)

    async def get_connection():
        return await aio_pika.connect_robust(
            "amqp://{}:{}@{}:{}{}".format(config['rabbitmq']['username'],
                                          config['rabbitmq']['password'],
                                          config['rabbitmq']['host'],
                                          config['rabbitmq']['port'],
                                          config['rabbitmq']['virtual_host']))

    # Connection pooling
    connection_pool = Pool(get_connection, max_size=2, loop=loop)

    async def get_channel() -> aio_pika.Channel:
        async with connection_pool.acquire() as connection:
            return await connection.channel()

    channel_pool = Pool(get_channel, max_size=10, loop=loop)
    queue_name = q_name

    async def consume():
        async with channel_pool.acquire() as channel:   # type: aio_pika.Channel
            await channel.set_qos(prefetch_count=10)

            queue = await channel.declare_queue(queue_name, durable=True, auto_delete=False)

            async with queue.iterator() as queue_iter:
                async for message in queue_iter:
                    msg = msg2dict(message.body)
                    start_time = time.time()
                    if check_stock_status(msg['status']):
                        task_queue = []
                        msg['name'] = init_dict[msg['code']][0]  # 增加股票代码名称字段
                        # 同一条消息会被5个计算单元消费
                        # 1. 判断是否超越30日高、低, 因为每日只提醒一次,需要与Redis交互查询是否已经发送
                        if not str2bool(init_dict[msg['code']][3]) and \
                                float(msg['close']) > float(init_dict[msg['code']][1]):
                            init_dict[msg['code']][3] = '1'
                            task_queue.append("14601")
                            print("14601", msg)
                        elif not str2bool(init_dict[msg['code']][4]) and \
                                float(msg['close']) < float(init_dict[msg['code']][2]):
                            init_dict[msg['code']][4] = '1'
                            task_queue.append("14602")
                            print("14602", msg)
                        else:
                            pass

                        # 2. 五分钟内涨跌幅达到±1%, 每日提醒多次
                        # 先计算五分钟内涨跌幅值 -- 使用斐波那契堆,最小堆+最大堆
                        # 将时间和价格组合成一个tuple (td, close)
                        init_dict[msg['code']][11], fiveRatio, td = fiveMinuteCal(init_dict[msg['code']][11],
                                                                                        int(msg['td']),
                                                                                        float(msg['close']))
                        # 五分钟涨幅超过1%,因为重复消息五分钟内仅提醒一次, 需要与Redis交互查询是否五分钟内重复触发
                        if fiveRatio >= 1 and fiveMinuteBefore2Int(td, CONSTANT.STOCK_TIMESTAMP_FORMAT) > int(init_dict[msg['code']][9]):
                            init_dict[msg['code']][9] = td
                            task_queue.append("14603:{}".format(fiveRatio))

                        # 五分钟跌幅超过1%,因为重复消息五分钟内仅提醒一次, 需要与Redis交互查询是否五分钟内重复触发
                        if fiveRatio <= -1 and fiveMinuteBefore2Int(td, CONSTANT.STOCK_TIMESTAMP_FORMAT) > int(init_dict[msg['code']][10]):
                            init_dict[msg['code']][10] = td
                            task_queue.append("14604:{}".format(fiveRatio))

                        # 3. 判断当日涨跌幅是否达到±7%,因为每日只提醒一次,需要与Redis交互查询是否已经提醒
                        if not str2bool(init_dict[msg['code']][5]) and \
                                float(msg['riseFallRate']) > CONSTANT.ALARM_QUOTE_CHANGE:
                            init_dict[msg['code']][5] = '1'
                            task_queue.append("14605")
                            print("14605", msg)
                        elif not str2bool(init_dict[msg['code']][6]) and \
                                float(msg['riseFallRate']) < -CONSTANT.ALARM_QUOTE_CHANGE:
                            init_dict[msg['code']][6] = '1'
                            task_queue.append("14606")
                            print("14606", msg)
                        else:
                            pass

                        # 4. 判断当日涨跌停,因为每日提醒多次,需要与Redis交互查询|本地保存一个字典是否已经提醒
                        if not str2bool(init_dict[msg['code']][7]) and \
                                float(msg['close']) >= float(msg['limitHigh']):
                            init_dict[msg['code']][7] = '1'
                            task_queue.append("14607")
                            print("14607", msg)
                        elif not str2bool(init_dict[msg['code']][8]) and \
                                float(msg['close']) <= float(msg['limitLow']):
                            init_dict[msg['code']][8] = '1'
                            task_queue.append("14608")
                            print("14608", msg)
                        else:
                            pass

                        # 5. 首先出现涨跌停标志,然后判断是否出现涨跌停开板
                        if str2bool(init_dict[msg['code']][7]) and float(msg['close']) < float(msg['limitHigh']):
                            init_dict[msg['code']][7] = '0'
                            task_queue.append("14609")
                            print('14609', msg)
                        elif str2bool(init_dict[msg['code']][8]) and float(msg['close']) > float(msg['limitLow']):
                            init_dict[msg['code']][8] = '0'
                            task_queue.append("14610")
                            print('14610', msg)
                        else:
                            pass

                        # interaction redis
                        await aio_redis_dao(redis_loop, task_queue, msg)
                    else:
                        pass
                    print("Consume Time: {}".format(time.time()-start_time))
                    # Confirm message
                    await message.ack()

    task = loop.create_task(consume())
    await task
Ejemplo n.º 11
0
import os
from aiologger import Logger
from aiologger.handlers.files import AsyncFileHandler
from aiologger.formatters.base import Formatter


# In version 0.4.0 AsyncFileHandler don't have formatter in __init__().
class AsyncFileHandlerFormatter(AsyncFileHandler):
    def __init__(self, formatter: Formatter = None, *args, **kwargs) -> None:
        super().__init__(*args, **kwargs)
        self.formatter = formatter


formatter = Formatter(
    fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s')

logger = Logger.with_default_handlers(name='converter', formatter=formatter)

logger.add_handler(
    AsyncFileHandlerFormatter(filename=os.path.join(os.path.dirname(__file__),
                                                    f'logs/{logger.name}.txt'),
                              formatter=formatter))
Ejemplo n.º 12
0
import os
import json

import asyncio
import argparse
from urllib.parse import urlparse

import aiohttp
from aiologger import Logger
from aiofile import AIOFile, Writer

import cleanup

logger = Logger.with_default_handlers(name=__name__)


def has_allowed_file_extensions(url):
    extensions = ['.jpeg', '.png', '.jpg']

    for extension in extensions:
        if extension in url:
            return True

    return False


def get_subreddit_url(subreddit):
    return f'https://www.reddit.com/r/{subreddit}.json'


async def parse_reddit_response(resp):
Ejemplo n.º 13
0
import asyncio
from collections import Counter
from urllib.parse import urlparse

import aiohttp
import async_timeout
from aiologger import Logger
from aiologger.levels import LogLevel
from bs4 import BeautifulSoup
from tortoise.query_utils import Q

from app.keywords.models import Resource, ResourceItem

LOG = Logger.with_default_handlers(level=LogLevel.INFO)
# LOG = logging.getLogger(__name__)

BAD_EXTENSIONS = [
    'jpg', 'png', 'jpeg', 'gif', 'svg', 'css', 'js', 'xml', 'ico', 'xls',
    'xlsx'
]
BAD_EXTENSIONS = [f'.{ext}' for ext in BAD_EXTENSIONS]

USER_AGENT = 'Mozilla/5.0 (iPad; CPU OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1'


async def fetch(session, url):
    error, data = False, None
    LOG.info(f"Try fetch {url}")

    try:
        with async_timeout.timeout(5):
Ejemplo n.º 14
0
from websockets.server import WebSocketServerProtocol as WsClient

import arcor2
from arcor2.data import compile_json_schemas, rpc
from arcor2.data.common import PackageState, PackageStateEnum, Project
from arcor2.data.events import ActionStateEvent, CurrentActionEvent, Event, PackageInfoEvent, PackageStateEvent,\
    ProjectExceptionEvent, ProjectExceptionEventData
from arcor2.data.helpers import EVENT_MAPPING
from arcor2.exceptions import Arcor2Exception
from arcor2.helpers import RPC_DICT_TYPE, aiologger_formatter, read_package_meta, server, write_package_meta
from arcor2.settings import CLEANUP_SERVICES_NAME, PROJECT_PATH
from arcor2.source.utils import make_executable

PORT = 6790

logger = Logger.with_default_handlers(name='manager',
                                      formatter=aiologger_formatter())

PROCESS: Union[asyncio.subprocess.Process, None] = None
PROJECT_EVENT: PackageStateEvent = PackageStateEvent()
PACKAGE_INFO_EVENT: Optional[PackageInfoEvent] = None
ACTION_EVENT: Optional[ActionStateEvent] = None
ACTION_ARGS_EVENT: Optional[CurrentActionEvent] = None
TASK = None

CLIENTS: Set = set()

MAIN_SCRIPT_NAME = "script.py"


def process_running() -> bool:
import logging

from aiohttp import web
from aiologger import Logger
from aiologger.handlers.files import AsyncFileHandler

formatter = logging.Formatter('%(asctime)s: %(levelname)s: %(message)s')
logger = Logger.with_default_handlers(name='async-logger',
                                      level=logging.WARNING,
                                      formatter=formatter)

file_handler = AsyncFileHandler(filename='log_data.log')
logger.add_handler(file_handler)


class ClientErrorHandler(web.HTTPException):
    @staticmethod
    async def not_found(req_text):  # 404

        return web.HTTPNotFound()

    @staticmethod
    async def bad_request(req_text):
        await logger.warning(f'Failed due to {req_text}')
        return web.HTTPBadRequest(reason=req_text)  # 400


class ServerErrorsHandler(web.HTTPException):
    @staticmethod
    async def not_implemented():  # 501
        return web.HTTPNotImplemented()