Пример #1
0
 def __init__(self, connection_params: Config, logger: Logger):
     self._connection_params = Config(
     ) if connection_params is None else connection_params
     self._logger = Logger() if logger is None else logger
     self._domain = 'esceer.com'
     self._dns_type = 'A'
     self._dns_record_name = '@'
Пример #2
0
 def __init__(self, base_path, sources):
     self.sources = list(
         map(
             lambda source: sources[source]
             (base_path / source,
              Logger(logging.getLogger(f"test_assistant_crawler.{source}"),
                     ALERT_FUNCTION, ALERT_SETTINGS)), sources))
Пример #3
0
 def __init__(self, params):
     self._debug = params.debug
     self._subsite = params.subsite
     self._logger = Logger("spuppy", "logs", params.debug)
     self._path = params.directory
     self._files = params.files
     self._out = params.out
Пример #4
0
            return
        if datetime.now().hour == self.timer_list[0]:
            self.num = self.timer_list.pop(0)
            logger.info('Emiter {0} task start...'.format(self.num))
            alert = self.emiter()
            alert.append(self.emiter_item(alert[0]))
            logger.info('Emiter a {0}:{1} {2} alert'.format(
                alert[0], alert[1], alert[2]))
            return alert
        if datetime.now().hour > self.timer_list[0]:
            self.timer_list.pop(0)
            logger.info('Poped one point...')

    def action(self):
        tasks = self.actions
        for task in tasks:
            target = task['action']['host']
            item = task['action']['item']
            stime = task['action']['start_time']
            rtime = task['action']['recovery_time']
            ack = task['action']['ack']
            state = task['action']['state']
            if ack:
                pass


if __name__ == '__main__':
    Logger()
    a = Emitor('config.yml')
    a.start()
Пример #5
0
                    default=5,
                    help='Save model checkpoints every k epochs.')
parser.add_argument('--early_stop', type=bool, default=True)
parser.add_argument('--patience', type=int, default=10)
parser.add_argument('--resume', type=bool, default=False)
parser.add_argument('--resume_path',
                    type=str,
                    default='./saved_models/model_best.pt')
parser.add_argument('--log_step', type=int, default=20)

# other
parser.add_argument('--cuda', type=bool, default=torch.cuda.is_available())
parser.add_argument('--config_file', type=str, default='./config.json')
parser.add_argument('--seed', type=int, default=1234)

logger = Logger()

cfg = Config(logger=logger, args=parser.parse_args())
cfg.print_config()
cfg.save_config(cfg.config['config_file'])

torch.manual_seed(cfg.config['seed'])
torch.cuda.manual_seed(cfg.config['seed'])
torch.backends.cudnn.enabled = False
np.random.seed(cfg.config['seed'])

# vocab
vocab = load_vocab('dataset/vocab.txt')
tokenizer = Tokenizer(vocab)

# data_loader
Пример #6
0
# encoding=utf-8

import pymysql

from com.entity.Article import Article
from config import Logger

log = Logger('all.log', level='debug').logger


class ArticleDao:

    # 查找数据
    @staticmethod
    def select_by_article_id(article_id):
        db = pymysql.connect(host="localhost",
                             user="******",
                             password="******",
                             db="keqiao",
                             charset="utf8mb4")
        cursor = db.cursor()
        select_sql = "select sid, article_id, name, url, read_count, discuss_count, spread_count from keqiao_article where article_id = '{}' ".format(
            article_id)
        try:
            cursor.execute(select_sql)
            result = cursor.fetchone()
            if result is None:
                return None
            else:
                sid = result[0]
                article_id = result[1]
Пример #7
0
import json
import logging.config
from datetime import datetime
from traceback import format_exc as trace

from config import ALERT_FUNCTION, ALERT_SETTINGS, DB_INFO_ALERT_SETTINGS, MONGODB_CONNECTION, LOG_PATH
from config import LOG_CONFIG, Logger

logging.config.fileConfig(LOG_CONFIG)
errors_logger = Logger(logging.getLogger("test_assistant_crawler.collector"),
                       ALERT_FUNCTION, ALERT_SETTINGS)

db_update_logger = Logger(
    logging.getLogger("test_assistant_crawler.db_updater"), ALERT_FUNCTION,
    DB_INFO_ALERT_SETTINGS)


class Collector:
    def __init__(self, base_path, sources):
        self.sources = list(
            map(
                lambda source: sources[source]
                (base_path / source,
                 Logger(logging.getLogger(f"test_assistant_crawler.{source}"),
                        ALERT_FUNCTION, ALERT_SETTINGS)), sources))

    def load_all(self):

        for module in self.sources:
            db_update_logger.info(
                f'Выполняется загрузка данных из источника {module.__name__}',
Пример #8
0
 def __init__(self, config: Config, logger: Logger):
     self._config = Config() if config is None else config
     self._logger = Logger() if logger is None else logger
Пример #9
0
import argparse
import logging.config
import sys

from config import ELASTICSEARCH_CONNECTION, BASE_PATH, DATA_SOURCES, ALERT_FUNCTION, DB_INFO_ALERT_SETTINGS
from config import LOG_CONFIG, Logger
from sources.collector import Collector

logging.config.fileConfig(LOG_CONFIG)
logger = Logger(logging.getLogger("test_assistant_crawler.runner"),
                ALERT_FUNCTION,
                DB_INFO_ALERT_SETTINGS)

if __name__ == '__main__':

    parser = argparse.ArgumentParser()
    parser.add_argument('--load', action='store_const', const=True)
    parser.add_argument('--save', action='store_const', const=True)
    parser.add_argument('--drop_elastic', action='store_const', const=True)
    parser.add_argument('--migrate', action='store_const', const=True)

    options = parser.parse_args(sys.argv[1:])

    col = Collector(base_path=BASE_PATH, sources=DATA_SOURCES)

    if options.load:
        logger.info('Action --load triggered', alert=True)

        col.load_all()  # TODO добавить какой-нибудь report об окончании загрузки с информацией о результатах

        logger.info('Action --load completed', alert=True)
Пример #10
0
import sys

from config import Config, Logger
from utils import IpUtils
from web_connector import GoDaddyConnector

if __name__ == '__main__':
    if len(sys.argv) != 2:
        print('Invalid arguments')
        print('Usage:')
        print('update_dns.py <dev|prod>')
        sys.exit(1)

    environment = sys.argv[1]
    config = Config(environment)
    logger = Logger(config)

    ip_utils = IpUtils(config, logger)
    external_ip = ip_utils.get_external_ip()

    go_daddy_connector = GoDaddyConnector(config, logger)
    if external_ip != go_daddy_connector.fetch_ip_from_dns():
        go_daddy_connector.update_dns(external_ip)