Exemplo n.º 1
0
    def on_failure(self, exc, task_id, args, kwargs, einfo):
        super(EmailTask, self).on_failure(exc, task_id, args, kwargs, einfo)
        logger.info('Task Failed : Task Name - {0}, args - {1}, id - {2}, stack_trace - {3}'.format(
            self.request.task, args, self.request.id, einfo
        ))
        email = (json.loads(args[0])).get('email')

        user_id = session.query(User.id).filter(User.email == email).first()
        if not user_id:
            raise ValueError("Enter-Valid-Email")
        try:
            celery_task_obj = CeleryTask(
                user_id=int(user_id[0]),
                task_module=self.request.task,
                payload_data=email,
                task_status='EXCEPTION',
                started_on=datetime.datetime.now(),
                task_id=self.request.id,
            )
            session.add(celery_task_obj)
            session.commit()
        except Exception as e:
            logger.info("Insertion Failed with - {}".format(str(e)))
            session.rollback()
            session.remove()
Exemplo n.º 2
0
    def send_email(self, url, data):
        sender = self._smtp_settings['user']
        message = MIMEMultipart("alternative")
        message['Subject'] = f'Data from {url}'
        message['From'] = sender

        text_list = [f"Hi. There is a parsed result from {url}:\n"]
        html_list = [
            f"<html><body><p>Hi. There is a parsed result from {url}:</p><br/><ul>"
        ]

        for d in data:
            text = d.get_text()
            href = d.get_href()
            text_list.append(f"{text} - {href}\n")
            html_list.append(f'<li><a href="{href}">{text}</a></li>')

        html_list.append("</ul><br/><body><html>")

        part1 = MIMEText(''.join(text_list), "plain")
        part2 = MIMEText(''.join(html_list), "html")

        message.attach(part1)
        message.attach(part2)

        context = ssl.create_default_context()
        with smtplib.SMTP_SSL(self._smtp_settings['address'],
                              self._smtp_settings['port'],
                              context=context) as server:
            server.login(self._smtp_settings['user'],
                         self._smtp_settings['password'])
            for recipient in self._recipients:
                server.sendmail(sender, recipient, message.as_string())
                logger.info(f'Data was sent to {recipient}')
Exemplo n.º 3
0
 def server_status_check(self):
     status = self.svc.request_status & self.svc.connect_status & self.svc.listen_status
     if status is True:
         logger.info('{} status : OK\n'.format(self.svc.name))
     else:
         warning_request(self.svc.name, 0)
         logger.warning('{} status : FAIL\n'.format(self.svc.name))
Exemplo n.º 4
0
    def on_success(self, retval, task_id, args, kwargs):
        super(EmailTask, self).on_success(retval, task_id, args, kwargs)
        logger.info('Task Completed Successfully : Task Name - {0}, args - {1}, id - {2}'.format(
            self.request.task, args, self.request.id
        ))
        email = (json.loads(args[0])).get('email')

        user_id = session.query(User.id).filter(User.email == email).first()
        if not user_id:
            raise ValueError("Enter-Valid-Email")
        try:
            celery_task_obj = CeleryTask(
                user_id=int(user_id[0]),
                task_module=self.request.task,
                payload_data=email,
                task_status='COMPLETED',
                started_on=datetime.datetime.now(),
                task_id=self.request.id,
            )
            session.add(celery_task_obj)
            session.commit()
        except Exception as e:
            logger.info("Insertion Failed with - {}".format(str(e)))
            session.rollback()
            session.remove()
 def search_question(self, question=''):
     """
     在问题库中查询一个问题
     :param question:
     :return:
     """
     try:
         if not self.db:
             logger.info("没有主动链接MySQL数据库")
             return {}
         try:
             self.db.close()  # 必须要强行关闭数据库,不然查询的数据不是最新的
         except pymysql.err.Error:
             logger.info("数据库已经关闭")
         self.db.ping()  # 检测数据连接是否正常
         sql = self.gen_sql(question)
         # logger.info("sql: {}".format(sql))
         with self.db.cursor() as cursor:
             cursor.execute(sql)
             result = cursor.fetchone() or {}
     except Exception as e:
         logger.info("出错:{}".format(e))
         logger.info("错误详情:{}".format(traceback.format_exc()))
         result = {}
     logger.info("问题`{}`的查询结果: {}".format(question, result))
     return result
Exemplo n.º 6
0
	def __init__(self, config: Config):
		self._scheduler = Scheduler()
		self._email_notification = EmailNotification(config.get('smtp'), config.get('recipients'))
		for site in config.get('sites'):

			self._scheduler.every_minutes(Site(site), self.parse)
		logger.info(f"Will be parsing {len(config.get('sites'))} site(s)")
Exemplo n.º 7
0
def start():
    if validate_properties(config.get_properties()):
        logger.info('Starting application')
        parser = Parser(config)
        parser.run()
    else:
        logger.info('Something went wrong')
Exemplo n.º 8
0
    def train(self):
        params = filter(lambda p: p.requires_grad, self.model.parameters())
        optimizer = Optimizer(params, args)
        patient = 0
        best_dev_acc, best_test_acc = 0, 0
        for ep in range(1, self.args.epoch + 1):
            train_loss, train_acc = self.train_iter(ep, self.train_set,
                                                    optimizer)

            dev_acc = self.eval(self.val_set)
            if dev_acc > best_dev_acc:
                best_dev_acc = dev_acc
                test_acc = self.eval(self.test_set)
                if test_acc > best_test_acc:
                    best_test_acc = test_acc
                patient = 0
            else:
                patient += 1

            logger.info(
                '[Epoch %d] train loss: %.4f, lr: %f, Train ACC: %.4f, Dev ACC: %.4f, Best Dev ACC: %.4f, Best Test ACC: %.4f, patient: %d'
                % (ep, train_loss, optimizer.get_lr(), train_acc, dev_acc,
                   best_dev_acc, best_test_acc, patient))

            if patient >= args.patient:
                break

        logger.info('Final Test ACC: %.4f' % best_test_acc)
Exemplo n.º 9
0
    def train_iter(self, ep, train_set, optimizer):
        t1 = time.time()
        train_acc, train_loss = 0., 0.
        train_loader = DataLoader(train_set,
                                  batch_size=self.args.batch_size,
                                  shuffle=True)
        self.model.train()
        for i, batcher in enumerate(train_loader):
            batch = batch_variable(batcher, self.vocabs)
            batch.to_device(self.args.device)
            pred = self.model(batch.x, batch.nx, batch.ew)
            loss = F.nll_loss(pred, batch.y)
            loss.backward()
            nn_utils.clip_grad_norm_(filter(lambda p: p.requires_grad,
                                            self.model.parameters()),
                                     max_norm=args.grad_clip)
            optimizer.step()
            self.model.zero_grad()

            loss_val = loss.data.item()
            train_loss += loss_val
            train_acc += (pred.data.argmax(dim=-1) == batch.y).sum().item()

            logger.info(
                '[Epoch %d] Iter%d time cost: %.2fs, lr: %.6f, train acc: %.4f, train loss: %.4f'
                % (ep, i + 1, (time.time() - t1), optimizer.get_lr(),
                   train_acc / len(train_set), loss_val))

        return train_loss / len(train_set), train_acc / len(train_set)
Exemplo n.º 10
0
 def do_GET(self):
     uuid = self.clean_path()
     logger.info("Searching for DR log with ID [ {} ]".format(uuid))
     self.send_response(202)
     self.send_header(Handler.CONTENT_TYPE_HEADER, Handler.CONTENT_TYPE_JSON)
     self.end_headers()
     self.wfile.write(json.dumps(self.get_server().get_json_with_id(uuid)).encode('utf-8'))
Exemplo n.º 11
0
 def save_dataset_df(self, df):
     df.drop(columns=['id'], inplace=True)
     df.to_csv(cfg['paths']['data_root'] + self.dataset_info['name'] +
               "_dataframe.csv",
               index_label='id')
     logger.info("Generated dataframe and stored at " +
                 cfg['paths']['data_root'])
Exemplo n.º 12
0
 def __init__(self, host='', user='', password='', db='', port=3306):
     logger.info("链接数据库: {}".format((host, port, db, user)))
     self.__host = host
     self.__port = int(port)
     self.__user = user
     self.__db = db
     self.__password = password
     self.reconnect()
Exemplo n.º 13
0
 def sell(self, tick: Tick):
     for key, order in enumerate(self.orders):
         if order["tick"].instrument_id == tick.instrument_id:
             self.balance += tick.price * order["count"]
             self.balance -= self.fee
             self.orders.pop(key)
             logger.info("Sell: {}. Balance: {} Count orders: {}".format(tick, self.balance, len(self.orders)))
             return True
Exemplo n.º 14
0
 def buy(self, tick: Tick, count: int):
     if tick.price*count > (self.balance - self.fee):
         return False
     self.balance -= self.fee
     self.balance -= tick.price*count
     self.orders.append({"count": count, "tick": tick})
     logger.info("Buy: {}. Balance: {}. Count orders: {}".format(tick, self.balance, len(self.orders)))
     return True
Exemplo n.º 15
0
def main():
    logger.info("Run githubImporter")
    args = checkParams()
    importer = GithubImporter(args.url, args.dateFormat, args.dest)
    importer.setBroker(RabbitMq(conf["broker"]))
    importer.setCallURL()
    importer.work()
    logger.info("End githubImporter")
Exemplo n.º 16
0
 def save_label_text_to_label_id_dict(self, label_text_to_label_id):
     with open(
             cfg['paths']['data_root'] + self.dataset_info['name'] +
             "_label_text_to_label_id.json", "w") as f:
         json_dict = json.dumps(label_text_to_label_id)
         f.write(json_dict)
     logger.info("Generated Label to ID mapping and stored at " +
                 cfg['paths']['data_root'])
Exemplo n.º 17
0
def validate_properties(properties):
    try:
        main_schema(properties)
        logger.info('Properties JSON schema is valid')
        return True
    except MultipleInvalid as ex:
        logger.error(ex)
        return False
Exemplo n.º 18
0
def memory_check():
    try:
        mem = Memory()

        percent_metric = float(config.get("memory", "percent"))
        if mem.percent > percent_metric:
            logger.warning(memory_usage_warning + 'memory usage: {:.2f} %\n'.format(mem.percent))
        logger.info(mem)
    except Exception as e:
        logger.error(traceback.format_exc())
Exemplo n.º 19
0
def io_check():
    try:
        io = InputOutput()
        await_metric = float(config.get("input-output", "await"))
        util_metric = float(config.get("input-output", "util"))
        if io.await > await_metric:
            logger.warning(io_delay_warning + 'time: {} ms\n'.format(io.await))
        if io.util > util_metric:
            logger.warning(io_queue_warning + 'io queue usage : {} %\n'.format(io.util))

        logger.info(io)
Exemplo n.º 20
0
    def getEventStatus(self):
        logger.info('Collecting event status')

        slug = 'event-status/'
        formattedSlug = slug
        completedUrl = self.getCompleteUrl(formattedSlug)
        logger.info('Request for event status : {}'.format(completedUrl))

        r = requests.get(completedUrl)
        responseData = r.json()
        return responseData
Exemplo n.º 21
0
    def save_graphs(self, path, graphs, labels_dict=None):

        if labels_dict is not None:
            labels_dict_tensor = {
                "glabel": torch.tensor(labels_dict["glabel"])
            }
        else:
            labels_dict_tensor = None

        graph_utils.save_dgl_graphs(path, graphs, labels_dict_tensor)
        logger.info(f"Saving {path}")
Exemplo n.º 22
0
    def getFixtures(self, gwNum):
        logger.info('Collecting fixture information')

        slug = 'fixtures/?event={}'
        formattedSlug = slug.format(gwNum)
        completedUrl = self.getCompleteUrl(formattedSlug)
        logger.info('Request for event status : {}'.format(completedUrl))

        r = requests.get(completedUrl)
        responseData = r.json()
        return responseData
Exemplo n.º 23
0
    def __init__(self, name, address='127.0.0.1'):
        self.name = name
        self.address = address
        self.pid = 1
        self.port = 0
        self.port_check_cmd = "ss -tunlp | grep {}".format(self.name)
        self.process_status = False
        self.listen_status = False
        self.connect_status = False
        self.request_status = False

        logger.info('Checking {} status'.format(self.name))
Exemplo n.º 24
0
    def getGWPlayerPick(self, teamId, gw=1):
        logger.info('Collecting player picks for {}, {}'.format(id, gw))

        slug = 'entry/{}/event/{}/picks/'
        formattedSlug = slug.format(teamId, gw)
        completedUrl = self.getCompleteUrl(formattedSlug)
        logger.info('Request for player picks : {}'.format(completedUrl))

        r = requests.get(completedUrl)
        responseData = r.json()
        # logger.info('Collected response for classic league standings : {}'.format(responseData))
        return responseData
Exemplo n.º 25
0
 def port_request_check(self):
     try:
         s = socket.socket()
         # logger.info('Attempting to connect to %s on port %s' % (self.name, self.port))
         try:
             s.connect((self.address, self.port))
             logger.info("Connected to %s on port %s" %
                         (self.name, self.port))
             return True
         except socket.error as e:
             logger.error("Connected to %s on port %s failed: %s" %
                          (self.name, self.port, e))
             return False
     except Exception as e:
         logger.error(traceback.format_exc())
Exemplo n.º 26
0
    def getStaticDataBootstrap(self):
        if self._staticFPLBootstrapData is not None:
            return self._staticFPLBootstrapData

        logger.info('Collecting bootstrap static data')

        slug = 'bootstrap-static/'
        formattedSlug = slug
        completedUrl = self.getCompleteUrl(formattedSlug)
        logger.info('Request for bootstrap static data : {}'.format(completedUrl))

        r = requests.get(completedUrl)
        responseData = r.json()
        # logger.info('Collected response for bootstrap static data : {}'.format(responseData))
        self._staticFPLBootstrapData = responseData
        return self._staticFPLBootstrapData
Exemplo n.º 27
0
 def port_listen_check(self):
     try:
         result = subprocess.Popen(self.port_check_cmd,
                                   shell=True,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT)
         line = result.stdout.readline()
         if line and 'LISTEN' in line:
             self.port = int(line.split()[4].split(':')[-1])
             logger.info('{} is listening at {}'.format(
                 self.name, self.port))
             return True
         else:
             logger.warning('{} port state is not listen'.format(self.name))
             return False
     except Exception as e:
         logger.error(traceback.format_exc())
Exemplo n.º 28
0
 def api_check(self):
     try:
         logger.info('sending request to {}'.format(self.name))
         cmd = '`which zkServer.sh` status'
         result = subprocess.Popen(cmd,
                                   shell=True,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT)
         f = result.stdout.read()
         if 'Mode' in f:
             logger.info('get response from {} successfully'.format(
                 self.name))
             return True
         else:
             return False
     except Exception as e:
         logger.error(traceback.format_exc())
Exemplo n.º 29
0
 def process_check(self):
     try:
         cmd = 'ps -ef | grep {} | grep -v grep '.format(self.name)
         result = subprocess.Popen(cmd,
                                   shell=True,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT)
         line = result.stdout.readline()
         if len(line) > 0:
             self.pid = line.split()[1]
             logger.info('{} is running with pid {}'.format(
                 self.name, self.pid))
             return True
         else:
             logger.error('{} is not running'.format(self.name))
             return False
     except Exception as e:
         logger.error(traceback.format_exc())
Exemplo n.º 30
0
 def api_check(self):
     try:
         logger.info('sending request to {}'.format(self.name))
         redis_login = config.redis_auth()
         cmd = "`which redis-cli` -p {} -a {} info".format(
             self.port, redis_login['password'])
         result = subprocess.Popen(cmd,
                                   shell=True,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT)
         f = result.stdout.read()
         if 'Server' in f:
             logger.info('get response from {} successfully'.format(
                 self.name))
             return True
         else:
             return False
     except Exception as e:
         logger.error(traceback.format_exc())
Exemplo n.º 31
0
 def __init__(self, tree=None, input_tries=None):
     """参数tree,从字典中导入tries树;
     参数input_tries, 可以是文件,但只限于json文件(tries树结构的dict,或者list)
     或者tries树的dict数据
     或者list数据"""
     # logger.info("初始化tries树")
     if tree is None:
         tree = {}
         self.tree = tree
     if not input_tries:
         pass
     elif isinstance(input_tries, (unicode, str)) and input_tries.startswith("NER_"):
         pass
     elif isinstance(input_tries, (unicode, str)) and input_tries.startswith("RE_PATTERN_"):
         pass
     elif isinstance(input_tries, (unicode, str)) and os.path.isfile(input_tries):
         with open(input_tries, encoding='utf8')as f:
             logger.info("加载文件:{}".format(input_tries))
             tries = json.load(f)
         if isinstance(tries, dict):
             tree = tries
             self.tree = tree
         elif isinstance(tries, (list, set)):
             for word in tries:
                 self.add(word)
         else:
             logger.info("输入数据文件有误")
     elif isinstance(input_tries, dict):
         tree = input_tries
         self.tree = tree
     elif isinstance(input_tries, (list, set)):
         for word in input_tries:
             self.add(word)
     else:
         logger.info("输入参数有误:{}".format(input_tries))
Exemplo n.º 32
0
import tornado
import tornado.ioloop
import tornado.web
import url_server.router.router_settings as settings
from url_server.router.router import create_application
from logger.logger import logger as logger

# main.py is the main access point of the tornado app, to run the application, just run "python main.py"

# What this will do is listen to port in the settings.py file, and then we can access the app using
# http://localhost:settings.port on any browser, or using python requests library
if __name__ == "__main__":
    # Set the application to listen to port 8888
    application = create_application()
    application.listen(settings.port)

    # Get the current IOLoop
    currentIOLoop = tornado.ioloop.IOLoop.current()

    # Log the port that is listened
    logger.info("Started application on port:" + str(settings.port))

    # Start the IOLoop
    currentIOLoop.start()