Exemplo n.º 1
0
def walk():
    """Performs one cycle in the pipeline"""

    # 1. Fetch most recent record for the intended user (which is also the paper-to-read)
    print(f'Fetching paper-to-read for {USER_EMAIL}')
    paper_to_read = mongo_obj.get_recent(USER_EMAIL)
    paper_id = paper_to_read['paper_id']
    print(f'Found paper_id {paper_id}')

    # 2. Run analysis on paper_to_read (last record from db)
    paper_url = compose_paper_url(paper_id)
    print(paper_url)
    deliverables = information_extraction.main({
        "filepath": paper_url,
        "ke": True,
        "showke": True,
        "clip_abstract": True
    })
    print('Analysis done')

    # 3. Deliver mail containing analysis results
    img = deliverables['ke']
    img_binary = get_binary_img(img)
    send_mail('*****@*****.**', book_image=img_binary)

    # 4. Sample next paper
    next_paper_id = sample_next_paper(paper_id)
    print('Next paper_id {}'.format(next_paper_id))

    # 5. Store the next paper-to-read in database
    doc = {'paper_id': next_paper_id, 'date': get_current_time()}
    mongo_obj.push(USER_EMAIL, doc)
Exemplo n.º 2
0
def questanswer(request, page=None):
    context = RequestContext(request)
    context['title'] = u'Вопрос-ответ'
    if request.method == 'POST':
        context['author_error'] = False
        context['mes_error'] = False

        if not request.POST.get('q_autor', ''):
            context['author_error'] = True
        else:
            context['q_autor'] = request.POST['q_autor']
        
        if not request.POST.get('q_mes', ''):
            context['mes_error'] = True
        else:
            context['q_mes'] = request.POST['q_mes']
        
        if context['author_error'] or context['mes_error']:
            pass
        else:
            qa = QuestAnswer(author = context['q_autor'], question = context['q_mes'])
            send_mail(context['q_autor'], context['q_mes'])
            qa.save()
            context['ok'] = True
    context['unanswered'] = QuestAnswer.objects.order_by('-date_publication').filter(is_public=False)
    paginate(
        context, 'questanswer', 
        QuestAnswer.objects.order_by('-date_publication').filter(is_public=True),
        count=QA_ON_PAGE, page=page, 
        root=context['current_section'].path
    )
    return render_to_response('qa/questanswer.html', context)
Exemplo n.º 3
0
def main():
    repository = UsersRepository()
    while True:
        action = input("Select action: \n 1-register 2-log in 3- quit ")
        if action == "1":
            name = input("Please enter your username ")
            if repository.contains(name):
                print("Username is already taken. Please try again")
                continue
            mail = input("Please enter your e-mail address ")
            password = input(
                "Please select your password. Password must contains at least 8 character and one number. "
            )
            if not validate_password(password):
                print("Password does not meet specified requirements")
                continue
            confirmation = input("Confirm your password ")
            if password != confirmation:
                print("Wrong password. Try again")
                continue
            if repository.add(name, encode_password(password), mail):
                print("User created successfully")
                send_mail(mail, name)
        elif action == "2":
            name = input("Please enter your username ")
            password = input("Please select your password ")
            if repository.validate(name, encode_password(password)):
                print("You have been logged in successfully")
            else:
                print("Invalid username or password")
        elif action == "3":
            print("See you soon")
            return
        else:
            print("Invalid action. Try again")
Exemplo n.º 4
0
def sender_wrapper(json_config, img_name, count):
	try:
		send_to_dropbox (sys.argv[1], img_name)
	except:
		send_mail(sys.argv[1], "Senzor je snimio kretnju, ali slika nije poslana na Dropbox")
		return

	if count == 1:
		send_mail(sys.argv[1], "Senzor je snimio kretnju i slika je poslana na Dropbox")
Exemplo n.º 5
0
def send_mail():
    try:
        mail_sender.send_mail(
            receiver_email=request.forms.get("receiver_email"),
            message_subject=request.forms.get("message_subject"),
            message_text=request.forms.get("message_text"),
            message_html=request.forms.get("message_html"))
        return "OK"
    except:
        return "Sending are fallen"
Exemplo n.º 6
0
def send_nike_email():
    # For Nike, we can use the API and get the Json response, so we don't need BeutifulSoup
    NIKE_SEARCH_ROUTE = 'https://busca.nike.com.br/busca?q=air%20force&origin=autocomplete&common_filter%5B372%5D=3257&sort=5&ajaxSearch=1'
    NIKE_json_response = json.loads(
        get_page(NIKE_SEARCH_ROUTE))  # Getting all data

    # Spliting data to get what we want
    NIKE_qty_sneaker = NIKE_json_response["totalProducts"]["totalResults"]
    NIKE_sneakers = NIKE_json_response["productsInfo"]["products"]

    # Filling email template and sending it
    NIKE_email_body = fill_email_template(NIKE_sneakers)
    send_mail(NIKE_email_body, NIKE_qty_sneaker, 1)
Exemplo n.º 7
0
def send(name, website=None, to_email=None, verbose=False):
    assert to_email != None
    if website != None:
        msg = format_msg(name=name, website=website)
    else:
        msg = format_msg(name=name)
    if verbose:
        print(name, website, to_email)
    try:
        send_mail(text=msg, to_emails=[to_email], html=None)
        sent = True
    except:
        sent = False
    return sent
Exemplo n.º 8
0
def send_authenticFeet_email():
    # AuthenticFeet URL
    AF_URL = 'https://www.authenticfeet.com.br/masculino/tenis/41/air%20force?PS=24&map=c,c,specificationFilter_5,ft'

    # Getting all the AuthenticFeet page and dealing using BeutifulSoup
    AF_page = get_page(AF_URL)  # Getting all data

    # Spliting data to get what we want
    AF_qty_sneaker = af_get_qty_seach(AF_page)
    AF_sneakers = af_get_sneakers_data(AF_page)

    # Filling email template and sending it
    AF_email_body = fill_email_template(AF_sneakers)
    send_mail(AF_email_body, AF_qty_sneaker, 0)
Exemplo n.º 9
0
def send_mails(task_id):

    task_data = get_task_data(task_id)

    if task_data['mail_recipients']:
        # must send mails
        mail_recipients = [x.strip() for x in task_data['mail_recipients'].split(',')]
        for mail in mail_recipients:
            subject = 'Task {} has failed'.format(task_data['name'])
            message = 'This is a message from pyMonitor to inform you that', \
                      'task {0} has failed at {1} on {2}'.format(
                            task_data['name'],
                            task_data['url'],
                            time_to_string_mail())
            mail_sender.send_mail(message, subject, mail)
Exemplo n.º 10
0
def __emp_mails():
    try:
        before = "<br /><table border=1 cellpadding=0 cellspacing=0 width=500px><thead bgcolor='#aaaaaa'><tr><th>产品</th><th>人员</th><th>昨日打款金额</th></tr></thead>"
        after = "</table> <br /><hr />IT部<br /> 华镇金控集团"
        content = __emp("and dept_no = 10")
        if content == "null":
            return
        content = before + content + after
        to = ["*****@*****.**"]
        sub = "渠道总监"
        mail_sender.send_mail(to, sub, content)

    except Exception, e:
        print(e)
        return False
Exemplo n.º 11
0
def __dept_mails():
	try:
		before = "<br /><table border=1 cellpadding=0 cellspacing=0 width=500px><thead bgcolor='#aaaaaa'><tr><th>部门</th><th>产品</th><th>昨日打款金额</th></tr></thead>"
		after =  "</table> <br /><hr />IT部<br /> 华镇金控集团"
		content = __dept("and dept_no>11")
		if content == "null":
			return
		content = before +content + after
		to = ["*****@*****.**"]
		sub = "副总"
		mail_sender.send_mail(to,sub,content)

	except Exception,e:
		print(e)
		return False
Exemplo n.º 12
0
def __process_mails():
    try:
        cnn_base_info=mysql_conn.get_conn(db_config.config_base_info)
        if cnn_base_info == None:
            __write_warn("can't get connection of notify","__process_mails")
            return
        cursor_base_info=cnn_base_info.cursor(dictionary=True)
        sql_top_10_mail="""SELECT
            `email`.`id`,
            `email`.`to`,
            `email`.`cc`,
            `email`.`subject`,
            `email`.`host`,
            `email`.`user`,
            `email`.`password`,
            `email`.`suffix`,
            `email`.`sender_name`,
            `email`.`body`,
            `email`.`from_name`
            FROM
            `email`
            WHERE
            `email`.`status` = 0
            ORDER BY id ASC
            LIMIT 5;"""
        cursor_base_info.execute(sql_top_10_mail)    
        dict_mail_list = cursor_base_info.fetchall()
        for mail in dict_mail_list:
            print mail["id"]
            sql_select_email_files = '''
                select email_no,file_url,file_name from email_files where email_no = %s
            '''%mail["id"]
            cursor_base_info.execute(sql_select_email_files)    
            email_files = cursor_base_info.fetchall()
            namelist = []
            for email_file in email_files:
                url = "http://192.168.1.236/upload/files"+str(email_file["file_url"])
                name = download(sys.argv[1:],url)
                namelist.append(name)
            if mail_sender.send_mail([str(mail["to"])],str(mail["subject"]),str(mail["body"]),namelist,str(mail["sender_name"]),str(mail["host"]),str(mail["user"]),str(mail["password"]),str(mail["suffix"]),str(mail["from_name"])):  
                sql_update_mail_status ="""
                    UPDATE `email` SET `status`=1 WHERE id = %s
                    """%mail["id"]
                cursor_base_info.execute(sql_update_mail_status)
                __write_info("send success","__process_mails")         
            else:  
                __write_info("send failed","__process_mails")
    except mysql.connector.Error as e:
        __write_err('raise exception when sending mail!{}'.format(e))
    finally:
        cursor_base_info.close()
        cnn_base_info.close()
Exemplo n.º 13
0
def __main():
    """need run after product_status_changer.py"""

    cnn_product = mysql_conn.get_conn(db_config.config_product)
    if not cnn_product:
        __write_warn("can't get connection of product")
        return
    cursor_product = cnn_product.cursor(dictionary=True)

    try:
        sql_approaching_end_product = """
        SELECT
        `name`,
        `end`
        FROM
        product
        WHERE
        `end` > ADDDATE(NOW(), 9)
        AND `end` < ADDDATE(NOW(), 10);"""

        cursor_product.execute(sql_approaching_end_product)
        products_approaching_end = cursor_product.fetchall()

        subject = "产品到期提醒"
        to_list = ["*****@*****.**", "*****@*****.**"]
        content = "产品%s即将于 %s 到期"

        for product_approaching_end in products_approaching_end:
            mail_sender.send_mail(
                to_list, subject, content % (product_approaching_end["name"],
                                             product_approaching_end["end"]))

    except mysql.connector.Error as e:
        __write_err('send reminder mail error!{}'.format(e))
    finally:
        cursor_product.close()
        #cursor_notify.close()
        cnn_product.close()
Exemplo n.º 14
0
 def get_tags_from_news(self, news_info):
     tags, model_version = self.get_tags_from_redis(news_info['account'])
     if tags is not None:
         return tags, model_version
     contents = ''.join([
         content['text'] for content in news_info['content']
         if 'text' in content
     ])
     info = {
         'account': news_info['account'],
         'content_tokens': self.split_words(contents),
         'title_tokens': self.split_words(news_info['title']),
         'source': news_info['subtitle'],
         'subtitle': news_info['subtitle'],
     }
     info_json = json.dumps(info, ensure_ascii=False,
                            sort_keys=True).encode('utf8')
     for times in range(self.tags_retry_times):
         try:
             tags_result = json.loads(self.get_tags_from_server(info_json))
         except TTransport.TTransportException:
             time.sleep(self.tags_retry_interval)
             continue
         if tags_result['status'] == 200:
             tags = []
             for tags_info in tags_result['tags']:
                 tags.append(tags_info[0])
             return tags, tags_result['model_version']
         else:
             subject = '{Matrix}{feeds}{tags client error}'
             message = 'Invalid Return Code %s' % tags_result['status']
             send_mail(subject, message)
             return None, ''
     subject = '{Matrix}{feeds}{lost tags server}'
     message = 'Server Lost\nSpider Name: %s\nRetry Times: %s\nRetry Interval: %s\n' % \
               ('breaking_news', self.tags_retry_times, self.tags_retry_interval)
     send_mail(subject, message)
     return None, ''
Exemplo n.º 15
0
def __main():
    """need run after product_status_changer.py"""
    
    cnn_product=mysql_conn.get_conn(db_config.config_product)
    if not cnn_product:
        __write_warn("can't get connection of product")
        return
    cursor_product=cnn_product.cursor(dictionary=True)
    
    try:
        sql_approaching_end_product = """
        SELECT
        `name`,
        `end`
        FROM
        product
        WHERE
        `end` > ADDDATE(NOW(), 9)
        AND `end` < ADDDATE(NOW(), 10);"""
        
        cursor_product.execute(sql_approaching_end_product)
        products_approaching_end = cursor_product.fetchall()
        
        subject="产品到期提醒"
        to_list=["*****@*****.**","*****@*****.**"]
        content="产品%s即将于 %s 到期"
        
        for product_approaching_end in products_approaching_end:
            mail_sender.send_mail(to_list,subject,content%(product_approaching_end["name"],product_approaching_end["end"]))
        
        
    except mysql.connector.Error as e:
        __write_err('send reminder mail error!{}'.format(e))        
    finally:
        cursor_product.close()
        #cursor_notify.close()
        cnn_product.close()
Exemplo n.º 16
0
def lambda_handler(event, context):
    eligible_persons = []

    with open(FILE_NAME, "r") as f:
        csv_dictreader = csv.DictReader(f)
        for r in csv_dictreader:
            print(r.get("beginning_date"))
            valid = validate_date(r.get("beginning_date"), r.get("Frequency"))
            if valid:
                eligible_persons.append(r.get("Person"))
    print(eligible_persons)

    eligible_persons = ",".join(eligible_persons)
    ret_val = send_mail(eligible_persons)
    status_body = "Successfully sent the mail" if ret_val else "Some shit!"
    return {"statusCode": 200, "body": json.dumps(status_body)}
Exemplo n.º 17
0
def get_foreign_data(target='', current_timestamp=0):
    logger.info('get_foreign_data: function started | target=' + target)

    downloaded_html = urlopen(target)
    logger.info('get_foreign_data: html downloaded')
    beautifulsoup_object = BeautifulSoup(downloaded_html, 'html.parser')
    logger.info('get_foreign_data: html parsed to beautifulsoup object')

    announced_time = [
        '2020',
        re.findall(
            '([0-9]+)[.]',
            beautifulsoup_object.findAll('p', class_='s_descript')[0].text)[0],
        re.findall(
            '[.]([0-9]+)',
            beautifulsoup_object.findAll('p', class_='s_descript')[0].text)[0],
        re.findall(
            '([0-9]+)시',
            beautifulsoup_object.findAll('p', class_='s_descript')[0].text)[0]
    ]
    logger.info('get_foreign_data: get announced time | announced_time=' +
                str(announced_time))

    datetime_object = datetime.datetime.strptime(str(announced_time),
                                                 "['%Y', '%m', '%d', '%H']")
    logger.info(
        'get_foreign_data: convert announced time to datetime object | datetime_object='
        + str(datetime_object))
    announced_time_unix = int(time.mktime(datetime_object.timetuple())) - 32400
    logger.info(
        'get_foreign_data: convert datetime object to unix time | announced_time_unix='
        + str(announced_time_unix))

    raw_table = beautifulsoup_object.findAll('tbody')
    logger.info('get_foreign_data: table picked out | raw_table=' +
                str(raw_table))
    raw_table_beautifulsoup_object = BeautifulSoup(str(raw_table[0]),
                                                   'html.parser')
    logger.info(
        'get_foreign_data: convert raw table to beautifulsoup object | raw_table_beautifulsoup_object='
        + str(raw_table_beautifulsoup_object))
    table_data_rows = raw_table_beautifulsoup_object.findAll('tr')
    logger.info(
        'get_foreign_data: export table data from raw_table_beautifulsoup_object | table_data_rows='
        + str(table_data_rows))
    table_data_rows.reverse()
    logger.info('get_foreign_data: reverse exported data | table_data_rows=' +
                str(table_data_rows))

    foreign_data_list = [announced_time_unix]
    logger.info(
        'get_foreign_data: declare foreign_data_list | foreign_data_list=' +
        str(foreign_data_list))

    convert_error_list = [0]
    database_error_list = [0]
    dictionary_error_list = [0]

    report_message = '* Dropper API Foreign Crawling Report *\n\n\n'
    report_level = 0

    index_no = 0

    try:
        table_data = table_data_rows[0]
        try:
            table_data_beautifulsoup_object = BeautifulSoup(
                str(table_data), 'html.parser')
            logger.info(
                'get_foreign_data: convert table_data to beautifulsoup object | table_data_beautifulsoup_object='
                + str(table_data_beautifulsoup_object))
            try:
                country = table_data_beautifulsoup_object.findAll('th')[0].text
                logger.info(
                    'get_foreign_data: extracting country from table data | country='
                    + str(country))
                try:
                    certified = re.sub(
                        '[,명]', '',
                        re.sub(
                            '\(사망[  ][0-9,]+\)', '',
                            table_data_beautifulsoup_object.findAll('td')
                            [0].text))
                    logger.info(
                        'get_foreign_data: extracting certified from table data | certified='
                        + str(certified))
                    dead = re.findall(
                        '\(사망[  ]([0-9,]+)\)',
                        table_data_beautifulsoup_object.findAll('td')[0].text)
                    logger.info(
                        'get_foreign_data: extracting dead from table data | country='
                        + str(dead))

                    # print('|' + foreign_property.country_dictionary[re.sub('[  ]', '', country)] + '|' + country + '|')
                    #  print('[\'foreign_' + foreign_property.country_dictionary[re.sub('[  ]', '', country)], end='\'')

                    foreign_data = {
                        'country':
                        foreign_property.country_dictionary[re.sub(
                            '[  ]', '', re.sub('[가-힣]^', '', country))],
                        'certified':
                        int(certified),
                        'dead':
                        int(re.sub('[,명]', '', dead[0])) if dead != [] else 0
                    }
                    logger.info(
                        'get_foreign_data: declare foreign data | foreign_data='
                        + str(foreign_data))

                    foreign_data_list.append(foreign_data)
                    logger.info(
                        'get_foreign_data: put foreign data into foreign data list | foreign_data_list='
                        + str(foreign_data_list))
                except Exception as ex:
                    if report_level < 1:
                        report_level = 1
                    dictionary_error_list[0] = 1
                    dictionary_error_list.append([ex, table_data])
                    logger.info(
                        'get_foreign_data: unregistered country name was found | ex='
                        + str(ex) + ' | dictionary_error_list=' +
                        str(dictionary_error_list))
            except Exception as ex:
                if report_level < 2:
                    report_level = 2
                database_error_list[0] = 1
                database_error_list.append([ex, index_no])
                logger.info(
                    'get_foreign_data: cannot extract country from table data | ex='
                    + str(ex) + ' | index_no=' + str(index_no))
        except Exception as ex:
            if report_level < 2:
                report_level = 2
            convert_error_list[0] = 1
            convert_error_list.append([ex, table_data])
            logger.info(
                'get_foreign_data: cannot convert table_data to beautifulsoup object | ex='
                + str(ex) + ' | table_data=' + str(table_data))
        for table_data, index_no in zip(table_data_rows[1:],
                                        range(1, len(table_data_rows))):
            try:
                table_data_beautifulsoup_object = BeautifulSoup(
                    str(table_data), 'html.parser')
                logger.info(
                    'get_foreign_data: convert table_data to beautifulsoup object | table_data_beautifulsoup_object='
                    + str(table_data_beautifulsoup_object))
                try:
                    country = table_data_beautifulsoup_object.findAll(
                        'td')[0].text
                    logger.info(
                        'get_foreign_data: extracting country from table data | country='
                        + str(country))
                    try:
                        certified = re.sub(
                            '[,명]', '',
                            re.sub(
                                '\(사망[  ][0-9,]+\)', '',
                                table_data_beautifulsoup_object.findAll('td')
                                [1].text))
                        logger.info(
                            'get_foreign_data: extracting certified from table data | certified='
                            + str(certified))
                        dead = re.findall(
                            '\(사망[  ]([0-9,]+)\)',
                            table_data_beautifulsoup_object.findAll('td')
                            [1].text)
                        logger.info(
                            'get_foreign_data: extracting dead from table data | country='
                            + str(dead))

                        # print('|' + foreign_property.country_dictionary[re.sub('[  ]', '', country)] + '|' + country + '|')
                        #  print(',\n\'foreign_' + foreign_property.country_dictionary[re.sub('[  ]', '', country)], end='\'')

                        foreign_data = {
                            'country':
                            foreign_property.country_dictionary[re.sub(
                                '[  ]', '', re.sub('[가-힣]^', '', country))],
                            'certified':
                            int(certified),
                            'dead':
                            int(re.sub('[,명]', '', dead[0]))
                            if dead != [] else 0
                        }
                        logger.info(
                            'get_foreign_data: declare foreign data | foreign_data='
                            + str(foreign_data))

                        foreign_data_list.append(foreign_data)
                        logger.info(
                            'get_foreign_data: put foreign data into foreign data list | foreign_data_list='
                            + str(foreign_data_list))
                    except Exception as ex:
                        if report_level < 1:
                            report_level = 1
                        dictionary_error_list[0] = 1
                        dictionary_error_list.append([ex, table_data])
                        logger.info(
                            'get_foreign_data: unregistered country name was found | ex='
                            + str(ex) + ' | dictionary_error_list=' +
                            str(dictionary_error_list))
                except Exception as ex:
                    if report_level < 2:
                        report_level = 2
                    database_error_list[0] = 1
                    database_error_list.append([ex, index_no])
                    logger.info(
                        'get_foreign_data: cannot extract country from table data | ex='
                        + str(ex) + ' | index_no=' + str(index_no))
            except Exception as ex:
                if report_level < 2:
                    report_level = 2
                convert_error_list[0] = 1
                convert_error_list.append([ex, table_data])
                logger.info(
                    'get_foreign_data: cannot convert table_data to beautifulsoup object | ex='
                    + str(ex) + ' | table_data=' + str(table_data))
    except Exception as ex:
        if report_level < 3:
            report_level = 3
        logger.info('get_foreign_data: table_data_rows is empty | ex=' +
                    str(ex))
        report_message += '- FATAL: table_data_rows is empty -\n\n\n'
        report_message += str(ex) + '\n'
        report_message += '\n'
        report_message += '\nThis report is about table_data_rows ' + str(
            table_data_rows)
        report_message += '\n'
        report_message += '\n\n\n\n\n'

    #  print('],')

    if convert_error_list[0] == 1:
        report_message += '- ERROR: cannot convert table_data to beautifulsoup object -\n\n\n'
        for error in convert_error_list[1:]:
            report_message += '---------------------------\n'
            report_message += f"{error[0]}\n\ntable_data:\n{error[1]}\n"
        report_message += '---------------------------\n'
        report_message += '\n\n\n\n\n'

    if database_error_list[0] == 1:
        report_message += '- ERROR: cannot extract country from table data -\n\n\n'
        for error in database_error_list[1:]:
            report_message += '---------------------------\n'
            report_message += f"{error[0]}\n\nindex_no:\n{error[1]}\n"
        report_message += '---------------------------\n'
        report_message += '\n\n\n\n\n'

    if dictionary_error_list[0] == 1:
        report_message += '- WARN: unregistered country name was found -\n\n\n'
        for error in dictionary_error_list[1:]:
            report_message += '---------------------------\n'
            report_message += f"{error[0]}\n\ncountry_name:\n{error[1]}\n"
        report_message += '---------------------------\n'
        report_message += '\n\n\n\n\n'

    if report_level < 2:
        report_message += 'Crawling finished successfully\n'
        report_message += '\nThis report is based on (Unix Time)' + str(
            int(current_timestamp))
        if report_level == 0:
            mail_sender.send_mail(
                subject=f'[Dropper API](foreign_crawler) INFO: task report',
                message=report_message)
        elif report_level == 1:
            mail_sender.send_mail(
                subject=f'[Dropper API](foreign_crawler) WARN: task report',
                message=report_message)
    elif report_level == 2:
        report_message += 'Some error occurred while crawling\n'
        report_message += '\nThis report is based on (Unix Time)' + str(
            int(current_timestamp))
        mail_sender.send_mail(
            subject=f'[Dropper API](foreign_crawler) ERROR: task report',
            message=report_message)
    else:
        report_message += 'Fatal error occurred while crawling\n'
        report_message += '\nThis report is based on (Unix Time)' + str(
            int(current_timestamp))
        mail_sender.send_mail(
            subject=f'[Dropper API](foreign_crawler) FATAL: task report',
            message=report_message)

    logger.info('get_foreign_data: function ended | foreign_data_list=' +
                str(foreign_data_list))
    return foreign_data_list
def get_foreign_data(target='', current_timestamp=0):
    logger.info('get_foreign_data: function started | target=' + target)

    downloaded_html = urlopen(target)
    logger.info('get_foreign_data: html downloaded')
    beautifulsoup_object = BeautifulSoup(downloaded_html, 'html.parser')
    logger.info('get_foreign_data: html parsed to beautifulsoup object')

    announced_time = [
        '2020',
        re.findall(
            '([0-9]+)[.]',
            beautifulsoup_object.findAll('p', class_='s_descript')[0].text)[0],
        re.findall(
            '[.]([0-9]+)',
            beautifulsoup_object.findAll('p', class_='s_descript')[0].text)[0],
        re.findall(
            '([0-9]+)시',
            beautifulsoup_object.findAll('p', class_='s_descript')[0].text)[0]
    ]
    logger.info('get_foreign_data: get announced time | announced_time=' +
                str(announced_time))

    datetime_object = datetime.datetime.strptime(str(announced_time),
                                                 "['%Y', '%m', '%d', '%H']")
    logger.info(
        'get_foreign_data: convert announced time to datetime object | datetime_object='
        + str(datetime_object))
    announced_time_unix = int(time.mktime(datetime_object.timetuple())) - 32400
    logger.info(
        'get_foreign_data: convert datetime object to unix time | announced_time_unix='
        + str(announced_time_unix))

    raw_table = beautifulsoup_object.findAll('tbody')
    logger.info('get_foreign_data: table picked out | raw_table=' +
                str(raw_table))
    raw_table_beautifulsoup_object = BeautifulSoup(str(raw_table[0]),
                                                   'html.parser')
    logger.info(
        'get_foreign_data: convert raw table to beautifulsoup object | raw_table_beautifulsoup_object='
        + str(raw_table_beautifulsoup_object))
    table_data_rows = raw_table_beautifulsoup_object.findAll('tr')
    logger.info(
        'get_foreign_data: export table data from raw_table_beautifulsoup_object | table_data_rows='
        + str(table_data_rows))
    table_data_rows.reverse()
    logger.info('get_foreign_data: reverse exported data | table_data_rows=' +
                str(table_data_rows))

    foreign_data_list = [announced_time_unix]
    logger.info(
        'get_foreign_data: declare foreign_data_list | foreign_data_list=' +
        str(foreign_data_list))
    country_dictionary = {
        '중국': 'china',
        '홍콩': 'hongkong',
        '대만': 'taiwan',
        '마카오': 'macau',
        '일본': 'japan',
        '싱가포르': 'singapura',
        '태국': 'thailand',
        '말레이시아': 'malaysia',
        '베트남': 'vietnam',
        '인도': 'india',
        '필리핀': 'philippines',
        '캄보디아': 'cambodia',
        '네팔': 'nepal',
        '러시아': 'russia',
        '스리랑카': 'srilanka',
        '아프가니스탄': 'afghanistan',
        '인도네시아': 'indonesia',
        '부탄': 'bhutan',
        '몰디브': 'maldives',
        '방글라데시': 'bangladesh',
        '브루나이': 'brunei',
        '몽골': 'mongolia',
        '카자흐스탄': 'kazakhstan',
        '파키스탄': 'pakistan',
        '투르크메니스탄': 'turkmenistan',
        '이란': 'iran',
        '쿠웨이트': 'kuwait',
        '바레인': 'bahrain',
        '아랍에미리트': 'uae',
        '이라크': 'iraq',
        '오만': 'oman',
        '레바논': 'lebanon',
        '이스라엘': 'israel',
        '이집트': 'egypt',
        '알제리': 'algeria',
        '카타르': 'qatar',
        '요르단': 'jordan',
        '튀니지': 'tunisia',
        '사우디아라비아': 'saudiarabia',
        '모로코': 'morocco',
        '시리아': 'syria',
        '소말리아': 'somalia',
        '카보베르데': 'capeverde',
        '짐바브웨': 'zimbabwe',
        '마다가스카르': 'madagascar',
        '앙골라': 'angola',
        '에리트레아': 'eritrea',
        '우간다': 'uganda',
        '모잠비크': 'mozambique',
        '미국': 'usa',
        '캐나다': 'canada',
        '브라질': 'brasil',
        '멕시코': 'mexico',
        '에콰도르': 'ecuador',
        '도미니카공화국': 'dominican',
        '아르헨티나': 'argentina',
        '칠레': 'chile',
        '콜롬비아': 'columbia',
        '페루': 'peru',
        '코스타리카': 'costarica',
        '파라과이': 'paraguay',
        '파나마': 'panama',
        '볼리비아': 'bolivia',
        '자메이카': 'jamaica',
        '온두라스': 'honduras',
        '세인트빈센트그레나딘': 'stvincentandthegrenadines',
        '쿠바': 'cuba',
        '가이아나': 'guyana',
        '베네수엘라': 'venezuela',
        '앤티가바부다': 'antiguaandbarbuda',
        '트리니다드토바고': 'trinidadandtobago',
        '우루과이': 'uruguay',
        '세인트루시아': 'saintlucia',
        '수리남': 'suriname',
        '과테말라': 'guatemala',
        '바하마': 'bahamas',
        '엘살바도르': 'elsalvador',
        '바베이도스': 'barbados',
        '니카라구아': 'nicaragua',
        '아이티': 'haiti',
        '그레나다': 'grenada',
        '벨리즈': 'belize',
        '도미니카연방': 'dominicanfederation',
        '이탈리아': 'italiana',
        '독일': 'germany',
        '프랑스': 'france',
        '영국': 'england',
        '스페인': 'spain',
        '오스트리아': 'austria',
        '크로아티아': 'croatia',
        '핀란드': 'finland',
        '스웨덴': 'sweden',
        '스위스': 'swiss',
        '벨기에': 'belgium',
        '덴마크': 'danmark',
        '에스토니아': 'eesti',
        '조지아': 'georgia',
        '그리스': 'greece',
        '북마케도니아': 'macedonia',
        '노르웨이': 'norway',
        '루마니아': 'romania',
        '네덜란드': 'nederlands',
        '벨라루스': 'belarus',
        '리투아니아': 'lithuania',
        '산마리노': 'sanmarino',
        '아제르바이잔': 'azerbaijan',
        '아이슬란드': 'island',
        '모나코': 'monaco',
        '룩셈부르크': 'luxembourg',
        '아르메니아': 'armenia',
        '아일랜드': 'ireland',
        '체코': 'czecho',
        '포르투갈': 'portugal',
        '라트비아': 'latvia',
        '안도라': 'andora',
        '폴란드': 'poland',
        '우크라이나': 'ukraine',
        '헝가리': 'hungary',
        '보스니아헤르체고비나': 'bosnaihercegovina',
        '슬로베니아': 'slovenija',
        '리히텐슈타인': 'liechtenstein',
        '세르비아': 'serbia',
        '슬로바키아': 'slovakia',
        '불가리아': 'bulgaria',
        '몰타': 'malta',
        '몰도바': 'moldova',
        '알바니아': 'albania',
        '사이프러스': 'cyprus',
        '터키': 'turkey',
        '몬테네그로': 'montenegro',
        '코소보': 'kosovo',
        '우즈베키스탄': 'uzbekistan',
        '키르기스스탄': 'kyrgyzstan',
        '동티모르': 'easttimor',
        '미얀마': 'myanmar',
        '호주': 'australia',
        '뉴질랜드': 'newzealand',
        '피지': 'fiji',
        '파푸아뉴기니': 'papuanewguinea',
        '나이지리아': 'nigeria',
        '세네갈': 'senegal',
        '카메룬': 'cameroon',
        '남아프리카공화국': 'republicofsouthafrica',
        '토고': 'republiquetogolaise',
        '부르키나파소': 'burkinafaso',
        'DR콩고': 'drcongo',
        '코트디부아르': 'ivorycoast',
        '수단': 'sudan',
        '에티오피아': 'ethiopia',
        '가봉': 'gabon',
        '가나': 'ghana',
        '기니': 'guineaecuatorial',
        '케냐': 'kenya',
        '나미비아': 'namibia',
        '중앙아프리카공화국': 'centralafricanrepublic',
        '콩고': 'congo',
        '적도기니': 'guinea',
        '에스와티니': 'eswatini',
        '모리타니아': 'mauritania',
        '르완다': 'rwanda',
        '세이셸': 'seychelles',
        '베냉': 'benin',
        '라이베리아': 'liberia',
        '탄자니아': 'tanzania',
        '지부티': 'djibouti',
        '모리셔스': 'mauritius',
        '감비아': 'gambia',
        '잠비아': 'zambia',
        '차드': 'chad',
        '니제르': 'niger',
        '일본크루즈': 'japan_cruise',
        '팔레스타인': 'palestine',
        '지브롤터': 'gibraltar',
        '세인트마틴': 'saintmartin',
        '생바르텔레미': 'saintbarthelemy',
        '바티칸': 'vatican',
        '마르티니크': 'martinique',
        '프랑스령기아나': 'guyane',
        '패로제도': 'faroeislands',
        '건지섬': 'guernsey',
        '프랑스령폴리네시아': 'polynesia',
        '저지섬': 'jersey',
        '프랑스령레위니옹': 'regionreunion',
        '과들루프': 'guadeloupe',
        '케이맨제도': 'cayman',
        '퀴라소': 'curacao',
        '마요트': 'mayotte',
        '푸에르토리코': 'puertorico',
        '괌': 'guam',
        '미국령버진아일랜드': 'unitedstatesvirginislands',
        '아루바': 'aruba',
        '버뮤다': 'bermuda',
        '신트마르틴': 'sintmartin',
        '몬트세라트': 'montserrat',
        '뉴칼레도니아': 'newcaledonia',
        '그린랜드': 'greenland',
        '맨섬': 'isleofman',
        '터크스케이커스제도': 'turksandcaicosislands',
        '합계': 'synthesize'
    }
    logger.info(
        'get_foreign_data: declare country_dictionary | country_dictionary=' +
        str(country_dictionary))

    table_convert_error = 0
    table_data_error = 0
    error_list = [0]

    report_message = '- Dropper API Foreign Crawling Report -\n\n\n'
    report_level = 0

    try:
        table_data = table_data_rows[0]
        try:
            table_data_beautifulsoup_object = BeautifulSoup(
                str(table_data), 'html.parser')
            logger.info(
                'get_foreign_data: convert table_data to beautifulsoup object | table_data_beautifulsoup_object='
                + str(table_data_beautifulsoup_object))
            try:
                country = table_data_beautifulsoup_object.findAll('th')[0].text
                logger.info(
                    'get_foreign_data: extracting country from table data | country='
                    + str(country))
                try:
                    certified = re.sub(
                        '[,명]', '',
                        re.sub(
                            '\(사망[  ][0-9,]+\)', '',
                            table_data_beautifulsoup_object.findAll('td')
                            [0].text))
                    logger.info(
                        'get_foreign_data: extracting certified from table data | certified='
                        + str(certified))
                    dead = re.findall(
                        '\(사망[  ]([0-9,]+)\)',
                        table_data_beautifulsoup_object.findAll('td')[0].text)
                    logger.info(
                        'get_foreign_data: extracting dead from table data | country='
                        + str(dead))

                    # print('|' + country_dictionary[re.sub('[  ]', '', country)] + '|' + re.sub('[  ]', '', country) + '|')
                    #  print('[\'foreign_' + country_dictionary[re.sub('[  ]', '', country)], end='\'')

                    foreign_data = {
                        'country':
                        country_dictionary[re.sub('[  ]', '', country)],
                        'certified':
                        int(certified),
                        'dead':
                        int(re.sub('[,명]', '', dead[0])) if dead != [] else 0
                    }
                    logger.info(
                        'get_foreign_data: declare foreign data | foreign_data='
                        + str(foreign_data))

                    foreign_data_list.append(foreign_data)
                    logger.info(
                        'get_foreign_data: put foreign data into foreign data list | foreign_data_list='
                        + str(foreign_data_list))
                except Exception as ex:
                    if report_level < 1:
                        report_level = 1
                    error_list[0] = 1
                    error_list.append([ex, table_data])
                    logger.info(
                        'get_foreign_data: unregistered country name was found | ex='
                        + str(ex) + ' | error_list=' + str(error_list))
            except Exception as ex:
                if report_level < 2:
                    report_level = 2
                logger.info(
                    'get_foreign_data: cannot extract country from table data | ex='
                    + str(ex))
                if table_data_error == 0:
                    table_data_error = 1
                    report_message += '* ERROR: cannot extract country from table data *\n\n\n'
                    report_message += str(ex) + '\n'
                    report_message += '\n'
                    report_message += '\nThis report is about country index number 1'
                    report_message += '\n'
                    report_message += '\n\n\n\n\n'
        except Exception as ex:
            if report_level < 2:
                report_level = 2
            logger.info(
                'get_foreign_data: cannot convert table_data to beautifulsoup object | ex='
                + str(ex))
            if table_convert_error == 0:
                table_convert_error = 1
                report_message += '* ERROR: cannot convert table_data to beautifulsoup object *\n\n\n'
                report_message += str(ex) + '\n'
                report_message += '\n'
                report_message += '\nThis report is about table_data ' + str(
                    table_data)
                report_message += '\n'
                report_message += '\n\n\n\n\n'
        for table_data, index_no in zip(table_data_rows[1:],
                                        range(1, len(table_data_rows))):
            try:
                table_data_beautifulsoup_object = BeautifulSoup(
                    str(table_data), 'html.parser')
                logger.info(
                    'get_foreign_data: convert table_data to beautifulsoup object | table_data_beautifulsoup_object='
                    + str(table_data_beautifulsoup_object))
                try:
                    country = table_data_beautifulsoup_object.findAll(
                        'td')[0].text
                    logger.info(
                        'get_foreign_data: extracting country from table data | country='
                        + str(country))
                    try:
                        certified = re.sub(
                            '[,명]', '',
                            re.sub(
                                '\(사망[  ][0-9,]+\)', '',
                                table_data_beautifulsoup_object.findAll('td')
                                [1].text))
                        logger.info(
                            'get_foreign_data: extracting certified from table data | certified='
                            + str(certified))
                        dead = re.findall(
                            '\(사망[  ]([0-9,]+)\)',
                            table_data_beautifulsoup_object.findAll('td')
                            [1].text)
                        logger.info(
                            'get_foreign_data: extracting dead from table data | country='
                            + str(dead))

                        # print('|' + country_dictionary[re.sub('[  ]', '', country)] + '|' + re.sub('[  ]', '', country) + '|')
                        #  print(',\n\'foreign_' + country_dictionary[re.sub('[  ]', '', country)], end='\'')

                        foreign_data = {
                            'country':
                            country_dictionary[re.sub('[  ]', '', country)],
                            'certified':
                            int(certified),
                            'dead':
                            int(re.sub('[,명]', '', dead[0]))
                            if dead != [] else 0
                        }
                        logger.info(
                            'get_foreign_data: declare foreign data | foreign_data='
                            + str(foreign_data))

                        foreign_data_list.append(foreign_data)
                        logger.info(
                            'get_foreign_data: put foreign data into foreign data list | foreign_data_list='
                            + str(foreign_data_list))
                    except Exception as ex:
                        if report_level < 1:
                            report_level = 1
                        error_list[0] = 1
                        error_list.append([ex, table_data])
                        logger.info(
                            'get_foreign_data: unregistered country name was found | ex='
                            + str(ex) + ' | error_list=' + str(error_list))
                except Exception as ex:
                    if report_level < 2:
                        report_level = 2
                    logger.info(
                        'get_foreign_data: cannot extract country from table data | ex='
                        + str(ex))
                    if table_data_error == 0:
                        table_data_error = 1
                        report_message += '* ERROR: cannot extract country from table data *\n\n\n'
                        report_message += str(ex) + '\n'
                        report_message += '\n'
                        report_message += '\nThis report is about country index number ' + str(
                            index_no)
                        report_message += '\n'
                        report_message += '\n\n\n\n\n'
            except Exception as ex:
                if report_level < 2:
                    report_level = 2
                logger.info(
                    'get_foreign_data: cannot convert table_data to beautifulsoup object | ex='
                    + str(ex))
                if table_convert_error == 0:
                    table_convert_error = 1
                    report_message += '* ERROR: cannot convert table_data to beautifulsoup object *\n\n\n'
                    report_message += str(ex) + '\n'
                    report_message += '\n'
                    report_message += '\nThis report is about table_data ' + str(
                        table_data)
                    report_message += '\n'
                    report_message += '\n\n\n\n\n'
    except Exception as ex:
        if report_level < 3:
            report_level = 3
        logger.info('get_foreign_data: table_data_rows is empty | ex=' +
                    str(ex))
        report_message += '* FATAL: table_data_rows is empty *\n\n\n'
        report_message += str(ex) + '\n'
        report_message += '\n'
        report_message += '\nThis report is about table_data_rows ' + str(
            table_data_rows)
        report_message += '\n'
        report_message += '\n\n\n\n\n'

    #  print('],')

    if error_list[0] == 1:
        report_message += '* WARN: unregistered country name was found *\n\n\n'
        for data in error_list[1:]:
            report_message += '---------------------------\n'
            report_message += f"{data[0]}\n\n{data[1]}\n"
        report_message += '---------------------------\n'
        report_message += '\n\n\n\n\n'

    if report_level < 2:
        report_message += 'Crawling finished successfully\n'
        report_message += '\nThis report is based on (Unix Time)' + str(
            int(current_timestamp))
        if report_level == 0:
            mail_sender.send_mail(
                subject=
                f'[Dropper API](foreign_crawler) INFO: consolidated report',
                message=report_message)
        elif report_level == 1:
            mail_sender.send_mail(
                subject=
                f'[Dropper API](foreign_crawler) WARN: consolidated report',
                message=report_message)
    elif report_level == 2:
        report_message += 'Some error occurred while crawling\n'
        report_message += '\nThis report is based on (Unix Time)' + str(
            int(current_timestamp))
        mail_sender.send_mail(
            subject=
            f'[Dropper API](foreign_crawler) ERROR: consolidated report',
            message=report_message)
    else:
        report_message += 'Fatal error occurred while crawling\n'
        report_message += '\nThis report is based on (Unix Time)' + str(
            int(current_timestamp))
        mail_sender.send_mail(
            subject=
            f'[Dropper API](foreign_crawler) FATAL: consolidated report',
            message=report_message)

    logger.info('get_foreign_data: function ended | foreign_data_list=' +
                str(foreign_data_list))
    return foreign_data_list
Exemplo n.º 19
0
def check_folder(current_timestamp, folders):
    results = [0]

    report_message = '* Dropper API Log Custodian Report *\n\n\n'

    for folder in folders:
        try:
            if os.path.exists(folder):
                results.append([
                    0, {
                        'size':
                        sum(
                            os.path.getsize(folder + '/' + file)
                            for file in os.listdir(folder)
                            if os.path.isfile(folder + '/' + file)) /
                        (1024 * 1024 * 1024)
                    }
                ])
            else:
                if results[0] < 1:
                    results[0] = 1
                os.makedirs(folder)
                results.append([
                    1, {
                        'message':
                        'Folder Created',
                        'size':
                        sum(
                            os.path.getsize(folder + '/' + file)
                            for file in os.listdir(folder)
                            if os.path.isfile(folder + '/' + file)) /
                        (1024 * 1024 * 1024)
                    }
                ])
        except Exception as ex:
            if results[0] < 2:
                results[0] = 2
            results.append([2, {'message': ex, 'size': 0}])

    for result, i in zip(results[1:], range(len(folders))):
        report_message += f"[{folders[i]}] {'GREEN' if result[0] == 0 else 'YELLOW' if result[0] == 1 else 'RED'}\n"

        if result[0] == 0:
            report_message += '---------------------------\n'
            report_message += f"size:\n{result[1]['size']}GB\n"
            report_message += '---------------------------\n'
        elif result[0] == 1:
            report_message += '---------------------------\n'
            report_message += f"{result[1]['message']}\n\nsize:\n{result[1]['size']}GB\n"
            report_message += '---------------------------\n'
        elif result[0] == 2:
            report_message += '---------------------------\n'
            report_message += f"{result[1]['message']}\n"
            report_message += '---------------------------\n'
        report_message += '\n'

    total_size = sum(data[1]['size'] for data in results[1:])

    report_message += '\n\n'
    report_message += f"total size: {total_size}GB"
    if total_size > 6:
        if results[0] < 1:
            results[0] = 1
        report_message += '---------------------------\n'
        report_message += 'Log is too big. Please backup and empty it immediately.'
        report_message += '---------------------------\n'
    elif total_size > 3:
        if results[0] < 1:
            results[0] = 1
        report_message += '---------------------------\n'
        report_message += 'Log is too big. Please backup and empty it.'
        report_message += '---------------------------\n'

    if results[0] == 0:
        report_message += '\n\n\n\n\n'
        report_message += 'Operating finished successfully\n'
    elif results[0] == 1:
        report_message += '\n\n\n\n\n'
        report_message += 'Error has been fixed automatically\n'
    else:
        report_message += '\n\n\n\n\n'
        report_message += 'Some error occurred while crawling\n'

    report_message += '\nThis report is based on (Unix Time)' + str(
        int(current_timestamp))

    if results[0] == 0:
        mail_sender.send_mail(
            subject=f'[Dropper API](api-log-custodian) INFO: task report',
            message=report_message)
    elif results[0] == 1:
        mail_sender.send_mail(
            subject=f'[Dropper API](api-log-custodian) WARN: task report',
            message=report_message)
    else:
        mail_sender.send_mail(
            subject=f'[Dropper API](api-log-custodian) ERROR: task report',
            message=report_message)
Exemplo n.º 20
0
def main():
    send_mail(
        "../p30-(paid-users-who-upgraded-in-the-first-30-days)-(saas).csv",
        "$")
def get_status_data(target='', current_timestamp=0):
    logger.info('get_status_data: function started | target=' + target)

    downloaded_html = urlopen(target)
    logger.info('get_status_data: html downloaded')
    beautifulsoup_object = BeautifulSoup(downloaded_html, 'html.parser')
    logger.info('get_status_data: html parsed to beautifulsoup object')

    announced_time = [
        '2020',
        re.findall('([0-9]+)[.]',
                   beautifulsoup_object.findAll('p',
                                                class_='info')[0].text)[0],
        re.findall('[.]([0-9]+)',
                   beautifulsoup_object.findAll('p',
                                                class_='info')[0].text)[0],
        re.findall('([0-9]+)시',
                   beautifulsoup_object.findAll('p', class_='info')[0].text)[0]
    ]
    logger.info('get_status_data: get announced time | announced_time=' +
                str(announced_time))

    datetime_object = datetime.datetime.strptime(str(announced_time),
                                                 "['%Y', '%m', '%d', '%H']")
    logger.info(
        'get_status_data: convert announced time to datetime object | datetime_object='
        + str(datetime_object))
    announced_time_unix = int(time.mktime(datetime_object.timetuple())) - 32400
    logger.info(
        'get_status_data: convert datetime object to unix time | announced_time_unix='
        + str(announced_time_unix))

    raw_table = beautifulsoup_object.findAll('tbody')
    logger.info('get_status_data: table picked out | raw_table=' +
                str(raw_table))
    raw_table_beautifulsoup_object = BeautifulSoup(str(raw_table[0]),
                                                   'html.parser')
    logger.info(
        'get_status_data: convert raw table to beautifulsoup object | raw_table_beautifulsoup_object='
        + str(raw_table_beautifulsoup_object))
    table_data_rows = raw_table_beautifulsoup_object.findAll('tr')
    logger.info(
        'get_status_data: export table data from raw_table_beautifulsoup_object | table_data_rows='
        + str(table_data_rows))

    status_data_list = [announced_time_unix]
    logger.info(
        'get_status_data: declare status_data_list | status_data_list=' +
        str(status_data_list))

    convert_error_list = [0]
    database_error_list = [0]
    dictionary_error_list = [0]

    report_message = '* Dropper API Status Crawling Report *\n\n\n'
    report_level = 0

    if len(table_data_rows) == 0:
        if report_level < 3:
            report_level = 3
        logger.info('get_status_data: table_data_rows is empty')
        report_message += '- FATAL: table_data_rows is empty -\n\n\n'
        report_message += '\n'
        report_message += '\nThis report is about table_data_rows ' + str(
            table_data_rows)
        report_message += '\n'
        report_message += '\n\n\n\n\n'
    else:
        for table_data, index_no in zip(table_data_rows,
                                        range(len(table_data_rows))):
            try:
                logger.info(
                    'get_status_data: extracting table data | table_data=' +
                    str(table_data))
                table_data_beautifulsoup_object = BeautifulSoup(
                    str(table_data), 'html.parser')
                logger.info(
                    'get_status_data: convert table_data to beautifulsoup object | table_data_beautifulsoup_object='
                    + str(table_data_beautifulsoup_object))
                try:
                    region = table_data_beautifulsoup_object.findAll(
                        'th')[0].text
                    logger.info(
                        'get_status_data: extracting region from table data | region='
                        + str(region))
                    data = table_data_beautifulsoup_object.findAll('td')
                    logger.info(
                        'get_status_data: extracting data from table data | data='
                        + str(data))
                    try:
                        status_data = {
                            'region':
                            status_property.region_dictionary[re.sub(
                                '[  ]', '', re.sub('[가-힣]^', '', region))],
                            'increased':
                            int('0' + re.sub('[^0-9]', '', data[0].text)),
                            'increased_foreign':
                            int('0' + re.sub('[^0-9]', '', data[1].text)),
                            'increased_local':
                            int('0' + re.sub('[^0-9]', '', data[2].text)),
                            'certified':
                            int('0' + re.sub('[^0-9]', '', data[3].text)),
                            'isolated':
                            int('0' + re.sub('[^0-9]', '', data[4].text)),
                            'unisolated':
                            int('0' + re.sub('[^0-9]', '', data[5].text)),
                            'dead':
                            int('0' + re.sub('[^0-9]', '', data[6].text)),
                            'percentage':
                            float('0' + re.sub('[^0-9.]', '', data[7].text))
                        }
                        logger.info(
                            'get_status_data: declare status data | status_data='
                            + str(status_data))

                        status_data_list.append(status_data)
                        logger.info(
                            'get_status_data: put status data into status data list | status_data_list='
                            + str(status_data_list))
                    except Exception as ex:
                        if report_level < 1:
                            report_level = 1
                        dictionary_error_list[0] = 1
                        dictionary_error_list.append([ex, table_data])
                        logger.info(
                            'get_status_data: unregistered region name was found | ex='
                            + str(ex) + ' | dictionary_error_list=' +
                            str(dictionary_error_list))
                except Exception as ex:
                    if report_level < 2:
                        report_level = 2
                    database_error_list[0] = 1
                    database_error_list.append([ex, index_no])
                    logger.info(
                        'get_status_data: cannot extract region or data from table data | ex='
                        + str(ex) + ' | index_no=' + str(index_no))
            except Exception as ex:
                if report_level < 2:
                    report_level = 2
                convert_error_list[0] = 1
                convert_error_list.append([ex, table_data])
                logger.info(
                    'get_status_data: cannot convert table_data to beautifulsoup object | ex='
                    + str(ex) + ' | table_data=' + str(table_data))

    if convert_error_list[0] == 1:
        report_message += '- ERROR: cannot convert table_data to beautifulsoup object -\n\n\n'
        for error in convert_error_list[1:]:
            report_message += '---------------------------\n'
            report_message += f"{error[0]}\n\ntable_data:\n{error[1]}\n"
        report_message += '---------------------------\n'
        report_message += '\n\n\n\n\n'

    if database_error_list[0] == 1:
        report_message += '- ERROR: cannot extract region from table data -\n\n\n'
        for error in database_error_list[1:]:
            report_message += '---------------------------\n'
            report_message += f"{error[0]}\n\nindex_no:\n{error[1]}\n"
        report_message += '---------------------------\n'
        report_message += '\n\n\n\n\n'

    if dictionary_error_list[0] == 1:
        report_message += '- WARN: unregistered region name was found -\n\n\n'
        for error in dictionary_error_list[1:]:
            report_message += '---------------------------\n'
            report_message += f"{error[0]}\n\nregion_name:\n{error[1]}\n"
        report_message += '---------------------------\n'
        report_message += '\n\n\n\n\n'

    if report_level < 2:
        report_message += 'Crawling finished successfully\n'
        report_message += '\nThis report is based on (Unix Time)' + str(
            int(current_timestamp))
        if report_level == 0:
            mail_sender.send_mail(
                subject=f'[Dropper API](status_crawler) INFO: task report',
                message=report_message)
        elif report_level == 1:
            mail_sender.send_mail(
                subject=f'[Dropper API](status_crawler) WARN: task report',
                message=report_message)
    elif report_level == 2:
        report_message += 'Some error occurred while crawling\n'
        report_message += '\nThis report is based on (Unix Time)' + str(
            int(current_timestamp))
        mail_sender.send_mail(
            subject=f'[Dropper API](status_crawler) ERROR: task report',
            message=report_message)
    else:
        report_message += 'Fatal error occurred while crawling\n'
        report_message += '\nThis report is based on (Unix Time)' + str(
            int(current_timestamp))
        mail_sender.send_mail(
            subject=f'[Dropper API](status_crawler) FATAL: task report',
            message=report_message)

    logger.info('get_status_data: function ended | status_data_list=' +
                str(status_data_list))
    return status_data_list

def autofix():
    os.system('python3.6 status_crawler.py ; python3.6 foreign_crawler.py')


if __name__ == '__main__':
    timestamp = time.time()
    result, error_list = check_status(timestamp)

    message = '* Dropper API Data Report *\n\n\n'
    message += assemble_message(result, error_list, timestamp)

    if result[0] == 0:
        mail_sender.send_mail(
            subject='[Dropper API](data_checker) INFO: task report',
            message=message)
    else:
        message += "\n\n\n---------EXECUTE AUTOFIX---------\n\n\n"

        autofix()

        timestamp = time.time()
        result, error_list = check_status(timestamp)

        message += assemble_message(result, error_list, timestamp)

        if result[0] == 0:
            mail_sender.send_mail(
                subject='[Dropper API](data_checker) WARN: task report',
                message=message)
Exemplo n.º 23
0
def send_email():
    send_mail(app, "", "asdf")
    flash(u"Wysłano wiadomość")
    return redirect(url_for('show_tree'))
Exemplo n.º 24
0
 def sendMail(self):
     send_mail(os.getlogin(), self.generateSummary())
Exemplo n.º 25
0
def main():
    report = generate_report(duration=timedelta(seconds=10))
    formatted = line_char(report)
    send_mail(formatted)
Exemplo n.º 26
0
def post_form():
    email = request.values.get('email')
    return ('Письмо отправлено успешно на адрес %s' % email if send_mail(
        email, 'Тестовое письмо', 'Тестовый текст', ['.', 'templates', '.git'])
            else 'Во время отправки письма на адрес %s произошла ошибка' %
            email)