Exemplo n.º 1
0
def main():
    #Load Logging Default Config
    lc.setup_logging()
    logger = logging.getLogger(__name__)

    logger.info("----- -> START_UP <- "+"-"*50)

    #Init Holiday Class
    holiday.init("START HOLIDAY CONFIG LOADER")

    date_for_test = "2017-06-11"

    #1
    logger.info(holiday.holidays)
    
    #2
    logger.info(holiday.holidays[3])
    
    #3
    logger.info('is_sunday("{}")? [{}]'.format(date_for_test, holiday.is_sunday(date_for_test)))
    
    #4
    logger.info('is_holiday("{}")? [{}]'.format(date_for_test, holiday.is_holiday(date_for_test)))
    
    #5
    logger.info('is_work_day_off("{}")? [{}]'.format(date_for_test, holiday.is_work_day_off(date_for_test)))


    print("END OF TESTS")
Exemplo n.º 2
0
def tmp_main():
    """."""
    setup_logging()
    logger = logging.getLogger(__name__)
    cik = int('0000000020')
    scrape = Scrape()
    company, urls = scrape.document_list(cik)
    for url in urls:
        doc, files = scrape.file_list(url)
        logger.info('fiscal_year_end_%s' % doc.filing_date_)
        for record in files:
            f = scrape.extract_rawdata(record)
            logger.info('文件大小:%s,文件实际大小:%s' % (f.size_, f.msize))
            hintInterest, hintForex = get_hint(f.source_.decode('utf-8'))
            logger.info('hintInterest:%s, hintForex:%s' %
                        (hintInterest, hintForex))
Exemplo n.º 3
0
from multiprocessing import Pool

# from paths_usa import era_path

DOWNLOAD_DIR = '/data/users/kgruber/Data/era5/TX'
COUNTRY = 'TX'

YEARS = range(1950, 1979)
MONTHS = list(range(1, 13))

north = 36
south = 25
west = -107
east = -93

setup_logging(op.join(DOWNLOAD_DIR, 'download.log'))




def download_era5(year):
    # API documentation for downloading a subset:
    # https://confluence.ecmwf.int/display/CKB/Global+data%3A+Download+data+from+ECMWF+for+a+particular+area+and+resolution
    # https://retostauffer.org/code/Download-ERA5/

    # Format for downloading ERA5: North/West/South/East
    bounding_box = "{}/{}/{}/{}".format(north, west, south, east)

    logging.info("Downloading bounding_box=%s for years=%s and months=%s",
                 bounding_box, YEARS, MONTHS)
Exemplo n.º 4
0
import os
import logging
import logging.config
import json
import requests

from flask import escape
from flask import abort

from logging_config import setup_logging

setup_logging()
logger = logging.getLogger(__name__)

from config import CONFIG
from bitbucket2chat import handle_bitbucket_event


def main(request):
    bot_name = CONFIG['bot_name'].get()
    
    if request is None:
        logger.info('Hi, I\'m %s!', bot_name)
        return 'OK'
    
    logger.info('Bot %s is alive!', bot_name)

    logger.info('This request came from: %s', request.remote_addr)

    if request.headers.get('User-Agent') == 'Bitbucket-Webhooks/2.0':
        event = request.get_json(silent=True)
             replySpider_start_urls_num, userSpider_start_urls_num)

        logger.info(time.asctime() + '\t' + 'begin' + '\t' + s)

        if user_urls_num != 0:
            if userSpider_start_urls_num == 0:
                logger.info(time.asctime() + '\t' + 'run_user_spider')
                run_user_spider(r, 50)

        if reply_urls_num != 0:
            if replySpider_start_urls_num == 0:
                logger.info(time.asctime() + '\t' + 'run_reply_spider')
                run_reply_spider(r, 1000)
        elif post_urls_num != 0:
            if postSpider_start_urls_num == 0:
                logger.info(time.asctime() + '\t' + 'run_post_spider')
                run_post_spider(r, 1000)
        else:
            if listSpider_start_urls_num == 0:
                logger.info(time.asctime() + '\t' + 'run_list_spider')
                run_list_spider(r)

        # print(time.asctime() + '\t' + 'sleep')
        time.sleep(60)


if __name__ == '__main__':
    yaml_path = 'logging_config.yaml'
    setup_logging(yaml_path)
    run()
Exemplo n.º 6
0
        old_key = set(sqlserver._query(
            'select [KEY] from IT_OPS.XQY_TGT_TEST2')['KEY'])
        add_key = list(execl_key-old_key)
        if len(add_key) > 0:
            logger.info(f"new key:{add_key}")
            new_df = DataFrame(columns=table_column)
            for key in add_key:
                new_df = new_df.append(
                    df.loc[df['KEY'] == key], ignore_index=True)
            logger.info(f'insert into new data number {len(new_df)}')
            # new_df.to_sql(name='XQY_TGT_TEST2', con=engine,
            #               if_exists='append', schema='IT_OPS', index=False, chunksize=1000)
            sqlserver._insert(new_df, 'XQY_TGT_TEST2', 'IT_OPS')
            logger.info(f'Insertion successful number {len(new_df)}')
        else:
            logger.info('no change data')
    end = time.time()
    runtime = end-starttime
    logger.info(f"Running time:{runtime}")


if __name__ == "__main__":
    logger = logging.getLogger(__name__)
    try:
        setup_logging(default_path='logging.json',
                      default_level=logging.INFO,
                      env_key='LOG_CFG')
        main()
    except Exception as e:
        logger.error(e)
Exemplo n.º 7
0
def main():
    """保存公司基本信息."""
    setup_logging()
    logger = logging.getLogger(__name__)
    conn, cur = get_conn()
    sql = "SELECT url from sec.tb_file"
    sqlOk = "SELECT id, is_ok from sec.tb_company where cik = %s"
    sqlInsertCompany = """INSERT into sec.tb_company(cik, name, sic, location, state_of_inc,
                        fiscal_year_end, baddr_state, baddr_city,
                        baddr_street, baddr_zip, baddr_phone,
                        maddr_street, maddr_city, maddr_state,
                        maddr_zip, ctime)
                        values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,
                        %s,%s,%s,%s,%s)"""
    sqkUpdateCompany = "UPDATE sec.tb_company set is_ok = 1 where id = %s"
    sqlDoc = "SELECT id from sec.tb_document where url = %s"
    sqlInsetDoc = """INSERT into sec.tb_document(acc_no, url, company_id, type, filing_date,
                    period_of_report, accepted, documents, company_name,
                    sic, state_of_inc, fiscal_year_end, baddr_street,
                    baddr_city, baddr_state, baddr_zip, baddr_phone,
                    maddr_street, maddr_city, maddr_state, maddr_zip)
                    values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,
                    %s,%s,%s,%s,%s,%s)"""
    sqlInsertFile = "INSERT into sec.tb_file(company_id, doc_id, url, seq, describtion, type, \
                    size) values(%s,%s,%s,%s,%s,%s,%s)"

    fileSet = set()
    if cur.execute(sql):
        fileSet = set(record[0] for record in cur.fetchall())
    scrape = Scrape()
    wb = load_workbook('./demand20180810/Sample_short.xlsx')
    sheet = wb.get_sheet_by_name('Sample_short2')
    for v in sheet.values:
        ctime = time.strftime('%Y-%m-%d %H:%M:%S')
        cik = v[7]
        try:
            int(cik)
        except ValueError as e:
            print("cik格式不符", cik)
            continue
        if len(cik) != 10:
            print("cik长度不符", cik)
            continue
        logger.debug('cik:%s' % cik)

        companyid = None
        isOk = 0
        if cur.execute(sqlOk, cik):
            record = cur.fetchone()
            companyid = record[0]
            isOk = record[1]
        if not isOk:
            print("cik采集", cik)
            try:
                company, urls = scrape.document_list(cik)
            except Exception as e:
                if e.args[0] == 'No matching CIK':
                    logger.debug('No matching CIK:%s' % cik)
                    continue
                else:
                    raise e
        else:
            print("cik已存在", cik)
            continue
        if not companyid:
            print(company)
            cur.execute(sqlInsertCompany,
                        (company.cik_, company.name_, company.sic_,
                         company.location_, company.state_of_inc_,
                         company.fiscal_year_end_, company.baddr_state_,
                         company.baddr_city_, company.baddr_street_,
                         company.baddr_zip_, company.baddr_phone_,
                         company.maddr_street_, company.maddr_city_,
                         company.maddr_state_, company.maddr_zip_, ctime))
            companyid = cur.lastrowid
            conn.commit()
        logger.debug('公司ID:%s,cik:%s' % (companyid, cik))
        for url in urls:
            doc, files = scrape.file_list(url)
            doc.company_id_ = companyid
            if cur.execute(sqlDoc, url):
                docId = cur.fetchone()[0]
            else:
                cur.execute(
                    sqlInsetDoc,
                    (doc.acc_no_, url, doc.company_id_, doc.type_,
                     doc.filing_date_, doc.period_of_report_, doc.accepted_,
                     doc.documents_, doc.company_name_, doc.sic_,
                     doc.state_of_inc_, doc.fiscal_year_end_,
                     doc.baddr_street_, doc.baddr_city_, doc.baddr_state_,
                     doc.baddr_zip_, doc.baddr_phone_, doc.maddr_street_,
                     doc.maddr_city_, doc.maddr_state_, doc.maddr_zip_))
                docId = cur.lastrowid
                conn.commit()
            for record in files:
                if record[2] not in fileSet:
                    f = File()
                    f.seq_ = record[0]
                    f.describtion_ = record[1]
                    f.url_ = record[2]
                    f.type_ = record[3]
                    f.size_ = record[4]
                    f.company_id_ = companyid
                    f.doc_id_ = docId
                    cur.execute(sqlInsertFile,
                                (f.company_id_, f.doc_id_, f.url_, f.seq_,
                                 f.describtion_, f.type_, f.size_))
                    conn.commit()
                    fileSet.add(f.url_)
        cur.execute(sqkUpdateCompany, companyid)
        conn.commit()
Exemplo n.º 8
0
            if alert.failed_alerts:
                if environ.get('SKIP_EMAIL_FOR_SUCCESSFUL_REMEDIATION',
                               'false') == 'true':
                    if not all(
                            failure.get('remediated', False)
                            for failure in alert.failed_alerts):
                        alert.email_results()
                else:
                    alert.email_results()

        log.handlers = [logging_config.get_normal_formatter()]
        log.info(f'Finished processing alerts for Cluster: {cluster}.')


if __name__ == '__main__':
    logger: Logger = logging_config.setup_logging()
    cluster_contexts: List[str] = environ.get('CLUSTER_CONTEXTS',
                                              'current').split(',')

    # Import all modules below the `alerts/` directory
    alert_plugins = (import_module(f'alerts.{alert_module.stem}')
                     for alert_module in Path('./alerts').glob('*.py'))
    # Load all classes from each module that inherits from the BaseAlert abstract class
    alert_classes = (
        cls for plugin in alert_plugins
        for cls in getmembers(plugin,
                              predicate=lambda curr_class: isclass(curr_class)
                              and BaseAlert in curr_class.__bases__) if cls)

    schedule.every(1).hour.do(process_alerts_and_remediations, logger,
                              cluster_contexts, alert_classes)