예제 #1
0
파일: app.py 프로젝트: StasEvseev/adminbuy
def init_app(application):
    from resources import api
    from assets import assets
    from applications.security.auth import login_manager
    from security import security

    application.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URI
    application.config['SECURITY_TOKEN_AUTHENTICATION_HEADER'] = (
        "AUTHORIZATION")
    application.config['SECURITY_REMEMBER_SALT'] = "SALT123123123"
    application.config['SQLALCHEMY_ECHO'] = True
    application.config['SECRET_KEY'] = SECRET_KEY
    application.permanent_session_lifetime = timedelta(minutes=30)

    Triangle(application)
    assets.init_app(application)
    api.init_app(application)
    api.application = application
    db.init_app(application)
    login_manager.init_app(application)
    security.init_app(application)

    application.db = db
    application.api = api

    if IS_PROD:
        init_logging(application)

    return application
예제 #2
0
def main(conf):
    print 'server starting, please wait.'
    conf = configure.make_conf(conf)

    #初始化日志
    log.init_logging(conf.log.log_conf)

    #初始化web服务器
    import webserver
    web = webserver.CWebServer(conf)

    def shutdown(signum, frame):
        # shutdown
        web.shutdown()
        log.shutdown()

        print 'server is going down now.'
        import sys
        sys.exit()

    # register signal(SIGINT) handler
    import signal
    signal.signal(signal.SIGINT, shutdown)
    signal.signal(signal.SIGTERM, shutdown)
    signal.signal(signal.SIGQUIT, shutdown)
    signal.signal(signal.SIGABRT, shutdown)

    print 'server started!'
    # finally, start game!
    web.start()
예제 #3
0
파일: app.py 프로젝트: StasEvseev/buy_api
def create_app():
    application = Flask(__name__)
    application.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URI
    # application.config['BABEL_DEFAULT_LOCALE'] = 'ru-ru'
    application.config['SECRET_KEY'] = SECRET_KEY
    application.permanent_session_lifetime = timedelta(minutes=30)

    Triangle(application)
    assets.init_app(application)
    api.init_app(application)
    api.application = application
    db.init_app(application)
    admin.init_app(application)
    login_manager.init_app(application)
    application.db = db
    application.api = api

    babel = Babel(application)
    @babel.localeselector
    def get_locale():
        return request.accept_languages.best_match(["ru"])

    init_logging(application)

    return application
예제 #4
0
파일: test_network.py 프로젝트: nido/mis
def main():
    """run the test"""
    init_logging()
    tcpserver = TCPServer()
    start_new_thread(server, (tcpserver,))
    sleep(1)
    test = TCPClient('127.0.0.1')
    ccon = test.connect()
    ccon.rpc_call("help")
예제 #5
0
def main():  
    init_logging('./error.log', 'DEBUG')
 
    # init signal
    signal.signal(signal.SIGINT, signal_handler)
    signal.signal(signal.SIGTERM, signal_handler)

    server = MyServer()  
    server.listen(8001)  
    ioloop.IOLoop.instance().start()  
예제 #6
0
파일: main.py 프로젝트: nido/mis
def main():
    """starts the program"""
    init_logging()

    if len(argv) > 1:
        print "Usage: " + argv[0] + " [command [option]]"
        if argv[1] == 'help':
            usage()
        if argv[1] == 'index':
            path = '.'
            if len(argv) > 2:
                path = abspath(argv[2])
            index(path)
        if argv[1] == 'batch':
            batch_update()
    else:
        console()
예제 #7
0
import openpyxl
import random
import time
import os
from log import init_logging
logger = init_logging(level='DEBUG')
time_now = time.strftime("%Y%m%d_%H_%M", time.localtime())
file_path = "D:\python script\excel\data\ " + time_now
'''
if not os.path.exists(file_path):
    os.makedirs(file_path)
    print('文件夹创建完成  '+file_path)
'''
# 创建一个工作簿
wb = openpyxl.Workbook()
# 创建一个test_case的sheet表单s
wb.create_sheet('test_case1')
wb.create_sheet('test_case2')
# 保存为一个xlsx格式的文件
wb.save(file_path + "{}.xlsx".format('test_case')
        )  #sava("/tmp/{}.xlsx".format(filename)),将路径添加进去就可以。


#将测试结果写入excel
def write_data(sheet_name, row, col, value):
    # 第一步:打开工作簿
    workbook1 = openpyxl.load_workbook(file_path + 'test_case.xlsx')
    sheet = workbook1[sheet_name]
    sheet.cell(row, col).value = value
    workbook1.save(file_path + "{}.xlsx".format('test_case'))
        logging.info("-----------------valid bad---------------------%s" %
                     e.message)
        return False
        # logging.info(item)
        # return True


def main():
    cur = ProxyItemsDB.get_proxy_items()
    for item in cur:
        if valid_proxy(item):
            AlivebaicDB.upsert_proxy_item(item)
            time.sleep(2 * random.random())


def init_defaultencoding():
    import sys
    reload(sys)
    sys.setdefaultencoding("utf-8")
    print "sys default encoding: ", sys.getdefaultencoding()


if __name__ == "__main__":
    init_defaultencoding()
    init_logging("log/check_proxy_to_baic.log",
                 "log/check_proxy_to_baic_2.log")
    logging.info("main begin!!-------")
    main()
    logging.info("success finish!!-------")
    pass
예제 #9
0
# python 3.7.4
# coding = utf-8
# filename api.py
# author [email protected]/www.cdzcit.com,
#        create by VIM at 2019/12/30

import ctypes
from gdapi.data_paser.parser_functions import FromMdarec
from gdapi.data_paser.const import *
from pprint import pprint
import time
import os
import log

LOGGER = log.init_logging('.\\gdlog.txt', 'debug')


def set_OnReceiveData(pUserParam, nDate, nMarketId, sCode, sName, uType,
                      nServiceId, pData, nLen):
    """
    光大接口数据回调函数
    void  __stdcall OnReceiveData(
        void* pUserParam,   # [in]用户自定义参数,由用户调用TDR_Create时传入
        T_I32 nDate,        # [in]日期
        T_I32 nMarketId,    # [in]市场代码,参见tdr.h中对市场进行的宏定义
        const char* sCode,  # [in]证券代码
        const char* sName,  # [in]证券名称
        T_U32 uType,        # [in]证券类型
        T_I32 nServiceId,   # [in]服务数据ID,比如行情、逐笔成交等,参见tdr.h文件定义
        void* pData,        # [in]数据内容
        T_I32 nLen          # [in]数据长度
# -*- coding:utf-8 -*-
__author__ = 'zhaojm'

import pymongo
import logging

mongo_client = pymongo.MongoClient("localhost:27017")
db = mongo_client["qianzhan"]

from log import init_logging

init_logging('log/merge_detail.log', 'log/merge_detail_2.log')

cur = db.company_info_items_detail.find({}, {"_id": 0}).batch_size(50)
i = 0
for item in cur:
    i += 1
    logging.info(i)
    db.company_info_items_base.update({'company_name': item['company_name']},
                                      {'$set': item}, True, True)
예제 #11
0
# mongo_client = pymongo.MongoClient(MONGO_URI)
mongo_client = pymongo.MongoClient()

qyxy_baic_db = mongo_client["qyxy_baic"]
other_from_excel_db = mongo_client["other_from_excel"]

import sys

reload(sys)
sys.setdefaultencoding("utf-8")
print "sys default encoding: ", sys.getdefaultencoding()



if __name__ == "__main__":
    init_logging('1.log', '2.log')

    company_list = []

    cur = other_from_excel_db.beijinggaoxin.find()

    for item in cur:
        search_key = item[u'company_name']
        # logging.debug(search_key)
        if qyxy_baic_db.search_key_need.find_one({"search_key": search_key}):
            # logging.debug("pass............")
            print "pass.."
            pass
        elif qyxy_baic_db.search_key_have.find_one({"search_key": search_key}):
            # logging.debug("pass............")
            print "pass.."
예제 #12
0
def mymain(args):
    stocks_list = []
    threads_list = []
    today = time.strftime('%Y%m%d', time.localtime(time.time()))
    # 初始化日志
    if not os.path.exists(os.path.expanduser(args.logdir)):
        os.mkdir(os.path.expanduser(args.logdir))
    logger = log.init_logging(
        os.path.join(os.path.expanduser(args.logdir),
                     '%s_%s.txt' % (__name__, today)), args.loglevel)
    # 初始化tushare
    pro = tsapi.init_tushare(logger, tsapi.MYTOKEN)
    if pro is None:
        print('Init tushare error, exit')
        exit(1)
    # 链接数据库
    conn_sync = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
    # 获取股票ts代码
    for d in conn_sync.getAll('select ts_code from t_stocks'):
        stocks_list.append(d['ts_code'])

    logger.info('----%s begin----' %
                time.strftime('%Y%m%d %H%M%S', time.localtime(time.time())))

    if args.startdate == '':
        startdate = today
        enddate = today
    else:
        startdate = args.startdate
        enddate = args.enddate

    # 日行情线程
    if args.daily_quotation:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_daily = threading.Thread(target=daily,
                                       args=(logger, pro, conn, startdate,
                                             args.retry))
            t_daily.start()
        except Exception as e:
            logger.error('启动日行情线程失败: %s' % e)
        else:
            threads_list.append(t_daily)

    # 周行情线程
    if args.weekly_quotation:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_weekly = threading.Thread(target=weekly,
                                        args=(logger, pro, conn, startdate,
                                              args.retry))
            t_weekly.start()
        except Exception as e:
            logger.error('启动周行情线程失败: %s' % e)
        else:
            threads_list.append(t_weekly)

    # 月行情线程
    if args.monthly_quotation:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_monthly = threading.Thread(target=monthly,
                                         args=(logger, pro, conn, startdate,
                                               args.retry))
            t_monthly.start()
        except Exception as e:
            logger.error('启动月行情线程失败: %s' % e)
        else:
            threads_list.append(t_monthly)

    # 前复权日行情线程
    if args.qfq_daily_quotation:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_qfq_daily = threading.Thread(target=qfq_daily,
                                           args=(logger, pro, conn,
                                                 stocks_list, startdate,
                                                 enddate, args.retry))
            t_qfq_daily.start()
        except Exception as e:
            logger.error('启动前复权日行情线程失败: %s' % e)
        else:
            threads_list.append(t_qfq_daily)

    # 前复权周行情线程
    if args.qfq_weekly_quotation:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_qfq_weekly = threading.Thread(target=qfq_weekly,
                                            args=(logger, pro, conn,
                                                  stocks_list, startdate,
                                                  enddate, args.retry))
            t_qfq_weekly.start()
        except Exception as e:
            logger.error('启动前复权周行情线程失败: %s' % e)
        else:
            threads_list.append(t_qfq_weekly)

    # 前复权月行情线程
    if args.qfq_monthly_quotation:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_qfq_monthly = threading.Thread(target=qfq_monthly,
                                             args=(logger, pro, conn,
                                                   stocks_list, startdate,
                                                   enddate, args.retry))
            t_qfq_monthly.start()
        except Exception as e:
            logger.error('启动前复权月行情线程失败: %s' % e)
        else:
            threads_list.append(t_qfq_monthly)

    # 后复权日行情线程
    if args.hfq_daily_quotation:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_hfq_daily = threading.Thread(target=hfq_daily,
                                           args=(logger, pro, conn,
                                                 stocks_list, startdate,
                                                 enddate, args.retry))
            t_hfq_daily.start()
        except Exception as e:
            logger.error('启动后复权日行情线程失败: %s' % e)
        else:
            threads_list.append(t_hfq_daily)

    # 后复权周行情线程
    if args.hfq_weekly_quotation:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_hfq_weekly = threading.Thread(target=hfq_weekly,
                                            args=(logger, pro, conn,
                                                  stocks_list, startdate,
                                                  enddate, args.retry))
            t_hfq_weekly.start()
        except Exception as e:
            logger.error('启动后复权周行情线程失败: %s' % e)
        else:
            threads_list.append(t_hfq_weekly)

    # 后复权月行情线程
    if args.hfq_monthly_quotation:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_hfq_monthly = threading.Thread(target=hfq_monthly,
                                             args=(logger, pro, conn,
                                                   stocks_list, startdate,
                                                   enddate, args.retry))
            t_hfq_monthly.start()
        except Exception as e:
            logger.error('启动后复权月行情线程失败: %s' % e)
        else:
            threads_list.append(t_hfq_monthly)

    # 每日指标线程
    if args.daily_index:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_index = threading.Thread(target=tsindex,
                                       args=(logger, pro, conn, startdate,
                                             args.retry))
            t_index.start()
        except Exception as e:
            logger.error('启动每日指标线程失败: %s' % e)
        else:
            threads_list.append(t_index)

    # 快讯线程
    if args.flash_news:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo-news')
            t_fnews = threading.Thread(
                target=flash_news,
                args=(logger, pro, conn, '%s-%s-%s' %
                      (startdate[:4], startdate[4:6], startdate[6:8]),
                      '%s-%s-%s' % (enddate[:4], enddate[4:6], enddate[6:8]),
                      args.retry))
            t_fnews.start()
        except Exception as e:
            logger.error('启动快讯线程失败: %s' % e)
        else:
            threads_list.append(t_fnews)

    # 长篇新闻线程
    if args.major_news:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo-news')
            t_fnews = threading.Thread(
                target=major_news,
                args=(logger, pro, conn, '%s-%s-%s' %
                      (startdate[:4], startdate[4:6], startdate[6:8]),
                      '%s-%s-%s' % (enddate[:4], enddate[4:6], enddate[6:8]),
                      args.retry))
            t_fnews.start()
        except Exception as e:
            logger.error('启动长篇新闻线程失败: %s' % e)
        else:
            threads_list.append(t_fnews)

    # 复权因子线程
    if args.adj_factor:
        try:
            conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
            t_adj = threading.Thread(target=adj_factor,
                                     args=(logger, pro, conn, startdate,
                                           args.retry))
            t_adj.start()
        except Exception as e:
            logger.error('启动复权因子线程失败: %s' % e)
        else:
            threads_list.append(t_adj)

    logger.info('Business thread start completed, all %d threads' %
                len(threads_list))

    def _continueloop(threads):
        for t in threads:
            if t.is_alive():
                return True
        return False

    now = time.time()
    while _continueloop(threads_list):
        time.sleep(1)

    logger.info('Sync daily quotation and indexs')
    index_sync(logger, conn_sync, stocks_list, tradedate=startdate)
    conn_sync.dispose()
    logger.info('----Complete in [%d] seconds----' % (time.time() - now))
예제 #13
0
# -*- coding:utf-8 -*-
__author__ = 'zhaojm'

from log import init_logging
from mongo import ProxyItemsDB, ProxyItemsDropDB, ProxyItemsTmpDB

from valid_proxy import valid_proxy
from get_proxy import GetProxy


def main():
    get_proxy = GetProxy(ProxyItemsTmpDB)
    while True:
        item = get_proxy.get_proxy()
        ret = valid_proxy(item)
        if ret:
            ProxyItemsDB.upsert_proxy_item(ret)
            pass
        else:
            ProxyItemsDropDB.upsert_proxy_item(item)
            pass
        ProxyItemsTmpDB.remove_proxy_item(item)

    pass


if __name__ == "__main__":
    init_logging("log/valid_tmp.log", "log/valid_tmp_2.log")
    main()
    pass
예제 #14
0
def init_logging(app_config):
    from log import init_logging
    init_logging(app_config)
예제 #15
0
파일: upq.py 프로젝트: silentwings/upq
def main(argv=None):
    if argv is None:
        argv = sys.argv

    server = None

    # SIGINT signal handler
    def program_cleanup(sig_num, frame):
        logger = log.getLogger("upq")
        logger.info("Shutting down socket server...")
        server.shutdown()
        logger.info("Disconnecting from DB...")
        upqdb.UpqDB().cleanup()
        log.getLogger("upq").info("Good bye.")
        sys.exit(0)

    usage = "usage: %prog -c CONFIGFILE [options]"
    parser = OptionParser(usage)
    parser.add_option("-c",
                      "--config",
                      dest="configfile",
                      default="",
                      help="path to config file CONFIGFILE")
    #TODO: use this to en/disable daemonization
    #		parser.add_option("-d", "--daemonize",
    #		help="detach from terminal etc")
    parser.add_option("-l",
                      "--logfile",
                      dest="logfile",
                      default="",
                      help="path to logfile LOGFILE")
    (options, argv) = parser.parse_args()

    try:
        # read ini file
        UpqConfig(options.configfile, options.logfile)
        UpqConfig().readConfig()

        #FIXME: remove following line + how does this $%$!" work?
        del UpqConfig().daemon['pidfile']
        #	 if UpqConfig().daemon.has_key('pidfile'):
        #		 lockfile=UpqConfig().daemon['pidfile']
        #		 UpqConfig().daemon['pidfile']=pidlockfile.TimeoutPIDLockFile(lockfile, acquire_timeout=1)
        context = daemon.DaemonContext(**UpqConfig().daemon)
        # daemonize
        context.stdout = sys.stderr
        context.stderr = sys.stderr

        upq = Upq()
        with context:
            # initialize logging
            logger = log.init_logging(UpqConfig().logging)
            logger.info("Starting logging...")
            logger.debug(UpqConfig().config_log)
            # setup and test DB
            logger.info("Connecting to DB...")
            db = upqdb.UpqDB()
            db.connect(UpqConfig().db['url'], UpqConfig().db['debug'])
            db.version()
            # start server
            logger.info("Starting socket server...")
            server = upq.start_server()

        # except SIGINT and SIGTERM
        signal.signal(signal.SIGINT, program_cleanup)
        signal.signal(signal.SIGTERM, program_cleanup)

        log.getLogger("upq").info(
            "Server running until receiving SIGTERM or SIGINT / Ctrl+C.")
        signal.pause()

    except Exception:
        traceback.print_exc(file=sys.stderr)
    try:
        db.cleanup()
    except:
        pass
    sys.exit(1)
예제 #16
0
    if len(src_list) < size:
        new_list.append(src_list)
        return new_list
    else:
        new_list.append(src_list[:size])
        return split_list(src_list[size:], size, new_list)


if __name__ == '__main__':
    # init_deamon()
    today = time.strftime('%Y%m%d', time.localtime(time.time()))
    logdir_exp = os.path.expanduser('.\\logs\\')
    if not os.path.exists(logdir_exp):
        os.makedirs(logdir_exp)
    logger = log.init_logging(
        os.path.join(logdir_exp, '%s_%s.txt' % (__name__ + 'tdx', today)),
        'info')
    threads_list = []

    start = time.time()
    dbpool = MyPymysqlPool(None, 'MysqlDatabaseInfo')
    ret = dbpool.getAll(
        'select table_name from information_schema.tables where table_schema="quantification" and table_type="base table" and table_name="t_min_qf";'
    )
    print(ret)
    exit()
    dbpool.common_execute(
        'CREATE TABLE `t_min_qfq_000001` (`ts_code` varchar(128) NOT NULL COMMENT "tushare代码",`timestamp` '
        'varchar(128) NOT NULL COMMENT "交易日期",`open` varchar(128) DEFAULT NULL COMMENT "开盘价",'
        '`close` varchar(128) DEFAULT NULL,`low` varchar(128) DEFAULT NULL,`high` varchar(128) '
        'DEFAULT NULL,`volume` varchar(128) DEFAULT NULL,`money` varchar(128) DEFAULT NULL,'
예제 #17
0
# -*- coding:utf-8 -*-
__author__ = 'zhaojm'

import pymongo
import logging

# MONGO
MONGO_URI = "localhost:27017"
MONGO_DB = "qianzhan"
# MONGO_NEEQ_DB = "neeq"
# qichacha_db = "qichacha"

mongo_client = pymongo.MongoClient(MONGO_URI)
db = mongo_client[MONGO_DB]
qichacha_db = mongo_client['qichacha']

from log import init_logging

init_logging('log/merge.log', 'log/merge_2.log')

cur = qichacha_db.company_info_items.find({}, {"_id": 0}).batch_size(50)
i = 0
for item in cur:
    i += 1
    logging.info(i)
    db.company_info_items_base.update({'company_name': item['company_name']},
                                      {'$set': item}, True, True)
예제 #18
0
def main():

    parsed_args = parse_args()
    init_logging(parsed_args.logger)

    # dataset related
    davis_dir = parsed_args.dataset
    year = parsed_args.year
    dataset_mode = parsed_args.set
    seq_names = parsed_args.sequences
    shuffle = not parsed_args.no_shuffle

    # model related
    mode = parsed_args.mode
    cuda_dev = parsed_args.cuda_device
    model_save_path = parsed_args.model_save
    model_load_path = parsed_args.model_load

    # training related
    batch_size = parsed_args.batch_size
    epochs = parsed_args.epochs
    iters = parsed_args.iters
    augment = not parsed_args.no_augment
    lr = parsed_args.learning_rate
    weight_decay = parsed_args.weight_decay
    validation_size = parsed_args.validation_size
    loss_function = parsed_args.loss_function

    # videomatch related
    img_shape = parsed_args.input_image_shape
    seg_shape = parsed_args.segmentation_shape
    remove_outliers = not parsed_args.leave_outliers
    fg_thresh = parsed_args.fg_thresh
    encoder = parsed_args.encoder
    upsample_fac = parsed_args.upsample_factor

    # misc
    val_report_iter = parsed_args.val_report
    visualize = parsed_args.visualize
    results_dir = parsed_args.results_dir
    loss_visualize = parsed_args.loss_visualization

    # args checks
    if mode == 'train' and batch_size != 1:
        logger.warning("Batch size > 1 is only applicable to 'eval' mode.")

    if iters != -1 and epochs > 1:
        logger.warning(
            "Iters is set to {} and not to -1 (full dataset), but epoch is > 1"
            .format(iters))

    if mode == 'eval' and shuffle:
        logger.warning(
            "Dataset shuffle can't be set to True in 'eval' mode, setting it to False!"
        )
        shuffle = False
    if mode == 'train' and not shuffle:
        logger.warning(
            "Dataset shuffle is off, consider turning it on when training, "
            "to avoid overfitting on starting sequences")

    if mode != 'eval' and visualize:
        logger.warning("Visualize is set to True, but mode isn't 'eval'")

    device = None if cuda_dev is None else "cuda:{:d}".format(cuda_dev)

    dataset = Davis(davis_dir, year, dataset_mode, seq_names)

    vm = VideoMatch(out_shape=seg_shape,
                    device=device,
                    encoder=encoder,
                    upsample_fac=upsample_fac)
    if model_load_path is not None:
        logger.info("Loading model from path {}".format(model_load_path))
        vm.load_model(model_load_path)

    if mode == 'train':
        pair_sampler = PairSampler(dataset, randomize=shuffle)
        indices = np.arange(len(pair_sampler))
        if shuffle:
            np.random.shuffle(indices)

        split = int(np.floor(validation_size * len(pair_sampler)))
        val_indices = indices[:split]
        train_indices = indices[split:]
        train_loader = DataLoader(dataset,
                                  batch_sampler=SubsetRandomSampler(
                                      pair_sampler.get_indexes(train_indices)),
                                  collate_fn=collate_pairs)
        val_loader = DataLoader(dataset,
                                batch_sampler=SubsetRandomSampler(
                                    pair_sampler.get_indexes(val_indices)),
                                collate_fn=collate_pairs)

        logger.debug("Train set size: {}, Validation set size: {}".format(
            len(pair_sampler) - split, split))

        iters = len(train_loader) if iters == -1 else iters
        fp = FrameAugmentor(img_shape, augment)

        train_vm(train_loader, val_loader, vm, fp, device, lr, weight_decay,
                 iters, epochs, val_report_iter, model_save_path,
                 loss_visualize, loss_function)

    elif mode == 'eval':
        multiframe_sampler = MultiFrameSampler(dataset)
        data_loader = DataLoader(dataset,
                                 sampler=multiframe_sampler,
                                 collate_fn=collate_multiframes,
                                 batch_size=batch_size,
                                 num_workers=batch_size)

        if results_dir is not None and not isdir(results_dir):
            mkdir(results_dir)

        eval_vm(data_loader, vm, img_shape, fg_thresh, remove_outliers,
                visualize, results_dir)
예제 #19
0
    y_max = max(y_list) + max(y_list) / 2
    # y_max = max(y_list) + max(y_list) / 11
    plt.ylim([0, y_max])
    y_sum = sum(y_list)
    percentage = [x / y_sum for x in y_list]
    # 为每个条形图添加数值标签
    for x, y in enumerate(y_list):
        plt.text(x, y + y_max / 11, str(round(percentage[x], 2)), ha='center')
    # 显示图形
    plt.show()


if __name__ == '__main__':
    today = time.strftime('%Y%m%d', time.localtime(time.time()))
    logger = log.init_logging(
        os.path.join(os.path.expanduser('./logs/'),
                     'model_%s_%s.txt' % (__name__, today)), 'info')
    dbconn = MyPymysqlPool(logger, 'MysqlDatabaseInfo')
    all_limit_up_stocks, turnover_raterange_list, turnover_rate_list = get_limit_up_two_days(
        logger, dbconn)
    _, _, _, _, _, _, _, _ = get_limit_up_amount(all_limit_up_stocks, logger)
    _, _, _, _, _, _, _ = get_limit_up_total_price(all_limit_up_stocks, logger)
    _, _, _, _, _, _, _ = get_limit_up_total_price_circ(
        all_limit_up_stocks, logger)
    _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ = open_changes(
        all_limit_up_stocks, logger)
    _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ = limit_up_times(
        all_limit_up_stocks, logger)
    _, _, _ = open_prices(all_limit_up_stocks, logger)
    exit()
    """
예제 #20
0
from flowview.cleanup_handler import CleanupHandler
from optparse import OptionParser
import logging
import log


parser = OptionParser()

parser.add_option("--config-file", dest="config_file", action="store")

parser.add_option("--phase",dest="phase",action="store")


(options,args) = parser.parse_args()

log.init_logging()
logger = logging.getLogger(__name__)

try:
    if options.phase == "setup":
        handler = SetupHandler(options.config_file)
    elif options.phase == "load":
        handler = LoadHandler(options.config_file)
    elif options.phase == "cleanup":
        handler = CleanupHandler(options.config_file)
    else:
        handler = None
        logger.error("Illegal option phase: %s" % options.phase)
    handler.execute()

except NameError:
예제 #21
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument('--link',
                        type=str,
                        default=None,
                        help='Process single YouTube video specified in link')
    parser.add_argument('--csv',
                        type=str,
                        default=None,
                        help='Process several YouTube videos, link in CSV')
    parser.add_argument(
        '--duration',
        type=int,
        default=None,
        help='Processed video duration in seconds, not crop if not set')
    parser.add_argument('--check-each-frame',
                        type=int,
                        default=1,
                        help='Check each N frame for correct')
    parser.add_argument('--output-dir',
                        type=str,
                        default=None,
                        help='Output dir')
    parser.add_argument('--models-dir',
                        type=str,
                        default=None,
                        help='Models dir')
    parser.add_argument('--face-detect-threshold',
                        type=float,
                        default=.5,
                        help='Face detect threshold')
    parser.add_argument('--change-scene-threshold',
                        type=float,
                        default=.5,
                        help='Change scene threshold')
    args = parser.parse_args()

    init_logging()

    if not args.link and not args.csv or args.link and args.csv:
        raise RuntimeError("Should be only --youtube or only --csv parameter")

    links = []
    if args.csv:
        with open(args.csv) as f:
            for link in f:
                link = link.strip()
                if link:
                    links.append(link)
    else:
        links.append(args.link)

    logging.info("Going to process {} YouTube links".format(len(links)))

    check_frame.initialize(
        models_dir=args.models_dir,
        face_detect_threshold=args.face_detect_threshold,
        change_scene_threshold=args.change_scene_threshold,
    )

    total_fragments = 0

    mlboard.update_task_info({
        "total.count": len(links),
    })

    for n, link in enumerate(links):
        mlboard.update_task_info({
            "youtube.link": link,
            "youtube.downloaded": "false",
        })
        video_file, audio_file = youtube.download(link)
        mlboard.update_task_info({
            "youtube.downloaded": "true",
        })
        fragments = process.process_video(
            video_file,
            audio_file,
            output_dir=args.output_dir,
            duration=args.duration,
            check_each_frame=args.check_each_frame,
        )
        total_fragments += fragments
        mlboard.update_task_info({
            "total.fragments_done": total_fragments,
            "total.processed": n + 1,
        })
        os.remove(video_file)
        if audio_file != video_file:
            os.remove(audio_file)

    mlboard.update_task_info({
        "youtube.processing": "-",
        "youtube.processed": len(links),
    })
예제 #22
0
def mymain(args):
    stocks_list = []
    threads_list = []
    # 初始化日志
    if not os.path.exists(os.path.expanduser(args.logdir)):
        os.mkdir(os.path.expanduser(args.logdir))
    logger = log.init_logging(
        os.path.join(
            os.path.expanduser(args.logdir), '%s_%s.txt' %
            (__name__, time.strftime('%Y%m%d', time.localtime(time.time())))),
        args.loglevel)
    # 初始化tushare
    pro = tsapi.init_tushare(logger, tsapi.MYTOKEN)
    if pro is None:
        print('Init tushare error, exit')
        exit(1)
    # 链接数据库
    conn_tmp = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
    # 获取股票ts代码
    for d in conn_tmp.getAll('select ts_code from t_stocks'):
        stocks_list.append(d['ts_code'])
    conn_tmp.dispose()

    today = time.strftime('%Y%m%d', time.localtime(time.time()))
    if args.startdate == '':
        startdate = today
        enddate = today
    else:
        startdate = args.startdate
        enddate = args.enddate

    # 日行情线程
    if args.daily_quotation:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_daily = threading.Thread(target=daily,
                                       args=(logger, pro, conn, stocks_list,
                                             startdate, enddate, args.retry))
            t_daily.start()
        except Exception as e:
            logger.error('启动日行情线程失败: %s' % e)
        else:
            threads_list.append(t_daily)

    # 周行情线程
    if args.weekly_quotation:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_weekly = threading.Thread(target=weekly,
                                        args=(logger, pro, conn, stocks_list,
                                              startdate, enddate, args.retry))
            t_weekly.start()
        except Exception as e:
            logger.error('启动周行情线程失败: %s' % e)
        else:
            threads_list.append(t_weekly)

    # 月行情线程
    if args.monthly_quotation:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_monthly = threading.Thread(target=monthly,
                                         args=(logger, pro, conn, stocks_list,
                                               startdate, enddate, args.retry))
            t_monthly.start()
        except Exception as e:
            logger.error('启动月行情线程失败: %s' % e)
        else:
            threads_list.append(t_monthly)

    # 前复权日行情线程
    if args.qfq_daily_quotation:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_qfq_daily = threading.Thread(target=qfq_daily,
                                           args=(logger, pro, conn,
                                                 stocks_list, startdate,
                                                 enddate, args.retry))
            t_qfq_daily.start()
        except Exception as e:
            logger.error('启动前复权日行情线程失败: %s' % e)
        else:
            threads_list.append(t_qfq_daily)

    # 前复权周行情线程
    if args.qfq_weekly_quotation:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_qfq_weekly = threading.Thread(target=qfq_weekly,
                                            args=(logger, pro, conn,
                                                  stocks_list, startdate,
                                                  enddate, args.retry))
            t_qfq_weekly.start()
        except Exception as e:
            logger.error('启动前复权周行情线程失败: %s' % e)
        else:
            threads_list.append(t_qfq_weekly)

    # 前复权月行情线程
    if args.qfq_monthly_quotation:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_qfq_monthly = threading.Thread(target=qfq_monthly,
                                             args=(logger, pro, conn,
                                                   stocks_list, startdate,
                                                   enddate, args.retry))
            t_qfq_monthly.start()
        except Exception as e:
            logger.error('启动前复权月行情线程失败: %s' % e)
        else:
            threads_list.append(t_qfq_monthly)

    # 后复权日行情线程
    if args.hfq_daily_quotation:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_hfq_daily = threading.Thread(target=hfq_daily,
                                           args=(logger, pro, conn,
                                                 stocks_list, startdate,
                                                 enddate, args.retry))
            t_hfq_daily.start()
        except Exception as e:
            logger.error('启动后复权日行情线程失败: %s' % e)
        else:
            threads_list.append(t_hfq_daily)

    # 后复权周行情线程
    if args.hfq_weekly_quotation:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_hfq_weekly = threading.Thread(target=hfq_weekly,
                                            args=(logger, pro, conn,
                                                  stocks_list, startdate,
                                                  enddate, args.retry))
            t_hfq_weekly.start()
        except Exception as e:
            logger.error('启动后复权周行情线程失败: %s' % e)
        else:
            threads_list.append(t_hfq_weekly)

    # 后复权月行情线程
    if args.hfq_monthly_quotation:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_hfq_monthly = threading.Thread(target=hfq_monthly,
                                             args=(logger, pro, conn,
                                                   stocks_list, startdate,
                                                   enddate, args.retry))
            t_hfq_monthly.start()
        except Exception as e:
            logger.error('启动后复权月行情线程失败: %s' % e)
        else:
            threads_list.append(t_hfq_monthly)

    # 每日指标线程
    if args.daily_index:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_index = threading.Thread(target=tsindex,
                                       args=(logger, pro, conn, stocks_list,
                                             startdate, enddate, args.retry))
            t_index.start()
        except Exception as e:
            logger.error('启动每日指标线程失败: %s' % e)
        else:
            threads_list.append(t_index)

    # 复权因子线程
    if args.adj_factor:
        conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
        try:
            t_adj = threading.Thread(target=adj_factor,
                                     args=(logger, pro, conn, stocks_list,
                                           args.retry))
            t_adj.start()
        except Exception as e:
            logger.error('启动复权因子线程失败: %s' % e)
        else:
            threads_list.append(t_adj)

    logger.info('Business thread start completed, all %d threads' %
                len(threads_list))

    def _continueloop(threads):
        for t in threads:
            if t.is_alive():
                return True
        return False

    now = time.time()
    print('\t\t\t\t%s begin' %
          time.strftime('%Y%m%d %H%M%S', time.localtime(now)))
    while _continueloop(threads_list):
        print('\r<%08d>s [\\], see log to more information' %
              (time.time() - now),
              end='')
        time.sleep(0.1)
        print('\r<%08d>s [/], see log to more information' %
              (time.time() - now),
              end='')
        time.sleep(0.1)
    print('\n\t\t\t\tAll complete, done')
예제 #23
0
import logging
from log import init_logging


def run():
    try:
        logging.info("===========mc_crawl_proxy_api run===================")
        r = requests.get(
            "http://proxy.mimvp.com/api/fetch.php?orderid=860160922170106912&num=5000&result_fields=1,2,3,4,5,6,7,8,9&result_format=json"
        )
        r_json = json.loads(r.text)
        proxy_list = r_json['result']
        for proxy in proxy_list:
            logging.info(proxy)
            ip_port = proxy['ip:port']
            ip, port = ip_port.split(':')
            item = {'ip': ip, 'port': port, 'type': proxy.get('http_type')}
            item.update(proxy)
            ProxyItemsTmpDB.upsert_proxy_item(item)

        logging.info("===========mc_crawl_proxy_api over===================")
    except Exception, e:
        logging.info(
            "===========mc_crawl_proxy_api exception===================")
        logging.info(e.message)


if __name__ == "__main__":
    init_logging("log/crawl_proxy_api.log", "log/crawl_proxy_api_2.log")
    run()
예제 #24
0
def initialize():
    init_sys_path()
    log.init_logging()
    modules.init_modules()
예제 #25
0
파일: test_commands.py 프로젝트: nido/mis
 def setUp(self):  # pylint: disable-msg=C0103
     init_logging()
예제 #26
0
def main(argv=None):
	if argv is None:
		argv = sys.argv

	server = None

# SIGINT signal handler
	def program_cleanup(sig_num, frame):
		logger = log.getLogger("upq")
		logger.info("Shutting down socket server...")
		server.shutdown()
		logger.info("Disconnecting from DB...")
		upqdb.UpqDB().cleanup()
		log.getLogger("upq").info("Good bye.")
		sys.exit(0)

	usage = "usage: %prog -c CONFIGFILE [options]"
	parser = OptionParser(usage)
	parser.add_option("-c", "--config", dest="configfile", default="",
			  help="path to config file CONFIGFILE")
#TODO: use this to en/disable daemonization
#		parser.add_option("-d", "--daemonize",
#		help="detach from terminal etc")
	parser.add_option("-l", "--logfile", dest="logfile", default="",
			  help="path to logfile LOGFILE")
	(options, argv) = parser.parse_args()

	try:
		# read ini file
		UpqConfig(options.configfile, options.logfile)
		UpqConfig().readConfig()

		#FIXME: remove following line + how does this $%$!" work?
		del UpqConfig().daemon['pidfile']
	#	 if UpqConfig().daemon.has_key('pidfile'):
	#		 lockfile=UpqConfig().daemon['pidfile']
	#		 UpqConfig().daemon['pidfile']=pidlockfile.TimeoutPIDLockFile(lockfile, acquire_timeout=1)
		context = daemon.DaemonContext(**UpqConfig().daemon)
		# daemonize
		context.stdout = sys.stderr
		context.stderr = sys.stderr

		upq = Upq()
		with context:
			# initialize logging
			logger = log.init_logging(UpqConfig().logging)
			logger.info("Starting logging...")
			logger.debug(UpqConfig().config_log)
			# setup and test DB
			logger.info("Connecting to DB...")
			db = upqdb.UpqDB()
			db.connect(UpqConfig().db['url'], UpqConfig().db['debug'])
			db.version()
			# start server
			logger.info("Starting socket server...")
			server = upq.start_server()

		# ignore all signals
		for sig in dir(signal):
			if sig.startswith("SIG"):
				try:
					signal.signal(signal.__getattribute__(sig), signal.SIG_IGN)
				except:
					# some signals cannot be ignored or are unknown on diff platforms
					pass
		# except SIGINT and SIGTERM
		signal.signal(signal.SIGINT, program_cleanup)
		signal.signal(signal.SIGTERM, program_cleanup)

		log.getLogger("upq").info("Server running until receiving SIGTERM or SIGINT / Ctrl+C.")
		signal.pause()

	except Exception:
		traceback.print_exc(file=sys.stderr)
	try:
		db.cleanup()
	except:
		pass
	sys.exit(1)
예제 #27
0
def main():
    """run custom pelican with autoreload"""

    init_logging(LOG_FILE)

    print(f'''\

---------------------------------------------------------------------

 DABI: Digital Analysis of Bibliographical Information  (v{__version__})

---------------------------------------------------------------------



 Build website from templates and local databases.

 Pages will be auto-regenerated while the program is running.

''')

    # set custom pelican settings

    custom_settings = pelican.settings.get_settings_from_module(pelicanconf)

    pelican.settings.DEFAULT_CONFIG.update(custom_settings)

    last_mtime_settings = 0

    # auto-reload observer

    my_handler = FileChangedHandler(
        patterns=['*'],
        ignore_patterns=['*/_website/*', LOG_FILE, '*/*.filepart'],
        ignore_directories=True,
        case_sensitive=False)

    observer = Observer()

    observer.schedule(my_handler, '.', recursive=True)

    observer.start()

    try:

        while True:

            try:

                if FileChangedHandler.new_run:

                    FileChangedHandler.new_run = False

                    # new log

                    for handler in logger.parent.handlers:

                        handler.close()

                    try:

                        mtime_settings = Path(SETTINGS_FILE).stat().st_mtime

                    except FileNotFoundError:

                        mtime_settings = 1

                    if mtime_settings > last_mtime_settings:  # update settings and build pelican class

                        last_mtime_settings = mtime_settings

                        settings = get_settings(SETTINGS_FILE)

                        pelican_cls = pelican.Pelican(settings)

                    settings['MONTH'] = datetime.now().strftime(
                        '%B %Y').title()

                    settings['YEAR'] = datetime.now().strftime("%Y")

                    pelican_cls.run()  # update all pages

                    logger.log(
                        35,
                        f'Done: Run completed at {str(datetime.now().astimezone().isoformat(timespec="seconds"))}.\n'
                    )

            except KeyboardInterrupt:

                raise

            except UserWarning as err:

                logger.critical(err)

            except FileNotFoundError as err:

                logger.debug(f'FileNotFoundError exception: {err}')

            except Exception as err:  # logs any error with Traceback

                logger.critical(err, exc_info=True)

            finally:

                time.sleep(.5)  # sleep to avoid cpu load

    except Exception as err:

        logger.warning(f'Program ended prematurely: {err}')

    except:

        logger.info('Terminating program.')

    observer.stop()

    observer.join()

    logging.shutdown()

    sys.exit(1)

    # TODO: convert pages to UTF automatically

    # TODO: 404, nav, sub-Bibl rename to Bibl
    '''
        logger.error('打开文件失败')
        return RET_FAIL

    # 读取英汉医学大词典的单词
    words = set(dd.deal_medical_dict())
    # print(words)

    for line in inf:
        line = line.strip()
        word_freq_info = line.split(WORD_FREQ_SEPATOR)
        if len(word_freq_info) != 2:
            logger.debug('词频格式不对: %s' % line)
            continue
        word_freq_info[1] = word_freq_info[1].replace(END_WORD_FREQ, '')
        if word_freq_info[1] == '0':
            continue
        if not word_freq_info[0] in words:
            continue
        word_freq[word_freq_info[0]] = int(word_freq_info[1])

    # print(len(word_freq))
    util.mk_dirname(WORD_FREQ_PRE)
    with open(WORD_FREQ_PRE, 'w') as f:
        json.dump(word_freq, f)
    logging.info('词频文件预处理成功')


if __name__ == "__main__":
    init_logging()
    deal_word_frequency_data()
                logger.error('Update error, SQL: %s' % up)

    logger.info('All updated %d datas' % cnt)


if __name__ == '__main__':
    date_list = []
    stocks_list = []
    threads_list = []
    cnt = 0
    # 初始化日志
    if not os.path.exists(os.path.expanduser(logdir)):
        os.mkdir(os.path.expanduser(logdir))
    logger = log.init_logging(
        os.path.join(
            os.path.expanduser(logdir), '%s_%s.txt' %
            ('sync', time.strftime('%Y%m%d', time.localtime(time.time())))),
        'info')
    conn = dbpool.MyPymysqlPool(logger, 'MysqlDatabaseInfo')
    # 获取股票ts代码
    for d in conn.getAll('select ts_code from t_stocks'):
        stocks_list.append(d['ts_code'])

    for d in conn.getAll(
            'select cal_date from t_exchange_trade_cal where cal_date>"20200329" and is_open=1'
    ):
        date_list.append(d['cal_date'])

    for d in date_list:
        index_sync(logger, conn, stocks_list, d)
예제 #30
0
import os
import log

if os.environ.has_key('ENV') and os.environ['ENV'].lower() == "pro":
    from envs.pro_settings import *
else:
    from envs.local_settings import *

# Start Logging
log.init_logging(LOGGING_DIR)

TEMPLATE_DEBUG = DEBUG
INTERNAL_IPS = ('127.0.0.1',)

# Django Default Date Format Override
DATE_FORMAT = 'N j, Y'

# Admins get notified of 500 errors
ADMINS = (
('admin', '*****@*****.**'),
)

# Managers get notified of 404 errors
MANAGERS = ADMINS

# Default email address 500 / 404 emails get sent from
SERVER_EMAIL = '*****@*****.**'

# Email admins when users hit a 404
SEND_BROKEN_LINK_EMAILS = True
예제 #31
0

def run():
    try:
        logging.info("===========mc_crawl_proxy_api run===================")
        r = requests.get(
            "http://proxy.mimvp.com/api/fetch.php?orderid=860160922170106912&num=5000&result_fields=1,2,3,4,5,6,7,8,9&result_format=json")
        r_json = json.loads(r.text)
        proxy_list = r_json['result']
        for proxy in proxy_list:
            logging.info(proxy)
            ip_port = proxy['ip:port']
            ip, port = ip_port.split(':')
            item = {
                'ip': ip,
                'port': port,
                'type': proxy.get('http_type')
            }
            item.update(proxy)
            ProxyItemsTmpDB.upsert_proxy_item(item)

        logging.info("===========mc_crawl_proxy_api over===================")
    except Exception, e:
        logging.info("===========mc_crawl_proxy_api exception===================")
        logging.info(e.message)


if __name__ == "__main__":
    init_logging("log/crawl_proxy_api.log", "log/crawl_proxy_api_2.log")
    run()
            {"company_name": company_name}, {"_id": 0})

    @staticmethod
    def upsert_company_export(item):
        logging.info("<MONGO> %s" % item)
        qianzhan_db.export_from_zhaopin.update(
            {'company_name': item['company_name']}, {'$set': item}, True, True)


import sys
reload(sys)
sys.setdefaultencoding("utf-8")
print "sys default encoding: ", sys.getdefaultencoding()

if __name__ == "__main__":
    init_logging('log/export_from_zhaopin.log',
                 'log/export_from_zhaopin_2.log')

    cur = ZhaopinDB.get_all()
    i = 0
    for item in cur:
        i += 1
        company_name = item['company_name']
        logging.info(i)
        company = QianzhanDB.company_detail(company_name)
        if company:
            QianzhanDB.upsert_company_export(company)
        company = QianzhanDB.company_base(company_name)
        if company:
            QianzhanDB.upsert_company_export(company)
예제 #33
0
# -*- coding:utf-8 -*-
__author__ = 'zhaojm'

from log import init_logging
from mongo import ProxyItemsDB, ProxyItemsDropDB

from valid_proxy import valid_proxy


def main():
    cur = ProxyItemsDB.get_proxy_items()
    for item in cur:
        ret = valid_proxy(item)
        if ret:
            ProxyItemsDB.remove_proxy_item(item)
            ProxyItemsDB.upsert_proxy_item(ret)
            pass
        else:
            ProxyItemsDB.remove_proxy_item(item)
            ProxyItemsDropDB.upsert_proxy_item(item)
            pass
    pass


if __name__ == "__main__":
    init_logging("log/valid_all_to_drop.log", "log/valid_all_to_drop_2.log")
    main()
    pass
예제 #34
0
파일: run.py 프로젝트: KTH/HowRU
import logging
import asyncio
from threading import Thread
import slack_util
import schedule
import log as log_module

log_module.init_logging()
log = logging.getLogger(__name__)

#schedule.every().day.at("09:00").do(slack_util.post_todays_question)
#schedule.every().day.at("15:00").do(slack_util.post_question_summary)


def main():
    try:
        #loop = asyncio.new_event_loop()
        #thread = Thread(target=slack_util.init, args=(loop,))
        #thread.start()
        slack_util.init()
        log.info("Everest Bot connected and running!")
        slack_util.post_todays_question()
    except Exception as err:
        log.error(f'Oh :poop:, I died and had to restart myself. {err}')
        raise


if __name__ == "__main__":
    main()