コード例 #1
0
def run(log_level="INFO"):
    config = configuration.configuration()
    config.fileConfig(myglobal.LOGGINGINI)
    config.setValue("handler_fileHandler", "args", ('dispatcher.log', 'a'))
    config.setValue("handler_fileHandler", "level", log_level)
    config.setValue("handler_consoleHandler", "level", log_level)
    #    logging.config.fileConfig(FILE_FOLDER + configuration.configuration("dispatcher.ini").getValue('Log','file'))
    logging.config.fileConfig(myglobal.LOGGINGINI)
    logger = logging.getLogger('main')

    d = dispatcher.dispatcher()

    while True:
        logger.info("Run periodically build and queue check.")

        config.fileConfig(myglobal.CONFIGURATONINI)
        type = config.getValue("Platform", "type")
        if type == "integration":
            #run scheduled integration necessity check
            d.checkIntegrationNecessity()
        else:
            #run scheduled build check
            d.scheduledCheck()
        #check the queue to see if any new job to do
        d.scanQueue()
        '''
        #add nightly jobs only on designated time
        currentTime=time.strftime('%H%M',time.localtime())
        scheduledTime=scheduled_nightly_time.replace(":","")
        if int(currentTime)>=int(scheduledTime) and int(currentTime)<(int(scheduledTime)+timeout/60):
            #print "It comes to the scheduled time, add nightly jobs."
            logger.info("It comes to the scheduled time, add nightly jobs.")
            d.addNightlyJobs()
        '''
        #wait for a whole
        logger.info("Sleep for %i minutes" % (timeout / 60))
        time.sleep(timeout)

        continue
コード例 #2
0
            self.__parser.readfp(fp)
            value = self.__parser.get(sec, key)
        except Exception, e:
            wlog.critical("Exception caught, [Exception]: %s" % e)
            return False
        else:
            return value
        finally:
            fp.close()


# define global config object
config = Config()

# define global vars
g_ip_dict_path = CUR_PATH + config.getValue("comment", "ip_count_file")
g_ip_dict = {}
g_user_dict_path = CUR_PATH + config.getValue("comment", "user_count_file")
g_user_dict = {}
g_bayes_dict_path = CUR_PATH + config.getValue("comment", "bayes_dict")
g_bayes_dict = {}
g_fisher_dict_path = CUR_PATH + config.getValue("comment", "fisher_dict")
g_fisher_dict = {}

g_load_interval = int(config.getValue("comment", "load_interval"))
g_backup_interval = int(config.getValue("comment", "backup_interval"))

g_bayes_word_freq = float(config.getValue("comment", "bayes_word_freq"))
g_bayes_spam_rate = float(config.getValue("comment", "bayes_spam_rate"))

g_top_num = int(config.getValue("comment", "default_top_num"))
コード例 #3
0
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--configfile",
        default="config.json",
        help="config file to use"
    )
    args = parser.parse_args()

    # config file
    config = ConfigFile(args.configfile)


    # configure the logging
    logging_config = config.getValue(
        "client.logging",
        default=None,
        alternatePaths="logging")


    import json
    print repr(logging_config)
    print "\n" * 4
    print json.dumps(logging_config, index=2)
    quit()

    if logging_config is not None:
        logging.config.dictConfig(logging_config)
    else:
        # set up some default logging options
        logging.basicConfig(
            format="%(asctime)s|%(levelname)s|%(name)s - %(message)s",
コード例 #4
0
ファイル: get_timelines.py プロジェクト: emCOMP/twitter_utils

    # open input file
    with io.open(args.idfile, mode="r", encoding="utf-8") as inputfile:
        # read all ids
        input_ids = []
        try:
            input_ids = [l.strip() for l in inputfile if len(l.strip()) > 0]
        except Exception, e:
            print "error reading input ids from file: ", e
            raise e

        with io.open(args.outfile, mode="w+", encoding="utf-8") as outfile:

            # twitter auth
            twitter_auth = config.getValue("twitter_auth", None)

            # create auth item
            auth = tweepy.auth.OAuthHandler(
                twitter_auth["api_key"],
                twitter_auth["api_secret"])
            auth.set_access_token(
                twitter_auth["access_token"],
                twitter_auth["access_token_secret"])

            # api
            api = tweepy.API(
                auth,
                wait_on_rate_limit=True)

            total = len(input_ids)
    # handle arguments
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--configfile",
        default="config.json",
        help="config file to use"
    )
    args = parser.parse_args()

    # config file
    config = ConfigFile(args.configfile)

    # configure the logging
    logging_config = config.getValue(
        "client.logging",
        default=None,
        alternate_paths=["logging"])

    configure_logging(logging_config)

    # create our log
    log = logging.getLogger("main")


    # create and run client
    try:
        client = MultiprocessClientBase(config)
        client.run()
    except exceptions.KeyboardInterrupt, e:
        log.info("keyboard interrupt")
        client.stop_process()