Beispiel #1
0
def initialize_app():
    global pool
    args = parse_args_for_config()
    initialize_configuration(args.config)

    log_config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                   'loggingelastic.conf')
    configure_logger(log_config_file, logging_config['elasticWorkflows'])
    if not os.path.exists(os.path.abspath(jobhistory_copy_dir)):
        logger.debug("Directory {0} does not exist.Creating it".format(
            os.path.abspath(jobhistory_copy_dir)))
        if not mkdir_p(os.path.abspath(jobhistory_copy_dir)):
            logger.error(
                "Failed to create jobhistory files copy dir:{0}. Exiting".
                format(jobhistory_copy_dir))
            exit(1)
        else:
            logger.debug("Directory {0} created successfully".format(
                os.path.abspath(jobhistory_copy_dir)))
    else:
        logger.debug("Directory {0} exist".format(
            os.path.abspath(jobhistory_copy_dir)))

    if kerberos["enabled"]:
        if kinit_tgt_for_user():
            kerberos_initialize()
        else:
            logging.error("Failed to kinit with kerberos enabled. Exiting")
            exit(1)
    if with_threading:
        pool = ThreadPool(THREAD_COUNT)
    if not use_rest_api:
        initialize_hdfs_client(hdfs['url'])
Beispiel #2
0
def initialize_app():
    log_config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "logginghadoop.conf")
    configure_logger(log_config_file, logging_config['namenode'])
    if kerberos["enabled"]:
        if kinit_tgt_for_user():
            kerberos_initialize()
        else:
            logging.error("Failed to kinit with kerberos enabled. Exiting")
            exit(1)
def initialize_app():
    args = parse_args_for_config()
    initialize_configuration(args.config)
    configure_logger("logginghadoop.conf", logging_config['hadoopCluster'])
    if kerberos["enabled"]:
        if kinit_tgt_for_user():
            kerberos_initialize()
        else:
            logging.error("Failed to kinit with kerberos enabled. Exiting")
            exit(1)
def initialize_app():
    args = parse_args_for_config()
    print("args to the process:{0}".format(args.config))
    initialize_configuration(args.config)
    log_config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'loggingspark.conf')
    configure_logger(log_config_file, logging_config['sparkJobs'])

    if kerberos["enabled"]:
        if kinit_tgt_for_user():
            kerberos_initialize()
        else:
            logging.error("Failed to kinit with kerberos enabled. Exiting")
            exit(1)
Beispiel #5
0
def initialize_app():
    args = parse_args_for_config()
    initialize_configuration(args.config)

    log_config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                   'loggingoozie.conf')
    configure_logger(log_config_file, logging_config['ozzieWorkflows'])
    if kerberos["enabled"]:
        if kinit_tgt_for_user():
            kerberos_initialize()
        else:
            logging.error("Failed to kinit with kerberos enabled. Exiting")
            exit(1)
def initialize_app():
    global pool
    args = parse_args_for_config()
    initialize_configuration(args.config)

    log_config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'loggingelastic.conf')
    configure_logger(log_config_file, logging_config['elasticWorkflows'])
    if kerberos["enabled"]:
        if kinit_tgt_for_user():
            kerberos_initialize()
        else:
            logging.error("Failed to kinit with kerberos enabled. Exiting")
            exit(1)
    if with_threading:
        pool = ThreadPool(THREAD_COUNT)
    if not use_rest_api:
        initialize_hdfs_client(hdfs['url'])
Beispiel #7
0
def initialize_app():
    log_config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                   'loggingmonitorkafkaelk.conf')
    configure_logger(log_config_file, logging_config['monitorkafkaelk'])