encoding='utf-8-sig')
    table_queue_name = cfg.get('Basic', 'table_queue_name')
    ssm_parameter_bucket = cfg.get('Basic', 'ssm_parameter_bucket')
    ssm_parameter_credentials = cfg.get('Basic', 'ssm_parameter_credentials')
    LocalProfileMode = cfg.getboolean('Debug', 'LocalProfileMode')
    JobType = cfg.get('Basic', 'JobType')
    LoggingLevel = cfg.get('Debug', 'LoggingLevel')
except Exception as e:
    print("s3_migration_cluster_config.ini", str(e))
    sys.exit(0)

# Main
if __name__ == '__main__':

    # Set Logging
    logger, log_file_name = set_log(LoggingLevel, 'jobsender')

    # Get Environment
    sqs, sqs_queue, table, s3_src_client, s3_des_client, instance_id, ssm = \
        set_env(JobType, LocalProfileMode, table_queue_name, ssm_parameter_credentials)

    #######
    # Program start processing here
    #######
    # Get ignore file list
    ignore_list_path = os.path.split(
        os.path.abspath(__file__))[0] + '/s3_migration_ignore_list.txt'
    ignore_list = []
    try:
        with open(ignore_list_path, 'r') as f:
            ignore_list = f.read().splitlines()
Beispiel #2
0
    print("ERR loading s3_migration_cluster_config.ini", str(e))
    sys.exit(0)

# if CDK deploy, get para from environment variable
try:
    table_queue_name = os.environ['table_queue_name']
    sqs_queue_name = os.environ['sqs_queue_name']
    ssm_parameter_bucket = os.environ['ssm_parameter_bucket']
except Exception as e:
    print("No Environment Variable from CDK, use the para from config.ini", str(e))

# Main
if __name__ == '__main__':

    # Set Logging
    logger, log_file_name = set_log(LoggingLevel, 'ec2-worker')

    # Get Environment
    sqs, sqs_queue, table, s3_src_client, s3_des_client, instance_id, ssm = \
        set_env(JobType=JobType,
                LocalProfileMode=LocalProfileMode,
                table_queue_name=table_queue_name,
                sqs_queue_name=sqs_queue_name,
                ssm_parameter_credentials=ssm_parameter_credentials,
                MaxRetry=MaxRetry)

    #######
    # Program start processing here
    #######

    # For concur jobs(files)
        Des_bucket_default = cfg.get('Basic', 'Des_bucket_default')
    except Exception as e:
        Des_bucket_default = 'foo'
    try:
        Des_prefix_default = cfg.get('Basic', 'Des_prefix_default')
    except Exception as e:
        Des_prefix_default = ''
except Exception as e:
    print("ERR loading s3_migration_cluster_config.ini", str(e))
    sys.exit(0)

# Main
if __name__ == '__main__':

    # Set Logging
    logger, log_file_name = set_log(LoggingLevel)

    # Get Environment
    sqs, sqs_queue, table, s3_src_client, s3_des_client, instance_id, ssm = \
        set_env(JobType, LocalProfileMode, table_queue_name, ssm_parameter_credentials)

    #######
    # Program start processing here
    #######

    # For concur jobs(files)
    logger.info(f'Start concurrent {MaxParallelFile} jobs.')
    start_time = int(time.time())
    with concurrent.futures.ThreadPoolExecutor(
            max_workers=MaxParallelFile) as job_pool:
        for i in range(MaxParallelFile):  # 这里只控制多个Job同时循环进行,每个Job的并发和超时在内层控制