Ejemplo n.º 1
0
        account_key=None,
        rmatch=None,
        prefix=prefix)
    list_files.append(list_file)

assert len(list_files) == len(folder_names)

#%% Divide images into chunks for each folder

# This will be a list of lists
folder_chunks = []

# list_file = list_files[0]
for list_file in list_files:

    chunked_files, chunks = prepare_api_submission.divide_files_into_tasks(
        list_file)
    print('Divided images into files:')
    for i_fn, fn in enumerate(chunked_files):
        new_fn = chunked_files[i_fn].replace('__', '_').replace('_all', '')
        os.rename(fn, new_fn)
        chunked_files[i_fn] = new_fn
        print(fn, len(chunks[i_fn]))
    folder_chunks.append(chunked_files)

assert len(folder_chunks) == len(folder_names)

#%% Copy image lists to blob storage for each job

# Maps  job name to a remote path
job_name_to_list_url = {}
job_names_by_task_group = []
Ejemplo n.º 2
0
    sas_token = 'st=...'
    container_name = ''
    rmatch = None  # '^Y53'
    output_file = r'output.json'

    blobs = prepare_api_submission.enumerate_blobs_to_file(
        output_file=output_file,
        account_name=account_name,
        sas_token=sas_token,
        container_name=container_name,
        rmatch=rmatch)

    #%%

    file_list_json = r"D:\temp\idfg_20190801-hddrop_image_list.json"
    task_files = prepare_api_submission.divide_files_into_tasks(file_list_json)

    #%%

    file_list_sas_urls = ['', '', '']

    input_container_sas_url = ''
    request_name_base = ''
    caller = '*****@*****.**'

    request_strings,request_dicts = \
        generate_api_queries(input_container_sas_url,file_list_sas_urls,request_name_base,caller)

    for s in request_strings:
        print(s)