Esempio n. 1
0
def download_cutouts(sbid, username, password, destination_dir, catalogue_query, do_cutouts, cutout_radius_degrees=0.1):
    # 2) Use CASDA VO (secure) to query for the images associated with the given scheduling_block_id
    print ("\n\n** Finding images and image cubes for scheduling block {} ... \n\n".format(sbid))
    data_product_id_query = "select * from ivoa.obscore where obs_id = '" + str(
        sbid) + "' and dataproduct_type = 'cube' and dataproduct_subtype in ('cont.restored.t0', 'spectral.restored.3d')"
    filename = destination_dir + "image_cubes_" + str(sbid) + ".xml"
    casda.sync_tap_query(data_product_id_query, filename, username, password)
    image_cube_votable = parse(filename, pedantic=False)
    results_array = image_cube_votable.get_table_by_id('results').array

    service = 'cutout_service' if do_cutouts else 'async_service'

    # 3) For each of the image cubes, query datalink to get the secure datalink details
    print ("\n\n** Retrieving datalink for each image and image cube...\n\n")
    authenticated_id_tokens = []
    for image_cube_result in results_array:
        image_cube_id = image_cube_result['obs_publisher_did'].decode('utf-8')
        async_url, authenticated_id_token = casda.get_service_link_and_id(image_cube_id, username,
                                                                          password,
                                                                          service=service,
                                                                          destination_dir=destination_dir)
        if authenticated_id_token is not None and len(authenticated_id_tokens) < 10:
            authenticated_id_tokens.append(authenticated_id_token)

    if len(authenticated_id_tokens) == 0:
        print ("No image cubes for scheduling_block_id " + str(sbid))
        return 1

    # Run the catalogue_query to find catalogue entries that are of interest
    if do_cutouts:
        print ("\n\n** Finding components in each image and image cube...\n\n")
        filename = destination_dir + "catalogue_query_" + str(sbid) + ".xml"
        casda.sync_tap_query(catalogue_query, filename, username, password)
        catalogue_vo_table = parse(filename, pedantic=False)
        catalogue_results_array = catalogue_vo_table.get_table_by_id('results').array
        print ("\n\n** Found %d components...\n\n" % (len(catalogue_results_array)))
        if len(catalogue_results_array) == 0:
            print ("No catalogue entries matching the criteria found for scheduling_block_id " + str(sbid))
            return 1


        # For each source found in the catalogue query, create a position filter
        pos_list = []
        for entry in catalogue_results_array:
            ra = entry['ra_deg_cont']
            dec = entry['dec_deg_cont']
            circle = "CIRCLE " + str(ra) + " " + str(dec) + " " + str(cutout_radius_degrees)
            pos_list.append(circle)

    # Generate cutouts from each image around each source
    # where there is no overlap an error file is generated but can be ignored.
    job_location = casda.create_async_soda_job(authenticated_id_tokens)
    if do_cutouts:
        casda.add_params_to_async_job(job_location, 'pos', pos_list)
    job_status = casda.run_async_job(job_location)
    print ('\nJob finished with status %s address is %s\n\n' % (job_status, job_location))
    if job_status != 'ERROR':
        casda.download_all(job_location, destination_dir)
    return 0
def download_cutouts(sbid, username, password, destination_dir, num_channels, data_product_sub_type):
    print ("\n\n** Finding images and image cubes for scheduling block {} ... \n\n".format(sbid))

    sbid_multi_channel_query = "SELECT TOP 1000 * FROM ivoa.obscore where obs_id='" + str(sbid) \
                               + "' and dataproduct_subtype='" + str(data_product_sub_type) \
                               + "' and em_xel > 1 and dataproduct_type = 'cube'"

    # create async TAP query and wait for query to complete
    result_file_path = casda.async_tap_query(sbid_multi_channel_query, username, password, destination_dir)
    image_cube_votable = parse(result_file_path, pedantic=False)
    results_array = image_cube_votable.get_table_by_id('results').array

    # 3) For each of the image cubes, query datalink to get the secure datalink details
    print ("\n\n** Retrieving datalink for each image and image cube...\n\n")
    authenticated_id_tokens = []
    for image_cube_result in results_array:
        image_cube_id = image_cube_result['obs_publisher_did'].decode('utf-8')
        async_url, authenticated_id_token = casda.get_service_link_and_id(image_cube_id, username,
                                                                          password,
                                                                          service='cutout_service',
                                                                          destination_dir=destination_dir)
        if authenticated_id_token is not None:
            authenticated_id_tokens.append([authenticated_id_token, image_cube_result])

    if len(authenticated_id_tokens) == 0:
        print ("No image cubes for scheduling_block_id " + str(sbid))
        return 1

    # For each image cube, slice by channels using num_channels specified by the user.
    job_locations = []
    for entry in authenticated_id_tokens:
        auth_id_token = entry[0]

        # get the image cube and number of channels
        ic = entry[1]
        channel_count = ic['em_xel']
        channel_list = []

        # wrap to max number of channels, if provided value exceeds channel count
        if num_channels > channel_count:
            num_channels = channel_count

        # slice up cube into chunks using the number of channels as the size of each chunk
        slices = math.ceil(channel_count / num_channels)
        current_step = 1

        for s in range(slices):
            channel_list.append(str(current_step) + " " + str(num_channels * int(s+1)))
            current_step = (num_channels * int(s+1)) + 1

        # create job for given band params
        job_location = casda.create_async_soda_job([auth_id_token])
        casda.add_params_to_async_job(job_location, 'CHANNEL', channel_list)
        job_locations.append(job_location)

    # run all jobs and download
    casda.run_async_jobs_and_download(job_locations, destination_dir)

    return 0
Esempio n. 3
0
def main():
    args = parseargs()
    password = casda.get_opal_password(args.opal_password, args.password_file)

    # Change this to choose which environment to use, prod is the default
    casda.use_at()

    start = time.time()
    if args.destination_directory is not None and not os.path.exists(
            args.destination_directory):
        os.makedirs(args.destination_directory)

    # Read cube dimensions
    cube_dim = get_dimensions(args.cubeid)
    # print("DIM=", cube_dim)

    # Generate random locations in the cutout
    pos_params, band_params = generate_random_cutouts(args, cube_dim)
    # print("POS=", pos_params)
    # print("BAND=", band_params)

    # Get access to the cube - sia call then datalink
    async_url, authenticated_id_token = casda.get_service_link_and_id(
        args.cubeid,
        args.opal_username,
        password,
        destination_dir=args.destination_directory)
    print(async_url, authenticated_id_token)

    # Create a job to retrieve the cutouts
    job_location = casda.create_async_soda_job([authenticated_id_token],
                                               soda_url=async_url)
    casda.add_params_to_async_job(job_location, 'POS', pos_params)
    casda.add_params_to_async_job(job_location, 'BAND', band_params)
    print('\n\n Job will have %d cutouts.\n\n' %
          (len(pos_params) * len(band_params)))

    # Run and time the job
    run_start = time.time()
    status = casda.run_async_job(job_location)
    run_end = time.time()
    print('Job finished with status %s in %.02f s\n\n' %
          (status, run_end - run_start))

    # Optionally download
    print("Job result available at ", casda.get_results_page(job_location))
    if args.download:
        casda.download_all(job_location, args.destination_directory)

    # Report
    end = time.time()
    print('#### Cutout processing completed at %s ####' %
          (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(end))))
    print('Job was processed in %.02f s' % (run_end - run_start))
    print('Full run took %.02f s' % (end - start))
    return 0
Esempio n. 4
0
def produce_cutouts(source_list, image_id, username, password,
                    destination_dir):
    # Use CASDA VO (secure) to query for the images associated with the given scheduling_block_id
    print("\n\n** Retreiving image details for %s ... \n\n" % image_id)
    filename = destination_dir + str(image_id) + ".xml"
    data_product_id_query = "select * from ivoa.obscore where obs_publisher_did = '" + image_id + \
                            "' and dataproduct_type = 'cube'"
    casda.sync_tap_query(data_product_id_query,
                         filename,
                         username=username,
                         password=password)
    image_cube_votable = votable.parse(filename, pedantic=False)
    results_array = image_cube_votable.get_table_by_id('results').array

    # For each of the image cubes, query datalink to get the secure datalink details
    print("\n\n** Retrieving datalink for each image and image cube...\n\n")
    authenticated_id_tokens = []
    for image_cube_result in results_array:
        image_cube_id = image_cube_result['obs_publisher_did'].decode('utf-8')
        async_url, authenticated_id_token = casda.get_service_link_and_id(
            image_cube_id,
            username,
            password,
            service='cutout_service',
            destination_dir=destination_dir)
        if authenticated_id_token is not None:
            authenticated_id_tokens.append(authenticated_id_token)

    if len(authenticated_id_tokens) == 0:
        print("No image cubes found")
        return 1

    # Create the async job
    job_location = casda.create_async_soda_job(authenticated_id_tokens)

    # For each entry in the results of the catalogue query, add the position filter as a parameter to the async job
    cutout_filters = []
    for sky_loc in source_list:
        ra = sky_loc.ra.degree
        dec = sky_loc.dec.degree
        circle = "CIRCLE " + str(ra) + " " + str(dec) + " " + str(
            cutout_radius_degrees)
        cutout_filters.append(circle)
    casda.add_params_to_async_job(job_location, 'pos', cutout_filters)

    # Run the job
    status = casda.run_async_job(job_location)

    # Download all of the files, or alert if it didn't complete
    if status == 'COMPLETED':
        print("\n\n** Downloading results...\n\n")
        casda.download_all(job_location, destination_dir)
    else:
        print("Job did not complete: Status was %s." % status)
        return 1
    return 0
Esempio n. 5
0
def main():
    args = parseargs()
    password = casda.get_opal_password(args.opal_password, args.password_file)

    # Change this to choose which environment to use, prod is the default
    casda.use_at()

    start = time.time()
    if args.destination_directory is not None and not os.path.exists(args.destination_directory):
        os.makedirs(args.destination_directory)

    # Read cube dimensions
    cube_dim = get_dimensions(args.cubeid)
    # print("DIM=", cube_dim)

    # Generate random locations in the cutout
    pos_params, band_params = generate_random_cutouts(args, cube_dim)
    # print("POS=", pos_params)
    # print("BAND=", band_params)

    # Get access to the cube - sia call then datalink
    async_url, authenticated_id_token = casda.get_service_link_and_id(
        args.cubeid,
        args.opal_username,
        password,
        destination_dir=args.destination_directory)
    print (async_url, authenticated_id_token)

    # Create a job to retrieve the cutouts
    job_location = casda.create_async_soda_job([authenticated_id_token], soda_url=async_url)
    casda.add_params_to_async_job(job_location, 'POS', pos_params)
    casda.add_params_to_async_job(job_location, 'BAND', band_params)
    print ('\n\n Job will have %d cutouts.\n\n' % (len(pos_params)*len(band_params)))

    # Run and time the job
    run_start = time.time()
    status = casda.run_async_job(job_location)
    run_end = time.time()
    print('Job finished with status %s in %.02f s\n\n' % (status, run_end - run_start))

    # Optionally download
    print ("Job result available at ", casda.get_results_page(job_location))
    if args.download:
        casda.download_all(job_location, args.destination_directory)

    # Report
    end = time.time()
    print('#### Cutout processing completed at %s ####'
          % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(end))))
    print('Job was processed in %.02f s' % (run_end - run_start))
    print('Full run took %.02f s' % (end - start))
    return 0
Esempio n. 6
0
def produce_cutouts(source_list, image_id, username, password, destination_dir):
    # Use CASDA VO (secure) to query for the images associated with the given scheduling_block_id
    print ("\n\n** Retreiving image details for %s ... \n\n" % image_id)
    filename = destination_dir + str(image_id) + ".xml"
    data_product_id_query = "select * from ivoa.obscore where obs_publisher_did = '" + image_id + \
                            "' and dataproduct_type = 'cube'"
    casda.sync_tap_query(data_product_id_query, filename, username=username, password=password)
    image_cube_votable = votable.parse(filename, pedantic=False)
    results_array = image_cube_votable.get_table_by_id('results').array

    # For each of the image cubes, query datalink to get the secure datalink details
    print ("\n\n** Retrieving datalink for each image and image cube...\n\n")
    authenticated_id_tokens = []
    for image_cube_result in results_array:
        image_cube_id = image_cube_result['obs_publisher_did'].decode('utf-8')
        async_url, authenticated_id_token = casda.get_service_link_and_id(image_cube_id, username,
                                                                          password,
                                                                          service='cutout_service',
                                                                          destination_dir=destination_dir)
        if authenticated_id_token is not None:
            authenticated_id_tokens.append(authenticated_id_token)

    if len(authenticated_id_tokens) == 0:
        print ("No image cubes found")
        return 1

    # Create the async job
    job_location = casda.create_async_soda_job(authenticated_id_tokens)

    # For each entry in the results of the catalogue query, add the position filter as a parameter to the async job
    cutout_filters = []
    for sky_loc in source_list:
        ra = sky_loc.ra.degree
        dec = sky_loc.dec.degree
        circle = "CIRCLE " + str(ra) + " " + str(dec) + " " + str(cutout_radius_degrees)
        cutout_filters.append(circle)
    casda.add_params_to_async_job(job_location, 'pos', cutout_filters)

    # Run the job
    status = casda.run_async_job(job_location)

    # Download all of the files, or alert if it didn't complete
    if status == 'COMPLETED':
        print ("\n\n** Downloading results...\n\n")
        casda.download_all(job_location, destination_dir)
    else:
        print ("Job did not complete: Status was %s." % status)
        return 1
    return 0
Esempio n. 7
0
def extract_spectra(source_list, cutout_radius_degrees, opal_username,
                    opal_password, destination_directory):
    """
    Extract spectra at the specified locations from ASKAP image cubes. Only cubes of subtype spectral.restored.3d will be
    used in the extraction. Cubes that are either released or in a project that the opal user has pre-release access to
    will be included.

    :param source_list:  The list of SkyCoord objects specifying source positions
    :param cutout_radius_degrees: The number of degrees around the central point to extract or find.
    :param opal_username: The user's user name on the ATNF's online proposal system OPAL (normally an email address)
    :param opal_password: The user's OPAL password
    :param destination_directory: The directory where the resulting files will be stored
    :return: None
    """

    # Build query to produce list of cubes for the sources.
    pos_params = build_pos_criteria(source_list, cutout_radius_degrees)

    # Run an immediate sia2 job to get the list of target cubes
    votable = casda.find_images(pos_params, opal_username, opal_password)
    table = votable.get_first_table()
    authenticated_ids = []
    for row in table.array:
        # We are only interested in the restored spectral line cubes
        if row['dataproduct_subtype'].decode() == 'spectral.restored.3d':
            data_product_id = row['obs_publisher_did'].decode('utf-8')
            async_url, authenticated_id_token = casda.get_service_link_and_id(
                data_product_id,
                opal_username,
                opal_password,
                service='spectrum_generation_service',
                destination_dir=destination_directory)
            authenticated_ids.append(authenticated_id_token)

    if len(authenticated_ids) == 0:
        print(
            '\n\nNo image cubes were found which matched any of your sources.')
        return

    # Generate spectra at each location for each cube - no spectra file is generated where there is no overlap
    job_location = casda.create_async_soda_job(authenticated_ids)
    casda.add_params_to_async_job(job_location, 'pos', pos_params)
    job_status = casda.run_async_job(job_location)
    print('\nJob finished with status %s address is %s\n\n' %
          (job_status, job_location))
    if job_status != 'ERROR':
        casda.download_all(job_location, destination_directory)
Esempio n. 8
0
def extract_spectra(source_list, cutout_radius_degrees, opal_username, opal_password, destination_directory):
    """
    Extract spectra at the specified locations from ASKAP image cubes. Only cubes of subtype spectral.restored.3d will be
    used in the extraction. Cubes that are either released or in a project that the opal user has pre-release access to
    will be included.

    :param source_list:  The list of SkyCoord objects specifying source positions
    :param cutout_radius_degrees: The number of degrees around the central point to extract or find.
    :param opal_username: The user's user name on the ATNF's online proposal system OPAL (normally an email address)
    :param opal_password: The user's OPAL password
    :param destination_directory: The directory where the resulting files will be stored
    :return: None
    """

    # Build query to produce list of cubes for the sources.
    pos_params = build_pos_criteria(source_list, cutout_radius_degrees)

    # Run an immediate sia2 job to get the list of target cubes
    votable = casda.find_images(pos_params, opal_username, opal_password)
    table = votable.get_first_table()
    authenticated_ids = []
    for row in table.array:
        # We are only interested in the restored spectral line cubes
        if row['dataproduct_subtype'].decode() == 'spectral.restored.3d':
            data_product_id = row['obs_publisher_did'].decode('utf-8')
            async_url, authenticated_id_token = casda.get_service_link_and_id(data_product_id, opal_username,
                                                                              opal_password,
                                                                              service='spectrum_generation_service',
                                                                              destination_dir=destination_directory)
            authenticated_ids.append(authenticated_id_token)

    if len(authenticated_ids) == 0:
        print('\n\nNo image cubes were found which matched any of your sources.')
        return

    # Generate spectra at each location for each cube - no spectra file is generated where there is no overlap
    job_location = casda.create_async_soda_job(authenticated_ids)
    casda.add_params_to_async_job(job_location, 'pos', pos_params)
    job_status = casda.run_async_job(job_location)
    print('\nJob finished with status %s address is %s\n\n' % (job_status, job_location))
    if job_status != 'ERROR':
        casda.download_all(job_location, destination_directory)
Esempio n. 9
0
def download_cutouts(sbid,
                     username,
                     password,
                     destination_dir,
                     catalogue_query,
                     do_cutouts,
                     cutout_radius_degrees=0.1):
    # 2) Use CASDA VO (secure) to query for the images associated with the given scheduling_block_id
    print(
        "\n\n** Finding images and image cubes for scheduling block {} ... \n\n"
        .format(sbid))
    data_product_id_query = "select * from ivoa.obscore where obs_id = '" + str(
        sbid
    ) + "' and dataproduct_type = 'cube' and dataproduct_subtype in ('cont.restored.t0', 'spectral.restored.3d')"
    filename = destination_dir + "image_cubes_" + str(sbid) + ".xml"
    casda.sync_tap_query(data_product_id_query, filename, username, password)
    image_cube_votable = parse(filename, pedantic=False)
    results_array = image_cube_votable.get_table_by_id('results').array

    service = 'cutout_service' if do_cutouts else 'async_service'

    # 3) For each of the image cubes, query datalink to get the secure datalink details
    print("\n\n** Retrieving datalink for each image and image cube...\n\n")
    authenticated_id_tokens = []
    for image_cube_result in results_array:
        image_cube_id = image_cube_result['obs_publisher_did'].decode('utf-8')
        async_url, authenticated_id_token = casda.get_service_link_and_id(
            image_cube_id,
            username,
            password,
            service=service,
            destination_dir=destination_dir)
        if authenticated_id_token is not None and len(
                authenticated_id_tokens) < 10:
            authenticated_id_tokens.append(authenticated_id_token)

    if len(authenticated_id_tokens) == 0:
        print("No image cubes for scheduling_block_id " + str(sbid))
        return 1

    # Run the catalogue_query to find catalogue entries that are of interest
    if do_cutouts:
        print("\n\n** Finding components in each image and image cube...\n\n")
        filename = destination_dir + "catalogue_query_" + str(sbid) + ".xml"
        casda.sync_tap_query(catalogue_query, filename, username, password)
        catalogue_vo_table = parse(filename, pedantic=False)
        catalogue_results_array = catalogue_vo_table.get_table_by_id(
            'results').array
        print("\n\n** Found %d components...\n\n" %
              (len(catalogue_results_array)))
        if len(catalogue_results_array) == 0:
            print(
                "No catalogue entries matching the criteria found for scheduling_block_id "
                + str(sbid))
            return 1

        # For each source found in the catalogue query, create a position filter
        pos_list = []
        for entry in catalogue_results_array:
            ra = entry['ra_deg_cont']
            dec = entry['dec_deg_cont']
            circle = "CIRCLE " + str(ra) + " " + str(dec) + " " + str(
                cutout_radius_degrees)
            pos_list.append(circle)

    # Generate cutouts from each image around each source
    # where there is no overlap an error file is generated but can be ignored.
    job_location = casda.create_async_soda_job(authenticated_id_tokens)
    if do_cutouts:
        casda.add_params_to_async_job(job_location, 'pos', pos_list)
    job_status = casda.run_async_job(job_location)
    print('\nJob finished with status %s address is %s\n\n' %
          (job_status, job_location))
    if job_status != 'ERROR':
        casda.download_all(job_location, destination_dir)
    return 0
Esempio n. 10
0
def produce_cutouts(source_list, proj, username, password, destination_dir,
                    cutout_radius_degrees):
    # Use CASDA VO (secure) to query for the images associated with the given scheduling_block_id
    print("\n\n** Retreiving image details for %s ... \n\n" % proj)
    filename = destination_dir + str(proj) + ".xml"
    #Do initial filter of images, allow for 3 deg cone around position (get ASKAP image which is ~30 sq deg).
    src_num = 0
    for sky_loc in source_list:
        src_num = src_num + 1
        ra = sky_loc.ra.degree
        dec = sky_loc.dec.degree
        data_product_id_query = "select * from ivoa.obscore where obs_collection LIKE '%" + proj + \
                            "%' and dataproduct_subtype = 'cont.restored.t0' and pol_states = '/I/' and 1 = CONTAINS(POINT('ICRS',"+ str(ra) + ","+ str(dec) + "),s_region)"
        casda.sync_tap_query(data_product_id_query,
                             filename,
                             username=username,
                             password=password)
        image_cube_votable = votable.parse(filename, pedantic=False)
        results_array = image_cube_votable.get_table_by_id('results').array

        # For each of the image cubes, query datalink to get the secure datalink details
        print(
            "\n\n** Retrieving datalink for each image containing source number "
            + str(src_num) + " ...\n\n")
        authenticated_id_tokens = []
        for image_cube_result in results_array:
            image_cube_id = image_cube_result['obs_publisher_did'].decode(
                'utf-8')
            async_url, authenticated_id_token = casda.get_service_link_and_id(
                image_cube_id,
                username,
                password,
                service='cutout_service',
                destination_dir=destination_dir)
            if authenticated_id_token is not None:
                authenticated_id_tokens.append(authenticated_id_token)

        if len(authenticated_id_tokens) == 0:
            print("No image cubes found")
            return 1

        # Create the async job
        job_location = casda.create_async_soda_job(authenticated_id_tokens)

        # For each entry in the results of the catalogue query, add the position filter as a parameter to the async job
        cutout_filters = []
        circle = "CIRCLE " + str(ra) + " " + str(dec) + " " + str(
            cutout_radius_degrees)
        cutout_filters.append(circle)
        casda.add_params_to_async_job(job_location, 'pos', cutout_filters)

        # Run the job
        status = casda.run_async_job(job_location)

        # Download all of the files, or alert if it didn't complete
        if status == 'COMPLETED':
            print("\n\n** Downloading results...\n\n")
            casda.download_all(job_location, destination_dir)
            returnflag = 0
        else:
            print("Job did not complete: Status was %s." % status)
            returnflag = 1

    if returnflag == 0:
        return 0
    else:
        return 1
Esempio n. 11
0
def download_cutouts(sbid, username, password, destination_dir, num_channels, data_product_sub_type):
    print ("\n\n** Finding images and image cubes for scheduling block {} ... \n\n".format(sbid))

    sbid_multi_channel_query = "SELECT TOP 1000 * FROM ivoa.obscore where obs_id='" + str(sbid) \
                               + "' and dataproduct_subtype='" + str(data_product_sub_type) \
                               + "' and em_xel > 1 and dataproduct_type = 'cube'"

    # create async TAP query and wait for query to complete
    result_file_path = casda.async_tap_query(sbid_multi_channel_query, username, password, destination_dir)
    image_cube_votable = parse(result_file_path, pedantic=False)
    results_array = image_cube_votable.get_table_by_id('results').array

    # 3) For each of the image cubes, query datalink to get the secure datalink details
    print ("\n\n** Retrieving datalink for each image and image cube...\n\n")
    authenticated_id_tokens = []
    for image_cube_result in results_array:
        image_cube_id = image_cube_result['obs_publisher_did'].decode('utf-8')
        async_url, authenticated_id_token = casda.get_service_link_and_id(image_cube_id, username,
                                                                          password,
                                                                          service='cutout_service',
                                                                          destination_dir=destination_dir)
        if authenticated_id_token is not None:
            authenticated_id_tokens.append([authenticated_id_token, image_cube_result])

    if len(authenticated_id_tokens) == 0:
        print ("No image cubes for scheduling_block_id " + str(sbid))
        return 1

    # For each image cube, slice by channels using num_channels specified by the user.
    band_list = []
    job_locations = []
    for entry in authenticated_id_tokens:
        auth_id_token = entry[0]
        ic = entry[1]

        em_xel = ic['em_xel']
        em_min = ic['em_min'] * u.m
        em_max = ic['em_max'] * u.m

        min_freq = em_max.to(u.Hz, equivalencies=u.spectral())
        max_freq = em_min.to(u.Hz, equivalencies=u.spectral())

        step_size = num_channels
        if step_size > em_xel:
            step_size = em_xel

        hz_per_channel = (max_freq - min_freq) / em_xel
        pos = em_xel

        channel_blocks = math.ceil(em_xel / num_channels)

        for b in range(int(channel_blocks)):
            f1 = get_freq_at_pos(pos, min_freq, hz_per_channel)
            pos -= step_size
            f2 = get_freq_at_pos(pos, min_freq, hz_per_channel)
            pos -= 1 # do not overlap channels between image cubes
            wavelength1 = f1.to(u.m, equivalencies=u.spectral())
            wavelength2 = f2.to(u.m, equivalencies=u.spectral())
            band = str(wavelength1.value) + " " + str(wavelength2.value)
            band_list.append(band)

        # create job for given band params
        job_location = casda.create_async_soda_job([auth_id_token])
        casda.add_params_to_async_job(job_location, 'BAND', band_list)
        job_locations.append(job_location)

    # run all jobs and download
    casda.run_async_jobs_and_download(job_locations, destination_dir)

    return 0