コード例 #1
0
    def get_basic_config(evfile, scfile, ra, dec, emin=100.0, emax=100000.0, zmax=100.0, evclass=128, evtype=3,
                         filter='DATA_QUAL>0 && LAT_CONFIG==1'):

        from fermipy.config import ConfigManager

        # Get default config from fermipy
        basic_config = ConfigManager.load(get_path_of_data_file("fermipy_basic_config.yml"))  # type: dict

        evfile = sanitize_filename(evfile)
        scfile = sanitize_filename(scfile)

        assert os.path.exists(evfile), "The provided evfile %s does not exist" % evfile
        assert os.path.exists(scfile), "The provided scfile %s does not exist" % scfile

        basic_config['data']['evfile'] = evfile
        basic_config['data']['scfile'] = scfile

        ra = float(ra)
        dec = float(dec)

        assert 0 <= ra <= 360, "The provided R.A. (%s) is not valid. Should be 0 <= ra <= 360.0" % ra
        assert -90 <= dec <= 90, "The provided Dec (%s) is not valid. Should be -90 <= dec <= 90.0" % dec

        basic_config['selection']['ra'] = ra
        basic_config['selection']['dec'] = dec

        emin = float(emin)
        emax = float(emax)

        basic_config['selection']['emin'] = emin
        basic_config['selection']['emax'] = emax

        zmax = float(zmax)
        assert 0.0 <= zmax <= 180.0, "The provided Zenith angle cut (zmax = %s) is not valid. " \
                                     "Should be 0 <= zmax <= 180.0" % zmax

        basic_config['selection']['zmax'] = zmax

        evclass = int(evclass)
        assert is_power_of_2(evclass), "The provided evclass is not a power of 2."

        basic_config['selection']['evclass'] = evclass

        evtype = int(evtype)

        basic_config['selection']['evtype'] = evtype

        basic_config['selection']['filter'] = filter

        return DictWithPrettyPrint(basic_config)
コード例 #2
0
    def _get_errors(self):

        # Re-implement this in order to use MINOS

        errors = DictWithPrettyPrint()

        for i, par_name in enumerate(self.parameters):

            err_low = ROOT.Double(0)
            err_up = ROOT.Double(0)

            self.minimizer.GetMinosError(i, err_low, err_up)

            errors[par_name] = (err_low, err_up)

        return errors
コード例 #3
0
ファイル: Fermi.py プロジェクト: BjoernBiltzinger/threeML-1
    def get_detector_information(self):
        """
        Return the detectors used for spectral analysis as well as their background
        intervals. Peak flux and fluence intervals are also returned as well as best fit models

        :return: detector information dictionary
        """

        assert (
            self._last_query_results is not None
        ), "You have to run a query before getting detector information"

        # Loop over the table and build a source for each entry
        sources = {}

        for name, row in self._last_query_results.T.items():
            # First we want to get the the detectors used in the SCAT file

            idx = np.array(list(map(int, row["scat_detector_mask"])),
                           dtype=bool)
            detector_selection = self._gbm_detector_lookup[idx]

            # get the location

            ra = row["ra"]
            dec = row["dec"]

            # Now we want to know the background intervals

            lo_start = row["back_interval_low_start"]
            lo_stop = row["back_interval_low_stop"]
            hi_start = row["back_interval_high_start"]
            hi_stop = row["back_interval_high_stop"]

            # the GBM plugin accepts these as strings

            pre_bkg = "%f-%f" % (lo_start, lo_stop)
            post_bkg = "%f-%f" % (hi_start, hi_stop)
            full_bkg = "%s,%s" % (pre_bkg, post_bkg)

            background_dict = {
                "pre": pre_bkg,
                "post": post_bkg,
                "full": full_bkg
            }

            # now we want the fluence interval and peak flux intervals

            # first the fluence

            start_flu = row["t90_start"]
            stop_flu = row["t90_start"] + row["t90"]

            interval_fluence = "%f-%f" % (start_flu, stop_flu)

            # peak flux

            start_pk = row["pflx_spectrum_start"]
            stop_pk = row["pflx_spectrum_stop"]

            interval_pk = "%f-%f" % (start_pk, stop_pk)

            # build the dictionary
            spectrum_dict = {"fluence": interval_fluence, "peak": interval_pk}

            trigger = row["trigger_name"]

            # get the best fit model in the fluence and peak intervals

            best_fit_peak = row["pflx_best_fitting_model"].split("_")[-1]

            best_fit_fluence = row["flnc_best_fitting_model"].split("_")[-1]

            best_dict = {"fluence": best_fit_fluence, "peak": best_fit_peak}

            sources[name] = {
                "source": spectrum_dict,
                "background": background_dict,
                "trigger": trigger,
                "detectors": detector_selection,
                "best fit model": best_dict,
                "ra": ra,
                "dec": dec,
            }

        return DictWithPrettyPrint(sources)
コード例 #4
0
    def get_basic_config(
        evfile,
        scfile,
        ra,
        dec,
        emin=100.0,
        emax=100000.0,
        zmax=100.0,
        evclass=128,
        evtype=3,
        filter="DATA_QUAL>0 && LAT_CONFIG==1",
        fermipy_verbosity=2,
        fermitools_chatter=2,
    ):

        from fermipy.config import ConfigManager

        # Get default config from fermipy
        basic_config = ConfigManager.load(
            get_path_of_data_file("fermipy_basic_config.yml"))  # type: dict

        evfile = str(sanitize_filename(evfile))
        scfile = str(sanitize_filename(scfile))

        if not os.path.exists(evfile):
            log.critical("The provided evfile %s does not exist" % evfile)
        if not os.path.exists(scfile):
            log.critical("The provided scfile %s does not exist" % scfile)

        basic_config["data"]["evfile"] = evfile
        basic_config["data"]["scfile"] = scfile

        ra = float(ra)
        dec = float(dec)

        if not ((0 <= ra) and (ra <= 360)):
            log.critical(
                "The provided R.A. (%s) is not valid. Should be 0 <= ra <= 360.0"
                % ra)
        if not ((-90 <= dec) and (dec <= 90)):
            log.critical(
                "The provided Dec (%s) is not valid. Should be -90 <= dec <= 90.0"
                % dec)

        basic_config["selection"]["ra"] = ra
        basic_config["selection"]["dec"] = dec

        emin = float(emin)
        emax = float(emax)

        basic_config["selection"]["emin"] = emin
        basic_config["selection"]["emax"] = emax

        zmax = float(zmax)
        if not ((0.0 <= zmax) and (zmax <= 180.0)):
            log.critical(
                "The provided Zenith angle cut (zmax = %s) is not valid. "
                "Should be 0 <= zmax <= 180.0" % zmax)

        basic_config["selection"]["zmax"] = zmax

        with fits.open(scfile) as ft2_:
            tmin = float(ft2_[0].header["TSTART"])
            tmax = float(ft2_[0].header["TSTOP"])

        basic_config["selection"]["tmin"] = tmin
        basic_config["selection"]["tmax"] = tmax

        evclass = int(evclass)
        if not is_power_of_2(evclass):
            log.critical("The provided evclass is not a power of 2.")

        basic_config["selection"]["evclass"] = evclass

        evtype = int(evtype)

        basic_config["selection"]["evtype"] = evtype

        basic_config["selection"]["filter"] = filter

        basic_config["logging"]["verbosity"] = fermipy_verbosity
        #(In fermipy convention, 0 = critical only, 1 also errors, 2 also warnings, 3 also info, 4 also debug)
        basic_config["logging"][
            "chatter"] = fermitools_chatter  #0 = no screen output. 2 = some output, 4 = lot of output.

        return DictWithPrettyPrint(basic_config)
コード例 #5
0
ファイル: download_GBM_data.py プロジェクト: giacomov/3ML
def download_GBM_trigger_data(trigger_name, detectors=None, destination_directory='.', compress_tte=True):
    """
    Download the latest GBM TTE and RSP files from the HEASARC server. Will get the
    latest file version and prefer RSP2s over RSPs. If the files already exist in your destination
    directory, they will be skipped in the download process. The output dictionary can be used
    as input to the FermiGBMTTELike class.

    example usage: download_GBM_trigger_data('080916009', detectors=['n0','na','b0'], destination_directory='.')

    :param trigger_name: trigger number (str) e.g. '080916009' or 'bn080916009' or 'GRB080916009'
    :param detectors: list of detectors, default is all detectors
    :param destination_directory: download directory
    :param compress_tte: compress the TTE files via gzip (default True)
    :return: a dictionary with information about the download
    """

    # Let's doctor up the input just in case the user tried something strange

    sanitized_trigger_name_ = _validate_fermi_trigger_name(trigger_name)

    # create output directory if it does not exists
    destination_directory = sanitize_filename(destination_directory, abspath=True)

    if_directory_not_existing_then_make(destination_directory)

    # Sanitize detector list (if any)
    if detectors is not None:

        for det in detectors:

            assert det in _detector_list, "Detector %s in the provided list is not a valid detector. " \
                                          "Valid choices are: %s" % (det, _detector_list)

    else:

        detectors = list(_detector_list)

    # Open heasarc web page

    url = threeML_config['gbm']['public HTTP location']
    year = '20%s' % sanitized_trigger_name_[:2]
    directory = '/triggers/%s/bn%s/current' % (year, sanitized_trigger_name_)

    heasarc_web_page_url = '%s/%s' % (url, directory)

    try:

        downloader = ApacheDirectory(heasarc_web_page_url)

    except RemoteDirectoryNotFound:

        raise TriggerDoesNotExist("Trigger %s does not exist at %s" % (sanitized_trigger_name_, heasarc_web_page_url))

    # Now select the files we want to download, then we will download them later
    # We do it in two steps because we want to be able to choose what to download once we
    # have the complete picture

    # Get the list of remote files
    remote_file_list = downloader.files

    # This is the dictionary to keep track of the classification
    remote_files_info = DictWithPrettyPrint([(det, {}) for det in detectors])

    # Classify the files detector by detector

    for this_file in remote_file_list:

        # this_file is something like glg_tte_n9_bn100101988_v00.fit
        tokens = this_file.split("_")

        if len(tokens) != 5:

            # Not a data file

            continue

        else:

            # The "map" is necessary to transform the tokens to normal string (instead of unicode),
            # because u"b0" != "b0" as a key for a dictionary

            _, file_type, detname, _, version_ext = map(str, tokens)

        version, ext = version_ext.split(".")

        # We do not care here about the other files (tcat, bcat and so on),
        # nor about files which pertain to other detectors

        if file_type not in ['cspec', 'tte'] or ext not in ['rsp','rsp2','pha','fit'] or detname not in detectors:

            continue

        # cspec files can be rsp, rsp2 or pha files. Classify them

        if file_type == 'cspec':

            if ext == 'rsp':

                remote_files_info[detname]['rsp'] = this_file

            elif ext == 'rsp2':

                remote_files_info[detname]['rsp2'] = this_file

            elif ext == 'pha':

                remote_files_info[detname]['cspec'] = this_file

            else:

                raise RuntimeError("Should never get here")

        else:

            remote_files_info[detname][file_type] = this_file

    # Now download the files

    download_info = DictWithPrettyPrint([(det, DictWithPrettyPrint()) for det in detectors])

    for detector in remote_files_info.keys():

        remote_detector_info = remote_files_info[detector]
        local_detector_info = download_info[detector]

        # Get CSPEC file
        local_detector_info['cspec'] = downloader.download(remote_detector_info['cspec'], destination_directory,
                                                           progress=True)

        # Get the RSP2 file if it exists, otherwise get the RSP file
        if 'rsp2' in remote_detector_info:

            local_detector_info['rsp'] = downloader.download(remote_detector_info['rsp2'], destination_directory,
                                                              progress=True)

        else:

            local_detector_info['rsp'] = downloader.download(remote_detector_info['rsp'], destination_directory,
                                                             progress=True)

        # Get TTE file (compressing it if requested)
        local_detector_info['tte'] = downloader.download(remote_detector_info['tte'], destination_directory,
                                                         progress=True, compress=compress_tte)

    return download_info
コード例 #6
0
    def get_detector_information(self):
        """
        Return the detectors used for spectral analysis as well as their background
        intervals. Peak flux and fluence intervals are also returned as well as best fit models

        :return: detector information dictionary
        """

        assert self._last_query_results is not None, "You have to run a query before getting detector information"

        # Loop over the table and build a source for each entry
        sources = {}

        for name, row in self._last_query_results.T.iteritems():
            # First we want to get the the detectors used in the SCAT file

            idx = np.array(map(int, row['scat_detector_mask']), dtype=bool)
            detector_selection = self._gbm_detector_lookup[idx]

            # get the location

            ra = row['ra']
            dec = row['dec']

            # Now we want to know the background intervals

            lo_start = row['back_interval_low_start']
            lo_stop = row['back_interval_low_stop']
            hi_start = row['back_interval_high_start']
            hi_stop = row['back_interval_high_stop']

            # the GBM plugin accepts these as strings

            pre_bkg = "%f-%f" % (lo_start, lo_stop)
            post_bkg = "%f-%f" % (hi_start, hi_stop)
            full_bkg = "%s,%s" % (pre_bkg, post_bkg)

            background_dict = {
                'pre': pre_bkg,
                'post': post_bkg,
                'full': full_bkg
            }

            # now we want the fluence interval and peak flux intervals

            # first the fluence

            start_flu = row['t90_start']
            stop_flu = row['t90_start'] + row['t90']

            interval_fluence = "%f-%f" % (start_flu, stop_flu)

            # peak flux

            start_pk = row['pflx_spectrum_start']
            stop_pk = row['pflx_spectrum_stop']

            interval_pk = "%f-%f" % (start_pk, stop_pk)

            # build the dictionary
            spectrum_dict = {'fluence': interval_fluence, 'peak': interval_pk}

            trigger = row['trigger_name']

            # get the best fit model in the fluence and peak intervals

            best_fit_peak = row['pflx_best_fitting_model'].split('_')[-1]

            best_fit_fluence = row['flnc_best_fitting_model'].split('_')[-1]

            best_dict = {'fluence': best_fit_fluence, 'peak': best_fit_peak}

            sources[name] = {
                'source': spectrum_dict,
                'background': background_dict,
                'trigger': trigger,
                'detectors': detector_selection,
                'best fit model': best_dict,
                'ra': ra,
                'dec': dec
            }

        return DictWithPrettyPrint(sources)
コード例 #7
0
ファイル: download_LLE_data.py プロジェクト: Husky22/threeML
def download_LLE_trigger_data(trigger_name, destination_directory='.'):
    """
    Download the latest Fermi LAT LLE and RSP files from the HEASARC server. Will get the
    latest file versions. If the files already exist in your destination
    directory, they will be skipped in the download process. The output dictionary can be used
    as input to the FermiLATLLELike class.

    example usage: download_LLE_trigger_data('080916009', destination_directory='.')

    :param trigger_name: trigger number (str) with no leading letter e.g. '080916009'
    :param destination_directory: download directory
    :return: a dictionary with information about the download
    """

    sanitized_trigger_name_ = _validate_fermi_trigger_name(trigger_name)

    # create output directory if it does not exists
    destination_directory = sanitize_filename(destination_directory,
                                              abspath=True)
    if_directory_not_existing_then_make(destination_directory)

    # Figure out the directory on the server
    url = threeML_config['LAT']['public HTTP location']

    year = '20%s' % sanitized_trigger_name_[:2]
    directory = 'triggers/%s/bn%s/current' % (year, sanitized_trigger_name_)

    heasarc_web_page_url = '%s/%s' % (url, directory)

    try:

        downloader = ApacheDirectory(heasarc_web_page_url)

    except RemoteDirectoryNotFound:

        raise TriggerDoesNotExist(
            "Trigger %s does not exist at %s" %
            (sanitized_trigger_name_, heasarc_web_page_url))

    # Download only the lle, pt, cspec and rsp file (i.e., do not get all the png, pdf and so on)
    pattern = 'gll_(lle|pt|cspec)_bn.+\.(fit|rsp|pha)'

    destination_directory_sanitized = sanitize_filename(destination_directory)

    downloaded_files = downloader.download_all_files(
        destination_directory_sanitized, progress=True, pattern=pattern)

    # Put the files in a structured dictionary

    download_info = DictWithPrettyPrint()

    for download in downloaded_files:

        file_type = _file_type_match.match(os.path.basename(download)).group(1)

        if file_type == 'cspec':

            # a cspec file can be 2 things: a CSPEC spectral set (with .pha) extension,
            # or a response matrix (with a .rsp extension)

            ext = os.path.splitext(os.path.basename(download))[1]

            if ext == '.rsp':

                file_type = 'rsp'

            elif ext == '.pha':

                file_type = 'cspec'

            else:

                raise RuntimeError("Should never get here")

        # The pt file is really an ft2 file

        if file_type == 'pt':

            file_type = 'ft2'

        download_info[file_type] = download

    return download_info
コード例 #8
0
def download_GBM_trigger_data(trigger_name,
                              detectors=None,
                              destination_directory='.',
                              compress_tte=True):
    """
    Download the latest GBM TTE and RSP files from the HEASARC server. Will get the
    latest file version and prefer RSP2s over RSPs. If the files already exist in your destination
    directory, they will be skipped in the download process. The output dictionary can be used
    as input to the FermiGBMTTELike class.

    example usage: download_GBM_trigger_data('080916009', detectors=['n0','na','b0'], destination_directory='.')

    :param trigger_name: trigger number (str) e.g. '080916009' or 'bn080916009' or 'GRB080916009'
    :param detectors: list of detectors, default is all detectors
    :param destination_directory: download directory
    :param compress_tte: compress the TTE files via gzip (default True)
    :return: a dictionary with information about the download
    """

    # Let's doctor up the input just in case the user tried something strange

    sanitized_trigger_name_ = _validate_fermi_trigger_name(trigger_name)

    # create output directory if it does not exists
    destination_directory = sanitize_filename(destination_directory,
                                              abspath=True)

    if_directory_not_existing_then_make(destination_directory)

    # Sanitize detector list (if any)
    if detectors is not None:

        for det in detectors:

            assert det in _detector_list, "Detector %s in the provided list is not a valid detector. " \
                                          "Valid choices are: %s" % (det, _detector_list)

    else:

        detectors = list(_detector_list)

    # Open heasarc web page

    url = threeML_config['gbm']['public HTTP location']
    year = '20%s' % sanitized_trigger_name_[:2]
    directory = '/triggers/%s/bn%s/current' % (year, sanitized_trigger_name_)

    heasarc_web_page_url = '%s/%s' % (url, directory)

    try:

        downloader = ApacheDirectory(heasarc_web_page_url)

    except RemoteDirectoryNotFound:

        raise TriggerDoesNotExist(
            "Trigger %s does not exist at %s" %
            (sanitized_trigger_name_, heasarc_web_page_url))

    # Now select the files we want to download, then we will download them later
    # We do it in two steps because we want to be able to choose what to download once we
    # have the complete picture

    # Get the list of remote files
    remote_file_list = downloader.files

    # This is the dictionary to keep track of the classification
    remote_files_info = DictWithPrettyPrint([(det, {}) for det in detectors])

    # Classify the files detector by detector

    for this_file in remote_file_list:

        # this_file is something like glg_tte_n9_bn100101988_v00.fit
        tokens = this_file.split("_")

        if len(tokens) != 5:

            # Not a data file

            continue

        else:

            # The "map" is necessary to transform the tokens to normal string (instead of unicode),
            # because u"b0" != "b0" as a key for a dictionary

            _, file_type, detname, _, version_ext = list(map(str, tokens))

        version, ext = version_ext.split(".")

        # We do not care here about the other files (tcat, bcat and so on),
        # nor about files which pertain to other detectors

        if file_type not in ['cspec', 'tte'] or ext not in [
                'rsp', 'rsp2', 'pha', 'fit'
        ] or detname not in detectors:

            continue

        # cspec files can be rsp, rsp2 or pha files. Classify them

        if file_type == 'cspec':

            if ext == 'rsp':

                remote_files_info[detname]['rsp'] = this_file

            elif ext == 'rsp2':

                remote_files_info[detname]['rsp2'] = this_file

            elif ext == 'pha':

                remote_files_info[detname]['cspec'] = this_file

            else:

                raise RuntimeError("Should never get here")

        else:

            remote_files_info[detname][file_type] = this_file

    # Now download the files

    download_info = DictWithPrettyPrint([(det, DictWithPrettyPrint())
                                         for det in detectors])

    for detector in list(remote_files_info.keys()):

        remote_detector_info = remote_files_info[detector]
        local_detector_info = download_info[detector]

        # Get CSPEC file
        local_detector_info['cspec'] = downloader.download(
            remote_detector_info['cspec'],
            destination_directory,
            progress=True)

        # Get the RSP2 file if it exists, otherwise get the RSP file
        if 'rsp2' in remote_detector_info:

            local_detector_info['rsp'] = downloader.download(
                remote_detector_info['rsp2'],
                destination_directory,
                progress=True)

        else:

            local_detector_info['rsp'] = downloader.download(
                remote_detector_info['rsp'],
                destination_directory,
                progress=True)

        # Get TTE file (compressing it if requested)
        local_detector_info['tte'] = downloader.download(
            remote_detector_info['tte'],
            destination_directory,
            progress=True,
            compress=compress_tte)

    return download_info
コード例 #9
0
def download_GBM_trigger_data(trigger_name: str,
                              detectors: Optional[List[str]] = None,
                              destination_directory: str = ".",
                              compress_tte: bool = True) -> Dict[str, Any]:
    """
    Download the latest GBM TTE and RSP files from the HEASARC server. Will get the
    latest file version and prefer RSP2s over RSPs. If the files already exist in your destination
    directory, they will be skipped in the download process. The output dictionary can be used
    as input to the FermiGBMTTELike class.

    example usage: download_GBM_trigger_data('080916009', detectors=['n0','na','b0'], destination_directory='.')

    :param trigger_name: trigger number (str) e.g. '080916009' or 'bn080916009' or 'GRB080916009'
    :param detectors: list of detectors, default is all detectors
    :param destination_directory: download directory
    :param compress_tte: compress the TTE files via gzip (default True)
    :return: a dictionary with information about the download
    """

    # Let's doctor up the input just in case the user tried something strange

    sanitized_trigger_name_: str = _validate_fermi_trigger_name(trigger_name)

    # create output directory if it does not exists
    destination_directory: Path = sanitize_filename(destination_directory,
                                                    abspath=True)

    if_directory_not_existing_then_make(destination_directory)

    # Sanitize detector list (if any)
    if detectors is not None:

        for det in detectors:

            if det not in _detector_list:
                log.error(
                    f"Detector {det} in the provided list is not a valid detector. "
                    f"Valid choices are: {_detector_list}")
                raise DetDoesNotExist()

    else:

        detectors: List[str] = list(_detector_list)

    # Open heasarc web page

    url = threeML_config.GBM.public_http_location
    year = f"20{sanitized_trigger_name_[:2]}"
    directory = f"/triggers/{year}/bn{sanitized_trigger_name_}/current"

    heasarc_web_page_url = f"{url}/{directory}"

    log.debug(f"going to look in {heasarc_web_page_url}")

    try:

        downloader = ApacheDirectory(heasarc_web_page_url)

    except RemoteDirectoryNotFound:

        log.exception(
            f"Trigger {sanitized_trigger_name_} does not exist at {heasarc_web_page_url}"
        )

        raise TriggerDoesNotExist()

    # Now select the files we want to download, then we will download them later
    # We do it in two steps because we want to be able to choose what to download once we
    # have the complete picture

    # Get the list of remote files
    remote_file_list = downloader.files

    # This is the dictionary to keep track of the classification
    remote_files_info = DictWithPrettyPrint([(det, {}) for det in detectors])

    # Classify the files detector by detector

    for this_file in remote_file_list:

        # this_file is something like glg_tte_n9_bn100101988_v00.fit
        tokens = this_file.split("_")

        if len(tokens) != 5:

            # Not a data file

            continue

        else:

            # The "map" is necessary to transform the tokens to normal string (instead of unicode),
            # because u"b0" != "b0" as a key for a dictionary

            _, file_type, detname, _, version_ext = list(map(str, tokens))

        version, ext = version_ext.split(".")

        # We do not care here about the other files (tcat, bcat and so on),
        # nor about files which pertain to other detectors

        if (file_type not in ["cspec", "tte"]
                or ext not in ["rsp", "rsp2", "pha", "fit"]
                or detname not in detectors):

            continue

        # cspec files can be rsp, rsp2 or pha files. Classify them

        if file_type == "cspec":

            if ext == "rsp":

                remote_files_info[detname]["rsp"] = this_file

            elif ext == "rsp2":

                remote_files_info[detname]["rsp2"] = this_file

            elif ext == "pha":

                remote_files_info[detname]["cspec"] = this_file

            else:

                raise RuntimeError("Should never get here")

        else:

            remote_files_info[detname][file_type] = this_file

    # Now download the files

    download_info = DictWithPrettyPrint([(det, DictWithPrettyPrint())
                                         for det in detectors])

    for detector in list(remote_files_info.keys()):

        log.debug(f"trying to download GBM detector {detector}")

        remote_detector_info = remote_files_info[detector]
        local_detector_info = download_info[detector]

        # Get CSPEC file
        local_detector_info["cspec"] = downloader.download(
            remote_detector_info["cspec"],
            destination_directory,
            progress=True)

        # Get the RSP2 file if it exists, otherwise get the RSP file
        if "rsp2" in remote_detector_info:

            log.debug(f"{detector} has RSP2 responses")

            local_detector_info["rsp"] = downloader.download(
                remote_detector_info["rsp2"],
                destination_directory,
                progress=True)

        else:

            log.debug(f"{detector} has RSP responses")

            local_detector_info["rsp"] = downloader.download(
                remote_detector_info["rsp"],
                destination_directory,
                progress=True)

        # Get TTE file (compressing it if requested)
        local_detector_info["tte"] = downloader.download(
            remote_detector_info["tte"],
            destination_directory,
            progress=True,
            compress=compress_tte,
        )

    return download_info