def run_sampler(self):
        if self.scheduler.lower() == "condor":
            signal.signal(signal.SIGALRM, handler=sighandler)
            signal.alarm(self.periodic_restart_time)

        likelihood, priors = self.get_likelihood_and_priors()

        self.result = bilby.run_sampler(
            likelihood=likelihood,
            priors=priors,
            sampler=self.sampler,
            label=self.label,
            outdir=self.result_directory,
            conversion_function=self.parameter_generation,
            injection_parameters=self.meta_data["injection_parameters"],
            meta_data=self.meta_data,
            result_class=self.result_class,
            exit_code=CHECKPOINT_EXIT_CODE,
            **self.sampler_kwargs,
        )

        if self.convert_to_flat_in_component_mass:
            try:
                result_reweighted = (
                    bilby.gw.prior.convert_to_flat_in_component_mass_prior(self.result)
                )
                result_reweighted.save_to_file()
            except Exception as e:
                logger.warning(
                    f"Unable to convert to the flat in component mass prior due to: {e}"
                )
    def _is_gwpy_data_good(start_time, end_time, det):
        """Check if start-end time is a period when the IFO has quality data.

        Check passes if the IFO has quality data during the time period provided.

        Note: we are using the DMT-SCIENCE channel to check the quality.
        https://labcit.ligo.caltech.edu/~jzweizig/talks/LSC-2009-06-03/DMT-DQ_Stat-2009-06-03.pdf

        This method is slow as it queries GWpy.

        Parameters
        ----------
        start_time, end_time: float
            GPS start and end time of required data.
        det: str
            The string key that represents the detector ("H1", "L1", etc)

        Returns
        -------

        True: if data is good (IFO has quality data during entire duration).
        False: if data is bad (IFO does not have quality data during entire duration).
        None: if the data quality check failed

        """
        # Create data quality flag
        channel_num = 1
        quality_flag = f"{det}:DMT-SCIENCE:{channel_num}"
        logger.info(
            f"Checking data quality {quality_flag} {start_time}-{end_time}")
        try:
            flag = gwpy.segments.DataQualityFlag.query(
                quality_flag, gwpy.time.to_gps(start_time),
                gwpy.time.to_gps(end_time))

            # compare active duration from quality flag and total duration
            total_duration = end_time - start_time
            active_duration = float(flag.livetime)
            inactive_duration = total_duration - active_duration

            # data is not good if there is any period when the IFO is inactive
            if inactive_duration > 0:
                data_is_good = False
                logger.warning(
                    "Data quality check: FAILED. \n"
                    "{det} does not have quality data for "
                    "{inactive_duration}s out of {total_duration}s".format(
                        det=det,
                        inactive_duration=inactive_duration,
                        total_duration=total_duration,
                    ))
            else:
                data_is_good = True
                logger.info("Data quality check: PASSED.")
        except Exception as e:
            logger.warning(f"Error in Data Quality Check: {e}.")
            data_is_good = None

        return data_is_good
Beispiel #3
0
    def gwpy_func_wrapper(*args, **kwargs):
        """Gwpy funct wrapper."""
        try:
            return func(*args, **kwargs)

        except UnboundLocalError:
            # Issue raised: https://github.com/gwpy/gwpy/issues/1144
            logger.warning("Error with gwpy_plot of time and spectogram data. "
                           "Skipping plotting. ")
    def _gwpy_read(self, det, channel, start_time, end_time, dtype="float64"):
        """Wrapper function to gwpy.timeseries.TimeSeries.read()

        Parameters
        ----------
        det: str
            The detector name corresponding to the key in data-dict
        channel: str
            The name of the channel to read, e.g. 'L1:GDS-CALIB_STRAIN'
        start_time, end_time: float
            GPS start and end time of required data
        dtype: str or np.dtype
            Data type requested

        Returns
        -------
        data: TimeSeries
            If successful, the data, otherwise None is returned

        """

        logger.debug("data-dict provided, attempt read of data")

        if det not in self.data_dict:
            logger.info(f"Detector {det} not found in data-dict")
            return None
        else:
            source = self.data_dict[det]
            format_ext = os.path.splitext(source)[1]

        # If the source contains a glob-path, e.g. *gwf, glob it first
        if "*" in source:
            logger.info(f"Globbing {source}")
            source = glob.glob(source)
            logger.info(f"Setting source={source}")

        if "gwf" in format_ext:
            kwargs = dict(source=source,
                          channel=channel,
                          dtype=dtype,
                          format="gwf.lalframe")
        elif "hdf5" in format_ext:
            kwargs = dict(source=source,
                          start=start_time,
                          end=end_time,
                          format="hdf5")
        elif "txt" in format_ext:
            data = kwargs = dict(source=source)
        else:
            # Generic best try
            kwargs = dict(source=source,
                          channel=channel,
                          start=start_time,
                          end=end_time)

        if self.data_format is not None:
            kwargs["format"] = self.data_format

        try:
            kwargs_string = ""
            for key, val in kwargs.items():
                if isinstance(val, str):
                    val = f"'{val}'"
                kwargs_string += f"{key}={val}, "
            logger.info(
                f"Running: gwpy.timeseries.TimeSeries.read({kwargs_string})")
            data = gwpy.timeseries.TimeSeries.read(**kwargs)

            data = data.crop(start=start_time, end=end_time)

            if data.duration.value < end_time - start_time:
                logger.warning(
                    "Unable to read in requested {}s duration of data from {}"
                    " only {}s available: returning None".format(
                        end_time - start_time, source, data.duration.value))
                data = None
            elif data.duration.value > end_time - start_time:
                logger.info(
                    "Read in {}s of data from {}, but {}s requested, truncating"
                    .format(data.duration.value, source,
                            end_time - start_time))
                data = data[data.times.value >= start_time]
                data = data[data.times.value < end_time]

            return data
        except ValueError as e:
            logger.info(f"Reading of data failed with error {e}")
            return None
    def _get_data(self,
                  det,
                  channel_type,
                  start_time,
                  end_time,
                  resample=True):
        """Read in data using gwpy

        If the channel_type is "GWOSC", open data is obtained. Otherwise, we
        try to read in the data first using "read" if a data_dict exists and
        then using "get".

        Parameters
        ----------
        channel_type: str
            The full channel name is formed from <det>:<channel_type>, see
            bilby_pipe --help for more information.

        start_time, end_time: float
            GPS start and end time of segment

        Returns
        -------
        data: gwpy.timeseries.TimeSeries
            The loaded data

        Raises
        ------
        BilbyPipeError:
            If there is an issue obtaining the data or with the data itself
        """
        timeslide_val = None
        if hasattr(self, "timeslide_dict"):
            timeslide_val = self.timeslide_dict[det]
            start_time = start_time + timeslide_val
            end_time = end_time + timeslide_val
            logger.info(
                "Applying timeshift of {}. Time range {} - {} => {} - {}".
                format(
                    timeslide_val,
                    start_time - timeslide_val,
                    end_time - timeslide_val,
                    start_time,
                    end_time,
                ))

        if self.ignore_gwpy_data_quality_check is False:
            data_is_good = self._is_gwpy_data_good(start_time, end_time, det)
            if not data_is_good:
                raise BilbyPipeError("Data quality is not good.")

        data = None

        if data is None and channel_type == "GWOSC":
            data = self._gwpy_fetch_open_data(det, start_time, end_time)

        channel = f"{det}:{channel_type}"
        if data is None and self.data_dict is not None:
            data = self._gwpy_read(det, channel, start_time, end_time)
        if data is None:
            data = self._gwpy_get(channel, start_time, end_time)

        if data is None:
            raise BilbyPipeError("Failed to obtain data")
        if np.all(data.value == 0):
            raise BilbyPipeError("Obtained data is all zeros")

        if resample and data.sample_rate.value == self.sampling_frequency:
            logger.info("Sample rate matches data no resampling")
        elif resample:
            message = "Resampling data to sampling_frequency {} using {}"
            if self.resampling_method == "gwpy":
                logger.info(
                    message.format(self.sampling_frequency,
                                   self.resampling_method))
                data = data.resample(self.sampling_frequency)
            elif self.resampling_method == "lal":
                logger.info(
                    message.format(self.sampling_frequency,
                                   self.resampling_method))
                try:
                    lal_timeseries = data.to_lal()
                    lal.ResampleREAL8TimeSeries(
                        lal_timeseries, float(1 / self.sampling_frequency))
                except Exception as e:
                    raise BilbyPipeError(
                        "The lal resampling method has failed with exception {} "
                        "You may wish to try a different resampling method".
                        format(e))
                data = gwpy.timeseries.TimeSeries(
                    lal_timeseries.data.data,
                    epoch=lal_timeseries.epoch,
                    dt=lal_timeseries.deltaT,
                )
            else:
                logger.warning(
                    "Resampling method {} not understood, should be "
                    "'gwpy' or 'lal'.".format(self.resampling_method))
        else:
            logger.info("No data resampling requested")

        if timeslide_val:
            # to match up the time axis for the interferometer network
            data.shift(-timeslide_val)

        return data
 def sampling_frequency(self, sampling_frequency):
     if is_a_power_of_2(sampling_frequency) is False:
         logger.warning(
             "Sampling frequency {} not a power of 2, this can cause problems"
             .format(sampling_frequency))
     self._sampling_frequency = sampling_frequency
    get_version_information,
    is_a_power_of_2,
    log_version_information,
    logger,
)

# fmt: off
import matplotlib  # isort:skip
matplotlib.use("agg")
# fmt: on

try:
    import nds2  # noqa
except ImportError:
    logger.warning(
        "You do not have nds2 (python-nds2-client) installed. You may "
        " experience problems accessing interferometer data.")

try:
    import LDAStools.frameCPP  # noqa
except ImportError:
    logger.warning(
        "You do not have LDAStools.frameCPP (python-ldas-tools-framecpp) "
        "installed. You may experience problems accessing interferometer data."
    )


class DataGenerationInput(Input):
    """Handles user-input for the data generation script

    Parameters