Exemplo n.º 1
0
    def find_latest(self, site, frametype, urltype=None, on_missing="warn"):
        """Query for the most recent framefile of a given type.

        @param  site:
            single-character name of site to match
        @param frametype:
            name of frametype to match
        @param urltype:
            file scheme to search for (e.g. 'file')
        @param on_missing:
            what to do when the requested frame isn't found, one of:
                - C{'warn'} (default): print a warning,
                - C{'error'}: raise an L{RuntimeError}, or
                - C{'ignore'}: do nothing

        @type       site: L{str}
        @type  frametype: L{str}
        @type    urltype: L{str}
        @type on_missing: L{str}

        @returns: L{Cache<pycbc_glue.lal.Cache>} with one
                  L{entry<pycbc_glue.lal.CacheEntry>}

        @raises RuntimeError: if given framefile is malformed
        @raises RuntimeError: if no frames are found and C{on_missing='error'}
        """
        if on_missing not in ('warn', 'error', 'ignore'):
            raise ValueError(
                "on_missing must be 'warn', 'error', or 'ignore'.")
        url = "%s/gwf/%s/%s/latest" % (_url_prefix, site, frametype)
        # if a URL type is specified append it to the path
        if urltype:
            url += "/%s" % urltype
        # request JSON output
        url += ".json"
        response = self._requestresponse("GET", url)
        urllist = decode(response.read())
        if len(urllist) == 0:
            if on_missing == "warn":
                sys.stderr.write("No files found!\n")
            elif on_missing == "error":
                raise RuntimeError("No files found!")
        return lal.Cache([
            lal.CacheEntry.from_T050017(x, coltype=self.LIGOTimeGPSType)
            for x in urllist
        ])
Exemplo n.º 2
0
def get_missing_segs_from_frame_file_cache(datafindcaches):
    """
    This function will use os.path.isfile to determine if all the frame files
    returned by the local datafind server actually exist on the disk. This can
    then be used to update the science times if needed.

    Parameters
    -----------
    datafindcaches : OutGroupList
        List of all the datafind output files.

    Returns
    --------
    missingFrameSegs : Dict. of ifo keyed glue.segment.segmentlist instances
        The times corresponding to missing frames found in datafindOuts.
    missingFrames: Dict. of ifo keyed lal.Cache instances
        The list of missing frames
    """
    missingFrameSegs = {}
    missingFrames = {}
    for cache in datafindcaches:
        if len(cache) > 0:
            # Don't bother if these are not file:// urls, assume all urls in
            # one cache file must be the same type
            if not cache[0].scheme == 'file':
                warn_msg = "We have %s entries in the " % (cache[0].scheme, )
                warn_msg += "cache file. I do not check if these exist."
                logging.info(warn_msg)
                continue
            _, currMissingFrames = cache.checkfilesexist(on_missing="warn")
            missingSegs = segments.segmentlist(e.segment \
                                         for e in currMissingFrames).coalesce()
            ifo = cache.ifo
            if not missingFrameSegs.has_key(ifo):
                missingFrameSegs[ifo] = missingSegs
                missingFrames[ifo] = lal.Cache(currMissingFrames)
            else:
                missingFrameSegs[ifo].extend(missingSegs)
                # NOTE: This .coalesce probably isn't needed as the segments
                # should be disjoint. If speed becomes an issue maybe remove it?
                missingFrameSegs[ifo].coalesce()
                missingFrames[ifo].extend(currMissingFrames)
    return missingFrameSegs, missingFrames
Exemplo n.º 3
0
    def find_frame_urls(self,
                        site,
                        frametype,
                        gpsstart,
                        gpsend,
                        match=None,
                        urltype=None,
                        on_gaps="warn"):
        """Find the framefiles for the given type in the [start, end) interval
        frame

        @param site:
            single-character name of site to match
        @param frametype:
            name of frametype to match
        @param gpsstart:
            integer GPS start time of query
        @param gpsend:
            integer GPS end time of query
        @param match:
            regular expression to match against
        @param urltype:
            file scheme to search for (e.g. 'file')
        @param on_gaps:
            what to do when the requested frame isn't found, one of:
                - C{'warn'} (default): print a warning,
                - C{'error'}: raise an L{RuntimeError}, or
                - C{'ignore'}: do nothing

        @type       site: L{str}
        @type  frametype: L{str}
        @type   gpsstart: L{int}
        @type     gpsend: L{int}
        @type      match: L{str}
        @type    urltype: L{str}
        @type    on_gaps: L{str}

        @returns: L{Cache<pycbc_glue.lal.Cache>}

        @raises RuntimeError: if gaps are found and C{on_gaps='error'}
        """
        if on_gaps not in ("warn", "error", "ignore"):
            raise ValueError("on_gaps must be 'warn', 'error', or 'ignore'.")
        url = ("%s/gwf/%s/%s/%s,%s" %
               (_url_prefix, site, frametype, gpsstart, gpsend))
        # if a URL type is specified append it to the path
        if urltype:
            url += "/%s" % urltype
        # request JSON output
        url += ".json"
        # append a regex if input
        if match:
            url += "?match=%s" % match
        # make query
        response = self._requestresponse("GET", url)
        urllist = decode(response.read())

        out = lal.Cache([
            lal.CacheEntry.from_T050017(x, coltype=self.LIGOTimeGPSType)
            for x in urllist
        ])

        if on_gaps == "ignore":
            return out
        else:
            span = segments.segment(gpsstart, gpsend)
            seglist = segments.segmentlist(e.segment for e in out).coalesce()
            missing = (segments.segmentlist([span]) - seglist).coalesce()
            if span in seglist:
                return out
            else:
                msg = "Missing segments: \n%s" % "\n".join(map(str, missing))
                if on_gaps == "warn":
                    sys.stderr.write("%s\n" % msg)
                    return out
                else:
                    raise RuntimeError(msg)
Exemplo n.º 4
0
            site, frametype, _, _ = framefile.split("-")
        except Exception, e:
            raise RuntimeError("Error parsing filename %s: %s" %
                               (framefile, e))
        url = ("%s/gwf/%s/%s/%s.json" %
               (_url_prefix, site, frametype, framefile))
        response = self._requestresponse("GET", url)
        urllist = decode(response.read())
        if len(urllist) == 0:
            if on_missing == "warn":
                sys.stderr.write("No files found!\n")
            elif on_missing == "error":
                raise RuntimeError("No files found!")
        # verify urltype is what we want
        cache = lal.Cache(e for e in [
            lal.CacheEntry.from_T050017(x, coltype=self.LIGOTimeGPSType)
            for x in urllist
        ] if not urltype or e.scheme == urltype)
        return cache

    def find_latest(self, site, frametype, urltype=None, on_missing="warn"):
        """Query for the most recent framefile of a given type.

        @param  site:
            single-character name of site to match
        @param frametype:
            name of frametype to match
        @param urltype:
            file scheme to search for (e.g. 'file')
        @param on_missing:
            what to do when the requested frame isn't found, one of:
                - C{'warn'} (default): print a warning,