예제 #1
0
파일: bass.py 프로젝트: legacysurvey/rapala
                     "--verbose",
                     action="store_true",
                     help="increase verbosity")
 args = parser.parse_args()
 mjds = [None, None]
 if args.mjd is not None:
     mjds = [int(d) if d != '*' else None for d in args.mjd.split(',')]
 elif args.utdate is not None:
     mjds = [
         int(Time(d).mjd) if d != '*' else None
         for d in args.utdate.split(',')
     ]
 elif args.date is not None:
     mjds = [
         int((Time(d) +
              TimeDelta(1, format='jd')).mjd) if d != '*' else None
         for d in args.date.split(',')
     ]
 if args.start2016:
     mjds[0] = 57327
 if len(mjds) == 1:
     mjds = mjds * 2
 if args.obsdb:
     build_obsdb(update=args.update, which=args.tiles, newest=args.newest)
 elif args.summary:
     if args.legacyfield:
         if args.legacyfield.upper() == 'CFHTW3':
             tiles = cfhtw3_tiles(observed=False)
         elif args.legacyfield.upper() == 'NDWFS':
             tiles = ndwfs_tiles(observed=False)
         elif args.legacyfield.upper().startswith('MD'):
예제 #2
0
    def fetch(self, qres, path=None, error_callback=None, **kwargs):
        """
        Download a set of results.

        Parameters
        ----------
        qres : `~sunpy.net.dataretriever.QueryResponse`
            Results to download.

        Returns
        -------
        Results Object
        """

        urls = [qrblock.url for qrblock in qres]

        filenames = []
        local_filenames = []

        for i, [url, qre] in enumerate(zip(urls, qres)):
            name = url.split('/')[-1]

            # temporary fix !!! coz All QRBs have same start_time values
            day = Time(qre.time.start.strftime('%Y-%m-%d')) + TimeDelta(
                i * u.day)

            if name not in filenames:
                filenames.append(name)

            if name.endswith('.gz'):
                local_filenames.append('{}SRS.txt'.format(
                    day.strftime('%Y%m%d')))
            else:
                local_filenames.append(name)

        # Files to be actually downloaded
        paths = self._get_full_filenames(qres, filenames, path)

        # Those files that will be present after get returns
        local_paths = self._get_full_filenames(qres, local_filenames, path)

        res = Results(lambda x: None, 0, lambda map_: self._link(map_))

        # remove duplicate urls. This will make paths and urls to have same number of elements.
        # OrderedDict is required to maintain ordering because it will be zipped with paths later

        urls = list(OrderedDict.fromkeys(urls))

        dobj = Downloader(max_conn=len(urls), max_total=len(urls))

        # We cast to list here in list(zip... to force execution of
        # res.require([x]) at the start of the loop.
        for aurl, ncall, fname in list(
                zip(urls, map(lambda x: res.require([x]), urls), paths)):
            dobj.download(aurl, fname, ncall, error_callback)

        res.wait()

        res2 = Results(lambda x: None, 0)

        for fname, srs_filename in zip(local_paths, local_filenames):

            fname = fname.args[0]
            name = fname.split('/')[-1]

            past_year = False
            for i, fname2 in enumerate(paths):

                fname2 = fname2.args[0]

                if fname2.endswith('.txt'):
                    continue

                year = fname2.split('/')[-1]
                year = year.split('_SRS')[0]

                if year in name:
                    TarFile = tarfile.open(fname2)
                    filepath = fname.rpartition('/')[0]
                    member = TarFile.getmember('SRS/' + srs_filename)
                    member.name = name
                    TarFile.extract(member, path=filepath)
                    TarFile.close()

                    callback = res2.require([fname])
                    callback({'path': fname})

                    past_year = True
                    break

            if past_year is False:
                callback = res2.require([fname])
                callback({'path': fname})

        return res2
예제 #3
0
    def meridian_transit(self,
                         t_min: Time = Time.now(),
                         duration: TimeDelta = TimeDelta(86400, format='sec'),
                         precision: TimeDelta = TimeDelta(5, format='sec'),
                         fast_compute: bool = True) -> Time:
        """ Computes the :class:`~nenupy.astro.target.Target` meridian transit time(s).
            This method returns all the transit times found in the time
            window ranging from ``t_min`` to ``t_min + duration``.

            :param t_min:
                Starting time of the temporal window within which
                meridian transits are looked for.
                Default is current time.
            :type t_min:
                :class:`~astropy.time.Time`
            :param duration:
                Width of the temporal window within which
                meridian transits are looked for.
                Default is ``1 day``.
            :type duration:
                :class:`~astropy.time.TimeDelta`
            :param precision:
                Temporal precision of the returned meridian transit values.
                Default is ``5 sec``.
            :type precision:
                :class:`~astropy.time.TimeDelta`
            :param fast_compute:
                If set to ``True``, a fast approximation is used during
                the computation of Local Sidereal Time.
                Default is ``True``.
            :type fast_compute:
                `bool`

            :returns:
                Meridian transit times.
                If no transit times are found (because the requested
                time window doesn't contain any) an empty
                :class:`~astropy.time.Time` object is returned.
            :rtype:
                :class:`~astropy.time.Time`

            :Example:
                >>> from nenupy.astro.target import FixedTarget
                >>> from astropy.time import Time, TimeDelta
                >>> cyg_a = FixedTarget.from_name("Cyg A")
                >>> cyg_a.meridian_transit(
                        t_min=Time("2021-01-01"),
                        duration=TimeDelta(86400*2, format="sec")
                    )
                <Time object: scale='utc' format='iso' value=['2021-01-01 13:05:47.868' '2021-01-02 13:01:51.882']>

            .. seealso::
                :ref:`ephemerides_sec`

        """
        def find_ha_transit(times: Time):
            """ """
            fk5 = self._get_source_coordinates(time=times).transform_to(
                FK5(equinox=times))
            ha = hour_angle(radec=fk5,
                            time=times,
                            observer=self.observer,
                            fast_compute=fast_compute)
            return np.where((np.roll(ha, shift=-1, axis=1) - ha)[:, :-1] < 0)

        return self._find_crossing_times(finding_function=find_ha_transit,
                                         t_min=t_min,
                                         duration=duration,
                                         precision=precision)
예제 #4
0
파일: iers.py 프로젝트: weaverba137/astropy
    def auto_open(cls, files=None):
        """Attempt to get an up-to-date leap-second list.

        The routine will try the files in sequence until it finds one
        whose expiration date is "good enough" (see below).  If none
        are good enough, it returns the one with the most recent expiration
        date, warning if that file is expired.

        For remote files that are cached already, the cached file is tried
        first before attempting to retrieve it again.

        Parameters
        ----------
        files : list of path-like, optional
            List of files/URLs to attempt to open.  By default, uses
            ``cls._auto_open_files``.

        Returns
        -------
        leap_seconds : `~astropy.utils.iers.LeapSeconds`
            Up to date leap-second table

        Notes
        -----
        Bulletin C is released about 10 days after a possible leap second is
        introduced, i.e., mid-January or mid-July.  Expiration days are thus
        generally at least 150 days after the present.  We look for a file
        that expires more than 180 - `~astropy.utils.iers.Conf.auto_max_age`
        after the present.
        """
        good_enough = cls._today() + TimeDelta(
            180 - _none_to_float(conf.auto_max_age), format='jd')

        if files is None:
            # Basic files to go over (entries in _auto_open_files can be
            # configuration items, which we want to be sure are up to date).
            files = [getattr(conf, f, f) for f in cls._auto_open_files]

        # Remove empty entries.
        files = [f for f in files if f]

        # Our trials start with normal files and remote ones that are
        # already in cache.  The bools here indicate that the cache
        # should be used.
        trials = [(f, True) for f in files
                  if not urlparse(f).netloc or is_url_in_cache(f)]
        # If we are allowed to download, we try downloading new versions
        # if none of the above worked.
        if conf.auto_download:
            trials += [(f, False) for f in files if urlparse(f).netloc]

        self = None
        err_list = []
        # Go through all entries, and return the first one that
        # is not expired, or the most up to date one.
        for f, allow_cache in trials:
            if not allow_cache:
                clear_download_cache(f)

            try:
                trial = cls.open(f, cache=True)
            except Exception as exc:
                err_list.append(exc)
                continue

            if self is None or trial.expires > self.expires:
                self = trial
                self.meta['data_url'] = str(f)
                if self.expires > good_enough:
                    break

        if self is None:
            raise ValueError('none of the files could be read. The '
                             'following errors were raised:\n' + str(err_list))

        if self.expires < self._today():
            warn('leap-second file is expired.', IERSStaleWarning)

        return self
예제 #5
0
def makemccd(args=None):
    """Script to generate multi-CCD test data given a set of parameters
    defined in a config file (parsed using configparser). This allows
    things such as a bias frame, flat field variations offsets, scale
    factor and rotations between CCDs, and temporal variations.

    Arguments::

       config : string
          file defining the parameters.

       parallel : bool
          True / yes etc to run in parallel. Be warned: it does not
          always make things faster, which I assume is the result of
          overheads when parallelising. It will simply use whatever
          CPUs are available.

    Depending upon the setting in the config file, this could generate a large
    number of different files and so the first time you run it, you may want
    to do so in a clean directory.

    Config file format: see the documentation of configparser for the general
    format of the config files expected by this routine. Essentially there are
    a series of sections, e.g.:

    [ccd 1]
    nxtot = 2048
    nytot = 1048
    .
    .
    .

    which define all the parameters needed. There are many others to simulate
    a bias offset, a flat field; see the example file ??? for a fully-documented
    version.

    """
    import configparser

    global _gframe, _gfield

    command, args = utils.script_args(args)

    # get inputs
    with Cline("HIPERCAM_ENV", ".hipercam", command, args) as cl:

        # Register parameters
        cl.register("config", Cline.LOCAL, Cline.PROMPT)
        cl.register("parallel", Cline.LOCAL, Cline.PROMPT)

        # Prompt for them
        config = cl.get_value("config", "configuration file",
                              cline.Fname("config"))
        parallel = cl.get_value("parallel", "add targets in parallel?", False)

    # Read the config file
    conf = configparser.ConfigParser()
    conf.read(config)

    # Determine whether files get overwritten or not
    overwrite = (conf.getboolean("general", "overwrite")
                 if "overwrite" in conf["general"] else False)
    dtype = conf["general"]["dtype"] if "dtype" in conf["general"] else None

    # Top-level header
    thead = fits.Header()
    thead.add_history("Created by makedata")

    # Store the CCD labels and parameters and their dimensions. Determine
    # maximum dimensions for later use when adding targets
    ccd_pars = Odict()
    maxnx = 0
    maxny = 0
    for key in conf:
        if key.startswith("ccd"):

            # translate parameters
            nxtot = int(conf[key]["nxtot"])
            nytot = int(conf[key]["nytot"])
            xcen = float(conf[key]["xcen"])
            ycen = float(conf[key]["ycen"])
            angle = float(conf[key]["angle"])
            scale = float(conf[key]["scale"])
            xoff = float(conf[key]["xoff"])
            yoff = float(conf[key]["yoff"])
            fscale = float(conf[key]["fscale"])
            toff = float(conf[key]["toff"])

            field = (hcam.Field.rjson(conf[key]["field"])
                     if "field" in conf[key] else None)
            ndiv = int(conf[key]["ndiv"]) if field is not None else None
            back = float(conf[key]["back"])

            # determine maximum total dimension
            maxnx = max(maxnx, nxtot)
            maxny = max(maxny, nytot)

            # store parameters
            ccd_pars[key[3:].strip()] = {
                "nxtot": nxtot,
                "nytot": nytot,
                "xcen": xcen,
                "ycen": ycen,
                "angle": angle,
                "scale": scale,
                "xoff": xoff,
                "yoff": yoff,
                "fscale": fscale,
                "toff": toff,
                "field": field,
                "ndiv": ndiv,
                "back": back,
            }

    if not len(ccd_pars):
        raise ValueError("hipercam.makedata: no CCDs found in " + config)

    # get the timing data
    utc_start = Time(conf["timing"]["utc_start"])
    exposure = float(conf["timing"]["exposure"])
    deadtime = float(conf["timing"]["deadtime"])

    # Generate the CCDs, store the read / gain values
    ccds = hcam.Group(hcam.CCD)
    rgs = {}
    for cnam, pars in ccd_pars.items():

        # Generate header with timing data
        head = fits.Header()
        td = TimeDelta(pars["toff"], format="sec")
        utc = utc_start + td
        head["UTC"] = (utc.isot, "UTC at mid exposure")
        head["MJD"] = (utc.mjd, "MJD at mid exposure")
        head["EXPOSE"] = (exposure, "Exposure time, seconds")
        head["TIMEOK"] = (True, "Time status flag")

        # Generate the Windows
        winds = hcam.Group(hcam.Window)
        rgs[cnam] = {}
        for key in conf:
            if key.startswith("window"):
                iccd, wnam = key[6:].split()
                if iccd == cnam:
                    llx = int(conf[key]["llx"])
                    lly = int(conf[key]["lly"])
                    nx = int(conf[key]["nx"])
                    ny = int(conf[key]["ny"])
                    xbin = int(conf[key]["xbin"])
                    ybin = int(conf[key]["ybin"])
                    if len(winds):
                        wind = hcam.Window(
                            hcam.Winhead(llx, lly, nx, ny, xbin, ybin))
                    else:
                        # store the header in the first Window
                        wind = hcam.Window(
                            hcam.Winhead(llx, lly, nx, ny, xbin, ybin, head))

                    # Store the Window
                    winds[wnam] = wind

                    # Store read / gain value
                    rgs[cnam][wnam] = (
                        float(conf[key]["read"]),
                        float(conf[key]["gain"]),
                    )

        # Accumulate CCDs
        ccds[cnam] = hcam.CCD(winds, pars["nxtot"], pars["nytot"])

    # Make the template MCCD
    mccd = hcam.MCCD(ccds, thead)

    # Make a flat field
    flat = mccd.copy()
    if "flat" in conf:
        rms = float(conf["flat"]["rms"])
        for ccd in flat.values():
            # Generate dust
            nspeck = int(conf["flat"]["nspeck"])
            if nspeck:
                radius = float(conf["flat"]["radius"])
                depth = float(conf["flat"]["depth"])
                specks = []
                for n in range(nspeck):
                    x = np.random.uniform(0.5, ccd.nxtot + 0.5)
                    y = np.random.uniform(0.5, ccd.nytot + 0.5)
                    specks.append(Dust(x, y, radius, depth))

            # Set the flat field values
            for wind in ccd.values():
                wind.data = np.random.normal(1.0, rms, (wind.ny, wind.nx))
                if nspeck:
                    wind.add_fxy(specks)

        flat.head["DATATYPE"] = ("Flat field", "Artificially generated")
        fname = utils.add_extension(conf["flat"]["flat"], hcam.HCAM)

        flat.write(fname, overwrite)
        print("Saved flat field to ", fname)
    else:
        # Set the flat to unity
        flat.set_const(1.0)
        print("No flat field generated")

    # Make a bias frame
    bias = mccd.copy()
    if "bias" in conf:
        mean = float(conf["bias"]["mean"])
        rms = float(conf["bias"]["rms"])
        for ccd in bias.values():
            for wind in ccd.values():
                wind.data = np.random.normal(mean, rms, (wind.ny, wind.nx))

        bias.head["DATATYPE"] = ("Bias frame", "Artificially generated")
        fname = utils.add_extension(conf["bias"]["bias"], hcam.HCAM)
        bias.write(fname, overwrite)
        print("Saved bias frame to ", fname)
    else:
        # Set the bias to zero
        bias.set_const(0.0)
        print("No bias frame generated")

    # Everything is set to go, so now generate data files
    nfiles = int(conf["files"]["nfiles"])
    if nfiles == 0:
        out = mccd * flat + bias
        fname = utils.add_extension(conf["files"]["root"], hcam.HCAM)
        out.write(fname, overwrite)
        print("Written data to", fname)
    else:
        # file naming info
        root = conf["files"]["root"]
        ndigit = int(conf["files"]["ndigit"])

        # movement
        xdrift = float(conf["movement"]["xdrift"])
        ydrift = float(conf["movement"]["ydrift"])
        nreset = int(conf["movement"]["nreset"])
        jitter = float(conf["movement"]["jitter"])

        print("Now generating data")

        tdelta = TimeDelta(exposure + deadtime, format="sec")

        for nfile in range(nfiles):

            # copy over template (into a global variable for multiprocessing speed)
            _gframe = mccd.copy()

            # get x,y offset
            xoff = np.random.normal(xdrift * (nfile % nreset), jitter)
            yoff = np.random.normal(ydrift * (nfile % nreset), jitter)

            # create target fields for each CCD, add background
            _gfield = {}
            for cnam in _gframe.keys():
                p = ccd_pars[cnam]
                _gframe[cnam] += p["back"]

                if p["field"] is not None:
                    # get field modification settings
                    transform = Transform(
                        p["nxtot"],
                        p["nytot"],
                        p["xcen"],
                        p["ycen"],
                        p["angle"],
                        p["scale"],
                        p["xoff"] + xoff,
                        p["yoff"] + yoff,
                    )
                    fscale = p["fscale"]
                    _gfield[cnam] = p["field"].modify(transform, fscale)

            # add the targets in (slow step)
            if parallel:
                # run in parallel on whatever cores are available
                args = [(cnam, ccd_pars[cnam]["ndiv"]) for cnam in _gfield]
                with Pool() as pool:
                    ccds = pool.map(worker, args)
                for cnam in _gfield:
                    _gframe[cnam] = ccds.pop(0)
            else:
                # single core
                for cnam in _gfield:
                    ccd = _gframe[cnam]
                    ndiv = ccd_pars[cnam]["ndiv"]
                    field = _gfield[cnam]
                    for wind in ccd.values():
                        field.add(wind, ndiv)

            # Apply flat
            _gframe *= flat

            # Add noise
            for cnam, ccd in _gframe.items():
                for wnam, wind in ccd.items():
                    readout, gain = rgs[cnam][wnam]
                    wind.add_noise(readout, gain)

            # Apply bias
            _gframe += bias

            # data type on output
            if dtype == "float32":
                _gframe.float32()
            elif dtype == "uint16":
                _gframe.uint16()

            # Save
            fname = "{0:s}{1:0{2:d}d}{3:s}".format(root, nfile + 1, ndigit,
                                                   hcam.HCAM)

            _gframe.write(fname, overwrite)
            print("Written file {0:d} to {1:s}".format(nfile + 1, fname))

            # update times in template
            for ccd in mccd.values():
                head = ccd.head
                utc = Time(head["UTC"]) + tdelta
                head["UTC"] = (utc.isot, "UTC at mid exposure")
                head["MJD"] = (utc.mjd, "MJD at mid exposure")
예제 #6
0
if __name__ == '__main__':

    logger = logging.getLogger('ztf_nights')
    logger.setLevel(logging.INFO)
    logging.basicConfig(filename='multiImage.log', level=logging.INFO)

    numSplits = 24
    ztf_data = '/home/rbisw/data/ZTF'
    scheduler_fname = os.path.join(ztf_data, 'test_schedule_v3.db')
    df = read_scheduler(scheduler_fname)

    dt = datetime(2018, 1, 3, 0, 0, tzinfo=pytz.timezone('US/Pacific'))
    tt = Time(dt)
    mjds = np.array(
        list((tt + TimeDelta(n, format='jd')).mjd for n in range(365)))
    obsHistIDs = list(
        (df['expMJD'] - mjd).abs().argsort()[:1].values[0] for mjd in mjds)
    dftodo = df.loc[obsHistIDs]
    dfs = np.array_split(dftodo, numSplits)

    def make_images(i):
        ztf_data = '/home/rbisw/data/ZTF'
        sim_fname = os.path.join(ztf_data, 'sn_coord_time.dat')
        ztf_fields = '/nfs/brahe/ZTF/Scheduler/NugentSim/tst/data_year'
        ztf_figs = '/nfs/brahe/scratch/rbiswas/ztf_figs_year2/'
        logfname = 'images_{}.log'.format(i)
        logfname = os.path.join(ztf_figs, logfname)
        tsplitstart = time.time()
        with open(logfname, 'w') as f:
            f.write('starting split {0} at time {1}\n'.format(i, tsplitstart))
예제 #7
0
def test_timedelta_scales(scale, tmpdir):

    tree = dict(timedelta=TimeDelta(0.125, scale=scale, format="jd"))
    assert_roundtrip_tree(tree, tmpdir)
예제 #8
0
파일: cache.py 프로젝트: wtbarnes/sunpy
 def __init__(self, downloader, storage, cache_dir, expiry=10 * u.day):
     self._downloader = downloader
     self._storage = storage
     self._cache_dir = Path(cache_dir)
     self._expiry = expiry if expiry is None else TimeDelta(expiry)
import astropy.units as u
from astropy.coordinates import SkyCoord
from astropy.time import TimeDelta

import sunpy.coordinates
import sunpy.map
from sunpy.io.special import srs
from sunpy.net import Fido
from sunpy.net import attrs as a
from sunpy.time import parse_time

##############################################################################
# For this example, we will search for and download a single HMI using Fido.
start_time = parse_time("2017-01-25")
end_time = start_time + TimeDelta(23 * u.hour + 59 * u.minute + 59 * u.second)
results = Fido.search(a.Time(start_time, end_time),
                      a.Instrument.hmi & a.Physobs.los_magnetic_field,
                      a.Sample(60 * u.second))

##############################################################################
# Let's select only the first file, download it and create a map.
result = results[0, 0]
file_name = Fido.fetch(result)
smap = sunpy.map.Map(file_name)

##############################################################################
# Download the SRS file.
srs_results = Fido.search(a.Time(start_time, end_time), a.Instrument.srs_table)
srs_downloaded_files = Fido.fetch(srs_results)
예제 #10
0
파일: forms.py 프로젝트: obviousrebel/jwql
class MnemonicQueryForm(forms.Form):
    """A triple-field form to query mnemonic records in the DMS EDB."""

    production_mode = False

    if production_mode:
        # times for default query (one day one week ago)
        now = Time.now()
        delta_day = -7.
        range_day = 1.
        default_start_time = now + TimeDelta(delta_day, format='jd')
        default_end_time = now + TimeDelta(delta_day + range_day, format='jd')
    else:
        # example for testing
        default_start_time = Time('2019-01-16 00:00:00.000', format='iso')
        default_end_time = Time('2019-01-16 00:01:00.000', format='iso')

    default_mnemonic_identifier = 'IMIR_HK_ICE_SEC_VOLT4'

    # Define search fields
    search = forms.CharField(label='mnemonic', max_length=500, required=True,
                             initial=default_mnemonic_identifier, empty_value='Search',
                             help_text="Mnemonic identifier")

    start_time = forms.CharField(label='start', max_length=500, required=False,
                                 initial=default_start_time.iso, help_text="Start time")

    end_time = forms.CharField(label='end', max_length=500, required=False,
                               initial=default_end_time.iso, help_text="End time")

    # Initialize attributes
    search_type = None

    def clean_search(self):
        """Validate the "search" field.

        Check that the input is a valid mnemonic identifier.

        Returns
        -------
        str
            The cleaned data input into the "search" field

        """
        # Get the cleaned search data
        search = self.cleaned_data['search']

        if is_valid_mnemonic(search):
            self.search_type = 'mnemonic'
        else:
            raise forms.ValidationError('Invalid search term {}. Please enter a valid DMS EDB '
                                        'mnemonic.'.format(search))

        return self.cleaned_data['search']

    def clean_start_time(self):
        """Validate the start time.

        Returns
        -------
        str
           The cleaned data input into the start_time field

        """
        start_time = self.cleaned_data['start_time']
        try:
            Time(start_time, format='iso')
        except ValueError:
            raise forms.ValidationError('Invalid start time {}. Please enter a time in iso format, '
                                        'e.g. {}'.format(start_time, self.default_start_time))
        return self.cleaned_data['start_time']

    def clean_end_time(self):
        """Validate the end time.

        Returns
        -------
        str
           The cleaned data input into the end_time field

        """
        end_time = self.cleaned_data['end_time']
        try:
            Time(end_time, format='iso')
        except ValueError:
            raise forms.ValidationError('Invalid end time {}. Please enter a time in iso format, '
                                        'e.g. {}.'.format(end_time, self.default_end_time))

        if 'start_time' in self.cleaned_data.keys():
            # verify that end_time is later than start_time
            if self.cleaned_data['end_time'] <= self.cleaned_data['start_time']:
                raise forms.ValidationError('Invalid time inputs. End time is required to be after'
                                            ' Start time.')

        return self.cleaned_data['end_time']
예제 #11
0
def get_detector_sun_angles_for_date(date, file):
    """
    Get the GBM detector angles vs the Sun as a function of time for a given
    date.

    Parameters
    ----------
    date : {parse_time_types}
        A date specified as a parse_time-compatible
        time string, number, or a datetime object.
    file : `str`
        A filepath to a Fermi/LAT weekly pointing file (e.g. as obtained by the
        download_weekly_pointing_file function).

    Returns
    -------
    `tuple`:
        A tuple of all the detector angles.
    """

    date = parse_time(date)
    tran = TimeRange(date, date + TimeDelta(1 * u.day))
    scx, scz, times = get_scx_scz_in_timerange(tran, file)

    # retrieve the detector angle information in spacecraft coordinates
    detectors = nai_detector_angles()

    detector_to_sun_angles = []
    # get the detector vs Sun angles for each t and store in a list of
    # dictionaries.
    for i in range(len(scx)):
        detector_radecs = nai_detector_radecs(detectors, scx[i], scz[i],
                                              times[i])

        # this gets the sun position with RA in hours in decimal format
        # (e.g. 4.3). DEC is already in degrees
        sunpos_ra_not_in_deg = [
            sun.apparent_rightascension(times[i]),
            sun.apparent_declination(times[i])
        ]
        # now Sun position with RA in degrees
        sun_pos = [sunpos_ra_not_in_deg[0].to('deg'), sunpos_ra_not_in_deg[1]]
        # now get the angle between each detector and the Sun
        detector_to_sun_angles.append(
            get_detector_separation_angles(detector_radecs, sun_pos))

    # slice the list of dictionaries to get the angles for each detector in a
    # list form
    angles = OrderedDict()
    key_list = [
        'n0', 'n1', 'n2', 'n3', 'n4', 'n5', 'n6', 'n7', 'n8', 'n9', 'n10',
        'n11', 'time'
    ]
    for i in range(13):
        if not key_list[i] == 'time':
            angles[key_list[i]] = [
                item[key_list[i]].value for item in detector_to_sun_angles
            ] * u.deg
        else:
            angles[key_list[i]] = [
                item[key_list[i]] for item in detector_to_sun_angles
            ]

    return angles
예제 #12
0
def make_test_observation_table(observatory_name='HESS',
                                n_obs=10,
                                datestart=None,
                                dateend=None,
                                use_abs_time=False,
                                random_state='random-seed'):
    """Make a test observation table.

    For the moment, only random observation tables are created.
    If `datestart` and `dateend` are specified, the starting time
    of the observations will be restricted to the specified interval.
    These parameters are interpreted as date, the precise hour of the
    day is ignored, unless the end date is closer than 1 day to the
    starting date, in which case, the precise time of the day is also
    considered.

    Parameters
    ----------
    observatory_name : str
        Name of the observatory; a list of choices is given in
        `~gammapy.obs.observatory_locations`.
    n_obs : int
        Number of observations for the obs table.
    datestart : `~astropy.time.Time`, optional
        Starting date for random generation of observation start time.
    dateend : `~astropy.time.Time`, optional
        Ending date for random generation of observation start time.
    use_abs_time : bool, optional
        Use absolute UTC times instead of [MET]_ seconds after the reference.
    random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}, optional
        Defines random number generator initialisation.
        Passed to `~gammapy.utils.random.get_random_state`.

    Returns
    -------
    obs_table : `~gammapy.obs.ObservationTable`
        Observation table.
    """
    random_state = get_random_state(random_state)

    n_obs_start = 1

    obs_table = ObservationTable()

    # build a time reference as the start of 2010
    dateref = Time('2010-01-01T00:00:00', format='isot', scale='utc')
    dateref_mjd_fra, dateref_mjd_int = np.modf(dateref.mjd)

    # define table header
    obs_table.meta['OBSERVATORY_NAME'] = observatory_name
    obs_table.meta['MJDREFI'] = dateref_mjd_int
    obs_table.meta['MJDREFF'] = dateref_mjd_fra
    if use_abs_time:
        # show the observation times in UTC
        obs_table.meta['TIME_FORMAT'] = 'absolute'
    else:
        # show the observation times in seconds after the reference
        obs_table.meta['TIME_FORMAT'] = 'relative'
    header = obs_table.meta

    # obs id
    obs_id = np.arange(n_obs_start, n_obs_start + n_obs)
    obs_table['OBS_ID'] = obs_id

    # obs time: 30 min
    time_observation = Quantity(30. * np.ones_like(obs_id),
                                'minute').to('second')
    obs_table['TIME_OBSERVATION'] = time_observation

    # livetime: 25 min
    time_live = Quantity(25. * np.ones_like(obs_id), 'minute').to('second')
    obs_table['TIME_LIVE'] = time_live

    # start time
    #  - random points between the start of 2010 and the end of 2014 (unless
    # otherwise specified)
    #  - using the start of 2010 as a reference time for the header of the table
    #  - observations restrict to night time (only if specified time interval is
    # more than 1 day)
    #  - considering start of astronomical day at midday: implicit in setting
    # the start of the night, when generating random night hours
    if datestart == None:
        datestart = Time('2010-01-01T00:00:00', format='isot', scale='utc')
    if dateend == None:
        dateend = Time('2015-01-01T00:00:00', format='isot', scale='utc')
    time_start = random_state.uniform(datestart.mjd, dateend.mjd, len(obs_id))
    time_start = Time(time_start, format='mjd', scale='utc')

    # check if time interval selected is more than 1 day
    if (dateend - datestart).jd > 1.:
        # keep only the integer part (i.e. the day, not the fraction of the day)
        time_start_f, time_start_i = np.modf(time_start.mjd)
        time_start = Time(time_start_i, format='mjd', scale='utc')

        # random generation of night hours: 6 h (from 22 h to 4 h), leaving 1/2 h
        # time for the last run to finish
        night_start = Quantity(22., 'hour')
        night_duration = Quantity(5.5, 'hour')
        hour_start = random_state.uniform(
            night_start.value, night_start.value + night_duration.value,
            len(obs_id))
        hour_start = Quantity(hour_start, 'hour')

        # add night hour to integer part of MJD
        time_start += hour_start

    if use_abs_time:
        # show the observation times in UTC
        time_start = Time(time_start.isot)
    else:
        # show the observation times in seconds after the reference
        time_start = time_relative_to_ref(time_start, header)
        # converting to quantity (better treatment of units)
        time_start = Quantity(time_start.sec, 'second')

    obs_table['TIME_START'] = time_start

    # stop time
    # calculated as TIME_START + TIME_OBSERVATION
    if use_abs_time:
        time_stop = Time(obs_table['TIME_START'])
        time_stop += TimeDelta(obs_table['TIME_OBSERVATION'])
    else:
        time_stop = TimeDelta(obs_table['TIME_START'])
        time_stop += TimeDelta(obs_table['TIME_OBSERVATION'])
        # converting to quantity (better treatment of units)
        time_stop = Quantity(time_stop.sec, 'second')

    obs_table['TIME_STOP'] = time_stop

    # az, alt
    # random points in a sphere above 45 deg altitude
    az, alt = sample_sphere(size=len(obs_id),
                            lon_range=Angle([0, 360], 'degree'),
                            lat_range=Angle([45, 90], 'degree'),
                            random_state=random_state)
    az = Angle(az, 'degree')
    alt = Angle(alt, 'degree')
    obs_table['AZ'] = az
    obs_table['ALT'] = alt

    # RA, dec
    # derive from az, alt taking into account that alt, az represent the values
    # at the middle of the observation, i.e. at time_ref + (TIME_START + TIME_STOP)/2
    # (or better: time_ref + TIME_START + (TIME_OBSERVATION/2))
    # in use_abs_time mode, the time_ref should not be added, since it's already included
    # in TIME_START and TIME_STOP
    az = Angle(obs_table['AZ'])
    alt = Angle(obs_table['ALT'])
    if use_abs_time:
        obstime = Time(obs_table['TIME_START'])
        obstime += TimeDelta(obs_table['TIME_OBSERVATION']) / 2.
    else:
        obstime = time_ref_from_dict(obs_table.meta)
        obstime += TimeDelta(obs_table['TIME_START'])
        obstime += TimeDelta(obs_table['TIME_OBSERVATION']) / 2.
    location = observatory_locations[observatory_name]
    alt_az_coord = AltAz(az=az, alt=alt, obstime=obstime, location=location)
    sky_coord = alt_az_coord.transform_to(FK5)
    obs_table['RA'] = sky_coord.ra
    obs_table['DEC'] = sky_coord.dec

    # positions

    # number of telescopes
    # random integers between 3 and 4
    n_tels_min = 3
    n_tels_max = 4
    n_tels = random_state.randint(n_tels_min, n_tels_max + 1, len(obs_id))
    obs_table['N_TELS'] = n_tels

    # muon efficiency
    # random between 0.6 and 1.0
    muon_efficiency = random_state.uniform(low=0.6, high=1.0, size=len(obs_id))
    obs_table['MUON_EFFICIENCY'] = muon_efficiency

    return obs_table
예제 #13
0
}
WCS_NO_COORDS = WCS(header=H_NO_COORDS, naxis=3)

SOURCE_DATA_DN = np.array([[[0.563, 1.132, -1.343], [-0.719, 1.441, 1.566]],
                           [[0.563, 1.132, -1.343], [-0.719, 1.441, 1.566]]])
SOURCE_UNCERTAINTY_DN = np.sqrt(SOURCE_DATA_DN)

TIME_DIM_LEN = SOURCE_DATA_DN.shape[0]
SINGLES_EXPOSURE_TIME = 2.
EXPOSURE_TIME = u.Quantity(np.zeros(TIME_DIM_LEN) + SINGLES_EXPOSURE_TIME,
                           unit=u.s)

# Define sample extra coords
EXTRA_COORDS0 = [
    ("time", 0,
     Time('2017-01-01') + TimeDelta(np.arange(TIME_DIM_LEN), format='sec')),
    ("exposure time", 0, EXPOSURE_TIME)
]
EXTRA_COORDS1 = [
    ("time", 0,
     (Time('2017-01-01') +
      TimeDelta(np.arange(TIME_DIM_LEN, TIME_DIM_LEN * 2), format='sec'))),
    ("exposure time", 0, EXPOSURE_TIME)
]

# Define SpectrogramCubes in various units.
spectrogram_DN0 = SpectrogramCube(SOURCE_DATA_DN, WCS0, EXTRA_COORDS0, u.ct,
                                  SOURCE_UNCERTAINTY_DN)
spectrogram_DN_per_s0 = SpectrogramCube(
    SOURCE_DATA_DN / SINGLES_EXPOSURE_TIME, WCS0, EXTRA_COORDS0, u.ct / u.s,
    SOURCE_UNCERTAINTY_DN / SINGLES_EXPOSURE_TIME)
예제 #14
0
 def start_time(self):
     """Start time of the observation."""
     return (Time(self['STT_IMJD'], scale='utc', format='mjd') +
             TimeDelta(self['STT_SMJD'], self['STT_OFFS'], format='sec'))
예제 #15
0
파일: kepler.py 프로젝트: vrodgom/astropy
def kepler_fits_reader(filename):
    """
    This serves as the FITS reader for KEPLER or TESS files within
    astropy-timeseries.

    This function should generally not be called directly, and instead this
    time series reader should be accessed with the
    :meth:`~astropy.timeseries.TimeSeries.read` method::

        >>> from astropy.timeseries import TimeSeries
        >>> ts = TimeSeries.read('kplr33122.fits', format='kepler.fits')  # doctest: +SKIP

    Parameters
    ----------
    filename : `str` or `pathlib.Path`
        File to load.

    Returns
    -------
    ts : `~astropy.timeseries.TimeSeries`
        Data converted into a TimeSeries.
    """
    hdulist = fits.open(filename)
    # Get the lightcurve HDU
    telescope = hdulist[0].header['telescop'].lower()

    if telescope == 'tess':
        hdu = hdulist['LIGHTCURVE']
    elif telescope == 'kepler':
        hdu = hdulist[1]
    else:
        raise NotImplementedError(
            "{} is not implemented, only KEPLER or TESS are "
            "supported through this reader".format(
                hdulist[0].header['telescop']))

    if hdu.header['EXTVER'] > 1:
        raise NotImplementedError("Support for {} v{} files not yet "
                                  "implemented".format(hdu.header['TELESCOP'],
                                                       hdu.header['EXTVER']))

    # Check time scale
    if hdu.header['TIMESYS'] != 'TDB':
        raise NotImplementedError("Support for {} time scale not yet "
                                  "implemented in {} reader".format(
                                      hdu.header['TIMESYS'],
                                      hdu.header['TELESCOP']))

    tab = Table.read(hdu, format='fits')

    # Some KEPLER files have a T column instead of TIME.
    if "T" in tab.colnames:
        tab.rename_column("T", "TIME")

    for colname in tab.colnames:
        # Fix units
        if tab[colname].unit == 'e-/s':
            tab[colname].unit = 'electron/s'
        if tab[colname].unit == 'pixels':
            tab[colname].unit = 'pixel'

        # Rename columns to lowercase
        tab.rename_column(colname, colname.lower())

    # Filter out NaN rows
    nans = np.isnan(tab['time'].data)
    if np.any(nans):
        warnings.warn(f'Ignoring {np.sum(nans)} rows with NaN times')
    tab = tab[~nans]

    # Time column is dependent on source and we correct it here
    reference_date = Time(hdu.header['BJDREFI'],
                          hdu.header['BJDREFF'],
                          scale=hdu.header['TIMESYS'].lower(),
                          format='jd')
    time = reference_date + TimeDelta(tab['time'].data)
    time.format = 'isot'

    # Remove original time column
    tab.remove_column('time')

    return TimeSeries(time=time, data=tab)
예제 #16
0
import os
from astropy.time import Time, TimeDelta
import astropy.units as u

start_time = Time('2020-10-29T00:00:00', format='isot')
ndays = 170

for ii in range(ndays):
    time = start_time + TimeDelta(ii * u.day)
    system_command = "python kp84_slack_bot.py -d --day %s" % time.isot.split(
        "T")[0].replace("-", "")
    os.system(system_command)
def search_sentinels(platform_name, df, aoi, dt=2, user=None, pwd=None,
                     proj_string='+init=EPSG:3995', product_type=None,
                     min_cloud_cover=0, max_cloud_cover=100,
                     swath_type=None, f_out=None):
    """
    Search Sentinel-1/2 images overlapping ICESat-2 data within +- dt

    Parameters:
    -----------
    platform_name : str ['Sentinel-1 | Sentinel-2']
        name of the platform for which images will be searched
    df : panda dataframe
        ICESat-2 data
    aoi: str, list
        area of interest as WKT string or bounding box[lllon, lllat, urlon, urlat]
    dt: int, float
        difference in hours between CS2 and S2
    user : str
        username to connect to the Copernicus Scientific Hub
    pwd : str
        password to connect to the Copernicus Scientific Hub
    proj_string: str
        projection string to be used with the pyproj module
    product_type : str
        name of the type of product to be searched (more info at https://scihub.copernicus.eu/userguide/)
    swath_type : str
        name of the type of swath to be searched (Sentinel-1 only, more info at https://scihub.copernicus.eu/userguide/)
    min_cloud_cover: int, float
        Minimum cloud coverage in percentage (Sentinel-2 only)
    max_cloud_cover: int, float
        Maximum cloud coverage in percentage (Sentinel-2 only)        
    f_out : str
        path to file where to write results


    Returns: (to be finished!)
    --------

    """

    #==========================================================================
    # Pre-processing
    #==========================================================================

    ### Imports
    from sentinelsat import SentinelAPI
    # import wkt
    import pyproj
    import numpy as np
    import shapely.geometry as sg
    from shapely.wkt import dumps, loads
    from astropy.time import Time, TimeDelta
    from tqdm import tqdm

    ### Convert aoi to shapely polygon in projected CRS
    # define projection
    print("Creating AOI polygon...")
    proj        = pyproj.Proj(proj_string)
    # read aoi polygon
    if type(aoi) == str:
        aoi_temp    = loads(aoi)
    elif type(aoi) in (list, tuple):
        aoi_temp    = sg.box(aoi[0], aoi[1], aoi[2], aoi[3])
        aoi         = aoi_temp.wkt
    else:
        print("ERROR: 'aoi' should be provided as a WKT string or bounding box (list)")
        sys.exit(1)
        
    ### Check input parameters
    if product_type == None:
        if platform_name == 'Sentinel-1':
            product_type    = 'GRD'
            print("product_type set to: ", product_type)
        if platform_name == 'Sentinel-2':
            product_type    = 'S2MSI1C'
            print("product_type set to: ", product_type)
    if swath_type == None and platform_name == 'Sentinel-1':
            swath_type      = 'EW'
            print("swath_type set to: ", swath_type)             
    
    # project coordinates and convert to shapely polygon
    x, y        = proj(aoi_temp.exterior.xy[0], aoi_temp.exterior.xy[1])
    aoi_poly    = sg.Polygon(list(zip(x, y)))

    ### Convert dt to astropy time object
    dtt         = TimeDelta(3600 * dt, format='sec')

    #==========================================================================
    # Processing
    #==========================================================================

    ### Project IS2 data to desired CRS
    print("Selecting orbit data inside AOI...")
    lon, lat    = np.array(df['lons']), np.array(df['lats'])
    x, y        = proj(lon, lat)
    
    ### Extract IS2 orbit number
    is2_orbits  = np.unique(df['orbit_number'])
    print("N. of orbits/points inside AOI: {}/{}".format(len(is2_orbits),
                                                         len(df)))

    ### Extract time period from IS2 data to query the server
    t_is2       = Time(df['time'], scale='utc')
    t_is2_start = min(t_is2) - dtt
    t_is2_stop  = max(t_is2) + dtt

    ### Read metadata
    print("Query for metadata...")
    api = SentinelAPI(user, pwd,'https://scihub.copernicus.eu/dhus',
                      timeout=600)
    if platform_name == 'Sentinel-1':
        md  = api.query(area=aoi, date=(t_is2_start.datetime, t_is2_stop.datetime),
                        platformname='Sentinel-1', area_relation='Intersects',
                        producttype=product_type, sensoroperationalmode=swath_type)
    elif platform_name == 'Sentinel-2':
        md  = api.query(area=aoi, date=(t_is2_start.datetime, t_is2_stop.datetime),
                        platformname='Sentinel-2', area_relation='Intersects',
                        cloudcoverpercentage=(min_cloud_cover, max_cloud_cover),
                        producttype=product_type)
    print("N. of total images: {}".format(len(md)))
    if len(md) == 0:
        return [], [], [], [], [], []

    ### Convert Sentinel-2 time strings to astropy time objects
    t_sen   = {}
    print("Converting time to astropy objects...")
    for el in md:
        t_sen[el]    = Time(md[el]['beginposition'], format='datetime',
                           scale='utc')

    ### Loop over orbits to find images that satisfy time costraints
    TimeDict    = {}
    t_is2       = []
    print("Looping over orbits to find intersections within {}h...".format(dt))
    for c, o in tqdm(enumerate(is2_orbits)):
        ### select CS2 data
        d_is2   = df[df['orbit_number'] == o]

        ### compute CS2 track central time
        t_temp      = Time(d_is2['time'], scale='utc')
        t_start_is2 = min(t_temp)
        t_stop_is2  = max(t_temp)
        t_is2_o     = t_start_is2 + (t_stop_is2 - t_start_is2) / 2
        t_is2.append(t_is2_o)

        ### save dict keys of images within +-dt from CS2
        i_t         = np.array(
            [el for el in md if np.abs((t_sen[el]  - t_is2_o).sec) <= dtt.sec])
        TimeDict[o] = i_t

    # get unique images within +-dt from all orbit data
    i_sen_t_int  = set(np.concatenate(list(TimeDict.values())).ravel())
    print("N. of images within {}h: {}".format(dt, len(i_sen_t_int)))
    if len(i_sen_t_int) == 0:
        return [], [], [], [], [], []

    ### Project images corner coordinates and convert to shapely polygons
    print("Creating images footprint polygons...")
    # loop over them, project corner coords and create polygons
    SenPolygonsDict  = {}
    for i in i_sen_t_int:
        # load S2 footprint
        aoi_sen      = loads(md[i]['footprint'])

        # check if multipolygon has more than 1 polygon defined
        if len(aoi_sen) > 1:
            print("WARNING: footprint for product {}".format(i),
                  "is defined by more than 1 polygon!!!")
        aoi_sen      = aoi_sen[0]

        # project corner coords
        x_sen, y_sen  = proj(aoi_sen.exterior.xy[0], aoi_sen.exterior.xy[1])

        # add polygon to dictionary
        SenPolygonsDict[i]   = sg.Polygon(list(zip(x_sen, y_sen)))


    ### Loop over orbits to find spatial intersections
    print("Looping over orbits to find intersections...")
    orbit_number    = []
    product_name    = []
    browse_url      = []
    download_url    = []
    t_diff          = []
    md_out          = {}
    for c, o in tqdm(enumerate(is2_orbits)):
        ### select CS2 data
        i       = df['orbit_number'] == o
        # check if track has at least 2 points
        if sum(i) < 2:
            continue
        d_is2    = df[i]
        x_is2    = x[i]
        y_is2    = y[i]

        ### create shapely line from CS track
        is2_line = sg.LineString(list(zip(x_is2, y_is2)))

        ### collect LS8 polygon indices
        i_sen    = TimeDict[o]

        ### Loop over S2 polygons
        for i_poly in i_sen:
            ls_poly     = SenPolygonsDict[i_poly]
            if is2_line.intersects(ls_poly):
                orbit_number.append(o)
                t_diff.append((t_sen[i_poly] - t_is2[c]).sec / 3600)
                product_name.append(md[i_poly]['filename'])
                download_url.append(md[i_poly]['link'])
                browse_url.append(md[i_poly]['link_icon'])
                md_out[i_poly]  = md[i_poly]

    print("N. of total intersections: {}".format(len(orbit_number)))

    ### Print to file
    if f_out != None:
        print("Printing results to {}...".format(f_out))
        with open(f_out, 'w') as fp:
            fp.write("orbit_number,t_diff_(h),product_id,dowload_url,browse_url\n")
            for i in range(len(orbit_number)):
                fp.write("{},{:.2f},{},{},{}\n".format(
                    orbit_number[i], t_diff[i], product_name[i],
                    download_url[i], browse_url[i]))

    return orbit_number, product_name, browse_url, download_url, t_diff, md_out
예제 #18
0
        # Now query the DRMS for the HMI.ME_720s_fd10 datasets. Download the inclination and
        # the field and get the fits header keywords that we need for the coordinates.
        c = drms.Client(email=drms_email, verbose=True)

        res = c.query('HMI.ME_720s_fd10['+query_middle_of_scan+']', \
                        key='T_REC, CRPIX1, CRPIX2, CRVAL1, CRVAL2, CDELT1, CDELT2, CROTA2', \
                        seg='inclination, field')

        # HMI timestamp in fits/isot format
        hmi_timestamp = Time(res[0]['T_REC'][0].replace('.', '-').replace(
            '_TAI', '').replace('_', 'T'),
                             scale='tai')

        # Calculate time difference and give a warning if it is more than 12 minutes.
        time_diff = TimeDelta(hmi_timestamp - middle_of_scan, format='sec')

        print('-----------------------------------------------------------')
        print('Hinode time (TAI, middle of scan): ', middle_of_scan)
        print('HMI time (TAI):                    ', hmi_timestamp)
        if time_diff.value > 720:
            print('*WARNING*: HMI/Hinode time more than 12 minutes apart.')

        export_request = 'HMI.ME_720s_fd10[' + query_middle_of_scan + ']{inclination, field}'

        r = c.export(export_request)

        hmi_inclination_hdu = fits.open(r.urls.url[0])
        hmi_field_hdu = fits.open(r.urls.url[1])

        hmi_inclination = hmi_inclination_hdu[1].data
예제 #19
0
            # get corresponding orbital phases for a range of dates
            #ist_date1 = '2013-06-16 12:00:00'
            #ist_date2 = '2013-07-03 12:00:00'
            #ist_date1 = '2013-07-24 12:00:00'
            #ist_date2 = '2013-07-29 12:00:00'
            ist_date1 = '2014-06-12 12:00:00'
            ist_date2 = '2014-06-18 12:00:00'
            ist_utc = 0 * 5.5 / 24.
            mjd1 = Time(ist_date1, scale='utc').mjd - ist_utc
            mjd2 = Time(ist_date2, scale='utc').mjd - ist_utc
            for mjd in np.arange(mjd1, mjd2 + 1.e-5):
                time = Time(mjd, format='mjd', scale='utc', precision=0)
                ut_start = gmst2time(gmststart, time)
                ut_stop = gmst2time(gmststop, time)
                if ut_stop < ut_start:
                    ut_stop += TimeDelta(1., format='jd')
                ph_start, ph_stop = src.phase(ut_start), src.phase(ut_stop)
                ist_start = ut_start + TimeDelta(ist_utc, format='jd')
                ist_stop = ut_stop + TimeDelta(ist_utc, format='jd')
                print('{}-{}: {:4.2f}-{:4.2f}'.format(ist_start.iso,
                                                      ist_stop.iso[11:],
                                                      ph_start, ph_stop))

# 0834+06 before 1957+20
#
# 1133+16 before J1012+5207
#
#
# Need scintellation data for B1957, J1012
#
# LOFAR how high makes it useful? (elevation > 30?)
예제 #20
0
def make_test_observation_table(
        observatory_name="hess",
        n_obs=10,
        az_range=Angle([0, 360], "deg"),
        alt_range=Angle([45, 90], "deg"),
        date_range=(Time("2010-01-01"), Time("2015-01-01")),
        use_abs_time=False,
        n_tels_range=(3, 4),
        random_state="random-seed",
):
    """Make a test observation table.
    Create an observation table following a specific pattern.
    For the moment, only random observation tables are created.
    The observation table is created according to a specific
    observatory, and randomizing the observation pointingpositions
    in a specified az-alt range.
    If a *date_range* is specified, the starting time
    of the observations will be restricted to the specified interval.
    These parameters are interpreted as date, the precise hour of the
    day is ignored, unless the end date is closer than 1 day to the
    starting date, in which case, the precise time of the day is also
    considered.
    In addition, a range can be specified for the number of telescopes.
    Parameters
    ----------
    observatory_name : str, optional
        Name of the observatory; a list of choices is given in
        `~gammapy.data.observatory_locations`.
    n_obs : int, optional
        Number of observations for the obs table.
    az_range : `~astropy.coordinates.Angle`, optional
        Azimuth angle range (start, end) for random generation of
        observation pointing positions.
    alt_range : `~astropy.coordinates.Angle`, optional
        Altitude angle range (start, end) for random generation of
        observation pointing positions.
    date_range : `~astropy.time.Time`, optional
        Date range (start, end) for random generation of observation
        start time.
    use_abs_time : bool, optional
        Use absolute UTC times instead of [MET]_ seconds after the reference.
    n_tels_range : int, optional
        Range (start, end) of number of telescopes participating in
        the observations.
    random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}, optional
        Defines random number generator initialisation.
        Passed to `~gammapy.utils.random.get_random_state`.
    Returns
    -------
    obs_table : `~gammapy.data.ObservationTable`
        Observation table.
    """
    random_state = get_random_state(random_state)

    n_obs_start = 1

    obs_table = ObservationTable()

    # build a time reference as the start of 2010
    dateref = Time("2010-01-01T00:00:00")
    dateref_mjd_fra, dateref_mjd_int = np.modf(dateref.mjd)

    # define table header
    obs_table.meta["OBSERVATORY_NAME"] = observatory_name
    obs_table.meta["MJDREFI"] = dateref_mjd_int
    obs_table.meta["MJDREFF"] = dateref_mjd_fra
    obs_table.meta["TIMESYS"] = "TT"
    obs_table.meta["TIMEUNIT"] = "s"
    obs_table.meta["TIMEREF"] = "LOCAL"
    if use_abs_time:
        # show the observation times in UTC
        obs_table.meta["TIME_FORMAT"] = "absolute"
    else:
        # show the observation times in seconds after the reference
        obs_table.meta["TIME_FORMAT"] = "relative"
    header = obs_table.meta

    # obs id
    obs_id = np.arange(n_obs_start, n_obs_start + n_obs)
    obs_table["OBS_ID"] = obs_id

    # obs time: 30 min
    ontime = Quantity(30.0 * np.ones_like(obs_id), "minute").to("second")
    obs_table["ONTIME"] = ontime

    # livetime: 25 min
    time_live = Quantity(25.0 * np.ones_like(obs_id), "minute").to("second")
    obs_table["LIVETIME"] = time_live

    # start time
    #  - random points between the start of 2010 and the end of 2014 (unless
    # otherwise specified)
    #  - using the start of 2010 as a reference time for the header of the table
    #  - observations restrict to night time (only if specified time interval is
    # more than 1 day)
    #  - considering start of astronomical day at midday: implicit in setting
    # the start of the night, when generating random night hours
    datestart = date_range[0]
    dateend = date_range[1]
    time_start = random_state.uniform(datestart.mjd, dateend.mjd, len(obs_id))
    time_start = Time(time_start, format="mjd", scale="utc")

    # check if time interval selected is more than 1 day
    if (dateend - datestart).jd > 1.0:
        # keep only the integer part (i.e. the day, not the fraction of the day)
        time_start_f, time_start_i = np.modf(time_start.mjd)
        time_start = Time(time_start_i, format="mjd", scale="utc")

        # random generation of night hours: 6 h (from 22 h to 4 h), leaving 1/2 h
        # time for the last run to finish
        night_start = Quantity(22.0, "hour")
        night_duration = Quantity(5.5, "hour")
        hour_start = random_state.uniform(
            night_start.value, night_start.value + night_duration.value,
            len(obs_id))
        hour_start = Quantity(hour_start, "hour")

        # add night hour to integer part of MJD
        time_start += hour_start

    if use_abs_time:
        # show the observation times in UTC
        time_start = Time(time_start.isot)
    else:
        # show the observation times in seconds after the reference
        time_start = time_relative_to_ref(time_start, header)
        # converting to quantity (better treatment of units)
        time_start = Quantity(time_start.sec, "second")

    obs_table["TSTART"] = time_start

    # stop time
    # calculated as TSTART + ONTIME
    if use_abs_time:
        time_stop = Time(obs_table["TSTART"])
        time_stop += TimeDelta(obs_table["ONTIME"])
    else:
        time_stop = TimeDelta(obs_table["TSTART"])
        time_stop += TimeDelta(obs_table["ONTIME"])
        # converting to quantity (better treatment of units)
        time_stop = Quantity(time_stop.sec, "second")

    obs_table["TSTOP"] = time_stop

    # az, alt
    # random points in a portion of sphere; default: above 45 deg altitude
    az, alt = sample_sphere(
        size=len(obs_id),
        lon_range=az_range,
        lat_range=alt_range,
        random_state=random_state,
    )
    az = Angle(az, "deg")
    alt = Angle(alt, "deg")
    obs_table["AZ"] = az
    obs_table["ALT"] = alt

    # RA, dec
    # derive from az, alt taking into account that alt, az represent the values
    # at the middle of the observation, i.e. at time_ref + (TIME_START + TIME_STOP)/2
    # (or better: time_ref + TIME_START + (TIME_OBSERVATION/2))
    # in use_abs_time mode, the time_ref should not be added, since it's already included
    # in TIME_START and TIME_STOP
    az = Angle(obs_table["AZ"])
    alt = Angle(obs_table["ALT"])
    if use_abs_time:
        obstime = Time(obs_table["TSTART"])
        obstime += TimeDelta(obs_table["ONTIME"]) / 2.0
    else:
        obstime = time_ref_from_dict(obs_table.meta)
        obstime += TimeDelta(obs_table["TSTART"])
        obstime += TimeDelta(obs_table["ONTIME"]) / 2.0
    location = observatory_locations[observatory_name]
    altaz_frame = AltAz(obstime=obstime, location=location)
    alt_az_coord = SkyCoord(az, alt, frame=altaz_frame)
    sky_coord = alt_az_coord.transform_to("icrs")
    obs_table["RA_PNT"] = sky_coord.ra
    obs_table["DEC_PNT"] = sky_coord.dec

    # positions

    # number of telescopes
    # random integers in a specified range; default: between 3 and 4
    n_tels = random_state.randint(n_tels_range[0], n_tels_range[1] + 1,
                                  len(obs_id))
    obs_table["N_TELS"] = n_tels

    # muon efficiency
    # random between 0.6 and 1.0
    muoneff = random_state.uniform(low=0.6, high=1.0, size=len(obs_id))
    obs_table["MUONEFF"] = muoneff

    return obs_table
예제 #21
0
def test_timedelta_vector(tmpdir):

    tree = dict(timedelta=TimeDelta([1, 2] * u.day))
    assert_roundtrip_tree(tree, tmpdir)
예제 #22
0
crossmatching, you need to have --doWriteDb active")

    if args.doKNFit:
        print('Fitting to kilonova grid...')

        from knfit import do_knfit
        for objid in allids:
            t = tbl_lc[tbl_lc['name'] == objid]
            do_knfit(t.to_pandas().rename(columns={"filter": "filtname"}))

    if args.doLCOStatus:
        print('Checking LCO for existing observations...')

        # LCO sometime over next 2 weeks
        tstart = Time.now()
        tend = Time.now() + TimeDelta(14 * u.day)
        tstart = str(tstart.isot).replace("T", " ")
        tend = str(tend.isot).replace("T", " ")

        #Read the secrets
        lco_secrets = ascii.read('../lco/secrets.csv', format='csv')
        PROPOSAL_ID = lco_secrets['PROPOSAL_ID'][0]
        API_TOKEN = lco_secrets['API_TOKEN'][0]

        lco_programs = args.lco_programs.split(",")

        from lco import check_observations
        obs = check_observations(API_TOKEN, lco_programs=lco_programs)

    if args.doLCOSubmission:
        print('Triggering LCO...')
예제 #23
0
 def setup(self):
     # Identical to what is used in LeapSeconds.auto_open().
     self.good_enough = (iers.LeapSeconds._today() + TimeDelta(
         180 - iers._none_to_float(iers.conf.auto_max_age), format='jd'))
     self._auto_open_files = iers.LeapSeconds._auto_open_files.copy()
예제 #24
0
    def open(self):
        #open file
        filename = self.filename
        scan_file = self.scan_file
        start_loc = self.start_loc

        try:
            if type(filename) is file:
                self._lofasm_file = filename
            else:
                #check file extension
                if not filename.endswith('.lofasm') and \
                   not filename.endswith('.lofasm.gz'):
                    print "Warning: {} file extension not recognized.".format(
                        filename)
                    print "Attempting to open anyway."
                #get file handler
                if self.gz:
                    print "Warning: gzipped files are only supported for header versions 4 or higher."
                    self._lofasm_file = gzip.open(filename, 'rb')
                else:
                    self._lofasm_file = open(filename, 'rb')

        except IOError as err:
            print "Error opening ", filename
            print err.message
            raise IOError('{} does not exist'.format(filename))

        #get header information
        self._file_hdr = parse_file_header(self._lofasm_file)

        #find end of file
        if int(self._file_hdr[2][1]) >= 4:
            self._lofasm_file_end = self._file_hdr[3][1] + int(self._file_hdr[12][1])*INTEGRATION_SIZE_B
        else:
            self._lofasm_file_end = self._get_file_end_loc()

        #get integration/burst size
        self._int_size = INTEGRATION_SIZE_B

        if scan_file:
            self._data_start, errno = check_headers(self._lofasm_file)
        elif start_loc:
            self._data_start, errno = start_loc, 0
        else:
            try:
                self._lofasm_file.seek(self._file_hdr[3][1])
                self._update_ptr()
                self._update_data()
                self._data_start = self._file_hdr[3][1]
            except IntegrationError:
                if self._file_hdr[2][1] == 1 or self._file_hdr[2][1] == 2:
                    self._data_start = 204896
                else:
                    self._data_start = self._find_next_burst(start=0)

        #move file pointer to data location
        self._lofasm_file.seek(self._data_start)

        self._update_ptr()

        #get times
        self.int_time = TimeDelta(float(self._file_hdr[10][1]), format='sec')
        mjd_start = float(self._file_hdr[8][1])
        mjd_start += float(self._file_hdr[9][1])/1000/86400
        self.time_start = Time(mjd_start, format='mjd', scale='utc')
        self.time = self.time_start


        #increment to first integration
        try:
            self._update_data()
        except IntegrationError:
            self.corrupt = True


        #set reference accumulation number
        self._acc_num_ref = self._acc_num

        #update time
        self._update_time()

        self._status_open = True
예제 #25
0
def process_data(m):
    """ 
    Process data from stdin, m is a doppler model
    as obtained from read_esa_predicts
    """

    logging.debug("Sample rate is %0.3f Hz" % (args.fs))

    # data type for binary file
    if args.swapiq:
        dt = [('imag', 'float32'), ('real', 'float32')]
    else:
        dt = [('real', 'float32'), ('imag', 'float32')]

    # Number of samples to work with
    fs = float(args.fs)

    # Starting phase
    phi = 0
    samples_read = 0
    elapsed_sec = 0
    fc = args.fc
    fc_rate = args.fc_rate

    if m:
        if m.has_key('utc'):
            indarray = args.utc == m['utc']
            ind = np.where(indarray == True)
            if len(ind) is not 1:
                logging.error("Couldn't find UTC time %s in doppler file" % ())
                sys.exit(-1)
            ind = ind[0]
        else:
            # Starts at 0 if no UTC time given
            ind = 0

    past_predicts_warning_printed = False

    if args.save_doppler:
        doppler_out_fid = open(args.save_doppler, "w")
    else:
        doppler_out_fid = None

    while 1:
        # Compute number of samples to read
        n = int(round((elapsed_sec + 1.0) * fs - samples_read))
        data = np.fromfile(sys.stdin, dtype=dt, count=n)

        current_utc = args.utc + TimeDelta(elapsed_sec, format='sec')

        logging.debug("%s (%d): Reading %d samples" %
                      (current_utc, elapsed_sec, n))

        if len(data) == 0:
            break

        # Reserve arrays
        iq_out = np.zeros([2 * len(data)], dtype=np.float32)
        iq = np.zeros([len(data)], dtype=np.complex)

        # Make complex number
        iq.real = data['real']
        iq.imag = data['imag']

        # Predicts provided?
        if m:
            # If we provide a model, use the model doppler and doppler rate
            # Note, if args.fc is given, we will apply this args.fc offset to
            # all data in predicts file. We will NOT apply a args.fc_rate offset though
            # and only use the fc_rate from the predicts file
            try:
                fc = args.fc + m['doppler_hz'][ind + elapsed_sec]
                fc_rate = m['doppler_rate_hz_s'][ind + elapsed_sec]
                if m.has_key('utc'):
                    delta_time = current_utc - m['utc'][ind + elapsed_sec]
                    # Make sure our model time and current time align
                    delta_sec = (delta_time * 86400).value
                    if delta_sec > 0.1:
                        logging.error(
                            "Model time and current time mismatch by %f seconds"
                            % (delta_sec))
                        sys.exit(-1)

            except IndexError:
                # Just keep using fc, fc_rate and keep propagating
                if not past_predicts_warning_printed:
                    past_predicts_warning_printed = True
                    if m.has_key('utc'):
                        logging.warning("Past predicts time %s, using model %f Hz %f Hz/s"\
                                %(m['utc'][-1],fc,fc_rate))
                    else:
                        logging.warning("Past predicts time %d, using model %f Hz %f Hz/s"\
                                %(elapsed_sec-1,fc,fc_rate))

        if doppler_out_fid:
            if m.has_key['utc']:
                doppler_out_fid.write("%s %f %f\n" %
                                      (current_utc, fc, fc_rate))
            else:
                doppler_out_fid.write("%d %f %f\n" %
                                      (elapsed_sec, fc, fc_rate))

            doppler_out_fid.flush()

        # Counter rotate
        iq_cr, fc, phi = counterrotate(iq, args.fs, fc, fc_rate, phi)

        # Interleave and dump to disk
        iq_out[::2] = iq_cr.real
        iq_out[1::2] = iq_cr.imag
        iq_out.tofile(sys.stdout, format='float32')

        # Increment counters
        elapsed_sec += 1
        samples_read += n

    if doppler_out_fid:
        doppler_out_fid.close()
예제 #26
0
    def iterative_det_od(self, index, **kwargs):
        no_cache = kwargs.get('no_cache', False)

        if not no_cache:
            data_path = self.get_path(f'calc/{index}_data.pickle').resolve()
            _data = self.load_pickle(data_path)
            if _data is not None:
                return _data

        obj = self.pop.get_object(index)

        sigmas = []

        print('RUNNING CALC')

        print(obj)

        scheduler = self.sched_cls(
            radar=radar,
            scan=scan,
            epoch=epoch,
            timeslice=0.1,
            end_time=3600.0 * end_hours,
            logger=self.logger,
            profiler=profiler,
        )
        self.scheduler = scheduler

        t, states, passes, data = detect.get_detections(scheduler,
                                                        obj,
                                                        self.logger,
                                                        profiler,
                                                        t_samp=10.0)

        if len(data) == 0:
            return None

        try:
            datas_scan, Sigma_orb_scan = detect.orbit_determination(
                data_select_all,
                scheduler,
                obj,
                passes,
                error_cache_path,
                self.logger,
                profiler,
            )
        except Exception as e:
            self.logger.info('Cannot do IOD')
            self.logger.exception(e)
            return None

        Sigma_orb_scan__ = 0.5 * (Sigma_orb_scan + Sigma_orb_scan.T)
        sigmas.append(Sigma_orb_scan__)

        try:
            datas, Sigma_orb = detect.orbit_determination(
                data_select,
                scheduler,
                obj,
                passes,
                error_cache_path,
                self.logger,
                profiler,
            )
        except Exception as e:
            self.logger.info('Cannot do IOD')
            self.logger.exception(e)
            return None

        Sigma_orb__ = 0.5 * (Sigma_orb + Sigma_orb.T)

        sigmas.append(Sigma_orb__)

        t_iod = datas[0]['t'][datas[0]['data_select']]

        #first detection is IOD state
        init_epoch = epoch + TimeDelta(t_iod.min(), format='sec')

        #sample IOD covariance
        init_orb = np.random.multivariate_normal(datas[0]['states'][:, 0],
                                                 Sigma_orb__)

        init_orb = sorts.frames.convert(
            init_epoch,
            init_orb,
            in_frame='ITRS',
            out_frame='TEME',
        )
        init_object = sorts.SpaceObject(x=init_orb[0],
                                        y=init_orb[1],
                                        z=init_orb[2],
                                        vx=init_orb[3],
                                        vy=init_orb[4],
                                        vz=init_orb[5],
                                        epoch=init_epoch,
                                        **pop_kw)

        true_orb = sorts.frames.convert(
            init_epoch,
            datas[0]['states'][:, 0],
            in_frame='ITRS',
            out_frame='TEME',
        )
        true_object = sorts.SpaceObject(x=true_orb[0],
                                        y=true_orb[1],
                                        z=true_orb[2],
                                        vx=true_orb[3],
                                        vy=true_orb[4],
                                        vz=true_orb[5],
                                        epoch=init_epoch,
                                        **pop_kw)

        chase_schdeule_time = t_iod.max() + tracker_delay

        scheduler.update(init_object, start_track=chase_schdeule_time)

        if update_interval is not None:
            updates = np.floor(
                (passes[0].end() - passes[0].start()) / update_interval)
            for update_num in range(1, int(updates)):

                delta_t = chase_schdeule_time + update_interval * update_num
                scheduler.stop_calc_time = delta_t

                def update_data_select(data):
                    if len(data['t']) == 0:
                        return np.empty((0, ), dtype=np.int)
                    else:
                        return np.argwhere(
                            data['t'] < data['t'].min() + delta_t).flatten()

                try:
                    datas, Sigma_orb = detect.orbit_determination(
                        update_data_select,
                        scheduler,
                        obj,
                        passes,
                        error_cache_path,
                        self.logger,
                        profiler,
                        Sigma_orb0=Sigma_orb__,
                    )
                except Exception as e:
                    self.logger.info('Cannot do IOD')
                    self.logger.exception(e)
                    return None

                Sigma_orb__ = 0.5 * (Sigma_orb + Sigma_orb.T)

                sigmas.append(Sigma_orb__)

                #sample OD covariance
                update_orb = np.random.multivariate_normal(
                    datas[0]['states'][:, 0], Sigma_orb__)

                update_orb = sorts.frames.convert(
                    init_epoch,
                    update_orb,
                    in_frame='ITRS',
                    out_frame='TEME',
                )
                update_object = sorts.SpaceObject(x=update_orb[0],
                                                  y=update_orb[1],
                                                  z=update_orb[2],
                                                  vx=update_orb[3],
                                                  vy=update_orb[4],
                                                  vz=update_orb[5],
                                                  epoch=init_epoch,
                                                  **pop_kw)

                scheduler.update_tracker(
                    update_object,
                    delta_t,
                    start_track=chase_schdeule_time,
                )

        scheduler.stop_calc_time = None
        scan_and_chase_datas = scheduler.observe_passes(
            passes,
            linear_list=True,
            space_object=obj,
            snr_limit=True,
            epoch=scheduler.epoch,
        )

        _data = Sigma_orb__, scan_and_chase_datas, t, states, passes, data, chase_schdeule_time, init_object.state, true_object.state, sigmas
        if not no_cache:
            self.save_pickle(data_path, _data)

        return _data
예제 #27
0
# Testing HDF5 table read/write with mixins.  This is mostly
# copied from FITS mixin testing.

el = EarthLocation(x=1 * u.km, y=3 * u.km, z=5 * u.km)
el2 = EarthLocation(x=[1, 2] * u.km, y=[3, 4] * u.km, z=[5, 6] * u.km)
sc = SkyCoord([1, 2], [3, 4], unit='deg,deg', frame='fk4', obstime='J1990.5')
scc = sc.copy()
scc.representation_type = 'cartesian'
tm = Time([2450814.5, 2450815.5], format='jd', scale='tai', location=el)

mixin_cols = {
    'tm':
    tm,
    'dt':
    TimeDelta([1, 2] * u.day),
    'sc':
    sc,
    'scc':
    scc,
    'scd':
    SkyCoord([1, 2], [3, 4], [5, 6],
             unit='deg,deg,m',
             frame='fk4',
             obstime=['J1990.5', 'J1991.5']),
    'q': [1, 2] * u.m,
    'qdb': [10, 20] * u.dB(u.mW),
    'qdex': [4.5, 5.5] * u.dex(u.cm / u.s**2),
    'qmag': [21, 22] * u.ABmag,
    'lat':
    Latitude([1, 2] * u.deg),
예제 #28
0
from astropy.utils.exceptions import ErfaWarning

import sunpy.time
from sunpy.time import is_time_equal

tbegin_str = '2012/1/1'
tfin_str = '2012/1/2'
dt = u.Quantity(24 * 60 * 60, 's')

start = sunpy.time.parse_time(tbegin_str)
end = sunpy.time.parse_time(tfin_str)
delta = end - start


@pytest.mark.parametrize("inputs", [(tbegin_str, tfin_str), (tbegin_str, dt),
                                    (tbegin_str, TimeDelta(1 * u.day)),
                                    (tbegin_str, timedelta(days=1))])
def test_timerange_inputs(inputs):
    timerange = sunpy.time.TimeRange(*inputs)
    assert isinstance(timerange, sunpy.time.TimeRange)
    assert timerange.start == start
    assert timerange.end == end
    assert timerange.dt == delta


def test_timerange_invalid_range():
    lower = '2016/01/04 09:30'
    mid = '2016/06/04 09:30'
    upper = '2017/03/04 09:30'

    with pytest.raises(ValueError):
예제 #29
0
    def azimuth_transit(
            self,
            azimuth: u.Quantity = 180 * u.deg,
            t_min: Time = Time.now(),
            duration: TimeDelta = TimeDelta(86400, format='sec'),
            precision: TimeDelta = TimeDelta(5, format='sec'),
    ) -> Time:
        """ Computes the :class:`~nenupy.astro.target.Target` transit time(s) at a given ``azimuth`` value.
            This method returns all the transit times found in the time
            window ranging from ``t_min`` to ``t_min + duration``.

            :param azimuth:
                Azimuth at which the transit is computed.
                Default is ``180 deg`` (i.e. South).
            :type azimuth:
                :class:`~astropy.units.Quantity`
            :param t_min:
                Starting time of the temporal window within which
                azimuth transits are looked for.
                Default is current time.
            :type t_min:
                :class:`~astropy.time.Time`
            :param duration:
                Width of the temporal window within which
                azimuth transits are looked for.
                Default is ``1 day``.
            :type duration:
                :class:`~astropy.time.TimeDelta`
            :param precision:
                Temporal precision of the returned azimuth transit values.
                Default is ``5 sec``.
            :type precision:
                :class:`~astropy.time.TimeDelta`

            :returns:
                Azimuth transit times.
                If no transit times are found (either because the requested
                time window doesn't contain any or because the source apparent
                sky position does not cross the desired ``azimuth``) an empty
                :class:`~astropy.time.Time` object is returned.
            :rtype:
                :class:`~astropy.time.Time`

            :Example:
                >>> from nenupy.astro.target import FixedTarget
                >>> from astropy.time import Time, TimeDelta
                >>> import astropy.units as u
                >>> cyg_a = FixedTarget.from_name("Cyg A")
                >>> cyg_a.azimuth_transit(
                        azimuth=100*u.deg,
                        t_min=Time("2021-01-01"),
                        duration=TimeDelta(86400*2, format="sec")
                    )
                <Time object: scale='utc' format='iso' value=['2021-01-01 11:22:12.463' '2021-01-02 11:18:16.477']>

            .. seealso::
                :ref:`ephemerides_sec`

        """
        def find_az_transit(times: Time):
            """ """
            # altaz_coordinates = radec_to_altaz(
            #     radec=self._get_source_coordinates(time=times),
            #     time=times,
            #     observer=self.observer,
            #     fast_compute=fast_compute
            # ).reshape(times.shape)
            altaz_coordinates = self._get_source_coordinates(
                time=times).transform_to(
                    AltAz(obstime=times, location=self.observer))
            azimuths = altaz_coordinates.az.rad
            az = azimuth.to(u.rad).value
            if self.is_circumpolar:
                az = np.angle(np.cos(az) + 1j * np.sin(az))
                complexAzStarts = np.angle(
                    np.cos(azimuths[:, :-1]) + 1j * np.sin(azimuths[:, :-1]))
                complexAzStops = np.angle(
                    np.cos(azimuths[:, 1:]) + 1j * np.sin(azimuths[:, 1:]))
                mask = (complexAzStarts <= az) &\
                    (complexAzStops >= az)
                mask |= (complexAzStarts >= az) &\
                    (complexAzStops <= az)
            else:
                mask = (azimuths[:, :-1] <= az) &\
                    (azimuths[:, 1:] >= az)
            return np.where(mask)

        return self._find_crossing_times(finding_function=find_az_transit,
                                         t_min=t_min,
                                         duration=duration,
                                         precision=precision)
    os.chdir(data_directory[jj])
    for fitsfile in glob.glob('*.fits'):
        # Read the necessary extensions from Hinode fits file
        # and compute the field.
        hdulist_hinode = fits.open(fitsfile)

        # Create astropy time objects of the start and end times of the Hinode scan in UTC.
        start_time = Time(hdulist_hinode[0].header['TSTART'], format='isot', scale='utc')
        end_time   = Time(hdulist_hinode[0].header['TEND'], format='isot', scale='utc')

        # Determine time in the middle of the scan. Convert start_time and end_time to TAI,
        # because that is what HMI uses.
        middle_of_scan = start_time.tai+(end_time.tai-start_time.tai)/2

        # For this Hinode file, find the closest HMI time and index at the given cadence
        tdeltas = TimeDelta(hmi_times_iso - middle_of_scan, format='sec')
        closest_hmi_index = np.abs(tdeltas.value).argmin()
        closest_hmi_time  = hmi_times_iso.value[closest_hmi_index]

        #Here I am sorting out bad coordinates with quantiles, the example below assumes that a
        # maximum of 2% of the coordinates are bad (1% at the lower end, 1% at the upper end). 
        # That fixes most of the maps, but not all of them. For now, I am just adding/subtracting 
        # an arcsec on either end to counteract the (probable) overcompensation, i.e. 
        # throwing away good coordinate values as well. If the HMI cutout needs to be
        # *really* precise, this needs to be done more carefully. Right now, the HMI map
        # might be a bit too small or too large depending on how many coordinate outliers
        # there are.
        hinode_xcoords = [np.quantile(hdulist_hinode[38].data,0.01)-1., np.quantile(hdulist_hinode[38].data,0.99)+1.]
        hinode_ycoords = [np.quantile(hdulist_hinode[39].data,0.01)-1., np.quantile(hdulist_hinode[39].data,0.99)+1.]

        # Take care of some problematic files in the list manually