Esempio n. 1
0
def carrington_rotation_number(t='now'):
    """
    Return the Carrington rotation number.  Each whole rotation number marks when the Sun's prime
    meridian coincides with the central meridian as seen from Earth, with the first rotation
    starting on 1853 November 9.

    Parameters
    ----------
    t : {parse_time_types}
        Time to use in a parse-time-compatible format
    """
    time = parse_time(t)

    # Estimate the Carrington rotation number by dividing the time that has elapsed since
    # JD 2398167.4 (late in the day on 1853 Nov 9), see Astronomical Algorithms (Meeus 1998, p.191),
    # by the mean synodic period (27.2753 days)
    estimate = (time - constants.first_carrington_rotation
                ) / constants.mean_synodic_period + 1
    estimate_int, estimate_frac = divmod(estimate, 1)

    # The fractional rotation number from the above estimate is inaccurate, so calculate the actual
    # fractional rotation number from the longitude of the central meridian (L0)
    actual_frac = 1 - L0(time).to('deg').value / 360

    # Calculate any adjustment to the integer rotation number due to wrapping
    wrap_adjustment = np.around(estimate_frac - actual_frac)

    actual = estimate_int + actual_frac + wrap_adjustment

    log.debug(
        f"Carrington rotation number: estimate is {estimate}, actual is {actual}"
    )

    return actual.to_value(u.one)
Esempio n. 2
0
 def _download(self, data):
     """ Download all data, even if paginated. """
     page = 1
     results = []
     new_data = data.copy()
     # Override the default name of the operatorX, where X is a number.
     for key in data.keys():
         if "operator" in key:
             new_data[f"op{key.split('operator')[-1]}"] = new_data.pop(key)
     while True:
         new_data['page'] = page
         url = self.url + urllib.parse.urlencode(new_data)
         log.debug(f'Opening {url}')
         fd = urllib.request.urlopen(url)
         try:
             result = codecs.decode(fd.read(),
                                    encoding='utf-8',
                                    errors='replace')
             result = json.loads(result)
         except Exception as e:
             raise IOError(
                 "Failed to load return from the HEKClient.") from e
         finally:
             fd.close()
         results.extend(result['result'])
         if not result['overmax']:
             if len(results) > 0:
                 return astropy.table.Table(dict_keys_same(results))
             else:
                 return astropy.table.Table()
         page += 1
Esempio n. 3
0
    def is_datasource_for(cls, **kwargs):
        """
        Determines if header corresponds to a GOES lightcurve
        `~sunpy.timeseries.TimeSeries`.
        """
        if "source" in kwargs.keys():
            return kwargs["source"].lower().startswith(cls._source)
        if "meta" in kwargs.keys():
            return kwargs["meta"].get("TELESCOP", "").startswith("GOES")

        if "filepath" in kwargs.keys():
            try:
                if sunpy.io.detect_filetype(kwargs["filepath"]) == "hdf5":
                    with h5netcdf.File(kwargs["filepath"],
                                       mode="r",
                                       **cls._netcdf_read_kw) as f:
                        summary = f.attrs["summary"]
                        if not isinstance(summary, str):
                            # h5netcdf<0.14
                            summary = summary.astype(str)
                        return "XRS" in summary
            except Exception as e:
                log.debug(
                    f'Reading {kwargs["filepath"]} failed with the following exception:\n{e}'
                )
                return False
Esempio n. 4
0
 def observatory(self):
     """
     Retrieves the goes satellite number by parsing the meta dictionary.
     """
     # Various pattern matches for the meta fields.
     pattern_inst = ("{}GOES 1-{SatelliteNumber:02d} {}")
     pattern_new = (
         "{}sci_gxrs-l2-irrad_g{SatelliteNumber:02d}_d{year:4d}{month:2d}{day:2d}_{}.nc{}"
     )
     pattern_old = ("{}go{SatelliteNumber:02d}{}{month:2d}{day:2d}.fits{}")
     pattern_r = (
         "{}sci_xrsf-l2-flx1s_g{SatelliteNumber:02d}_d{year:4d}{month:2d}{day:2d}_{}.nc{}"
     )
     pattern_telescop = ("GOES {SatelliteNumber:02d}")
     # The ordering of where we get the metadata from is important.
     # We alway want to check ID first as that will most likely have the correct information.
     # The other fields are fallback and sometimes have data in them that is "useless".
     id = (self.meta.metas[0].get("id", "").strip()
           or self.meta.metas[0].get("TELESCOP", "").strip()
           or self.meta.metas[0].get("Instrument", "").strip())
     if id is None:
         log.debug(
             "Unable to get a satellite number from 'Instrument', 'TELESCOP' or 'id' "
         )
         return None
     for pattern in [
             pattern_inst, pattern_new, pattern_old, pattern_r,
             pattern_telescop
     ]:
         parsed = parse(pattern, str(id))
         if parsed is not None:
             return f"GOES-{parsed['SatelliteNumber']}"
     log.debug('Satellite Number not found in metadata')
     return None
Esempio n. 5
0
def _retry_sample_data(results, new_url_base):
    # In case we have a broken file on disk, overwrite it.
    dl = Downloader(overwrite=True,
                    progress=True,
                    headers={'Accept-Encoding': 'identity'})

    for err in results.errors:
        file_name = err.url.split("/")[-1]
        log.debug(
            f"Failed to download {_SAMPLE_FILES[file_name]} from {err.url}: {err.exception}"
        )
        # Update the url to a mirror and requeue the file.
        new_url = urljoin(new_url_base, file_name)
        log.debug(
            f"Attempting redownload of {_SAMPLE_FILES[file_name]} using {new_url}"
        )
        dl.enqueue_file(new_url, filename=err.filepath_partial)

    extra_results = dl.download()

    # Make a new results object which contains all the successful downloads
    # from the previous results object and this retry, and all the errors from
    # this retry.
    new_results = results + extra_results
    new_results._errors = extra_results._errors
    return new_results
Esempio n. 6
0
    def __init__(self, data, header, **kwargs):
        # Assume pixel units are arcesc if not given
        header['cunit1'] = header.get('cunit1', 'arcsec')
        header['cunit2'] = header.get('cunit2', 'arcsec')

        super().__init__(data, header, **kwargs)

        self._nickname = self.detector
        # TODO Currently (8/29/2011), cannot read fits files containing more
        # than one image (schriste)
        # Fix some broken/misapplied keywords
        if self.meta['ctype1'] == 'arcsec':
            self.meta['cunit1'] = 'arcsec'
            self.meta['ctype1'] = 'HPLN-TAN'
        if self.meta['ctype2'] == 'arcsec':
            self.meta['cunit2'] = 'arcsec'
            self.meta['ctype2'] = 'HPLT-TAN'

        self.meta['waveunit'] = 'keV'
        self.meta['wavelnth'] = [self.meta['energy_l'], self.meta['energy_h']]
        self.plot_settings['cmap'] = 'rhessi'

        if ('TIMESYS' in self.meta
                and self.meta['keycomments']['TIMESYS'] == 'Reference Time'):
            log.debug('Moving "TIMESYS" FITS keyword to "DATEREF"')
            self.meta['DATEREF'] = self.meta.pop('TIMESYS')
Esempio n. 7
0
    def _download(self, data):
        """ Download all data, even if paginated. """
        page = 1
        results = []

        while True:
            data['page'] = page
            url = self.url + urllib.parse.urlencode(data)
            log.debug(f'Opening {url}')
            fd = urllib.request.urlopen(url)
            try:
                result = codecs.decode(fd.read(),
                                       encoding='utf-8',
                                       errors='replace')
                result = json.loads(result)
            except Exception as e:
                raise IOError(
                    "Failed to load return from the HEKClient.") from e
            finally:
                fd.close()
            results.extend(result['result'])

            if not result['overmax']:
                if len(results) > 0:
                    return astropy.table.Table(dict_keys_same(results))
                else:
                    return astropy.table.Table()

            page += 1
Esempio n. 8
0
def link_test(link):
    """
    Just a quick function to test a link.

    Quickly checks to see if the URL is a valid link; if it is it returns the
    downloaded contents of that page.

    Parameters
    ----------
    link : str
        A string containing a URL

    Returns
    -------
    webpage: str or NoneType
        String containing the webresults

    Examples
    --------
    >>> from sunpy.net.helio import parser
    >>> result = parser.link_test('http://msslkz.mssl.ucl.ac.uk/helio-hec/HelioService')  # doctest: +REMOTE_DATA

    >>> print(parser.link_test('http://rrnx.invalid_url5523.com'))  # doctest: +REMOTE_DATA
        None
    """
    try:
        with closing(urlopen(link, timeout=LINK_TIMEOUT)) as fd:
            return fd.read()
    except Exception as e:
        log.debug(f"Failed to get {link} with {e}")
        return None
Esempio n. 9
0
    def fetch(self, query_results, *, path, downloader, **kwargs):
        """
        Queue a set of results to be downloaded. `BaseClient` does the actual
        downloading, so we just have to queue up the ``downloader``.

        Parameters
        ----------
        query_results : sunpy.net.fido_factory.UnifiedResponse
            Results from a Fido search.
        path : str
            Path to download files to. Must be a format string with a ``file``
            field for the filename.
        downloader : parfive.Downloader
            Downloader instance used to download data.
        kwargs :
            Keyword arguments aren't used by this client.
        """
        base_url = ('http://soar.esac.esa.int/soar-sl-tap/data?'
                    f'retrieval_type=LAST_PRODUCT&product_type=SCIENCE&'
                    'data_item_id=')

        for row in query_results:
            url = base_url + row['Data item ID']
            filepath = str(path).format(file=row['Filename'])
            log.debug(f'Queing URL: {url}')
            downloader.enqueue_file(url, filename=filepath)
Esempio n. 10
0
    def _read_file(self, fname, **kwargs):
        """
        Read in a file name and return the list of (data, meta) pairs in that file.
        """
        # File gets read here. This needs to be generic enough to seamlessly
        # call a fits file or a jpeg2k file, etc
        # NOTE: use os.fspath so that fname can be either a str or pathlib.Path
        # This can be removed once read_file supports pathlib.Path
        log.debug(f'Reading {fname}')
        try:
            pairs = read_file(os.fspath(fname), **kwargs)
        except Exception as e:
            msg = f"Failed to read {fname}."
            raise IOError(msg) from e

        new_pairs = []
        for pair in pairs:
            filedata, filemeta = pair
            assert isinstance(filemeta, FileHeader)
            # This tests that the data is more than 1D
            if len(np.shape(filedata)) > 1:
                data = filedata
                meta = MetaDict(filemeta)
                new_pairs.append((data, meta))

        if not new_pairs:
            raise NoMapsInFileError(
                f"Found no HDUs with >= 2D data in '{fname}'.")

        return new_pairs
Esempio n. 11
0
def transform_with_sun_center():
    """
    Context manager for coordinate transformations to ignore the motion of the center of the Sun.

    Normally, coordinates refer to a point in inertial space (relative to the barycenter of the
    solar system).  Transforming to a different observation time does not move the point at all,
    but rather only updates the coordinate representation as needed for the origin and axis
    orientations at the new observation time.  However, the center of the Sun moves over time.
    Thus, for example, a coordinate that lies on the surface of the Sun at one observation time
    will not continue to lie on the surface of the Sun at other observation times.

    Under this context manager, transformations will instead move the coordinate over time to
    "follow" the translational motion of the center of Sun, thus maintaining the position of the
    coordinate relative to the center of the Sun.

    Examples
    --------
    >>> from astropy.coordinates import SkyCoord
    >>> from sunpy.coordinates import HeliographicStonyhurst, transform_with_sun_center
    >>> import astropy.units as u
    >>> start_frame = HeliographicStonyhurst(obstime="2001-01-01")
    >>> end_frame = HeliographicStonyhurst(obstime="2001-02-01")
    >>> sun_center = SkyCoord(0*u.deg, 0*u.deg, 0*u.AU, frame=start_frame)
    >>> sun_center
    <SkyCoord (HeliographicStonyhurst: obstime=2001-01-01T00:00:00.000): (lon, lat, radius) in (deg, deg, AU)
        (0., 0., 0.)>
    >>> sun_center.transform_to(end_frame)  # transformations do not normally follow Sun center
    <SkyCoord (HeliographicStonyhurst: obstime=2001-02-01T00:00:00.000): (lon, lat, radius) in (deg, deg, AU)
        (-156.66825767, 5.96399877, 0.00027959)>
    >>> with transform_with_sun_center():
    ...     sun_center.transform_to(end_frame)  # now following Sun center
    <SkyCoord (HeliographicStonyhurst: obstime=2001-02-01T00:00:00.000): (lon, lat, radius) in (deg, deg, AU)
        (0., 0., 0.)>

    Notes
    -----
    This context manager accounts only for the motion of the center of the Sun, i.e.,
    translational motion.  The motion of solar features due to any rotation of the Sun about its
    rotational axis is not accounted for.

    Due to the implementation approach, this context manager modifies transformations between only
    these five coordinate frames:
    `~sunpy.coordinates.frames.HeliographicStonyhurst`,
    `~sunpy.coordinates.frames.HeliographicCarrington`,
    `~sunpy.coordinates.frames.HeliocentricInertial`,
    `~sunpy.coordinates.frames.Heliocentric`, and
    `~sunpy.coordinates.frames.Helioprojective`.
    """
    try:
        global _ignore_sun_motion

        old_ignore_sun_motion = _ignore_sun_motion  # nominally False

        log.debug(
            "Ignore the motion of the center of the Sun for transformations")
        _ignore_sun_motion = True
        yield
    finally:
        _ignore_sun_motion = old_ignore_sun_motion
Esempio n. 12
0
    def _do_search(query):
        """
        Query the SOAR server with a single query.

        Parameters
        ----------
        query : list[str]
            List of query items.

        Returns
        -------
        astropy.table.QTable
            Query results.
        """
        base_url = ('http://soar.esac.esa.int/soar-sl-tap/tap/'
                    'sync?REQUEST=doQuery&')
        # Need to manually set the intervals based on a query
        request_dict = {}
        request_dict['LANG'] = 'ADQL'
        request_dict['FORMAT'] = 'json'

        url_query = {}
        url_query['SELECT'] = '*'
        url_query['FROM'] = 'v_sc_data_item'
        url_query['WHERE'] = '+AND+'.join(query)
        request_dict['QUERY'] = '+'.join(
            [f'{item}+{url_query[item]}' for item in url_query])

        request_str = ''
        request_str = [f'{item}={request_dict[item]}' for item in request_dict]
        request_str = '&'.join(request_str)

        url = base_url + request_str
        log.debug(f'Getting request from URL: {url}')
        # Get request info
        r = requests.get(url)
        r.raise_for_status()

        # Do some list/dict wrangling
        names = [m['name'] for m in r.json()['metadata']]
        info = {name: [] for name in names}
        for entry in r.json()['data']:
            for i, name in enumerate(names):
                info[name].append(entry[i])

        if len(info['begin_time']):
            info['begin_time'] = parse_time(info['begin_time']).iso
            info['end_time'] = parse_time(info['end_time']).iso

        return astropy.table.QTable({
            'Instrument': info['instrument'],
            'Data product': info['descriptor'],
            'Level': info['level'],
            'Start time': info['begin_time'],
            'End time': info['end_time'],
            'Data item ID': info['data_item_id'],
            'Filename': info['filename'],
            'Filesize': info['filesize']
        })
Esempio n. 13
0
 def __del__(self):
     """
     Attempt to close the connection, but if it fails, continue.
     """
     try:
         self.api.transport.session.close()
     except Exception as e:
         log.debug(f"Failed to close VSO API connection with: {e}")
Esempio n. 14
0
def send_to_log(message, kind='INFO'):
    """
    A simple function to demonstrate the logger generating an origin.
    """
    if kind.lower() == 'info':
        log.info(message)
    elif kind.lower() == 'debug':
        log.debug(message)
Esempio n. 15
0
def send_to_log(message, kind='INFO'):
    """
    A simple function to demonstrate the logger generating an origin.
    """
    if kind.lower() == 'info':
        log.info(message)
    elif kind.lower() == 'debug':
        log.debug(message)
Esempio n. 16
0
def _handle_final_errors(results):
    for err in results.errors:
        file_name = err.url.split("/")[-1]
        log.debug(
            f"Failed to download {_SAMPLE_FILES[file_name]} from {err.url}: {err.exception}"
        )
        log.error(
            f"Failed to download {_SAMPLE_FILES[file_name]} from all mirrors,"
            "the file will not be available.")
Esempio n. 17
0
def check_connection(url):
    try:
        resp = urlopen(urljoin(url, "/v2/getDataSources/"))
        assert resp.getcode() == 200
        assert isinstance(json.loads(resp.read()), dict)
        return url
    except Exception as e:
        log.debug(f"Unable to connect to {url}:\n {e}")
        log.info(f"Connection to {url} failed. Retrying with different url.")
    return None
Esempio n. 18
0
    def _apply_diffrot(self, duration, rotation_model):
        oldrepr = self.spherical

        from sunpy.physics.differential_rotation import diff_rot
        log.debug(f"Applying {duration} of solar rotation")
        newlon = oldrepr.lon + diff_rot(duration,
                                        oldrepr.lat,
                                        rot_type=rotation_model,
                                        frame_time='sidereal')
        newrepr = SphericalRepresentation(newlon, oldrepr.lat, oldrepr.distance)

        return self.realize_frame(newrepr)
Esempio n. 19
0
 def rotation_matrix(self):
     # For Helioviewer images, clear rotation metadata, as these have already been rotated.
     # Also check that all CROTAn keywords exist to make sure that it's an untouched
     # Helioviewer file.
     if ('helioviewer' in self.meta and 'crota' in self.meta
             and 'crota1' in self.meta and 'crota2' in self.meta):
         log.debug(
             "LASCOMap: Ignoring CROTAn keywords "
             "because the map has already been rotated by Helioviewer")
         return np.identity(2)
     else:
         return super().rotation_matrix
Esempio n. 20
0
        def wrapped_func(*args, **kwargs):
            global _layer_level

            # Check if the logging level is at least DEBUG (for performance reasons)
            debug_output = log.getEffectiveLevel() <= logging.DEBUG

            if debug_output:
                # Indention for transformation layer
                indention = u"\u2502   " * _layer_level

                # For the input arguments, add indention to any lines after the first line
                from_str = str(args[0]).replace("\n", f"\n       {indention}\u2502       ")
                to_str = str(args[1]).replace("\n", f"\n       {indention}\u2502       ")

                # Log the description and the input arguments
                log.debug(f"{indention}{description}")
                log.debug(f"{indention}\u251c\u2500From: {from_str}")
                log.debug(f"{indention}\u251c\u2500To  : {to_str}")

                # Increment the layer level to increase the indention for nested transformations
                _layer_level += 1

            result = func(*args, **kwargs)

            if debug_output:
                # Decrement the layer level
                _layer_level -= 1

                # For the output, add intention to any lines after the first line
                out_str = str(result).replace("\n", f"\n       {indention}        ")

                # Log the output
                log.debug(f"{indention}\u2514\u2500Out : {out_str}")

            return result
Esempio n. 21
0
    def rotatedsun_to_reference(rotatedsun_coord, reference_frame):
        # Transform to HCI
        from_coord = rotatedsun_coord.base.realize_frame(rotatedsun_coord.data)
        hci_coord = from_coord.transform_to(HeliocentricInertial(obstime=reference_frame.obstime))
        oldrepr = hci_coord.spherical

        # Rotate the coordinate in HCI
        from sunpy.physics.differential_rotation import diff_rot
        log.debug(f"Applying {rotatedsun_coord.duration} of solar rotation")
        newlon = oldrepr.lon + diff_rot(rotatedsun_coord.duration,
                                        oldrepr.lat,
                                        rot_type=rotatedsun_coord.rotation_model,
                                        frame_time='sidereal')
        newrepr = SphericalRepresentation(newlon, oldrepr.lat, oldrepr.distance)

        # Transform back from HCI
        hci_coord = HeliocentricInertial(newrepr, obstime=reference_frame.obstime)
        return hci_coord.transform_to(reference_frame)
Esempio n. 22
0
    def __init__(self, data, header, **kwargs):

        GenericMap.__init__(self, data, header, **kwargs)
        self._nickname = "{}-{}".format(self.detector, self.observatory[-1])
        self.plot_settings['cmap'] = 'sohoeit{wl:d}'.format(
            wl=int(self.wavelength.value))
        self.plot_settings['norm'] = ImageNormalize(stretch=source_stretch(
            self.meta, PowerStretch(0.25)),
                                                    clip=False)
        self.meta['waveunit'] = 'Angstrom'

        # Try to identify when the FITS meta data does not have the correct
        # date FITS keyword
        if ('date_obs' in self.meta) and not ('date-obs' in self.meta):
            self.meta['date-obs'] = self.meta['date_obs']
        # fix CROTA to CROTAn
        if "crota" in self.meta and "crota2" not in self.meta:
            log.debug("EUVIMap: Changing the CROTA keyword to CROTA2")
            self.meta["crota2"] = self.meta.pop("crota")
Esempio n. 23
0
    def __init__(self, data, header, **kwargs):
        super().__init__(data, header, **kwargs)

        if self.meta['cunit1'] == 'Degree':
            self.meta['cunit1'] = 'deg'

        if self.meta['cunit2'] == 'Sine Latitude':
            log.debug("Editing CUNIT2, CDELT1, CDLET2 keywords to the correct "
                      "values for a CEA projection.")
            self.meta['cunit2'] = 'deg'

            # Since, this map uses the cylindrical equal-area (CEA) projection,
            # the spacing should be modified to 180/pi times the original value
            # Reference: Section 5.5, Thompson 2006
            self.meta['cdelt2'] = 180 / np.pi * self.meta['cdelt2']
            self.meta['cdelt1'] = np.abs(self.meta['cdelt1'])

        if 'date-obs' not in self.meta and 't_obs' in self.meta:
            log.debug('Setting "DATE-OBS" keyword from "T_OBS"')
            self.meta['date-obs'] = self.meta['t_obs']
Esempio n. 24
0
    def _read_file(self, fname, **kwargs):
        """ Read in a file name and return the list of (data, meta) pairs in
            that file. """

        # File gets read here.  This needs to be generic enough to seamlessly
        # call a fits file or a jpeg2k file, etc
        # NOTE: use os.fspath so that fname can be either a str or pathlib.Path
        # This can be removed once read_file supports pathlib.Path
        log.debug(f'Reading {fname}')
        pairs = read_file(os.fspath(fname), **kwargs)

        new_pairs = []
        for pair in pairs:
            filedata, filemeta = pair
            assert isinstance(filemeta, FileHeader)
            # This tests that the data is more than 1D
            if len(np.shape(filedata)) > 1:
                data = filedata
                meta = MetaDict(filemeta)
                new_pairs.append((data, meta))
        return new_pairs
Esempio n. 25
0
    def _ftpfileslist(self, timerange):
        directories = self.range(timerange)
        filesurls = list()
        ftpurl = urlsplit(directories[0]).netloc
        with FTP(ftpurl, user="******", passwd="*****@*****.**") as ftp:
            for directory in directories:
                try:
                    ftp.cwd(urlsplit(directory).path)
                except Exception as e:
                    log.debug(f"FTP CWD: {e}")
                    continue
                for file_i in ftp.nlst():
                    fullpath = directory + file_i
                    if self._URL_followsPattern(fullpath):
                        if self._check_timerange(fullpath, timerange):
                            filesurls.append(fullpath)

        filesurls = [f'ftp://' + "{0.netloc}{0.path}".format(urlsplit(url))
                     for url in filesurls]

        return filesurls
Esempio n. 26
0
def _retry_sample_data(results):
    # In case we have a broken file on disk, overwrite it.
    dl = Downloader(overwrite=True,
                    progress=True,
                    headers={'Accept-Encoding': 'identity'})
    for err in results.errors:
        file_name = err.filepath_partial().name
        log.debug(
            f"Failed to download {_SAMPLE_FILES[file_name]} from {err.url}: {err.exception}"
        )
        # Update the url to a mirror and requeue the file.
        new_url = urljoin(_BASE_URLS[1], file_name)
        log.debug(
            f"Attempting redownload of {_SAMPLE_FILES[file_name]} using {new_url}"
        )
        dl.enqueue_file(new_url, filename=err.filepath_partial)
    extra_results = dl.download()
    for err in extra_results.errors:
        file_name = err.filepath_partial().name
        log.debug(
            f"Failed to download {_SAMPLE_FILES[file_name]} from {err.url}: {err.exception}"
        )
        log.error(
            f"Failed to download {_SAMPLE_FILES[file_name]} from all mirrors,"
            "the file will not be available.")
    return results + extra_results
Esempio n. 27
0
    def reference_to_rotatedsun(hgs_coord, rotatedsun_frame):
        int_frame = HeliographicStonyhurst(
            obstime=rotatedsun_frame.base.obstime)
        int_coord = hgs_coord.make_3d().transform_to(
            int_frame)  # obstime change handled here
        oldrepr = int_coord.spherical

        # Rotate the coordinate in HGS
        from sunpy.physics.differential_rotation import diff_rot
        log.debug(f"Applying {rotatedsun_frame.duration} of solar rotation")
        newlon = oldrepr.lon - diff_rot(
            rotatedsun_frame.duration,
            oldrepr.lat,
            rot_type=rotatedsun_frame.rotation_model,
            frame_time='sidereal')
        newrepr = SphericalRepresentation(newlon, oldrepr.lat,
                                          oldrepr.distance)

        # Transform from HGS
        new_coord = int_coord.realize_frame(newrepr).transform_to(
            rotatedsun_frame.base)
        return rotatedsun_frame.realize_frame(new_coord.data)
Esempio n. 28
0
    def __init__(self, data, header, **kwargs):

        header['cunit1'] = header['cunit1'].lower()
        header['cunit2'] = header['cunit2'].lower()

        super().__init__(data, header, **kwargs)

        # Fill in some missing or broken info
        # Test if change has already been applied
        if 'T' not in self.meta['date-obs']:
            datestr = "{date}T{time}".format(
                date=self.meta.get('date-obs', self.meta.get('date_obs')),
                time=self.meta.get('time-obs', self.meta.get('time_obs')))
            self.meta['date-obs'] = parse_time(datestr).isot

        # If non-standard Keyword is present, correct it too, for compatibility.
        if 'date_obs' in self.meta:
            self.meta['date_obs'] = self.meta['date-obs']
        self._nickname = self.instrument + "-" + self.detector
        self.plot_settings['cmap'] = 'soholasco{det!s}'.format(
            det=self.detector[1])
        self.plot_settings['norm'] = ImageNormalize(stretch=source_stretch(
            self.meta, PowerStretch(0.5)),
                                                    clip=False)

        # For Helioviewer images, clear rotation metadata, as these have already been rotated.
        # Also check that all CROTAn keywords exist to make sure that it's an untouched
        # Helioviewer file.
        if ('helioviewer' in self.meta and 'crota' in self.meta
                and 'crota1' in self.meta and 'crota2' in self.meta):
            log.debug(
                "LASCOMap: Cleaning up CROTAn keywords "
                "because the map has already been rotated by Helioviewer")
            self.meta.pop('crota')
            self.meta.pop('crota1')
            self.meta['crota2'] = 0
Esempio n. 29
0
def propagate_with_solar_surface(rotation_model='howard'):
    """
    Context manager for coordinate transformations to automatically apply solar
    differential rotation for any change in observation time.

    Normally, coordinates refer to a point in inertial space (relative to the
    barycenter of the solar system).  Transforming to a different observation time
    does not move the point at all, but rather only updates the coordinate
    representation as needed for the origin and axis orientations at the new
    observation time.

    Under this context manager, transformations will instead treat the coordinate
    as if it were referring to a point on the solar surface instead of a point in
    inertial space.  If a transformation has a change in observation time, the
    heliographic longitude of the point will be updated according to the specified
    rotation model.

    Parameters
    ----------
    rotation_model : `str`
        Accepted model names are ``'howard'`` (default), ``'snodgrass'``,
        ``'allen'``, and ``'rigid'``.  See the documentation for
        :func:`~sunpy.physics.differential_rotation.diff_rot` for the differences
        between these models.

    Notes
    -----
    This context manager also ignores the motion of the center of the Sun (see
    :func:`~sunpy.coordinates.transformations.transform_with_sun_center`).

    Due to the implementation approach, this context manager modifies
    transformations between only these five coordinate frames:
    `~sunpy.coordinates.frames.HeliographicStonyhurst`,
    `~sunpy.coordinates.frames.HeliographicCarrington`,
    `~sunpy.coordinates.frames.HeliocentricInertial`,
    `~sunpy.coordinates.frames.Heliocentric`, and
    `~sunpy.coordinates.frames.Helioprojective`.

    Examples
    --------
    .. minigallery:: sunpy.coordinates.propagate_with_solar_surface

    >>> import astropy.units as u
    >>> from astropy.coordinates import SkyCoord
    >>> from sunpy.coordinates import HeliocentricInertial, propagate_with_solar_surface
    >>> meridian = SkyCoord(0*u.deg, [-60, -30, 0, 30, 60]*u.deg, 1*u.AU,
    ...                     frame=HeliocentricInertial, obstime='2021-09-15')
    >>> out_frame = HeliocentricInertial(obstime='2021-09-21')
    >>> with propagate_with_solar_surface():
    ...     print(meridian.transform_to(out_frame))
    <SkyCoord (HeliocentricInertial: obstime=2021-09-21T00:00:00.000): (lon, lat, distance) in (deg, deg, AU)
        [(70.24182965, -60., 1.),
         (82.09298036, -30., 1.),
         (85.9579703 ,   0., 1.),
         (82.09298036,  30., 1.),
         (70.24182965,  60., 1.)]>
    >>> with propagate_with_solar_surface(rotation_model='rigid'):
    ...     print(meridian.transform_to(out_frame))
    <SkyCoord (HeliocentricInertial: obstime=2021-09-21T00:00:00.000): (lon, lat, distance) in (deg, deg, AU)
        [(85.1064, -60., 1.), (85.1064, -30., 1.),
         (85.1064,   0., 1.), (85.1064,  30., 1.),
         (85.1064,  60., 1.)]>
    """
    with transform_with_sun_center():
        try:
            global _autoapply_diffrot

            old_autoapply_diffrot = _autoapply_diffrot  # nominally False

            log.debug("Enabling automatic solar differential rotation "
                      f"('{rotation_model}') for any changes in obstime")
            _autoapply_diffrot = rotation_model
            yield
        finally:
            if not old_autoapply_diffrot:
                log.debug("Disabling automatic solar differential rotation "
                          "for any changes in obstime")
            _autoapply_diffrot = old_autoapply_diffrot
Esempio n. 30
0
File: cdf.py Progetto: hayesla/sunpy
def read_cdf(fname):
    """
    Read a CDF file that follows the ISTP/IACG guidelines.

    Parameters
    ----------
    fname : path-like
        Location of single CDF file to read.

    Returns
    -------
    list[GenericTimeSeries]
        A list of time series objects, one for each unique time index within
        the CDF file.

    References
    ----------
    Space Physics Guidelines for CDF https://spdf.gsfc.nasa.gov/sp_use_of_cdf.html
    """
    cdf = cdflib.CDF(str(fname))

    # Extract the time varying variables
    cdf_info = cdf.cdf_info()
    meta = cdf.globalattsget()
    all_var_keys = cdf_info['rVariables'] + cdf_info['zVariables']
    var_attrs = {key: cdf.varattsget(key) for key in all_var_keys}
    # Get keys that depend on time
    var_keys = [var for var in var_attrs if 'DEPEND_0' in var_attrs[var]]

    # Get unique time index keys
    time_index_keys = sorted(set([var_attrs[var]['DEPEND_0'] for var in var_keys]))

    all_ts = []
    # For each time index, construct a GenericTimeSeries
    for index_key in time_index_keys:
        try:
            index = cdf.varget(index_key)
        except ValueError:
            # Empty index for cdflib >= 0.3.20
            continue
        if index is None:
            # Empty index for cdflib <0.3.20
            continue
        # TODO: use to_astropy_time() instead here when we drop pandas in timeseries
        index = CDFepoch.to_datetime(index)
        df = pd.DataFrame(index=pd.DatetimeIndex(name=index_key, data=index))
        units = {}

        for var_key in sorted(var_keys):
            attrs = var_attrs[var_key]
            if attrs['DEPEND_0'] != index_key:
                continue

            # Get data
            if cdf.varinq(var_key)['Last_Rec'] == -1:
                log.debug(f'Skipping {var_key} in {fname} as it has zero elements')
                continue

            data = cdf.varget(var_key)
            # Get units
            if 'UNITS' in attrs:
                unit_str = attrs['UNITS']
                try:
                    unit = u.Unit(unit_str)
                except ValueError:
                    if unit_str in _known_units:
                        unit = _known_units[unit_str]
                    else:
                        warn_user(f'astropy did not recognize units of "{unit_str}". '
                                  'Assigning dimensionless units. '
                                  'If you think this unit should not be dimensionless, '
                                  'please raise an issue at https://github.com/sunpy/sunpy/issues')
                        unit = u.dimensionless_unscaled
            else:
                warn_user(f'No units provided for variable "{var_key}". '
                          'Assigning dimensionless units.')
                unit = u.dimensionless_unscaled

            if data.ndim == 2:
                # Multiple columns, give each column a unique label
                for i, col in enumerate(data.T):
                    df[var_key + f'_{i}'] = col
                    units[var_key + f'_{i}'] = unit
            else:
                # Single column
                df[var_key] = data
                units[var_key] = unit

        all_ts.append(GenericTimeSeries(data=df, units=units, meta=meta))

    if not len(all_ts):
        log.debug(f'No data found in file {fname}')
    return all_ts
Esempio n. 31
0
 def _lookup_records(self, iargs):
     """
     Do a LookData request to JSOC to workout what results the query returns.
     """
     isMeta = iargs.get('meta', False)
     keywords = iargs.get('keys', '**ALL**')
     client = drms.Client()
     if 'series' not in iargs:
         error_message = "Series must be specified for a JSOC Query"
         raise ValueError(error_message)
     if not isinstance(keywords, list) and not isinstance(keywords, str):
         error_message = "Keywords can only be passed as a list or comma-separated strings."
         raise TypeError(error_message)
     # Get a set of the PrimeKeys that exist for the given series, and check
     # whether the passed PrimeKeys is a subset of that.
     primekeys = client.pkeys(iargs['series'])
     primekeys_passed = iargs.get(
         'primekey',
         None)  # primekeys_passes is a dict, with key-value pairs.
     if primekeys_passed is not None:
         if not set(list(primekeys_passed.keys())) <= set(primekeys):
             error_message = f"Unexpected PrimeKeys were passed. The series {iargs['series']} supports the following Keywords: {primekeys}"
             raise ValueError(
                 error_message.format(series=iargs['series'],
                                      primekeys=primekeys))
     # Raise special error for wavelength (even though the code would ignore it anyway)
     wavelength = iargs.get('wavelength', '')
     if wavelength != '':
         if 'WAVELNTH' not in primekeys:
             error_message = (
                 f"The series {iargs['series']} does not support wavelength attribute. "
                 f"The following primekeys are supported {primekeys}")
             raise TypeError(
                 error_message.format(series=iargs['series'],
                                      pkeys=primekeys))
     # Raise errors for segments
     # Get a set of the segments that exist for the given series, and check
     # whether the passed segments is a subset of that.
     series = client.info(iargs['series'])
     # Fetches all valid segment names
     segments = list(series.segments.index.values)
     segments_passed = iargs.get('segment', None)
     if segments_passed is not None:
         if not isinstance(segments_passed, list) and not isinstance(
                 segments_passed, str):
             error_message = "Segments can only be passed as a comma-separated string or a list of strings."
             raise TypeError(error_message)
         elif isinstance(segments_passed, str):
             segments_passed = segments_passed.replace(' ', '').split(',')
         if not set(segments_passed) <= set(segments):
             error_message = f"Unexpected Segments were passed. The series {iargs['series']} contains the following Segments {segments}"
             raise ValueError(
                 error_message.format(series=iargs['series'],
                                      segs=segments))
         iargs['segment'] = segments_passed
     # If Time has been passed as a PrimeKey, convert the Time object into TAI time scale,
     # and then, convert it to datetime object.
     ds = self._make_recordset(**iargs)
     # Convert the list of keywords into comma-separated string.
     if isinstance(keywords, list):
         key = str(keywords)[1:-1].replace(' ', '').replace("'", '')
     else:
         key = keywords
     log.debug(f"Running following query: {ds}")
     log.debug(f"Requesting following keywords: {key}")
     result = client.query(ds, key=key, rec_index=isMeta)
     if result is None or result.empty:
         return astropy.table.Table()
     else:
         return astropy.table.Table.from_pandas(result)