Beispiel #1
0
def test_angular_radius():
    # Regression-only test
    # The Astronomical Almanac publishes values, but I don't know what physical radius they use
    assert_quantity_allclose(sun.angular_radius("2012/11/11"), 968.871*u.arcsec, atol=1e-3*u.arcsec)
    with pytest.warns(ErfaWarning):
        assert_quantity_allclose(sun.angular_radius("2043/03/01"), 968.326*u.arcsec, atol=1e-3*u.arcsec)
    assert_quantity_allclose(sun.angular_radius("2001/07/21"), 944.039*u.arcsec, atol=1e-3*u.arcsec)
Beispiel #2
0
def test_angular_radius(t2):
    # Validate against a published value from the Astronomical Almanac (2013, C13)
    # The Astromomical Almanac uses a slightly different radius for the Sun (6.96e5 km)
    # The Astronomical Almanac also uses a small-angle approximation
    # See https://archive.org/details/131123ExplanatorySupplementAstronomicalAlmanac/page/n212/mode/1up
    assert_quantity_allclose(sun.angular_radius(t2),
                             Angle('0d15m44.61s') / (6.96e5*u.km) * radius,  # scale to IAU radius
                             atol=0.005*u.arcsec)

    # Regression-only test
    assert_quantity_allclose(sun.angular_radius("2012/11/11"), 968.875*u.arcsec, atol=1e-3*u.arcsec)
    with pytest.warns(ErfaWarning):
        assert_quantity_allclose(sun.angular_radius("2043/03/01"),
                                 968.330*u.arcsec, atol=1e-3*u.arcsec)
    assert_quantity_allclose(sun.angular_radius("2001/07/21"), 944.042*u.arcsec, atol=1e-3*u.arcsec)
Beispiel #3
0
def test_rsun_missing():
    """Tests output if 'rsun' is missing"""
    euvi_no_rsun = Map(fitspath)
    euvi_no_rsun.meta['rsun'] = None
    with pytest.warns(SunpyUserWarning,
                      match='Missing metadata for solar radius'):
        assert euvi_no_rsun.rsun_obs.value == sun.angular_radius(
            euvi.date).to('arcsec').value
Beispiel #4
0
def test_angular_radius():
    coord = Helioprojective(0 * u.deg,
                            0 * u.deg,
                            5 * u.km,
                            obstime="2010/01/01T00:00:00",
                            observer="earth")
    assert_quantity_allclose(coord.angular_radius,
                             angular_radius(coord.obstime))
Beispiel #5
0
def test_angular_radius(t2):
    # Validate against a published value from the Astronomical Almanac (2013, C13)
    # The Astromomical Almanac uses a slightly different radius for the Sun (6.96e5 km)
    assert_quantity_allclose(
        sun.angular_radius(t2),
        Angle('0d15m44.61s') / (6.96e5 * u.km) * radius,  # scale to IAU radius
        atol=0.005 * u.arcsec)

    # Regression-only test
    assert_quantity_allclose(sun.angular_radius("2012/11/11"),
                             968.875 * u.arcsec,
                             atol=1e-3 * u.arcsec)
    with pytest.warns(ErfaWarning):
        assert_quantity_allclose(sun.angular_radius("2043/03/01"),
                                 968.330 * u.arcsec,
                                 atol=1e-3 * u.arcsec)
    assert_quantity_allclose(sun.angular_radius("2001/07/21"),
                             944.042 * u.arcsec,
                             atol=1e-3 * u.arcsec)
Beispiel #6
0
def viewangle(xy, date=None, heliographic=False):
    """
    Return the viewing angle theta (and mu=cos(theta)) given a set of solar
    coordinates in the helioprojective or heliographic Stonyhurst system

    Parameters
    ----------
    xy : array_like
        2-element list or array with the coordinate values (in arcsec if
        helioprojective, in degrees if heliographic)
    date : str, optional
        date for which to get the viewing angle (default: today's date)
    heliographic : bool, optional
        switch to indicate input coordinates are heliographic Stonyhurst
        (default False, i.e. coordinates are helioprojective)

    Returns
    -------
    viewangle : list
        2-element list with [theta, mu]. Theta is given in degrees.

    Example
    -------
    >>> result = viewangle([240,-380], date='2017-08-02')
    >>> result = viewangle([30,-50], date='2019-01-12', heliographic=True) # S50 W30

    :Author:
        Gregal Vissers (ISP/SU 2019)
    """

    if date is None:
        date = datetime.now().date().strftime('%Y-%m-%d')

    # Convert to helioprojective if input is heliographic Stonyhurst
    if heliographic is True:
        c = SkyCoord(xy[0] * u.deg,
                     xy[1] * u.deg,
                     frame=frames.HeliographicStonyhurst,
                     obstime=date)
        xy_hpc = c.transform_to(frames.Helioprojective)
        xy = [xy_hpc.Tx.value, xy_hpc.Ty.value]

    r_sun = sun.angular_radius(date).value
    rho = np.sqrt(xy[0]**2 + xy[1]**2)
    if (rho > r_sun):
        raise ValueError(
            "viewangle: coordinates ({0},{1}) are not on the solar " +
            "disc.".format(xy[0], xy[1]))
    mu = np.sqrt(1 - (rho / r_sun)**2)
    theta = np.degrees(np.arccos(mu))

    return [theta, mu]
def make_map_ipa(file):
    """
    Function to make a sunpy.map.GenericMap from a NoRH fits file.
    This function fixes the header keywords so that it works within
    map.

    Parameters
    ----------
    file : `str`
        path of NoRH fits file to read into a map

    Returns
    -------
    `sunpy.map.GenericMap

    """
    hdu = fits.open(file)[0]
    header = hdu.header
    data = hdu.data

    header['CUNIT1'], header['CUNIT2'] = 'arcsec', 'arcsec'
    header['CTYPE1'], header['CTYPE2'] = 'HPLN-TAN', 'HPLT-TAN'
    header['date-obs'] = header['date-obs'] + 'T' + header['time-obs']

    # get observer location
    observer = get_earth(header['date-obs'])
    observer = observer.transform_to(
        frames.HeliographicStonyhurst(obstime=observer.obstime))

    header['hgln_obs'] = observer.lon.to_value(u.deg)
    header['hglt_obs'] = observer.lat.to_value(u.deg)
    header['dsun_obs'] = observer.radius.to_value(u.m)

    header['rsun_obs'] = sun.angular_radius(
        header['date-obs']).to('arcsec').value

    norh_map = sunpy.map.Map(data, header)
    norh_map.plot_settings['cmap'] = 'BuPu'
    #norh_map.plot_settings['norm'] = LogNorm()

    return norh_map
Beispiel #8
0
def backprojection(calibrated_event_list,
                   pixel_size: u.arcsec = (1., 1.) * u.arcsec,
                   image_dim: u.pix = (64, 64) * u.pix):
    """
    Given a stacked calibrated event list fits file create a back
    projection image.

    .. warning:: The image is not in the right orientation!

    Parameters
    ----------
    calibrated_event_list : str
        filename of a RHESSI calibrated event list
    pixel_size : `~astropy.units.Quantity` instance
        the size of the pixels in arcseconds. Default is (1,1).
    image_dim : `~astropy.units.Quantity` instance
        the size of the output image in number of pixels

    Returns
    -------
    out : RHESSImap
        Return a backprojection map.

    Examples
    --------
    This example is broken.
    >>> import sunpy.data
    >>> import sunpy.data.sample # doctest: +REMOTE_DATA
    >>> import sunpy.instr.rhessi as rhessi
    >>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST)   # doctest: +SKIP
    >>> map.peek()   # doctest: +SKIP

    """
    # import sunpy.map in here so that net and timeseries don't end up importing map
    import sunpy.map

    pixel_size = pixel_size.to(u.arcsec)
    image_dim = np.array(image_dim.to(u.pix).value, dtype=int)

    afits = sunpy.io.read_file(calibrated_event_list)
    info_parameters = afits[2]
    xyoffset = info_parameters.data.field('USED_XYOFFSET')[0]
    time_range = TimeRange(
        info_parameters.data.field('ABSOLUTE_TIME_RANGE')[0], format='utime')

    image = np.zeros(image_dim)

    # find out what detectors were used
    det_index_mask = afits[1].data.field('det_index_mask')[0]
    detector_list = (np.arange(9) + 1) * np.array(det_index_mask)
    for detector in detector_list:
        if detector > 0:
            image = image + _backproject(calibrated_event_list,
                                         detector=detector,
                                         pixel_size=pixel_size.value,
                                         image_dim=image_dim)

    dict_header = {
        "DATE-OBS":
        time_range.center.strftime("%Y-%m-%d %H:%M:%S"),
        "CDELT1":
        pixel_size[0],
        "NAXIS1":
        image_dim[0],
        "CRVAL1":
        xyoffset[0],
        "CRPIX1":
        image_dim[0] / 2 + 0.5,
        "CUNIT1":
        "arcsec",
        "CTYPE1":
        "HPLN-TAN",
        "CDELT2":
        pixel_size[1],
        "NAXIS2":
        image_dim[1],
        "CRVAL2":
        xyoffset[1],
        "CRPIX2":
        image_dim[0] / 2 + 0.5,
        "CUNIT2":
        "arcsec",
        "CTYPE2":
        "HPLT-TAN",
        "HGLT_OBS":
        0,
        "HGLN_OBS":
        0,
        "RSUN_OBS":
        sun.angular_radius(time_range.center).value,
        "RSUN_REF":
        sunpy.sun.constants.radius.value,
        "DSUN_OBS":
        sun.earth_distance(time_range.center).value *
        sunpy.sun.constants.au.value
    }

    result_map = sunpy.map.Map(image, dict_header)

    return result_map
Beispiel #9
0
def backprojection(calibrated_event_list,
                   pixel_size: u.arcsec = (1., 1.) * u.arcsec,
                   image_dim: u.pix = (64, 64) * u.pix):
    """
    Given a stacked calibrated event list fits file create a back projection
    image.

    .. warning::

        The image will not be in the right orientation.

    Parameters
    ----------
    calibrated_event_list : `str`
        Filename of a RHESSI calibrated event list.
    pixel_size : `tuple`, optional
        A length 2 tuple with the size of the pixels in arcsecond
        `~astropy.units.Quantity`. Defaults to  ``(1, 1) * u.arcsec``.
    image_dim : `tuple`, optional
        A length 2 tuple with the size of the output image in number of pixel
        `~astropy.units.Quantity` Defaults to ``(64, 64) * u.pix``.

    Returns
    -------
    `sunpy.map.sources.RHESSImap`
        A backprojection map.
    """
    # import sunpy.map in here so that net and timeseries don't end up importing map
    import sunpy.map

    pixel_size = pixel_size.to(u.arcsec)
    image_dim = np.array(image_dim.to(u.pix).value, dtype=int)

    afits = sunpy.io.read_file(calibrated_event_list)
    info_parameters = afits[2]
    xyoffset = info_parameters.data.field('USED_XYOFFSET')[0]
    time_range = TimeRange(
        info_parameters.data.field('ABSOLUTE_TIME_RANGE')[0], format='utime')

    image = np.zeros(image_dim)

    # find out what detectors were used
    det_index_mask = afits[1].data.field('det_index_mask')[0]
    detector_list = (np.arange(9) + 1) * np.array(det_index_mask)
    for detector in detector_list:
        if detector > 0:
            image = image + _backproject(calibrated_event_list,
                                         detector=detector,
                                         pixel_size=pixel_size.value,
                                         image_dim=image_dim)

    dict_header = {
        "DATE-OBS":
        time_range.center.strftime("%Y-%m-%d %H:%M:%S"),
        "CDELT1":
        pixel_size[0],
        "NAXIS1":
        image_dim[0],
        "CRVAL1":
        xyoffset[0],
        "CRPIX1":
        image_dim[0] / 2 + 0.5,
        "CUNIT1":
        "arcsec",
        "CTYPE1":
        "HPLN-TAN",
        "CDELT2":
        pixel_size[1],
        "NAXIS2":
        image_dim[1],
        "CRVAL2":
        xyoffset[1],
        "CRPIX2":
        image_dim[0] / 2 + 0.5,
        "CUNIT2":
        "arcsec",
        "CTYPE2":
        "HPLT-TAN",
        "HGLT_OBS":
        0,
        "HGLN_OBS":
        0,
        "RSUN_OBS":
        sun.angular_radius(time_range.center).value,
        "RSUN_REF":
        sunpy.sun.constants.radius.value,
        "DSUN_OBS":
        sun.earth_distance(time_range.center).value *
        sunpy.sun.constants.au.value
    }

    result_map = sunpy.map.Map(image, dict_header)

    return result_map
Beispiel #10
0
def test_rsun_missing():
    """Tests output if 'rsun' is missing"""
    euvi_no_rsun = Map(fitspath)
    euvi_no_rsun.meta['rsun'] = None
    assert euvi_no_rsun.rsun_obs.value == sun.angular_radius(
        euvi.date).to('arcsec').value
Beispiel #11
0
def prepData(files, base_dir, prefix, custom_keywords={}, plot=False):
    diffs = {'center_x': [], 'center_y': [], 'radius': [], 'scale': []}
    os.makedirs(os.path.join(base_dir, 'level1'), exist_ok=True)
    os.makedirs(os.path.join(base_dir, 'level1_5'), exist_ok=True)

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        for file in tqdm(files):
            try:
                # load existing file
                hdul = fits.open(file)
                hdu = hdul[0]
                hdu.verify('fix')
                d, h = hdu.data, hdu.header

                # set custom keywords
                h.update(custom_keywords)

                # evaluate center and radius
                imsave("demo.jpg", d)
                myCmd = os.popen(
                    '/home/rja/PythonProjects/SpringProject/spring/limbcenter/sunlimb demo.jpg'
                ).read()
                center_x, center_y, radius, d_radius = map(
                    float, myCmd.splitlines())

                if "EXPTIME" in h:
                    h['EXP_TIME'] = h['EXPTIME']
                    del h['EXPTIME']
                if 'TIME-OBS' in h:
                    obs_date = datetime.strptime(
                        h['DATE-OBS'] + 'T' + h['TIME-OBS'],
                        "%m/%d/1%yT%H:%M:%S")
                    h['DATE-OBS'] = obs_date.isoformat()
                    del h["TIME-OBS"]
                if 'TIME' in h:
                    obs_date = datetime.strptime(
                        h['DATE-OBS'] + 'T' + h['TIME'], "%d/%m/%YT%H:%M:%S")
                    h['DATE-OBS'] = obs_date.isoformat()
                    del h["TIME"]

                obs_time = parse(h["DATE-OBS"])
                rsun = angular_radius(obs_time)
                b0_angle = sun.B0(obs_time)
                l0 = sun.L0(obs_time)
                p_angle = sun.P(obs_time)
                filename = "%s_%s_fi_%s.fits" % (
                    prefix, h["OBS_TYPE"].lower(),
                    obs_time.strftime("%Y%m%d_%H%M%S"))

                # prepare existing header information
                if "ANGLE" not in h:
                    h["ANGLE"] = p_angle.value

                scale = rsun / (radius * u.pix)
                coord = SkyCoord(0 * u.arcsec,
                                 0 * u.arcsec,
                                 obstime=obs_time,
                                 observer='earth',
                                 frame=frames.Helioprojective)

                # create WCS header info
                header = header_helper.make_fitswcs_header(
                    d,
                    coord,
                    rotation_angle=h["ANGLE"] * u.deg,
                    reference_pixel=u.Quantity([center_x, center_y] * u.pixel),
                    scale=u.Quantity([scale, scale]),
                    instrument=h["INSTRUME"],
                    telescope=h["TELESCOP"],
                    observatory=h["OBSVTRY"],
                    exposure=h["EXP_TIME"] * u.ms,
                    wavelength=h["WAVELNTH"] * u.angstrom)

                header["KEYCOMMENTS"] = {
                    "EXPTIME": "[s] exposure time in seconds",
                    "DATE": "file creation date (YYYY-MM-DDThh:mm:ss UT)",
                    "DATE-OBS": "date of observation",
                    "WAVELNTH": "[Angstrom] wavelength",
                    "BANDPASS": "******",
                    "WAVEMIN": "[Angstrom] minimum wavelength",
                    "WAVEMAX": "[Angstrom] maximum wavelength",
                    "BZERO": "offset data range to that of unsigned short",
                    "CDELT1": "[arcsec/pix]",
                    "CDELT2": "[arcsec/pix]",
                    "SOLAR_R": "[pix]",
                    "DSUN_OBS": "[m]",
                    "RSUN_REF": "[m]",
                    "RSUN_ARC": "[%s]" % rsun.unit,
                    "ANGLE": "[deg]",
                    "SOLAR_P": "[%s]" % p_angle.unit,
                    "SOLAR_L0": "[%s]" % l0.unit,
                    "SOLAR_B0": "[%s]" % b0_angle.unit,
                    'SIMPLE': 'file does conform to FITS standard',
                    'BITPIX': 'number of bits per data pixel',
                    'CUNIT1': '[arcsec]',
                    'CUNIT2': '[arcsec]',
                    'CRVAL1': 'coordinate system value at reference pixel',
                    'CRVAL2': 'coordinate system value at reference pixel',
                    'CTYPE1': 'name of the coordinate axis',
                    'CTYPE2': 'name of the coordinate axis',
                    'INSTRUME': 'name of instrument',
                    'TELESCOP': 'name of telescope',
                    'OBSVTRY': 'name of observatory',
                }

                # set constants and default values
                header["FILENAME"] = filename
                header["DATE"] = datetime.now().strftime("%Y-%m-%dT%H:%M:%S")

                header["SOLAR_R"] = radius
                header["RSUN_ARC"] = rsun.value
                header["SOLAR_P"] = p_angle.value
                header["SOLAR_L0"] = l0.value
                header["SOLAR_B0"] = b0_angle.value

                header["DATAMIN"] = np.min(d)
                header["DATAMEAN"] = np.mean(d)
                header["DATAMAX"] = np.max(d)

                # copy existing keys
                for key, value in h.items():
                    if key not in header:
                        header[key] = value

                # copy comments
                for key, value in zip(list(h.keys()), list(h.comments)):
                    if key not in header["KEYCOMMENTS"]:
                        header["KEYCOMMENTS"][key] = value

                # LEVEL 1
                s_map = Map(d.astype(np.float32), header)
                level1_path = os.path.join(base_dir, 'level1', filename)

                h.add_history("unified FITS header")
                s_map.meta["HISTORY"] = h["HISTORY"]
                s_map.meta["LVL_NUM"] = "1.0"
                s_map = Map(s_map.data.astype(np.float32), s_map.meta)
                s_map.save(level1_path, overwrite=True)

                # LEVEL 1.5
                scale = s_map.scale[0].value
                s_map = padScale(s_map)

                s_map = s_map.rotate(
                    recenter=True,
                    scale=scale,
                    missing=s_map.min(),
                )
                center = np.floor(s_map.meta['crpix1'])
                range_side = (center + np.array([-1, 1]) * 2048 / 2) * u.pix
                s_map = s_map.submap(
                    u.Quantity([range_side[0], range_side[0]]),
                    u.Quantity([range_side[1], range_side[1]]))
                level1_5_path = os.path.join(base_dir, 'level1_5', filename)

                h.add_history("recentered and derotated")
                s_map.meta["HISTORY"] = h["HISTORY"]
                s_map.meta["LVL_NUM"] = "1.5"
                s_map = Map(s_map.data.astype(np.float32), s_map.meta)
                s_map.save(level1_5_path, overwrite=True)

                if plot:
                    s_map.plot()
                    s_map.draw_grid()
                    plt.savefig(level1_5_path.replace(".fits", ".jpg"))
                    plt.close()

                # check header
                hdul = fits.open(level1_5_path)
                hdu = hdul[0]
                hdu.verify('exception')

                # evaluate difference
                if 'center_x' in h and not isinstance(h["center_x"], str):
                    diffs['center_x'].append(
                        np.abs(h['center_x'] - header['crpix1']))
                if 'center_y' in h and not isinstance(h["center_y"], str):
                    diffs['center_y'].append(
                        np.abs(h['center_y'] - header['crpix2']))
                if 'SOLAR_R' in h and not isinstance(h["SOLAR_R"], str):
                    diffs['radius'].append(
                        np.abs(h['SOLAR_R'] - header['SOLAR_R']))
                if 'cdelt1' in h and not isinstance(h["cdelt1"], str):
                    diffs['scale'].append(
                        np.abs(h['cdelt1'] - header['cdelt1']))
            except Exception as ex:
                print("INVALID FILE", file)
                print("ERROR:", ex)
        return diffs
Beispiel #12
0
def batch_dem_jp2(t_start,
                  cadence,
                  nobs,
                  fits_dir,
                  jp2_dir,
                  get_fits=0,
                  serr_per=10,
                  min_snr=2,
                  fe_min=2,
                  use_fe=False,
                  sat_lvl=1.5e4,
                  mk_jp2=False,
                  plot_out=False,
                  plot_loci=False,
                  mk_fits=False,
                  xp=370,
                  yp=750):
    """
    batch script for loading (or downloading) synoptic data from jsoc, setting up the AIA degradation and temperature response etc.
    running demregpy to produce 2-d DEM maps. Finally the code has optional very basic plotting routines and an optional call to
    dem2jp2 to make jpeg2000 greyscale, bytescaled 8 bit images for hv. 


    """
    version_number = 1.2
    contact_email = '*****@*****.**'
    location = 'University of Glasgow A+A'
    #we only want optically thin coronal wavelengths
    wavenum = ['94', '131', '171', '193', '211', '335']
    t_start = Time(t_start)
    print(t_start)
    for obs in np.arange(nobs):
        t_obs = (t_start + TimeDelta(obs * cadence, format='sec'))
        print(t_obs)
        t_obs.precision = 0
        #convert our string into a datetime object
        # t=dateutil.parser.parse(TimeString(t_obs))
        t = t_obs.to_datetime()

        #deconstruct the datetime object into a synoptica data filename
        file_str = [('AIA' + str(t.year).zfill(4) + str(t.month).zfill(2) +
                     str(t.day).zfill(2) + '_' + str(t.hour).zfill(2) +
                     str(t.minute).zfill(2) + '_' +
                     "{}".format(wave.zfill(4)) + '.fits')
                    for j, wave in enumerate(wavenum)]
        dir_str = str(t.year).zfill(4) + '/' + str(
            t.month).zfill(2) + '/' + str(t.day).zfill(2) + '/'
        syn_url = 'http://jsoc.stanford.edu/data/aia/synoptic/'
        nf = len(file_str)

        cklist = []

        for file in file_str:
            cklist.append(os.path.isfile(fits_dir + dir_str + file))
        if not os.path.isdir(fits_dir + dir_str):
            os.makedirs(fits_dir + dir_str)
        if not all(cklist):
            # not all the files exist.
            print('Downloading synoptic data')
            url = [(syn_url + dir_str + 'H' + str(t.hour).zfill(2) + '00' +
                    '/' + file_str[jj]) for jj, c in enumerate(file_str)]
            for jj, c in enumerate(file_str):
                # h = httplib2.Http()
                # open the webpage
                # resp = h.request(url[jj], 'HEAD')
                request = requests.get(url[jj])
                if request.status_code < 400:
                    # webpage exists so download
                    wget.download(url[jj], fits_dir + dir_str)
            cklist = []
            for file in file_str:
                # check for files again
                cklist.append(os.path.isfile(fits_dir + dir_str + file))
            if not all(cklist):
                #skipping this observation
                print('\n Missing synoptioc data for ' + str(t) +
                      '...Skipping to next...')

                continue

        #find the files in their directory
        fits_files = [
            fits_dir + dir_str + file_str[j] for j in np.arange(len(file_str))
        ]
        #load the fits with sunpy
        aia = Map(fits_files)
        if aia[0].meta['percentd'] < 95.0:
            #check if the percentage data > 95 otherwise skip next observation
            print('\n PERCENTAGE GOOD DATA BELOW THRESHOLD = ' +
                  str(aia[0].meta['percentd']) + ' at ' + str(t) +
                  '\n...Skipping to next...')
            dem = Dem()
            continue
        if aia[2].meta['datamax'] < 1e3:
            #check if the data is there, sometimes synoptic outputs maps of noise only if the observation not taken, we just threshold aia 171 at 1k DN
            print(
                '\n Image looks like noise, possibly broken synoptic maps? Datamax = '
                + str(aia[2].meta['datamax']) + ' at ' + str(t) +
                '\n...Skipping to next...')
            dem = Dem()
            continue
        correction_table = get_correction_table()
        # correction_table=get_correction_table('aia_V8_20171210_050627_response_table.txt')  #for use as a hardcoded response file
        cal_ver = 10
        #correct the images for degradation
        aia = [
            correct_degradation(m,
                                correction_table=correction_table,
                                calibration_version=cal_ver) for m in aia
        ]
        # aia = [update_pointing(m) for m in aia]

        channels = [aia[i].wavelength for i in range(nf)]

        nt = 32
        t_space = 0.05
        t_min = 5.6
        logtemps = np.linspace(t_min, t_min + t_space * nt, num=nt + 1)
        temperatures = 10**logtemps
        logt_bin = np.zeros(nt)
        for i in np.arange(nt):
            logt_bin[i] = (logtemps[i] + logtemps[i + 1]) / 2

        tren = io.readsav('aia_trespv9_en.dat')
        tresp_logt = tren.logt
        tresp_calibrated = np.zeros([tresp_logt.shape[0], nf + 1])
        tresp_calibrated[:, :-1] = tren.tr.T

        #initialise structure
        dem = Dem()
        dem.bitpix = 8
        nx = aia[0].meta['naxis1']
        ny = aia[0].meta['naxis2']
        dem.naxis1 = nx
        dem.naxis2 = ny
        dem.crota2 = 0
        dem.crval1 = aia[0].meta['crval1']
        dem.crval2 = aia[0].meta['crval2']
        dem.crpix1 = 512.5
        dem.crpix2 = 512.5
        dem.cdelt1 = aia[0].meta['cdelt1'] * 4.0
        dem.cdelt2 = aia[0].meta['cdelt2'] * 4.0
        dem.cunit1 = aia[0].meta['cunit1']
        dem.cunit2 = aia[0].meta['cunit2']
        dem.dsun_obs = aia[0].meta['dsun_obs']
        dem.crlt_obs = aia[0].meta['crlt_obs']
        dem.crln_obs = aia[0].meta['crln_obs']
        dem.hglt_obs = B0(t_obs).value
        dem.hgln_obs = 0
        dem.temperatures = temperatures
        dem.minTemp = logtemps[0]
        dem.maxTemp = logtemps[-1]
        dem.t_obs = t_obs
        dem.filt_use = 6
        dem.rsun_ref = 6.957E+08
        dem.rsun_obs = angular_radius(t_obs).value
        dem.hv_zero = np.log10(dem.dem_min)
        dem.hv_scale = (np.log10(dem.dem_max) - np.log10(dem.dem_min)) / 255
        dem.contact = contact_email
        dem.produced = 'Produced at ' + location + ' on: ' + datetime.today(
        ).strftime('%Y-%m-%d')
        dem.dem_ver = version_number

        dem1 = Dem()
        dem1.bitpix = 8
        dem1.naxis1 = nx
        dem1.naxis2 = ny
        dem1.crota2 = aia[0].meta['crota2']
        dem1.crval1 = aia[0].meta['crval1']
        dem1.crval2 = aia[0].meta['crval2']
        dem1.crpix1 = 512.5
        dem1.crpix2 = 512.5
        dem1.cdelt1 = aia[0].meta['cdelt1'] * 4.0
        dem1.cdelt2 = aia[0].meta['cdelt2'] * 4.0
        dem1.cunit1 = aia[0].meta['cunit1']
        dem1.cunit2 = aia[0].meta['cunit2']
        dem1.dsun_obs = aia[0].meta['dsun_obs']
        dem1.crlt_obs = aia[0].meta['crlt_obs']
        dem1.crln_obs = aia[0].meta['crln_obs']
        dem1.hglt_obs = B0(t_obs).value
        dem1.hgln_obs = 0
        dem1.minTemp = logtemps[0]
        dem1.maxTemp = logtemps[-1]
        dem1.filt_use = 7
        dem1.rsun_ref = 6.957E+08
        dem1.rsun_obs = np.rad2deg(np.arctan2(dem1.rsun_ref,
                                              dem1.dsun_obs)) * 3600
        dem1.hv_zero = np.log10(dem1.dem_min)
        dem1.hv_scale = (np.log10(dem1.dem_max) - np.log10(dem1.dem_min)) / 255
        dem1.contact = contact_email
        dem1.produced = 'Produced at ' + location + ' on: ' + datetime.today(
        ).strftime('%Y-%m-%d')
        dem1.dem_ver = version_number

        data = np.zeros([nx, ny, nf + 1])

        #errors in dn/px
        npix = 4096.**2 / (nx * ny)
        edata = np.zeros([nx, ny, nf + 1])
        gains = np.array([18.3, 17.6, 17.7, 18.3, 18.3, 17.6])
        dn2ph = gains * [94, 131, 171, 193, 211, 335] / 3397.0
        rdnse = 1.15 * np.sqrt(npix) / npix
        drknse = 0.17
        qntnse = 0.288819 * np.sqrt(npix) / npix
        for f in range(nf):
            #convert from our list to an array of data
            data[:, :, f] = aia[f].data
            data[data < 0.0] = 0.0
        for j in np.arange(nf):
            shotnoise = (dn2ph[j] * data[:, :, j])**0.5 / dn2ph[j]
            esys = serr_per / 100.0 * data[:, :, j]
            etemp = np.sqrt(rdnse**2. + drknse**2. + qntnse**2. + shotnoise**2)
            edata[:, :, j] = np.sqrt(esys**2 + etemp**2)

        for f in range(nf):
            #convert to values per second
            data[:, :, f] = data[:, :, f] / aia[f].exposure_time.to(u.s).value
            edata[:, :,
                  f] = edata[:, :, f] / aia[f].exposure_time.to(u.s).value

        #calculate the hot component of aia 94
        a94_fe18 = np.zeros([nx, ny])
        a94_warm = np.zeros([nx, ny])
        a94_fe18[:, :] = data[:, :,
                              0] - data[:, :, 4] / 120.0 - data[:, :,
                                                                2] / 450.0
        a94_warm = data[:, :, 0] - a94_fe18[:, :]

        #threshold of fe_min for the hot component

        a94_fe18[a94_fe18 <= 0] = 0.0001
        a94_warm[a94_warm <= 0] = 0.0001
        data[:, :, 6] = a94_fe18

        #now we need fe18 temp response in a94
        trfe = (tresp_calibrated[:, 0] - tresp_calibrated[:, 4] / 120.0 -
                tresp_calibrated[:, 2] / 450.0)
        trfe[tresp_logt <= 6.4] = 1e-38
        #remove low peak

        tresp_calibrated[:, 6] = trfe + 1e-3 * tresp_calibrated[:, 0]
        #errors on fe18 are a little arbitary, percentage error and a flat of 2...
        edata[:, :, 6] = serr_per / 100 * data[:, :, 6] + 2.0
        plt.rcParams.update({'font.size': 10})
        # dem,edem,elogt,chisq,dn_reg=dn2dem_pos(data[x1:x2,y1:y2,:filt_use],edata[x1:x2,y1:y2,:filt_use],tresp_calibrated[:,:filt_use],tresp_logt,temperatures,dem_norm0=dem_norm0[x1:x2,y1:y2,:],max_iter=10)
        x1 = 0
        x2 = nx
        y1 = 0
        y2 = ny
        filt_use = 6
        norm_mean = 6.35
        norm_std = 0.35
        dem_norm0 = np.zeros([nx, ny, nt])
        dem_norm = gaussian(logt_bin, norm_mean, norm_std)
        dem_norm0[:, :, :] = dem_norm[:]
        dem1.data, dem1.edem, dem1.elogt, dem1.chisq, dem1.dn_reg = dn2dem_pos(
            data[x1:x2, y1:y2, :filt_use],
            edata[x1:x2, y1:y2, :filt_use],
            tresp_calibrated[:, :filt_use],
            tresp_logt,
            dem.temperatures,
            max_iter=15,
            dem_norm0=dem_norm0)

        if plot_out == True:
            aia_col = ['#c2c3c0', '#g0r0r0']
            fig = plt.figure(figsize=(8, 7))
            for j in range(int(nt / 2)):
                fig = plt.subplot(4, 4, j + 1)

                em_loci = data[xp, yp, :] / tresp_calibrated
                plt.errorbar(logt_bin,
                             dem1.data[xp, yp + j * 5, :],
                             color='c',
                             xerr=dem1.elogt[xp, yp + j * 5, :],
                             yerr=dem1.edem[xp, yp + j * 5, :],
                             fmt='or',
                             ecolor='gray',
                             elinewidth=3,
                             capsize=0)
                for i in range(7):
                    em_loci[:-1, i] = em_loci[:-1, i] / (10**tresp_logt[1:] -
                                                         10**tresp_logt[:-1])
                if plot_loci == True:
                    plt.plot(tresp_logt[:-1], em_loci[:-1, :6])
                ax = plt.gca()
                plt.ylim([1e19, 1e23])
                plt.xlim([5.7, 7.3])
                plt.xlabel('$\mathrm{\log_{10}T\;[K]}$')
                plt.ylabel('$\mathrm{DEM\;[cm^{-5}\;K^{-1}]}$')
                plt.yscale('log')
                ax.label_outer()
            plt.gcf().suptitle("6 Filter", fontsize=14)
            plt.gcf().tight_layout(pad=2.0)

            fig = plt.figure(figsize=(8, 7))
            for j in range(int(nt / 2)):
                fig = plt.subplot(4, 4, j + 1)
                plt.imshow(np.log10(dem1.data[:, :, 2 * j] + 1),
                           'inferno',
                           vmin=19,
                           vmax=24,
                           origin='lower')
                ax = plt.gca()
                ax.set_title('%.1f' % (t_min + 2 * j * 0.05))
            plt.gcf().suptitle("dem1", fontsize=14)

        if use_fe == True:
            filt_use = 7

        dem_norm0 = np.zeros([nx, ny, nt])
        mxdem = np.max(dem1.data)
        for ii in np.arange(nx):
            for jj in np.arange(ny):
                dem_norm0[ii,
                          jj, :] = (np.convolve(dem1.data[ii, jj, 1:-1],
                                                np.ones(5) / 5))[1:-1] / mxdem
        dem_norm0[dem_norm0 <= 1e-8] = 1e-8

        if plot_out == True:
            aia_col = ['#c2c3c0', '#g0r0r0']
            fig = plt.figure(figsize=(8, 7))
            for j in range(int(nt / 2)):
                fig = plt.subplot(4, 4, j + 1)

                em_loci = data[xp, yp, :] / tresp_calibrated
                plt.errorbar(logt_bin,
                             dem_norm0[xp, yp + j * 5, :] * mxdem,
                             color='c',
                             xerr=dem1.elogt[xp, yp + j * 5, :],
                             yerr=dem1.edem[xp, yp + j * 5, :],
                             fmt='or',
                             ecolor='gray',
                             elinewidth=3,
                             capsize=0)
                for i in range(7):
                    em_loci[:-1, i] = em_loci[:-1, i] / (10**tresp_logt[1:] -
                                                         10**tresp_logt[:-1])
                if plot_loci == True:
                    plt.plot(tresp_logt[:-1], em_loci[:-1, :6])
                ax = plt.gca()
                plt.ylim([1e19, 1e23])
                plt.xlim([5.7, 7.3])
                plt.xlabel('$\mathrm{\log_{10}T\;[K]}$')
                plt.ylabel('$\mathrm{DEM\;[cm^{-5}\;K^{-1}]}$')
                plt.yscale('log')
                ax.label_outer()
            plt.gcf().suptitle("DEM NORM", fontsize=14)
            plt.gcf().tight_layout(pad=2.0)

        if use_fe == True:
            data[a94_fe18 < fe_min, :] = 0
        dem.data, dem.edem, dem.elogt, dem.chisq, dem.dn_reg = dn2dem_pos(
            data[x1:x2, y1:y2, :filt_use],
            edata[x1:x2, y1:y2, :filt_use],
            tresp_calibrated[:, :filt_use],
            tresp_logt,
            temperatures,
            dem_norm0=dem_norm0[x1:x2, y1:y2, :],
            max_iter=15)
        if use_fe == True:
            dem.data[a94_fe18 < fe_min, :] = dem1.data[a94_fe18 < fe_min, :]
        dem.data[dem.data <= 0] = 1

        if plot_out == True:
            aia_col = ['#c2c3c0', '#g0r0r0']
            fig = plt.figure(figsize=(8, 7))
            for j in range(int(np.floor(nt / 2))):
                fig = plt.subplot(4, 4, j + 1)

                em_loci = data[xp, yp, :] / tresp_calibrated
                plt.errorbar(logt_bin,
                             dem.data[xp, yp + j * 5, :],
                             color='c',
                             xerr=dem.elogt[xp, yp + j * 5, :],
                             yerr=dem.edem[xp, yp + j * 5, :],
                             fmt='or',
                             ecolor='gray',
                             elinewidth=3,
                             capsize=0)
                for i in range(7):
                    em_loci[:-1, i] = em_loci[:-1, i] / (10**tresp_logt[1:] -
                                                         10**tresp_logt[:-1])
                if plot_loci == True:
                    plt.plot(tresp_logt[:-1], em_loci[:-1, :6])
                ax = plt.gca()
                plt.ylim([1e19, 1e23])
                plt.xlim([5.7, 7.3])
                plt.xlabel('$\mathrm{\log_{10}T\;[K]}$')
                plt.ylabel('$\mathrm{DEM\;[cm^{-5}\;K^{-1}]}$')
                plt.yscale('log')
                ax.label_outer()
            plt.gcf().suptitle("7", fontsize=14)
            plt.gcf().tight_layout(pad=2.0)

            fig = plt.figure(figsize=(8, 7))
            for j in range(int(nt / 2)):
                fig = plt.subplot(4, 4, j + 1)
                plt.imshow(np.log10(dem.data[:, :, j * 2] + 1),
                           'inferno',
                           vmin=19,
                           vmax=24,
                           origin='lower')
                ax = plt.gca()
                ax.set_title('%.1f' % (t_min + j * 2 * 0.05))
            plt.gcf().suptitle("7", fontsize=14)
            plt.gcf().tight_layout(pad=2.0)

        if plot_out == True:
            aia_col = ['#c2c3c0', '#g0r0r0']
            fig = plt.figure(figsize=(8, 7))
            for j in range(int(np.floor(nt / 2))):
                fig = plt.subplot(4, 4, j + 1)

                em_loci = data[xp, yp, :] / tresp_calibrated
                plt.errorbar(logt_bin,
                             dem.data[xp, yp + j * 5, :],
                             color='c',
                             xerr=dem.elogt[xp, yp + j * 5, :],
                             yerr=dem.edem[xp, yp + j * 5, :],
                             fmt='or',
                             ecolor='gray',
                             elinewidth=3,
                             capsize=0)
                for i in range(7):
                    em_loci[:-1, i] = em_loci[:-1, i] / (10**tresp_logt[1:] -
                                                         10**tresp_logt[:-1])
                if plot_loci == True:
                    plt.plot(tresp_logt[:-1], em_loci[:-1, :6])
                ax = plt.gca()
                plt.ylim([1e19, 1e23])
                plt.xlim([5.7, 7.3])
                plt.xlabel('$\mathrm{\log_{10}T\;[K]}$')
                plt.ylabel('$\mathrm{DEM\;[cm^{-5}\;K^{-1}]}$')
                plt.yscale('log')
                ax.label_outer()
            plt.gcf().suptitle("Combo", fontsize=14)
            plt.gcf().tight_layout(pad=2.0)

            fig = plt.figure(figsize=(8, 7))
            for j in range(int(nt / 2)):
                fig = plt.subplot(4, 4, j + 1)
                plt.imshow(np.log10(dem.data[:, :, j * 2] + 1),
                           'inferno',
                           vmin=19,
                           vmax=24,
                           origin='lower')
                ax = plt.gca()
                ax.set_title('%.1f' % (t_min + j * 2 * 0.05))
            plt.gcf().suptitle("Combo", fontsize=14)
            plt.gcf().tight_layout(pad=2.0)
            aia[0].peek()
            plt.show()
        dem.nimg = int(np.floor(nt / 4))
        if mk_jp2 == True:
            if not os.path.isdir(jp2_dir + dir_str):
                os.makedirs(jp2_dir + dir_str)
            for i in range(dem.nimg):
                img_data = (dem.data[:, :, i * 2] + dem.data[:, :, i * 2 + 1] +
                            dem.data[:, :, i * 2 + 2] +
                            dem.data[:, :, i * 2 + 3]) / 4
                jp2_fname = (str(t.year).zfill(4) + '_' +
                             str(t.month).zfill(2) + '_' +
                             str(t.day).zfill(2) + '__' +
                             str(t.hour).zfill(2) + '_' +
                             str(t.minute).zfill(2) + '_' +
                             str(t.second).zfill(2) + '_00' +
                             '__DEM_REGINV_T_' + '%.2f_%.2f' %
                             (logtemps[i * 4], logtemps[i * 4 + 4]))
                tmin = logtemps[i * 4]
                tmax = logtemps[(i + 1) * 4]
                print('writing ' + jp2_fname + ' img ' + str(i + 1) + ' of ' +
                      str(dem.nimg))
                dem2jp2(img_data,
                        dem,
                        jp2_dir + dir_str + jp2_fname,
                        i,
                        tmin,
                        tmax,
                        mk_fits=mk_fits)
    return dem