Ejemplo n.º 1
0
    def _get_time_mask(self, startdt, enddt):
        """Return mask in data arrays for all
        data in interval [startdt,enddt)"""
        startjd = datetime2jd(startdt)
        endjd = datetime2jd(enddt)

        inrange = np.logical_and(self['jds'].flatten() >= startjd,
                                 self['jds'].flatten() < endjd)
        return inrange
def get_f107_ap(dt, silent=False):
    """
	Get F10.7 for day of interest (noon), and day previous and 81-day centered average
	Also get AP index for day of interest (noon)
	"""
    jd = special_datetime.datetime2jd(dt)

    oi = omnireader.omni_interval(dt - datetime.timedelta(days=41),
                                  dt + datetime.timedelta(days=41), 'hourly')

    odt = oi['Epoch']
    ojd = special_datetime.datetimearr2jd(odt).flatten()
    f107_81 = oi['F10_INDEX']
    ap_81 = oi['AP_INDEX']
    f107a = np.nanmean(f107_81)
    today = np.logical_and(ojd > np.floor(jd), ojd < np.ceil(jd))
    yesterday = np.logical_and(ojd > np.floor(jd - 1), ojd < np.ceil(jd - 1))
    f107p = np.nanmean(f107_81[yesterday])  # Previous day f10.7
    f107 = np.nanmean(f107_81[today])
    ap = np.nanmean(ap_81[today])
    if not silent:
        print((dt.strftime('%Y%m%d %H:%M')))
        print(('Yesterday F107 %.2f' % (f107p)))
        print(('Today F107 %.2f' % (f107)))
        print(('81-day avg F107: %.2f' % (f107a)))
    return f107, ap, f107p, f107a
Ejemplo n.º 3
0
def hourly_solarwind_for_average(dt,oi):
    """
    Takes a solarwind (sw) OrderedDict (output of read_solarwind)
    made using omni data at a sub-hourly cadence (5min or 1min),
    and averages the data to an hourly cadence relative to time dt
    (dt must be within the range of the data in sw). Returns a
    new OrderedDict with 'n_hours_in_average' values for each
    solar wind parameter
    """
    n_hours_in_average = 4 #number of hourly datapoints (4 previous)
    
    target_jd = special_datetime.datetime2jd(dt)
    
    sw = read_solarwind(dt)

    #Julian date in days to time relative to target time in hours
    #with positive values indicating time before the target
    hours_before_target = -1*(sw['jd']-target_jd)*24.
    
    sw4avg = OrderedDict()
    for swkey,swdata in sw.items():
        hourly_swdata = []
        for hour in range(n_hours_in_average)[::-1]:
            hourmask = np.logical_and(hours_before_target>=hour,
                                      hours_before_target<(hour+1))    
            if swkey == 'jd':
                hourly_swdata.append(np.nanmax(swdata[hourmask]))
            else:
                hourly_swdata.append(np.nanmean(swdata[hourmask]))
        sw4avg[swkey]=np.array(hourly_swdata)
    return sw4avg
Ejemplo n.º 4
0
def greenwich_mean_siderial_time(jds):
    """Calculate the angle in the plane of the equator
    between the vernal equinox direction and the prime meridian (the 
    line of longitude through Greenwich, England).
    
    Parameters
    ----------

    jds : float or np.ndarray
        The julian date(s) of the times for which the GMST should
        be calculated

    Returns
    -------

    theta_GST : float or np.ndarray
        The Greenwich Mean Siderial Time in radians

    Notes
    -----

    Because this calculation depends on the actual exact number of earth
    rotations since the J2000 epoch, the time (julian date) strictly speaking
    should be in the UT1 system (the time system determined from observations
    of distant stars), because this system takes into account the small changes
    in earth's rotation speed.
    
    Generally though, UTC is available instead of UT1. UTC is determined
    from atomic clocks, and is kept within +- 1 second of UT1 
    by the periodic insertion of leap seconds.
    
    """
    jd_j2000 = datetime2jd(dt_j2000)
    t_ut1 = (jds -
             jd_j2000) / 36525.  #Get Julian centuries since the j2000.0 epoch
    #Note that this formula can be broken up into a two part (hours and seconds) version using a two part
    #T_UT1. Where 876600 is multiplied by 3600., and in the exponentiation, the accuracy can be increased
    #by breaking up the T_UT1
    theta_GST_s = 67310.54841 + (
        876600. * 3600. +
        8640184.812866) * t_ut1 + .093104 * t_ut1**2 - 6.2e-6 * t_ut1**3

    # NOTE: In Python (and Numpy), the output of modulus is
    # always the same sign as the divisor (360. in the case of angles)
    # so we can treat negative out of bounds the same
    # as positive

    #Make sure abs(theta_GST) <= 86400 seconds
    theta_GST_s = np.mod(theta_GST_s, 86400.)

    #Convert theta_GST to degrees from seconds
    theta_GST = theta_GST_s / 240.

    # Ensure in 0 to 360.
    theta_GST = np.mod(theta_GST, 360.)

    # Radians
    theta_GST = theta_GST * np.pi / 180.
    return theta_GST
Ejemplo n.º 5
0
def test_julian_date_matches_vallado():
    """
    Test that the julian date function matches
    an example from Vallado: Fundamentals of Astrodynamics and Applications
    """
    dt = vallado_3_4_dt
    jd_ut = special_datetime.datetime2jd(dt)
    expected_jd = vallado_3_4_jd
    assert abs(jd_ut - expected_jd) < .000001
Ejemplo n.º 6
0
def get_daily_f107(dt, oi):
    """
    Since OvationPyme uses hourly OMNI data
    I just do the mean for all of the 1 hour values for the day
    of dt (used for calculating solar conductance)
    """
    omjd = special_datetime.datetimearr2jd(oi['Epoch']).flatten()
    omf107 = oi['F10_INDEX']
    jd = special_datetime.datetime2jd(dt)
    imatch = omjd == np.floor(jd)
    return np.nanmean(omf107[imatch])
Ejemplo n.º 7
0
def _almanac_expected(alman_dict):
    dt = alman_dict['dt']
    ra_hms = alman_dict['ra_hrs_mins_secs']
    dec_dms = alman_dict['dec_deg_mins_secs']
    jd = datetime2jd(dt)
    expected_ra_hrs = float(ra_hms[0])\
                        +float(ra_hms[1])/60.\
                        +float(ra_hms[2])/3600.
    expected_ra_rad = expected_ra_hrs/12*np.pi
    expected_dec_deg = float(dec_dms[0])\
                        +float(dec_dms[1])/60.\
                        +float(dec_dms[2])/3600. 
    expected_dec_rad = np.radians(expected_dec_deg)
    return dt,jd,expected_ra_rad,expected_dec_rad
Ejemplo n.º 8
0
def test_approx_lmst_for_boulder(local_hour):
    #Daylight savings is March 14 to November 7
    #pick a date during non-daylight savings
    year,month,day = 2010,2,1
    #Mountain standard time is UTC - 7
    #Solar time can differ from solar time by up to 2 hours
    #http://blog.poormansmath.net/the-time-it-takes-to-change-the-time/
    ut_hour = np.mod(local_hour+7,24)
    dt_utc = datetime.datetime(year,month,day,ut_hour)
    jd_utc = datetime2jd(dt_utc)
    local_mean_solar_rads = local_mean_solar_time(jd_utc,boulder_glon)
    local_mean_solar_hour = local_mean_solar_rads*12/np.pi
    #if local_mean_solar_hour<0:
    #    local_mean_solar_hour+=24
    #local_mean_solar_hour = np.mod(local_mean_solar_hour,24.)
    assert(pytest.approx(local_mean_solar_hour,abs=2) == local_hour)
Ejemplo n.º 9
0
def test_gmst_matches_vallado(shape):
    vallado_3_5_jd = datetime2jd(vallado_3_5_dt)
    if shape is None:
        jds = vallado_3_5_jd
        expected_gmsts_deg = vallado_3_5_gmst
    else:
        jds = np.ones(shape)*vallado_3_5_jd
        expected_gmsts_deg = np.ones(shape)*vallado_3_5_gmst
    gmsts = greenwich_mean_siderial_time(jds)
    gmsts_deg = np.degrees(gmsts)

    tol = 1e-7
    if shape is None:
        assert(np.abs(gmsts_deg-expected_gmsts_deg)<tol)
    else:
        nptest.assert_allclose(gmsts_deg,expected_gmsts_deg,atol=tol,rtol=0.)
Ejemplo n.º 10
0
def calc_avg_solarwind(dt, oi=None):
    """
    Calculates a weighted average of several
    solar wind variables n_hours (4 by default) backward
    in time from the closest hourly OMNIWeb
    datum to datetime dt

    oi is an optional omnireader.omni_interval
    instance from which to read the data. If this
    is None (default), will create a new omni_interval
    """
    n_hours = 4  # hours previous to integrate over
    prev_hour_weight = 0.65  # reduce weighting by factor of wh each hour back

    if oi.cadence == 'hourly':
        velvar, densvar = 'V', 'N'
    else:
        velvar, densvar = 'flow_speed', 'proton_density'

    #Find closest time to dt in julian date instead of datetime (comparisons with arrays of datetime are slow)
    jd = special_datetime.datetime2jd(dt)
    om_jd = special_datetime.datetimearr2jd(oi['Epoch'])
    Bx, By, Bz = oi['BX_GSE'], oi['BY_GSM'], oi['BZ_GSM']
    V, Ni = oi[velvar], oi[densvar]
    Ec = calc_coupling(Bx, By, Bz, V)

    i_first = np.nanargmin(np.abs(om_jd - jd))

    #Get indicies of all data in the average
    om_in_avg = list(range(i_first - n_hours, i_first + 1))
    weights = [
        prev_hour_weight * n_hours_back
        for n_hours_back in reversed(list(range(n_hours + 1)))
    ]  #reverse the list

    #Calculate weighted averages
    avgsw = dict()
    avgsw['Bx'] = np.nansum(Bx[om_in_avg] * weights) / len(om_in_avg)
    avgsw['By'] = np.nansum(By[om_in_avg] * weights) / len(om_in_avg)
    avgsw['Bz'] = np.nansum(Bz[om_in_avg] * weights) / len(om_in_avg)
    avgsw['V'] = np.nansum(V[om_in_avg] * weights) / len(om_in_avg)
    avgsw['Ec'] = np.nansum(Ec[om_in_avg] * weights) / len(om_in_avg)

    return avgsw
Ejemplo n.º 11
0
def ssm_geo_to_passdata(h5f,ssm_geo_generator):
	code,satnum,dt,sod,glat,glon,dbx,dby,dbz = next(ssm_geo_generator)
	print("Adding Pass Data to HDF5 file for %s %s %s" % (str(code),str(satnum),dt.strftime('%Y%m%d')))

	h5group = ensure_hdf5_group_structure(h5f,code,satnum,dt)

	finite_inds = np.flatnonzero(np.isfinite(glat))
	finglat = glat[np.isfinite(glat)]
	xings = satplottools.simple_passes(finglat)
	xings = finite_inds[xings]

	xing_sod = sod[xings]
	xing_glat = glat[xings]
	xing_glon = glon[xings]
	xing_dbx,xing_dby,xing_dbz = dbx[xings],dby[xings],dbz[xings]
	delta = 10
	xing_dbx_av,xing_dby_av,xing_dbz_av = np.zeros_like(xing_dbx),np.zeros_like(xing_dby),np.zeros_like(xing_dbz)
	for j,xing in enumerate(xings):
		xing_dbx_av[j] = np.nanmean(dbx[xing-delta:xing+delta])
		xing_dby_av[j] = np.nanmean(dby[xing-delta:xing+delta])
		xing_dbz_av[j] = np.nanmean(dbz[xing-delta:xing+delta])
	
	xing_db = np.column_stack((xing_dbx,xing_dby,xing_dbz))
	xing_db_av = np.column_stack((xing_dbx_av,xing_dby_av,xing_dbz_av))

	h5group.attrs['satnum'] = satnum
	h5group.attrs['code'] = str(code)
	h5group.attrs['jd'] = special_datetime.datetime2jd(dt)

	h5group.create_dataset('sod',data=xing_sod)
	h5group.create_dataset('glat',data=xing_glat)
	h5group.create_dataset('glon',data=xing_glon)
	h5group.create_dataset('dbx',data=xing_dbx)
	h5group.create_dataset('dbxav',data=xing_dbx_av)
	h5group.create_dataset('dby',data=xing_dby)
	h5group.create_dataset('dbyav',data=xing_dby_av)
	h5group.create_dataset('dbz',data=xing_dbz)
	h5group.create_dataset('dbzav',data=xing_dbz_av)
Ejemplo n.º 12
0
def calc_avg_solarwind(dt, oi):
    """
    Calculates a weighted average of several
    solar wind variables n_hours (4 by default) backward
    in time from the closest hourly OMNIWeb
    datum to datetime dt

    oi is an optional omnireader.omni_interval
    instance from which to read the data. If this
    is None (default), will create a new omni_interval
    """
    n_hours = 4  # hours previous to integrate over
    prev_hour_weight = 0.65  # reduce weighting by factor of wh each hour back

    jd = special_datetime.datetime2jd(dt)

    sw = read_solarwind(dt)
    om_jd = sw['jd']

    i_first = np.nanargmin(np.abs(om_jd - jd))

    #Get indicies of all data in the average
    om_in_avg = list(range(i_first - n_hours, i_first + 1))
    weights = [
        prev_hour_weight * n_hours_back
        for n_hours_back in reversed(list(range(n_hours + 1)))
    ]  #reverse the list

    #Calculate weighted averages
    avgsw = OrderedDict()
    avgsw['Bx'] = np.nansum(sw['Bx'][om_in_avg] * weights) / len(om_in_avg)
    avgsw['By'] = np.nansum(sw['By'][om_in_avg] * weights) / len(om_in_avg)
    avgsw['Bz'] = np.nansum(sw['Bz'][om_in_avg] * weights) / len(om_in_avg)
    avgsw['V'] = np.nansum(sw['V'][om_in_avg] * weights) / len(om_in_avg)
    avgsw['Ec'] = np.nansum(sw['Ec'][om_in_avg] * weights) / len(om_in_avg)

    return avgsw
import datetime

import numpy as np
import matplotlib.pyplot as plt

import ovation_utilities
from geospacepy import satplottools, special_datetime

if __name__ == '__main__':
    startdt = datetime.datetime(2013, 3, 16, 2, 0, 0)
    enddt = datetime.datetime(2013, 3, 16, 4, 0, 0)
    dt = datetime.datetime(2013, 3, 16, 3, 0, 0)

    startjd = special_datetime.datetime2jd(startdt)
    endjd = special_datetime.datetime2jd(enddt)
    jd = special_datetime.datetime2jd(dt)

    f = plt.figure(figsize=(8, 11))
    sw = ovation_utilities.read_solarwind(dt)
    avgsw = ovation_utilities.calc_avg_solarwind(dt)

    inwindow = np.logical_and(sw['jd'] > startjd, sw['jd'] < endjd)

    n_plots = len(avgsw.keys())
    axs = [f.add_subplot(n_plots, 1, i) for i in range(1, n_plots + 1)]
    for i, swvar in enumerate(avgsw):
        axs[i].plot(sw['jd'], sw[swvar], 'k-', label=swvar)
        axs[i].plot(jd, avgsw[swvar], 'bo', label='Average')
        axs[i].set_ylabel(swvar)
        axs[i].legend()
# (C) 2020 University of Colorado AES-CCAR-SEDA (Space Environment Data Analysis) Group
# Written by Liam M. Kilcommons
import pytest
import datetime
import numpy as np
import numpy.testing as nptest
from geospacepy.special_datetime import datetime2jd
from geospacepy.terrestrial_spherical import (eci2ecef, ecef2eci,
                                              ecef_cart2spherical,
                                              ecef_spherical2cart, ecef2enu,
                                              enu2ecef)

example_jd = datetime2jd(datetime.datetime(2010, 5, 29, 9, 13, 30))


def _example_vector(x, y, z, n_vectors):
    """Makes a shape=(n_vector,3) array out of floats x,y,z"""
    R = np.array([x, y, z]).reshape(1, 3)
    return np.broadcast_to(R, (n_vectors, 3))


@pytest.mark.parametrize('n_vectors,jds', [(1, example_jd), (4, example_jd),
                                           (4, np.full((4, ), example_jd)),
                                           (4, np.full((1, 4), example_jd))])
def test_eci_to_ecef_round_trip(n_vectors, jds):
    R_ECI_in = _example_vector(1., 0., 0., n_vectors)
    R_ECEF = eci2ecef(R_ECI_in, jds)
    R_ECI_out = ecef2eci(R_ECEF, jds)
    nptest.assert_allclose(R_ECI_in, R_ECI_out, atol=1e-8, rtol=0.)

Ejemplo n.º 15
0
def solar_position_almanac(jds):
    """
    Finds the apparent solar right ascension and 
    declination for any number of julian dates.

    This algorithm is entitled 'Low precision formulas for the Sun'
    and can be found in section C of the Naval Research Laboratory
    Astronomical Almanac (2019). This formula should also be in
    other editions of the Almanac.

    The Almanac describes this formula as yeilding a precision
    better than 1' (1/60 degrees) for the years 1950 to 2050

    Parameters
    ----------

    jds : np.ndarray or float
        Julian dates for which to calculate solar positions

    Returns
    -------

    alpha : np.ndarray or float (matches input)
        Solar apparent right ascension (angle in equatorial
        plane measured clockwise from the vernal equinox direction),
        in radians

    delta : np.ndarray or float (matches input)
        Solar declination (equiv. to subsolar latitude), in radians

    """

    jd_j2000_epoch = datetime2jd(dt_j2000)
    jd2000 = jds - jd_j2000_epoch  #J2000 epoch = 2451545.0

    #Solar mean longitude (degrees)
    L = 280.460 + .9856474 * jd2000
    L = np.mod(L, 360.)

    #Solar mean anomaly (degrees)
    g = 357.528 + 0.9856003 * jd2000
    g = np.mod(g, 360.)

    #Solar ecliptic longitude (degrees)
    g_rad = np.radians(g)
    lam = L + 1.915 * np.sin(g_rad) + .020 * np.sin(2 * g_rad)

    #Solar ecliptic latitude (degrees)
    beta = 0.

    #Obliquity of the ecliptic (degrees)
    epsilon = 23.439 - .0000004 * jd2000

    #Right Ascension (there are 2 formulae...prefer the second b/c arctan)
    #Similar to subsolar longitude but measured counterclockwise from
    #vernal equinox direction instead of counterclockwise from
    #prime meridian
    epsilon_r = np.radians(epsilon)
    lam_r = np.radians(lam)
    t = np.tan(epsilon_r / 2)**2.
    f = 180. / np.pi
    lam_r = np.radians(lam)
    alpha = lam - f * t * np.sin(2. * lam_r) + (f / 2.) * t**2 * np.sin(
        4 * lam_r)
    #alpha = np.arctan2(np.cos(epsilon_r)*np.sin(lam_r),np.cos(lam_r))
    alpha_r = np.radians(alpha)

    #Declination (equiv. to subsolar latitude, always < 90.)
    delta = np.degrees(np.arcsin(np.sin(epsilon_r) * np.sin(lam_r)))
    delta_r = np.radians(delta)
    return alpha_r, delta_r
Ejemplo n.º 16
0
def brekke_moen_solar_conductance(dt,glats,glons,f107):
    """
    Estimate the solar conductance using methods from:

    Cousins, E. D. P., T. Matsuo, and A. D. Richmond (2015), Mapping
    high-latitude ionospheric electrodynamics with SuperDARN and AMPERE

    --which cites--

    Asgeir Brekke, Joran Moen,
    Observations of high latitude ionospheric conductances

    INPUTS
    ------
        dt, datetime.datetime
            Universal date and time to calculate solar conductances for

        glats, np.ndarray
            GeoDETIC latitudes

        glons, np.ndaray
            Geographic longitudes

    OUTPUTS
    -------

        sigp, np.ndarray
            Pedersen conductance at locations (glats,glons)

        sigh, np.ndarray
            Hall conductance at locations (glats,glons)

        f107, float
            F10.7 index (daily) used to calcuate solar conductance
    """
    szas_rad = sun.solar_zenith_angle(special_datetime.datetime2jd(dt), glats, glons)
    szas = np.rad2deg(szas_rad)

    sigp,sigh = np.zeros_like(glats),np.zeros_like(glats)

    cos65 = np.cos(65/180.*np.pi)
    sigp65  = .5*(f107*cos65)**(2./3)
    sigh65  = 1.8*np.sqrt(f107)*cos65
    sigp100 = sigp65-0.22*(100.-65.)

    in_band = szas <= 65.
    #print "%d/%d Zenith Angles < 65" % (np.count_nonzero(in_band),len(in_band))
    sigp[in_band] = .5*(f107*np.cos(szas_rad[in_band]))**(2./3)
    sigh[in_band] = 1.8*np.sqrt(f107)*np.cos(szas_rad[in_band])

    in_band = np.logical_and(szas >= 65.,szas < 100.)
    #print "%d/%d Zenith Angles > 65 and < 100" % (np.count_nonzero(in_band),len(in_band))
    sigp[in_band] = sigp65-.22*(szas[in_band]-65.)
    sigh[in_band] = sigh65-.27*(szas[in_band]-65.)

    in_band = szas > 100.
    sigp[in_band] = sigp100-.13*(szas[in_band]-100.)
    sigh[in_band] = sigh65-.27*(szas[in_band]-65.)

    sigp[sigp<.4] = .4
    sigh[sigh<.8] = .8

    #correct for inverse relationship with magnetic field from AMIE code
    #(conductance_models.f90)
    theta = np.radians(90.-glats)
    bbp = np.sqrt(1. - 0.99524*np.sin(theta)**2)*(1. + 0.3*np.cos(theta)**2)
    bbh = np.sqrt(1. - 0.01504*(1.-np.cos(theta)) - 0.97986*np.sin(theta)**2)*(1.0+0.5*np.cos(theta)**2)
    sigp = sigp*1.134/bbp
    sigh = sigh*1.285/bbh

    return sigp,sigh
import numpy as np
import matplotlib.pyplot as plt

import ovation_utilities
from geospacepy import satplottools, special_datetime

from logbook import StreamHandler

if __name__ == '__main__':
    StreamHandler(sys.stdout).push_application()

    startdt = datetime.datetime(2013, 3, 16, 2, 0, 0)
    enddt = datetime.datetime(2013, 3, 16, 4, 0, 0)
    dt = datetime.datetime(2013, 3, 16, 3, 0, 0)

    jd = special_datetime.datetime2jd(dt)

    f = plt.figure(figsize=(8, 11))
    sw = ovation_utilities.read_solarwind(dt)
    sw4avg = ovation_utilities.hourly_solarwind_for_average(dt)
    avgsw = ovation_utilities.calc_avg_solarwind(dt)

    n_plots = len(avgsw.keys())
    axs = [f.add_subplot(n_plots, 1, i) for i in range(1, n_plots + 1)]
    for i, swvar in enumerate(avgsw):
        axs[i].plot(sw['jd'], sw[swvar], 'k-', label=swvar)
        axs[i].plot(sw4avg['jd'], sw4avg[swvar], 'b.', label='hour average')
        axs[i].plot(jd, avgsw[swvar], 'go', label='5 hour weighted average')
        axs[i].set_ylabel(swvar)
        axs[i].legend()
def mappable_glow(inputs):
    year, month, day, uts, glats, glons, ele_chan_nfluxes, ele_total_fluxes, ele_avg_energies, test, maxwellian = inputs[:]
    import pyglow098

    dt = datetime.datetime(year, month, day)

    doy = special_datetime.datetime2doy(dt)
    jd = special_datetime.datetime2jd(dt)

    f107, ap, f107p, f107a = get_f107_ap(dt)

    idoy = np.floor(doy)
    idate = (year - 2000 if year >= 2000 else year - 1900) * 1000 + idoy

    uts = uts.flatten()
    glats = glats.flatten()
    glons = glons.flatten()
    ncond = int(len(uts))
    idates = np.ones((ncond, )) * idate
    f107as = np.ones((ncond, )) * f107a
    f107s = np.ones((ncond, )) * f107
    f107ps = np.ones((ncond, )) * f107p
    aps = np.ones((ncond, )) * ap
    efs = ele_total_fluxes.flatten()
    ecs = ele_avg_energies.flatten()
    phij = ele_chan_nfluxes

    print("Start GLOW run for %d SSJ points UT %.1f-%.1f" %
          (ncond, uts[0] / 3600., uts[-1] / 3600.))

    usephij = 0 if maxwellian else 1  #Use SSJ fluxes (1) or only Maxwellian (0)
    #Test means test repeatability of GLOW (i.e. look effect of bug with save and intent(out))

    pyglow098.glowssjcond(idates, uts, glats, glons, f107as, f107s, f107ps,
                          aps, efs, ecs, ncond, phij, usephij)

    pedcond_aur = pyglow098.cglow.pedcond.copy()
    hallcond_aur = pyglow098.cglow.hallcond.copy()

    phitop = pyglow098.cglow.phitop.copy()
    ener = pyglow098.cglow.ener.copy()
    z = pyglow098.cglow.zz.copy() / 1.0e5  #cm->km

    difference_method = False
    if difference_method:
        phij_noaur = np.zeros_like(phij)
        pyglow098.glowssjcond(idates, uts, glats, glons, f107as, f107s, f107ps,
                              aps, efs, ecs, ncond, phij_noaur, usephij)

        pedcond_noaur = pyglow098.cglow.pedcond.copy()
        hallcond_noaur = pyglow098.cglow.hallcond.copy()

        pedcond = pedcond_aur - pedcond_noaur
        hallcond = hallcond_aur - hallcond_noaur
    else:
        pedcond = pedcond_aur
        hallcond = hallcond_aur

    print("End GLOW run for %d SSJ points UT %.1f-%.1f" %
          (ncond, uts[0] / 3600., uts[-1] / 3600.))

    #Apply 200 km ceiling (above which GLOW is not so reliable)
    ped = pedcond[:, z < 200.]
    hall = hallcond[:, z < 200.]
    z = z[z < 200.]

    #ped[np.logical_not(np.isfinite(ped))]=np.nanmedian(ped.flatten())
    #hall[np.logical_not(np.isfinite(hall))]=np.nanmedian(hall.flatten())

    intped = np.trapz(ped, x=z * 1000.)  # Conductivity units of S/m?
    inthall = np.trapz(hall, x=z * 1000.)  # Conductivity units of S/m?

    return z, ped, hall, intped, inthall