# In[64]:


new_df["dt"]=False
for i in enumerate(new_df["date"]):
    new_df["dt"].iloc[i[0]] = datetime.datetime.strptime(i[1],fmt)


# In[65]:


import julian
new_df["julian"] = False
for i in enumerate(new_df["dt"]):
    jd = julian.to_jd(i[1] + datetime.timedelta(hours=12), fmt = "jd")
    new_df["julian"].iloc[i[0]] = jd


# In[75]:


#Find amount of unique dates
#Set arbitrary value (maybe 1, do hyperparameting testing again) to index of that date
unique = []
for i in new_df["julian"]:
    unique.append(i)
unique = set(unique)
print(len(unique))

#-------------------------Setup---------------------------------

# Importing libraries
import numpy as np
import julian
from sgp4.earth_gravity import wgs84
from sgp4.io import twoline2rv

# Example ISS (Zarya) TLE
line1 = (
    '1 25544U 98067A   19302.69799672  .00001237  00000-0  29468-4 0  9996')
line2 = (
    '2 25544  51.6449  54.3108 0006382 201.3316 303.7490 15.50231045196128')

# Simulation Parameters
tstart = datetime(2019, 10, 30, 00, 00, 00)
tspan = np.array([0, 8640])  # [sec]
tstep = .1  # [sec] - 10 Hz

# Initialize
ISS_sgp4 = twoline2rv(line1, line2, wgs84)
r_i, v_i = ISS_sgp4.propagate(tstart.year, tstart.month, tstart.day,
                              tstart.hour, tstart.minute, tstart.second)
q_i = np.array([1, 0, 0, 0])
w_i = np.array([.1, .5, -.3])
state_i = np.r_[r_i, q_i, v_i, w_i]

mjd_start = julian.to_jd(tstart, 'mjd')

#--------------------------------------------------------------
Esempio n. 3
0
def convert_to_jd(datetime_tuple):
    # time = (2019, 12, 31, 0, 0, 0)
    d = datetime.datetime(*datetime_tuple)
    return julian.to_jd(d)
Esempio n. 4
0
pickle.dump(fall_data, open('pickles/Fall_OAK_soundings.p', 'wb'))

for i in range(len(seasons)):
    print(seasons[i])

    season_data = pickle.load(
        open('pickles/' + seasons[i] + '_OAK_soundings.p', 'rb'))
    cols = season_data.columns

    months = season_data[cols[1]].values
    days = season_data[cols[2]].values
    years = season_data[cols[0]].values
    hours = season_data[cols[3]].values
    jd = np.zeros(len(months))

    for j in range(len(months)):

        if int(hours[j] > 23):
            hours[j] = 0
        dt = datetime.datetime(year=int(years[j]),
                               month=int(months[j]),
                               day=int(days[j]),
                               hour=int(hours[j]),
                               tzinfo=timezone.utc)
        jd[j] = julian.to_jd(dt, fmt='jd')

    season_data['JD'] = pd.Series(jd, index=season_data.index)
    pickle.dump(season_data,
                open('pickles/' + seasons[i] + '_OAK_soundings.p', 'wb'))

print('end of script')
Esempio n. 5
0
def date2julian(gdate=None):
    _time = None
    if gdate == None:
        gdate = datetime.now()
    jd = julian.to_jd(gdate, fmt='jd')
    return float(jd)
Esempio n. 6
0
 def _calculate_current_julian_day(self, dt: datetime) -> float:
     """
     Return current julian day.
     """
     j = round(julian.to_jd(dt))
     return j - EPOCH + LEAP
Esempio n. 7
0
def dt2julian(dt):
    jd = julian.to_jd(dt) - JULIAN_1970
    return jd
Esempio n. 8
0
def centroider(target,
               sources,
               output_plots=False,
               gif=False,
               restore=False,
               box_w=8):
    matplotlib.use('TkAgg')
    plt.ioff()
    t1 = time.time()
    pines_path = pines_dir_check()
    short_name = short_name_creator(target)

    kernel = Gaussian2DKernel(x_stddev=1)  #For fixing nans in cutouts.

    #If restore == True, read in existing output and return.
    if restore:
        centroid_df = pd.read_csv(
            pines_path / ('Objects/' + short_name +
                          '/sources/target_and_references_centroids.csv'),
            converters={
                'X Centroids': eval,
                'Y Centroids': eval
            })
        print('Restoring centroider output from {}.'.format(
            pines_path / ('Objects/' + short_name +
                          '/sources/target_and_references_centroids.csv')))
        print('')
        return centroid_df

    #Create subdirectories in sources folder to contain output plots.
    if output_plots:
        subdirs = glob(
            str(pines_path / ('Objects/' + short_name + '/sources')) + '/*/')
        #Delete any source directories that are already there.
        for name in subdirs:
            shutil.rmtree(name)

        #Create new source directories.
        for name in sources['Name']:
            source_path = (
                pines_path /
                ('Objects/' + short_name + '/sources/' + name + '/'))
            os.mkdir(source_path)

    #Read in extra shifts, in case the master image wasn't used for source detection.
    extra_shift_path = pines_path / ('Objects/' + short_name +
                                     '/sources/extra_shifts.txt')
    extra_shifts = pd.read_csv(extra_shift_path,
                               delimiter=' ',
                               names=['Extra X shift', 'Extra Y shift'])
    extra_x_shift = extra_shifts['Extra X shift'][0]
    extra_y_shift = extra_shifts['Extra Y shift'][0]

    np.seterr(
        divide='ignore', invalid='ignore'
    )  #Suppress some warnings we don't care about in median combining.

    #Get list of reduced files for target.
    reduced_path = pines_path / ('Objects/' + short_name + '/reduced')
    reduced_filenames = natsort.natsorted(
        [x.name for x in reduced_path.glob('*red.fits')])
    reduced_files = np.array([reduced_path / i for i in reduced_filenames])

    #Declare a new dataframe to hold the centroid information for all sources we want to track.
    columns = []
    columns.append('Filename')
    columns.append('Seeing')
    columns.append('Time (JD UTC)')
    columns.append('Airmass')

    #Add x/y positions and cenroid flags for every tracked source
    for i in range(0, len(sources)):
        columns.append(sources['Name'][i] + ' Image X')
        columns.append(sources['Name'][i] + ' Image Y')
        columns.append(sources['Name'][i] + ' Cutout X')
        columns.append(sources['Name'][i] + ' Cutout Y')
        columns.append(sources['Name'][i] + ' Centroid Warning')

    centroid_df = pd.DataFrame(index=range(len(reduced_files)),
                               columns=columns)

    log_path = pines_path / ('Logs/')
    log_dates = np.array(
        natsort.natsorted(
            [x.name.split('_')[0] for x in log_path.glob('*.txt')]))

    #Make sure we have logs for all the nights of these data. Need them to account for image shifts.
    nights = list(set([i.name.split('.')[0] for i in reduced_files]))
    for i in nights:
        if i not in log_dates:
            print('ERROR: {} not in {}. Download it from the PINES server.'.
                  format(i + '_log.txt', log_path))
            pdb.set_trace()

    shift_tolerance = 2.0  #Number of pixels that the measured centroid can be away from the expected position in either x or y before trying other centroiding algorithms.
    for i in range(len(sources)):
        #Get the initial source position.
        x_pos = sources['Source Detect X'][i]
        y_pos = sources['Source Detect Y'][i]
        print('')
        print(
            'Getting centroids for {}, ({:3.1f}, {:3.1f}) in source detection image. Source {} of {}.'
            .format(sources['Name'][i], x_pos, y_pos, i + 1, len(sources)))
        if output_plots:
            print('Saving centroid plots to {}.'.format(
                pines_path / ('Objects/' + short_name + '/sources/' +
                              sources['Name'][i] + '/')))
        pbar = ProgressBar()
        for j in pbar(range(len(reduced_files))):
            centroid_df[sources['Name'][i] + ' Centroid Warning'][j] = 0
            file = reduced_files[j]
            image = fits.open(file)[0].data
            #Get the measured image shift for this image.
            log = pines_log_reader(log_path /
                                   (file.name.split('.')[0] + '_log.txt'))
            log_ind = np.where(log['Filename'] == file.name.split('_')[0] +
                               '.fits')[0][0]

            x_shift = float(log['X shift'][log_ind])
            y_shift = float(log['Y shift'][log_ind])

            #Save the filename for readability. Save the seeing for use in variable aperture photometry. Save the time for diagnostic plots.
            if i == 0:
                centroid_df['Filename'][j] = file.name.split('_')[0] + '.fits'
                centroid_df['Seeing'][j] = log['X seeing'][log_ind]
                time_str = fits.open(file)[0].header['DATE-OBS']

                #Correct some formatting issues that can occur in Mimir time stamps.
                if time_str.split(':')[-1] == '60.00':
                    time_str = time_str[0:14] + str(
                        int(time_str.split(':')[-2]) + 1) + ':00.00'
                elif time_str.split(':')[-1] == '010.00':
                    time_str = time_str[0:17] + time_str.split(':')[-1][1:]

                centroid_df['Time (JD UTC)'][j] = julian.to_jd(
                    datetime.datetime.strptime(time_str,
                                               '%Y-%m-%dT%H:%M:%S.%f'))
                centroid_df['Airmass'][j] = log['Airmass'][log_ind]

            nan_flag = False  #Flag indicating if you should not trust the log's shifts. Set to true if x_shift/y_shift are 'nan' or > 30 pixels.

            #If bad shifts were measured for this image, skip.
            if log['Shift quality flag'][log_ind] == 1:
                continue

            if np.isnan(x_shift) or np.isnan(y_shift):
                x_shift = 0
                y_shift = 0
                nan_flag = True

            #If there are clouds, shifts could have been erroneously high...just zero them?
            if abs(x_shift) > 200:
                #x_shift = 0
                nan_flag = True
            if abs(y_shift) > 200:
                #y_shift = 0
                nan_flag = True

            #Apply the shift. NOTE: This relies on having accurate x_shift and y_shift values from the log.
            #If they're incorrect, the cutout will not be in the right place.
            #x_pos = sources['Source Detect X'][i] - x_shift + extra_x_shift
            #y_pos = sources['Source Detect Y'][i] + y_shift - extra_y_shift

            x_pos = sources['Source Detect X'][i] - (x_shift - extra_x_shift)
            y_pos = sources['Source Detect Y'][i] + (y_shift - extra_y_shift)

            #TODO: Make all this its own function.

            #Cutout around the expected position and interpolate over any NaNs (which screw up source detection).
            cutout = interpolate_replace_nans(
                image[int(y_pos - box_w):int(y_pos + box_w) + 1,
                      int(x_pos - box_w):int(x_pos + box_w) + 1],
                kernel=Gaussian2DKernel(x_stddev=0.5))

            #interpolate_replace_nans struggles with edge pixels, so shave off edge_shave pixels in each direction of the cutout.
            edge_shave = 1
            cutout = cutout[edge_shave:len(cutout) - edge_shave,
                            edge_shave:len(cutout) - edge_shave]

            vals, lower, upper = sigmaclip(
                cutout, low=1.5,
                high=2.5)  #Get sigma clipped stats on the cutout
            med = np.nanmedian(vals)
            std = np.nanstd(vals)

            try:
                centroid_x_cutout, centroid_y_cutout = centroid_2dg(
                    cutout - med)  #Perform centroid detection on the cutout.
            except:
                pdb.set_trace()

            centroid_x = centroid_x_cutout + int(
                x_pos
            ) - box_w + edge_shave  #Translate the detected centroid from the cutout coordinates back to the full-frame coordinates.
            centroid_y = centroid_y_cutout + int(y_pos) - box_w + edge_shave

            # if i == 0:
            #     qp(cutout)
            #     plt.plot(centroid_x_cutout, centroid_y_cutout, 'rx')

            #     # qp(image)
            #     # plt.plot(centroid_x, centroid_y, 'rx')
            #     pdb.set_trace()

            #If the shifts in the log are not 'nan' or > 200 pixels, check if the measured shifts are within shift_tolerance pixels of the expected position.
            #   If they aren't, try alternate centroiding methods to try and find it.

            #Otherwise, use the shifts as measured with centroid_1dg. PINES_watchdog likely failed while observing, and we don't expect the centroids measured here to actually be at the expected position.
            if not nan_flag:
                #Try a 2D Gaussian detection.
                if (abs(centroid_x - x_pos) > shift_tolerance) or (
                        abs(centroid_y - y_pos) > shift_tolerance):
                    centroid_x_cutout, centroid_y_cutout = centroid_2dg(
                        cutout - med)
                    centroid_x = centroid_x_cutout + int(x_pos) - box_w
                    centroid_y = centroid_y_cutout + int(y_pos) - box_w

                    #If that fails, try a COM detection.
                    if (abs(centroid_x - x_pos) > shift_tolerance) or (
                            abs(centroid_y - y_pos) > shift_tolerance):
                        centroid_x_cutout, centroid_y_cutout = centroid_com(
                            cutout - med)
                        centroid_x = centroid_x_cutout + int(x_pos) - box_w
                        centroid_y = centroid_y_cutout + int(y_pos) - box_w

                        #If that fails, try masking source and interpolate over any bad pixels that aren't in the bad pixel mask, then redo 1D gaussian detection.
                        if (abs(centroid_x - x_pos) > shift_tolerance) or (
                                abs(centroid_y - y_pos) > shift_tolerance):
                            mask = make_source_mask(cutout,
                                                    nsigma=4,
                                                    npixels=5,
                                                    dilate_size=3)
                            vals, lo, hi = sigmaclip(cutout[~mask])
                            bad_locs = np.where((mask == False) & (
                                (cutout > hi) | (cutout < lo)))
                            cutout[bad_locs] = np.nan
                            cutout = interpolate_replace_nans(
                                cutout, kernel=Gaussian2DKernel(x_stddev=0.5))

                            centroid_x_cutout, centroid_y_cutout = centroid_1dg(
                                cutout - med)
                            centroid_x = centroid_x_cutout + int(x_pos) - box_w
                            centroid_y = centroid_y_cutout + int(y_pos) - box_w

                            #Try a 2D Gaussian detection on the interpolated cutout
                            if (abs(centroid_x - x_pos) > shift_tolerance) or (
                                    abs(centroid_y - y_pos) > shift_tolerance):
                                centroid_x_cutout, centroid_y_cutout = centroid_2dg(
                                    cutout - med)
                                centroid_x = centroid_x_cutout + int(
                                    x_pos) - box_w
                                centroid_y = centroid_y_cutout + int(
                                    y_pos) - box_w

                                #Try a COM on the interpolated cutout.
                                if (abs(centroid_x - x_pos) > shift_tolerance
                                    ) or (abs(centroid_y - y_pos) >
                                          shift_tolerance):
                                    centroid_x_cutout, centroid_y_cutout = centroid_com(
                                        cutout)
                                    centroid_x = centroid_x_cutout + int(
                                        x_pos) - box_w
                                    centroid_y = centroid_y_cutout + int(
                                        y_pos) - box_w

                                    #Last resort: try cutting off the edge of the cutout. Edge pixels can experience poor interpolation, and this sometimes helps.
                                    if (abs(centroid_x - x_pos) >
                                            shift_tolerance) or (
                                                abs(centroid_y - y_pos) >
                                                shift_tolerance):
                                        cutout = cutout[1:-1, 1:-1]
                                        centroid_x_cutout, centroid_y_cutout = centroid_1dg(
                                            cutout - med)
                                        centroid_x = centroid_x_cutout + int(
                                            x_pos) - box_w + 1
                                        centroid_y = centroid_y_cutout + int(
                                            y_pos) - box_w + 1

                                        #Try with a 2DG
                                        if (abs(centroid_x - x_pos) >
                                                shift_tolerance) or (
                                                    abs(centroid_y - y_pos) >
                                                    shift_tolerance):
                                            centroid_x_cutout, centroid_y_cutout = centroid_2dg(
                                                cutout - med)
                                            centroid_x = centroid_x_cutout + int(
                                                x_pos) - box_w + 1
                                            centroid_y = centroid_y_cutout + int(
                                                y_pos) - box_w + 1

                                            #If ALL that fails, report the expected position as the centroid.
                                            if (abs(centroid_x - x_pos) >
                                                    shift_tolerance) or (
                                                        abs(centroid_y - y_pos)
                                                        > shift_tolerance):
                                                print(
                                                    'WARNING: large centroid deviation measured, returning predicted position'
                                                )
                                                print('')
                                                centroid_df[
                                                    sources['Name'][i] +
                                                    ' Centroid Warning'][j] = 1
                                                centroid_x = x_pos
                                                centroid_y = y_pos
                                                #pdb.set_trace()

            #Check that your measured position is actually on the detector.
            if (centroid_x < 0) or (centroid_y < 0) or (centroid_x > 1023) or (
                    centroid_y > 1023):
                #Try a quick mask/interpolation of the cutout.
                mask = make_source_mask(cutout,
                                        nsigma=3,
                                        npixels=5,
                                        dilate_size=3)
                vals, lo, hi = sigmaclip(cutout[~mask])
                bad_locs = np.where((mask == False)
                                    & ((cutout > hi) | (cutout < lo)))
                cutout[bad_locs] = np.nan
                cutout = interpolate_replace_nans(
                    cutout, kernel=Gaussian2DKernel(x_stddev=0.5))
                centroid_x, centroid_y = centroid_2dg(cutout - med)
                centroid_x += int(x_pos) - box_w
                centroid_y += int(y_pos) - box_w
                if (centroid_x < 0) or (centroid_y < 0) or (
                        centroid_x > 1023) or (centroid_y > 1023):
                    print(
                        'WARNING: large centroid deviation measured, returning predicted position'
                    )
                    print('')
                    centroid_df[sources['Name'][i] +
                                ' Centroid Warning'][j] = 1
                    centroid_x = x_pos
                    centroid_y = y_pos
                    #pdb.set_trace()

            #Check to make sure you didn't measure nan's.
            if np.isnan(centroid_x):
                centroid_x = x_pos
                print(
                    'NaN returned from centroid algorithm, defaulting to target position in source_detct_image.'
                )
            if np.isnan(centroid_y):
                centroid_y = y_pos
                print(
                    'NaN returned from centroid algorithm, defaulting to target position in source_detct_image.'
                )

            #Record the image and relative cutout positions.
            centroid_df[sources['Name'][i] + ' Image X'][j] = centroid_x
            centroid_df[sources['Name'][i] + ' Image Y'][j] = centroid_y
            centroid_df[sources['Name'][i] +
                        ' Cutout X'][j] = centroid_x_cutout
            centroid_df[sources['Name'][i] +
                        ' Cutout Y'][j] = centroid_y_cutout

            if output_plots:
                #Plot
                lock_x = int(centroid_df[sources['Name'][i] + ' Image X'][0])
                lock_y = int(centroid_df[sources['Name'][i] + ' Image Y'][0])
                norm = ImageNormalize(data=cutout, interval=ZScaleInterval())
                plt.imshow(image, origin='lower', norm=norm)
                plt.plot(centroid_x, centroid_y, 'rx')
                ap = CircularAperture((centroid_x, centroid_y), r=5)
                ap.plot(lw=2, color='b')
                plt.ylim(lock_y - 30, lock_y + 30 - 1)
                plt.xlim(lock_x - 30, lock_x + 30 - 1)
                plt.title('CENTROID DIAGNOSTIC PLOT\n' + sources['Name'][i] +
                          ', ' + reduced_files[j].name + ' (image ' +
                          str(j + 1) + ' of ' + str(len(reduced_files)) + ')',
                          fontsize=10)
                plt.text(centroid_x,
                         centroid_y + 0.5,
                         '(' + str(np.round(centroid_x, 1)) + ', ' +
                         str(np.round(centroid_y, 1)) + ')',
                         color='r',
                         ha='center')
                plot_output_path = (
                    pines_path /
                    ('Objects/' + short_name + '/sources/' +
                     sources['Name'][i] + '/' + str(j).zfill(4) + '.jpg'))
                plt.gca().set_axis_off()
                plt.subplots_adjust(top=1,
                                    bottom=0,
                                    right=1,
                                    left=0,
                                    hspace=0,
                                    wspace=0)
                plt.margins(0, 0)
                plt.gca().xaxis.set_major_locator(plt.NullLocator())
                plt.gca().yaxis.set_major_locator(plt.NullLocator())
                plt.savefig(plot_output_path,
                            bbox_inches='tight',
                            pad_inches=0,
                            dpi=150)
                plt.close()

        if gif:
            gif_path = (pines_path / ('Objects/' + short_name + '/sources/' +
                                      sources['Name'][i] + '/'))
            gif_maker(path=gif_path, fps=10)

    output_filename = pines_path / (
        'Objects/' + short_name +
        '/sources/target_and_references_centroids.csv')
    #centroid_df.to_csv(pines_path/('Objects/'+short_name+'/sources/target_and_references_centroids.csv'))

    print('Saving centroiding output to {}.'.format(output_filename))
    with open(output_filename, 'w') as f:
        for j in range(len(centroid_df)):
            #Write the header line.
            if j == 0:
                f.write('{:<17s}, '.format('Filename'))
                f.write('{:<15s}, '.format('Time (JD UTC)'))
                f.write('{:<6s}, '.format('Seeing'))
                f.write('{:<7s}, '.format('Airmass'))
                for i in range(len(sources['Name'])):
                    n = sources['Name'][i]
                    if i != len(sources['Name']) - 1:
                        f.write(
                            '{:<23s}, {:<23s}, {:<24s}, {:<24s}, {:<34s}, '.
                            format(n + ' Image X', n + ' Image Y',
                                   n + ' Cutout X', n + ' Cutout Y',
                                   n + ' Centroid Warning'))
                    else:
                        f.write(
                            '{:<23s}, {:<23s}, {:<24s}, {:<24s}, {:<34s}\n'.
                            format(n + ' Image X', n + ' Image Y',
                                   n + ' Cutout X', n + ' Cutout Y',
                                   n + ' Centroid Warning'))

            #Write in the data lines.
            try:
                f.write('{:<17s}, '.format(centroid_df['Filename'][j]))
                f.write('{:<15.7f}, '.format(centroid_df['Time (JD UTC)'][j]))
                f.write('{:<6.1f}, '.format(float(centroid_df['Seeing'][j])))
                f.write('{:<7.2f}, '.format(centroid_df['Airmass'][j]))
            except:
                pdb.set_trace()

            for i in range(len(sources['Name'])):
                n = sources['Name'][i]
                if i != len(sources['Name']) - 1:
                    format_string = '{:<23.4f}, {:<23.4f}, {:<24.4f}, {:<24.4f}, {:<34d}, '
                else:
                    format_string = '{:<23.4f}, {:<23.4f}, {:<24.4f}, {:<24.4f}, {:<34d}\n'

                f.write(
                    format_string.format(
                        centroid_df[n + ' Image X'][j],
                        centroid_df[n + ' Image Y'][j],
                        centroid_df[n + ' Cutout X'][j],
                        centroid_df[n + ' Cutout Y'][j],
                        centroid_df[n + ' Centroid Warning'][j]))
    np.seterr(divide='warn', invalid='warn')
    print('')
    print('centroider runtime: {:.2f} minutes.'.format(
        (time.time() - t1) / 60))
    print('')
    return centroid_df
Esempio n. 9
0
def extrapolate_header(filepath):
    """
    Gleans as much information that would normally be in a header from a file that has been determined by the read_file function to not have a header 
    and populates it into that file's dictionary.

    param filepath: The path to that particular file

    returns extrapolated_header: dict with extrapolated header information derived from file name
    """
    extrapolated_header = {}
    
    # Gleaning information from a file that does not contain a file header for information
    filename = (filepath.split("/")[-1])# splitting filepath back down to just the filename    
    extrapolated_header.update({"filename": filename})
    filename_temporary = re.split('[_.]',filename)#split the filename into the naming components (there's no header, so we have to glean info from the filename)
    filename = filename_temporary

    # Getting date from numbers in filename (this assumes MMDDYY, which is not always correct)
    # Sometimes people do DDMMYY, so any file without a header should be viewed with some suspicion. 
    unix_timestamp = (os.path.getmtime(filepath))
    date = (datetime.datetime.utcfromtimestamp(unix_timestamp))
    extrapolated_header.update({"date": (date.strftime('%Y-%m-%d %H:%M:%S'))})# gleaning info from filename
    #Calculating MJD...
    jd = julian.to_jd(date+ datetime.timedelta(hours=12),fmt='jd')
    mjd = jd  - 2400000.5
    mjd = Decimal(mjd).quantize(Decimal('0.001'),rounding=ROUND_DOWN)
    extrapolated_header.update({"mjd": mjd})
    # Getting Az and El from their location in the filename again, two numbers (this is pretty standard, so we can trust these values unlike dates)
    extrapolated_header.update({"azimuth (deg)":float(filename[7][2:])})
    extrapolated_header.update({"elevation (deg)":float(filename[8][2:])})
    # We don't have feed, so just add a NaN instead.
    extrapolated_header.update({"feed": "NaN"})
    extrapolated_header.update({"frontend": str(filename[2])})
    extrapolated_header.update({"projid": "NaN"})
    extrapolated_header.update({"frequency_resolution (MHz)": "NaN"})
    extrapolated_header.update({"Window": "NaN"})
    extrapolated_header.update({"exposure": "NaN"})
    # Calculating utc from date (again, to be viewed with some suspicion)
    utc_hr = (float(date.strftime("%H")))
    utc_min = (float(date.strftime("%M"))/60.0 )
    utc_sec = (float(date.strftime("%S"))/3600.0)
    utc = utc_hr+utc_min+utc_sec
    extrapolated_header.update({"utc (hrs)": utc})
    extrapolated_header.update({"number_IF_Windows": "NaN"})
    extrapolated_header.update({"Channel": "NaN"})
    extrapolated_header.update({"backend": "NaN"})
    # View LST with suspicion
    year_formatted = date.strftime('%Y')[2:]
    utc_formatted = date.strftime('%m%d'+year_formatted+' %H%M')
    LSThh,LSTmm,LSTss = rfitrends.LST_calculator.LST_calculator(utc_formatted)
    LST = LSThh + LSTmm/60.0 + LSTss/3600.0
    extrapolated_header.update({"lst (hrs)": LST})

    extrapolated_header.update({"polarization":filename[6]})
    extrapolated_header.update({"source":"NaN"})
    extrapolated_header.update({"tsys":"NaN"})
    extrapolated_header.update({"frequency_type":"NaN"})
    extrapolated_header.update({"Units":"Jy"})
    extrapolated_header.update({"scan_number":"NaN"})

    # We assume if there's no header that we will only have frequency and Intensity columns. So far this has not been false, 
    # But it is an assumption to acknowledge. 
    extrapolated_header['Column names'] = ["Frequency (MHz)","Intensity (Jy)"]
    return(extrapolated_header)
Esempio n. 10
0
        if row[5][3:] in list_month:
            #print(row[1]+'-'+row[5]+'-'+row[6])
            st = str(list_month.index(row[5][3:]) + 1)
            #print(st.zfill(2))
            identifier = row[1] + st.zfill(2) + row[5][:2] + row[6][:-3].zfill(
                2
            ) + row[6][
                -2:]  ###create the identifier in YYYYMMDDHHMM format from SEP start time
            print(identifier)

            dt = datetime(int(identifier[:4]), int(identifier[4:6]),
                          int(identifier[6:8]), int(identifier[8:10]),
                          int(identifier[10:12]), 0, 0)
            print(dt)
            jd = julian.to_jd(
                dt,
                fmt='jd')  ###convert to julian date for easy search process
            print(jd)
            t_array = []
            for i in range(4):  ####array of date folder strings
                t = julian.from_jd(jd - i, fmt='jd')
                month = str(t.month).zfill(2)
                day = str(t.day).zfill(2)
                ar = str(t.year) + '/' + month + '/' + day + '/'
                t_array.append(ar)
            print(t_array)
            for goes in goes_ar:
                path = root + identifier + '/x_ray/' + goes
                for i in range(4):
                    archive_url = url + goes + '/' + t_array[i]
                    r = requests.get(archive_url)
Esempio n. 11
0
def to_mjd(num):
    return julian.to_jd(date(num), fmt='mjd')
Esempio n. 12
0
        reference = ""
        for i in dataset.variables["REFERENCE_DATE_TIME"]:
            reference+=str(i)[2]

        psal_qc = dataset.variables["PSAL_QC"][0][:]
        temp_qc = dataset.variables["TEMP_QC"][0][:]
        for index in range(len(psal_qc))[::-1]:
            i = psal_qc[index]
            if psal_qc[index] != b'1' or temp_qc[index] != b'1' :
                #print(num,outDict["cycle"],i,pressuresOut[index])
                pressuresOut.pop(index)
                tempsOut.pop(index)
                densitiesOut.pop(index)
                salinitiesOut.pop(index)

        reference = julian.to_jd(datetime.datetime(int(reference[0:4]),int(reference[4:6]),int(reference[6:8])))
        x = (reference + int(dataset.variables["JULD"][0]))
        dt = julian.from_jd(x)
        #print(dt)
        outDict["date"] = str(dt)
        #print(str(dt))
        output.append(outDict)
        #profilePlotter(densitiesOut,pressuresOut,HolteAndTalley(pressuresOut,tempsOut,salinitiesOut,densitiesOut).densityMLD)
        
with open('profiles.json', 'w') as outfile:
    json.dump(output, outfile)
    #print(len(output))

fig = plt.figure(figsize=(8, 6), edgecolor='w')
m = Basemap(projection='cyl', resolution=None,
            lat_0=0, lon_0=90)
     x_index = row.index('Longitude')
 else:
     inner_count = 0
     matched_rows = []
     matched_rows = find_matched_time(leak_lists, row[origin_index])
     for leak_list in matched_rows:
         lat_index = leak_lists[0].index('Latitude')
         long_index = leak_lists[0].index('Longitude')
         x_block.append(float(leak_list[long_index]))
         y_block.append(float(leak_list[lat_index]))
         id_block.append(leak_list[leak_id])
         methane_block.append(float(leak_list[meth_index]))
         formated_time = (row[origin_index])[:-5].replace('T', ' ')
         formated_time = datetime.datetime.strptime(
             formated_time, '%Y-%m-%d %H:%M:%S')
         jul_time = julian.to_jd(formated_time, fmt='jd')
         db.insert_table(con, curs, leak_list[leak_id],
                         leak_list[meth_index],
                         leak_list[long_index],
                         leak_list[lat_index], jul_time, "leaking")
         inner_count += 1
     x = float(row[x_index])
     y = float(row[y_index])
     formated_time = (row[origin_index])[:-5].replace('T', ' ')
     formated_time = datetime.datetime.strptime(
         formated_time, '%Y-%m-%d %H:%M:%S')
     jul_time = julian.to_jd(formated_time, fmt='jd')
     if bool(wind_activated):
         IDW.get_db_config(db, con, curs)
     ch4 = IDW.run(x, y, x_block, y_block, methane_block, 2,
                   bool(wind_activated), wind_speed, wind_direction,
Esempio n. 14
0
def date_to_eq(date, planet):
    return kernel[0, planet].compute(julian.to_jd(date, fmt='jd'))
def home():
    sat_list = Satellite.query.all()
    satellite_dict = dict()
    incorrect_value_flag = False
    for satellite in sat_list:
        satellite_dict[satellite.name] = satellite.norad_number
    
    time_diff = 0
    current_time = j.to_jd(dt.datetime.now(orbits.utc), fmt='jd')
    
    if 'satellite' in session:
        time_diff = (current_time - session['retrival_time'])
    retrival_time_local = dt.datetime.now(orbits.utc)
    if time_diff > 0.0694 and 'satellite' in session:
        session['retrival_time'] = j.to_jd(dt.datetime.now(orbits.utc), fmt='jd')
        while True:
             try:
                 timezone_str = tf.timezone_at(lng = session['user_coord'][1],lat = session['user_coord'][0])
                 timezone = pytz.timezone(timezone_str)
             except:
                 print("invalid timezone")
                 session['user_coord'][0] = session['user_coord'][0] + 1
                 continue
             break
        retrival_time_local= dt.datetime.now(timezone)
    
    if session.get('satellite') == None:
        session['user_coord'] = [40.0,0.0]
        session['satellite_coord'] = [0.0,0.0]
        session['satellite_name'] = "Hubble Space Telescope"
        session['satellite'] = satellite_dict[session['satellite_name']]
        session['tle'] = get_satellite_tle(session['satellite'])
        while True:
             try:
                 timezone_str = tf.timezone_at(lng = session['user_coord'][1],lat = session['user_coord'][0])
                 timezone = pytz.timezone(timezone_str)
             except:
                 print("invalid timezone")
                 session['user_coord'][0] = session['user_coord'][0] + 1
                 continue
             break
        session['retrival_time'] = j.to_jd(dt.datetime.now(orbits.utc), fmt='jd')
        retrival_time_local= dt.datetime.now(timezone)
    
    if request.method == "POST":
        new_coords = [float(request.form["latitude"]), float(request.form["longitude"])]
        satellite_re = '^[0-9]{5}$'
        print(repr(request.form["satellite_id"]))
        match = re.match(satellite_re,request.form["satellite_id"])
        if match:
            print("id")
            try:
                new_id = request.form["satellite_id"]
            except:
                new_id = session['satellite']
                incorrect_valuee_flag = True
        else:
            print("name")
            try:
                new_id = satellite_dict[request.form["satellite_id"]]
            except:
                new_id = session['satellite']
                incorrect_valuee_flag = True
        if new_id != session['satellite']  or new_coords != session['user_coord']:
            session['user_coord'] = new_coords
            while True:
                try:
                    timezone_str = tf.timezone_at(lng = session['user_coord'][1],lat = session['user_coord'][0])
                    timezone = pytz.timezone(timezone_str)
                except:
                    print("invalid timezone")
                    session['user_coord'][0] = session['user_coord'][0] + 1
                    continue
                break
            tle = get_satellite_tle(new_id)
            session['tle'] = tle
            session['retrival_time'] = j.to_jd(dt.datetime.now(orbits.utc), fmt='jd')
            session['satellite'] = new_id
            session['satellite_name'] = request.form["satellite_id"]
            retrival_time_local= dt.datetime.now(timezone)
    orbit_propogation = orbits.propogate_orbit(session['tle'], session['user_coord'])
    temp_coords = []
    for coord_array in orbit_propogation:
        temp_coords.extend(coord_array)
    
    sat_passes = orbits.check_passes(temp_coords, session['user_coord'], orbits.getSemiMajorAxis(session['tle']['mean motion']), retrival_time_local)
    startTime = sat_passes['start time']
    startAzimuth = sat_passes['start azimuth']
    endTime = sat_passes['end time']
    endAzimuth = sat_passes['end azimuth']
    date = sat_passes['date']
    passes = sat_passes['passes']
    session['satellite_coord'] = orbits.getFuturePosition(time_diff, session['tle'])
    session['look angle'] = orbits.lookAngle(session['user_coord'], session['satellite_coord'],orbits.getSemiMajorAxis(session['tle']['mean motion']))
    return render_template("index.html",passes = passes,startTime = startTime, date = date,startAzimuth = startAzimuth,
            endTime = endTime,endAzimuth = endAzimuth, elev = round(session['look angle'][0],2), 
            az = round(session['look angle'][1],2), lat = round(session['satellite_coord'][0],2),
            lon = round(session['satellite_coord'][1],2), sat_id = session['satellite'],
            latg = round(session['user_coord'][0],2), longg = round(session['user_coord'][1],2),
            coords = orbit_propogation, inclination = round(session['tle']['inclination']*180/math.pi,2),
            perigee = round(session['tle']['perigee']*180/math.pi,2), eccentricity = round(session['tle']['eccentricity'],5),
            satellite_dict = satellite_dict, sat_name = session['satellite_name'],  mapbox_key=os.getenv("MAP_BOX_KEY"), bad_value=incorrect_value_flag)
Esempio n. 16
0
    def get(self, data):
        data = re.match(
            r"startDate=(?P<date_start>\d+-\d+-\d+) endDate=(?P<date_end>\d+-\d+-\d+) startTime=(?P<time_start>\d+-\d+) endTime=(?P<time_end>\d+-\d+) events=(?P<events>\w) cam=(?P<cam>\w+)",
            data.replace('&', ' '))
        data = data.groupdict()
        conn = psycopg2.connect(host='localhost',
                                dbname='video_analytics',
                                user='******',
                                password='******')
        cur = conn.cursor()
        start_time_database = ' start_time >=' + str(
            (int(data['time_start'][0:2]) * 60 + int(data['time_start'][3:])) *
            60 * 999) + ' and ' if data['time_start'] != '00-00' else ''
        end_time_database = ' end_time <= ' + str(
            (int(data['time_end'][0:2]) * 60 + int(data['time_end'][3:])) *
            60 * 1001) + ' and ' if data['time_end'] != '00-00' else ''
        start_date_database = ' date >= ' + str(
            DateTime(data['date_start'].replace('-', '/') +
                     ' UTC').JulianDay()) + ' and '
        end_date_database = ' date <= ' + str(
            DateTime(data['date_end'].replace('-', '/') +
                     ' UTC').JulianDay()) + ' and '
        cam = "records.cam='{}'".format(data['cam'])
        cur.execute(
            'select start_time,end_time,date, video_archive,cam,id  from records where'
            + start_date_database + end_date_database + start_time_database +
            end_time_database + cam + ';')
        data_out = cur.fetchall()
        result = []
        event_types = "and type = {}".format(
            data['events']) if int(data['events']) > 0 else ''
        print(data_out)
        print("start")
        print(
            str(
                DateTime(data['date_start'].replace('-', '/') +
                         ' UTC').JulianDay()))
        print(
            str(
                DateTime(data['date_end'].replace('-', '/') +
                         ' UTC').JulianDay()))
        print(len(data_out))
        if len(data_out) > 0:
            for el in data_out:
                r = {
                    'date': el[2],
                    'start': get_time(el[0]),
                    'end': get_time(el[1]),
                    'archivePostfix': el[3],
                    'cam': el[4],
                    'id': el[5]
                }
                result.append(r)
                conn.close()
            for el in result:
                conn = psycopg2.connect(host='localhost',
                                        dbname='video_analytics',
                                        user='******',
                                        password='******')
                cur = conn.cursor()
                cur.execute(
                    "select id, cam, archive_file1, archive_file2, start_timestamp, end_timestamp, type, confidence,reaction,file_offset_sec from events where events.cam='{cam}' and date={date} and archive_file1='{archive}'  {events};"
                    .format(cam=el['cam'],
                            date=el['date'],
                            archive=el['archivePostfix'],
                            events=event_types))
                rows = cur.fetchall()
                list = []
                for event in rows:
                    list.append({
                        'id': event[0],
                        'cam': event[1],
                        'archiveStartHint': event[2],
                        'archiveEndHint': event[3],
                        'startTimeMS': event[4],
                        'endTimeMS': event[5],
                        'eventType': event[6],
                        'confidence': event[7],
                        'reaction': event[8],
                        'offset': event[9]
                    })
                el['events'] = list
                conn.close()
        else:
            id = 1
            try:
                dir_data = subprocess.check_output(
                    "ls", cwd="/home/_VideoArchive/{}".format(data['cam']))
                dir_data = dir_data.decode()
                dir_data = dir_data.split('\n')
                dir_data.remove("alertFragments")
                dir_data.remove('') if "" in dir_data else None
                for row in dir_data:
                    print(row)
                    data_dict = re.match(
                        r"cam(?P<cam>\d+)_(?P<date>\d+_\d+_\d+)___(?P<time>\d+_\d+_\d+)",
                        row)
                    data_dict = data_dict.groupdict()
                    juliandate = round(
                        julian.to_jd(
                            datetime.datetime.strptime(data_dict["date"],
                                                       "%d_%m_%Y") +
                            datetime.timedelta(
                                hours=int(data_dict["time"][:2]),
                                minutes=int(data_dict["time"][3:5]),
                                seconds=30)))
                    starttime = int(
                        int(data_dict["time"][:2]) * 60 +
                        int(data_dict["time"][3:5])) * 60 * 1000
                    endtime = (int(
                        int(data_dict["time"][:2]) * 60 +
                        int(data_dict["time"][3:5])) * 60 * 1000) + 600000
                    print(
                        int(
                            int(data['time_end'][0:2]) * 60 +
                            int(data['time_end'][3:])) * 60 * 1000)
                    print(endtime)
                    if (round(
                            DateTime(data['date_start'].replace('-', '/') +
                                     ' UTC').JulianDay()) <= juliandate
                            and round(
                                DateTime(data['date_end'].replace('-', '/') +
                                         ' UTC').JulianDay()) >= juliandate
                            and int(
                                int(data['time_end'][0:2]) * 60 +
                                int(data['time_end'][3:])) * 60 * 1001 >=
                            endtime and int(
                                int(data['time_start'][0:2]) * 60 +
                                int(data['time_start'][3:])) * 60 * 999 <=
                            starttime):
                        result.append({
                            'id':
                            id,
                            'cam':
                            'cam' + data_dict["cam"],
                            'archivePostfix':
                            '/cam' + data_dict['cam'] + '/' + row,
                            'date':
                            juliandate,
                            'start':
                            data_dict["time"][0:5].replace('_', '-'),
                            'end':
                            endtime,
                            'events': []
                        })
                        id += 1


#                    print(result)
            except:
                pass
        return result
Esempio n. 17
0
alpha = 0
rho = 1 / 1000
s = 1.2
q = 1 / 10
m_1 = 10
m_2 = 1
r_E = 10  #in the unit of AU
coord = SkyCoord('18:00:00 -30:00:00', unit=(u.hourangle, u.deg))
paral = {'earth_orbital': True, 'satellite': False, 'topocentric': False}

#define gravtational constant
G = 39.478  #AU^-3 * yr^-2 * M_solar^-1

#set up the time array for t_0-2t_E to t_0+2t_E
t_gc = julian.from_jd(t_0, 'jd')
t_start = julian.to_jd(t_gc - datetime.timedelta(days=2 * t_E), fmt='jd')
t_end = julian.to_jd(t_gc + datetime.timedelta(days=2 * t_E), fmt='jd')
times = np.linspace(t_start, t_end, 100)

#change the unit for the times array to t_E
times_plot = (times - t_0) / t_E

#Part a
#find period of the orbital motion of the binary axis
T = 2 * np.pi * np.sqrt((s * r_E)**3 / (G * (m_1 + m_2)))
print("The period of the orbital motion is", T, "yr")

#Part b
#find rate of change of alpha in deg/year
dalpha_dt = 360 / T
print(dalpha_dt)
# -*- coding: utf-8 -*-
import os, sys
import julian
import numpy as np
from datetime import datetime
from math import sqrt

#------------------- Configuration Parameters -------------------

# Simulation Parameters
mjd_start = julian.to_jd(datetime(2020, 1, 30, 00, 00, 00), fmt='mjd')
tstep = 0.1  # seconds - 10 Hz

# Seed Initial Position/Velocity with TLE - BEESAT-1
# (optional) - can instead replace this with r_i, v_i as np.array(3)
line1 = (
    '1 35933U 09051C   19315.45643387  .00000096  00000-0  32767-4 0  9991')
line2 = (
    '2 35933  98.6009 127.6424 0006914  92.0098 268.1890 14.56411486538102')

# Initial Spacecraft State
q_i = np.array(
    [sqrt(4.0) / 4.0,
     sqrt(4.0) / 4.0,
     sqrt(4.0) / 4.0,
     sqrt(4.0) / 4.0])  # quaternion
# w_i = np.array([4.18879, 0, 0])   # radians/sec
w_i = np.array([.03, .03, .03])
T_i = 283  # Kelvin

# Spacecraft Properties