示例#1
0
def index():
    """ Very simple embedding of a lightcurve chart
    """
    # FLASK
    # Grab the inputs arguments from the URL
    # This is automated by the button
    args = flask.request.args
    _from = str(args.get('_from', str(DEFAULT_TR.start)))
    _to = str(args.get('_to', str(DEFAULT_TR.end)))

    tr = TimeRange(parse_time(_from), parse_time(_to))

    if 'next' in args:
        tr = tr.next()

    if 'prev' in args:
        tr = tr.previous()

    if 'next_hour' in args:
        tr = TimeRange(tr.start + ONE_HOUR, tr.end + ONE_HOUR)

    if 'next_day' in args:
        tr = TimeRange(tr.start + ONE_DAY, tr.end + ONE_DAY)

    if 'prev_hour' in args:
        tr = TimeRange(tr.start - ONE_HOUR, tr.end - ONE_HOUR)

    if 'prev_day' in args:
        tr = TimeRange(tr.start - ONE_DAY, tr.end - ONE_DAY)

    _from = str(tr.start)
    _to = str(tr.end)

    # get the data
    goes = lc.GOESLightCurve.create(tr)
    # resample to reduce the number of points for debugging
    goes.data = goes.data.resample("1T").mean()
    # add time string for display of hover tool
    goes.data['time_str'] = goes.data.index.strftime('%F %H:%M:%S')
    source = ColumnDataSource(data=goes.data)
    source_static = ColumnDataSource(data=goes.data)

    # now create the bokeh plots
    # XRS-B Plot
    fig1 = figure(title="GOES",
                  tools=TOOLS,
                  plot_height=PLOT_HEIGHT,
                  width=PLOT_WIDTH,
                  x_axis_type='datetime',
                  y_axis_type="log",
                  y_range=(10**-9, 10**-2),
                  toolbar_location="right")
    fig1.xaxis.formatter = formatter
    fig1.line('index',
              'xrsb',
              source=source_static,
              color='red',
              line_width=2,
              legend="xrsa 1-8 Angstrom")

    fig2 = figure(title="GOES",
                  tools=TOOLS,
                  plot_height=PLOT_HEIGHT,
                  width=PLOT_WIDTH,
                  x_axis_type='datetime',
                  y_axis_type="log",
                  y_range=(10**-9, 10**-2))
    fig2.xaxis.formatter = formatter
    fig2.line('index',
              'xrsa',
              source=source_static,
              color='blue',
              line_width=2,
              legend="xrsa 0.5-4.0 Angstrom")

    # link the x-range for common panning
    fig2.x_range = fig1.x_range

    fig = Column(fig1, fig2)

    source_static.callback = CustomJS(code="""
        var inds = cb_obj.selected['1d'].indices;
        var d1 = cb_obj.data;
        var m = 0;

        if (inds.length == 0) { return; }

        for (i = 0; i < inds.length; i++) {
            d1['color'][inds[i]] = "red"
            if (d1['y'][inds[i]] > m) { m = d1['y'][inds[i]] }
        }
        console.log(m);
        cb_obj.trigger('change');
    """)

    hover = HoverTool()
    hover.tooltips = [("time", "@time_str"), ("xrsb", "@xrsb"),
                      ("xrsa", "@xrsa")]

    fig1.add_tools(hover)

    hover2 = HoverTool()
    hover2.tooltips = [("time", "@time_str"), ("xrsb", "@xrsb"),
                       ("xrsa", "@xrsa")]
    fig2.add_tools(hover2)

    # Configure resources to include BokehJS inline in the document.
    # For more details see:
    #   http://bokeh.pydata.org/en/latest/docs/reference/resources_embedding.html#bokeh-embed
    js_resources = INLINE.render_js()
    css_resources = INLINE.render_css()

    # For more details see:
    #   http://bokeh.pydata.org/en/latest/docs/user_guide/embedding.html#components
    script, div = components(fig, INLINE)
    html = flask.render_template(
        'embed.html',
        plot_script=script,
        plot_div=div,
        js_resources=js_resources,
        css_resources=css_resources,
        _from=_from,
        _to=_to,
    )
    return encode_utf8(html)
def main(date_start, date_finish, image_size_output, time_window, flag,
         home_dir, bases):

    date_time_pre_start = date_start + '-0000'
    date_time_start = parser.parse(date_time_pre_start)
    print('date_time_start:', date_time_start)

    date_time_pre_end = date_finish + '-2359'
    date_time_end = parser.parse(date_time_pre_end)
    print('date_time_end:', date_time_end)

    time_increment = 60

    print('image_size_output:', image_size_output)
    print('flag:', flag)
    print('home_dir:', home_dir)

    url_prefix = 'https://seal.nascom.nasa.gov/'
    print('url_prefix:', url_prefix)

    look_ahead = int(
        np.ceil(time_window * 60 / 10.)
    )  #should sufficiently cover all 7 products based on their cadence.
    print('look_ahead:', look_ahead)

    diff_start_finish_total_sec = (date_time_end -
                                   date_time_start).total_seconds()
    print('diff_start_finish_total_sec:', diff_start_finish_total_sec)

    total_sec = timedelta(days=time_increment).total_seconds()
    print('total_sec:', total_sec)

    num_loops = np.ceil(
        diff_start_finish_total_sec / total_sec
    ) + 1  #num_loops would be equal to 94 + 1 for 19960101-0000' - '20110501-0000'
    print('num_loops:', num_loops)

    base_list = bases.split(',')
    for base in tqdm(base_list):
        start_process_time = process_time()  #initialize clock per product type

        base = base.strip(' ')
        holes_list = []
        unreadable_file_ids_product_list_global = []

        print(f'{base}')
        base_dir = home_dir + base
        if not os.path.exists(base_dir):
            os.makedirs(base_dir)

        time_range = TimeRange(
            date_time_start, timedelta(days=time_increment)
        )  #time_range re-initialized here for each new base name

        prev_time, time_range_modified = prev_time_resumer(
            home_dir, base, time_range, date_time_end)
        for t_value in tqdm(np.arange(
                num_loops)):  #this is the main workhorse loop of the program
            print('t_value:', t_value)
            print('prev_time:', prev_time)

            if time_range_modified.end > date_time_end:
                time_range_modified = TimeRange(time_range_modified.start,
                                                date_time_end)

            product_results = product_search(base, time_range_modified,
                                             date_time_start)
            product_results_number = product_results.file_num
            if product_results_number != 0:
                ind = index_of_sizes(base, product_results)
                all_size_sieved_times_pre, fetch_indices_product_orig = fetch_indices(
                    base, ind, product_results, time_window, look_ahead,
                    prev_time)
                if len(fetch_indices_product_orig) != 0:

                    all_time_window_sieved_times_product_times_modified, holes_product_list, unreadable_file_ids_product_list_local = product_distiller(
                        fetch_indices_product_orig, base,
                        all_size_sieved_times_pre, ind, product_results,
                        look_ahead, time_window, url_prefix, flag,
                        image_size_output, home_dir)

                    if holes_product_list:  #if image had missing regions (e.g., arising from telemetry errors)
                        holes_list.append(holes_product_list)

                    if unreadable_file_ids_product_list_local:
                        unreadable_file_ids_product_list_global.append(
                            unreadable_file_ids_product_list_local)

                    all_time_window_sieved_times_sorted = np.unique(
                        all_time_window_sieved_times_product_times_modified)

                    #print(f'{base} np.unique(all_size_sieved_times_pre):', np.unique(all_size_sieved_times_pre), len(np.unique(all_size_sieved_times_pre)))
                    #print(f'{base} list(all_time_window_sieved_times_sorted):', list(all_time_window_sieved_times_sorted), len(all_time_window_sieved_times_sorted))

                    prev_time = []  #reset to empty list
                    if len(all_time_window_sieved_times_sorted) != 0:
                        prev_time.append(
                            all_time_window_sieved_times_sorted[-1]
                        )  #append the last good time entry from the previous loop

                    csv_writer(base, home_dir, date_start, date_finish, flag,
                               time_window, image_size_output,
                               all_time_window_sieved_times_sorted)

            time_range_modified.next(
            )  #Sunpy iterator to go for the next time increment in number of days. There is also time_range_modified.previous() to go backwards in time.
            #print('time_range_modified next:', time_range_modified)

        print(f'{base} holes_list', holes_list)
        print(f'{base} unreadable_file_ids_product_list_global:',
              unreadable_file_ids_product_list_global)

        data_cuber(home_dir, base, date_start, date_finish, flag, time_window,
                   image_size_output)

        end_process_time = process_time()
        time_of_process = end_process_time - start_process_time
        print(f'{base} time of process in seconds:', time_of_process)
def returnDataRange(centralDate, delta_min):
    date_rangeUp = TimeRange(centralDate, delta_min * u.min).previous()
    date_rangeFinal = TimeRange(date_rangeUp.start, date_rangeUp.next().end)
    return date_rangeFinal
示例#4
0
delta_t = t1 - t0
print()
print('-----------------------')
print(t0)
print(t1)
print(delta_t.sec)

# sunpy time ranges
from sunpy.time import TimeRange
time_range = TimeRange(t0, t1)
print(time_range.center)
tc = Time(time_range.center, scale='utc')
print(tc.mjd)
print(tc.datetime)
print(time_range.seconds)
print(time_range.next())

# example of windowing time ranges at a candence in sunpy (potentially very useful!)
import astropy.units as u
w = time_range.window(600 * u.second, 60 * u.second)
for win in w:
    print(win.center)
"""
illustration of how I think I will use Astropy and Sunpy time objects
- start with a set of dates that spans your range of interest
- convert these to astropy time objects
- generate a sunpy time range that spans the whole interval
- divide up this interval into entries
"""
print('\n---------------------\n')
date_start = '2012-07-01T00:00:00.000'
def main(date_start, date_finish, image_size_output, time_window, flag,
         home_dir, bases, fits_headers, lev1_LASCO, email, mission):

    warnings.simplefilter(action='ignore', category=RuntimeWarning)

    client = drms.Client(email=email, verbose=False)  #True

    date_time_pre_start = date_start + '-0000'
    date_time_start = parser.parse(date_time_pre_start)
    print('date_time_start:', date_time_start)

    date_time_pre_end = date_finish + '-2359'
    date_time_end = parser.parse(date_time_pre_end)
    print('date_time_end:', date_time_end)

    time_increment = 60
    print(f'time increment {time_increment} days')

    print('image_size_output:', image_size_output)
    print('flag:', flag)
    print('home_dir:', home_dir)

    url_prefix = 'https://seal.nascom.nasa.gov/'
    print('url_prefix:', url_prefix)

    look_ahead = int(
        np.ceil(time_window * 60 / 10.)
    )  #should sufficiently cover all 7 products based on their cadence.
    print('look_ahead:', look_ahead)

    diff_start_finish_total_sec = (date_time_end -
                                   date_time_start).total_seconds()
    print('diff_start_finish_total_sec:', diff_start_finish_total_sec)

    total_sec = timedelta(days=time_increment).total_seconds()
    print('total_sec:', total_sec)

    num_loops = np.ceil(
        diff_start_finish_total_sec / total_sec
    ) + 1  #num_loops would be equal to 94 + 1 for 19960101-0000' - '20110501-0000'
    print('num_loops:', num_loops)

    base_list = bases.split(',')
    for base in tqdm(base_list):
        start_process_time = process_time()  #initialize clock per product type

        base = base.strip(' ')

        holes_list = []
        transients_list = []
        blobs_list = []
        unreadable_file_ids_product_list_global = []

        print(f'{base}')
        base_dir = home_dir + base + f'_{mission}'
        if not os.path.exists(base_dir):
            os.makedirs(base_dir)
        '''
        if ('AIA' in base) and (int(base.split('AIA')[1]) != 4500): #AIA4500 is once an hour. So if not AIA4500, then have AIA cadences at 12s and 24s -> time_increment has to be reduced from 60d -> 1d
            time_increment = 1
            print(f'new time increment {time_increment} days:')
            total_sec = timedelta(days = time_increment).total_seconds()
            print('new total_sec:', total_sec)
            num_loops = np.ceil(diff_start_finish_total_sec/total_sec) + 1 #num_loops would be equal to 94 + 1 for 19960101-0000' - '20110501-0000'
            print('new num_loops:', num_loops)            
            time_range = TimeRange(date_time_start, timedelta(days = time_increment))
            
        else:
            time_range = TimeRange(date_time_start, timedelta(days = time_increment)) #time_range re-initialized here for each new base name 
        '''

        time_range = TimeRange(date_time_start, timedelta(days=time_increment))

        prev_time, time_range_modified = prev_time_resumer(
            home_dir, base, time_range, date_time_end, mission
        )  #time_range_modified.next() is the workshorse that advances time at the end of the time for-loop
        for t_value in tqdm(np.arange(
                num_loops)):  #this is the main workhorse loop of the program
            print('t_value:', t_value)
            print('prev_time:', prev_time)

            if time_range_modified.end > date_time_end:
                time_range_modified = TimeRange(time_range_modified.start,
                                                date_time_end)

            product_results, client = product_search(base, time_range_modified,
                                                     client, mission,
                                                     time_window)

            if ('MDI' in base) or (mission == 'SDO'):
                client_export_failed = ghost_file_check(product_results)

                if client_export_failed == True:
                    product_results_number = 0

                elif client_export_failed == False:
                    product_results_number = product_results.data.count(
                    )['record']  #### old when this object was a query and not an export: product_results.count()['T_REC']

            else:
                product_results_number = product_results.file_num
                client_export_failed = False

            if (product_results_number !=
                    0) and (client_export_failed
                            == False):  #product_results.has_failed()
                ind, fits_headers = index_of_sizes(
                    base, product_results, fits_headers, lev1_LASCO, mission
                )  #putting fits_headers here to insert it into __init__.py
                all_size_sieved_times_pre, fetch_indices_product_orig = fetch_indices(
                    base, ind, product_results, time_window, look_ahead,
                    prev_time, mission)
                if len(fetch_indices_product_orig) != 0:

                    all_time_window_sieved_times_product_times_modified, holes_product_list, transients_product_list, blobs_product_list, unreadable_file_ids_product_list_local = product_distiller(
                        fetch_indices_product_orig, base,
                        all_size_sieved_times_pre, ind, product_results,
                        look_ahead, time_window, url_prefix, flag,
                        image_size_output, home_dir, email, fits_headers,
                        lev1_LASCO, client, mission)

                    if holes_product_list:  #if image had missing regions (e.g., arising from telemetry errors)
                        holes_list.append(holes_product_list)

                    if transients_product_list:
                        transients_list.append(transients_product_list)

                    if blobs_product_list:
                        blobs_list.append(blobs_product_list)

                    if unreadable_file_ids_product_list_local:
                        unreadable_file_ids_product_list_global.append(
                            unreadable_file_ids_product_list_local)

                    all_time_window_sieved_times_sorted = np.unique(
                        all_time_window_sieved_times_product_times_modified)

                    #print(f'{base} np.unique(all_size_sieved_times_pre):', np.unique(all_size_sieved_times_pre), len(np.unique(all_size_sieved_times_pre)))
                    #print(f'{base} list(all_time_window_sieved_times_sorted):', list(all_time_window_sieved_times_sorted), len(all_time_window_sieved_times_sorted))

                    prev_time = []  #reset to empty list
                    if len(all_time_window_sieved_times_sorted) != 0:
                        prev_time.append(
                            all_time_window_sieved_times_sorted[-1]
                        )  #append the last good time entry from the previous loop

                    csv_writer(base, home_dir, date_start, date_finish, flag,
                               time_window, image_size_output,
                               all_time_window_sieved_times_sorted, lev1_LASCO,
                               mission)

            time_range_modified.next(
            )  #Sunpy iterator to go for the next time increment in number of days. There is also time_range_modified.previous() to go backwards in time.
            #print('time_range_modified next:', time_range_modified)

        print(f'{base} holes_list', holes_list)
        print(f'{base} transients_list', transients_list)
        print(f'{base} blobs_list', blobs_list)
        print(f'{base} unreadable_file_ids_product_list_global:',
              unreadable_file_ids_product_list_global)

        data_cuber(home_dir, base, date_start, date_finish, flag, time_window,
                   image_size_output, lev1_LASCO, mission, fits_headers)

        end_process_time = process_time()
        time_of_process = end_process_time - start_process_time
        print(f'{base} time of process in seconds:', time_of_process)