Exemplo n.º 1
0
def place_trials_default(expnum, start_time, end_time, verbose=False):
    """This is going to be the primary way of moving processed data from it's proper location
    to the PEN tool's subfolder. As long as the data is organized with our standard format where
    the metadata is located on the mysql database, this will handle all the uploading.
    WARNING: Currently this will not realize if you've pointed it to a folder that it already uploaded."""
    destination = experiment_path[expnum]
    current_trial = utils.find_last_trial(expnum) + 1
    existing_evid_dict = caching.load_evid_dictionary(expnum)
    event_data_dicts = smysql.retrieve_event_description(start_time, end_time, list_of_sites=mySQL_sitedef[expnum])
    default_folder = smysql.retrieve_data_folder()

    # Look at every event in the database between time constraints.
    for event in event_data_dicts:
        site_evt_number = event[cfg_evt_siteEvt]
        site_evt_time = event[cfg_evt_time]
        site_event_id = event[cfg_evt_evid]
        site_event_dist = event[cfg_evt_dist]
        site_event_ml = event[cfg_evt_ml]
        file_data_dicts = smysql.retrieve_file_location(site_evt_number, mySQL_stadef[expnum])

        # If this event has already been uploaded, report it and skip this event.
        if site_event_id in existing_evid_dict.values():
            nees_logging.log_existing_evid(site_event_id)
            continue

        # Don't do anything if there's no data
        if file_data_dicts == []:
            continue

        # Generate file structure on shttp and local system.
        description = utils.generate_description(event)
        trialtitle = datetime.datetime.utcfromtimestamp(site_evt_time).strftime(default_time_format)
        trial_doc_folder = "%sTrial-%s/Documentation/" % (destination, current_trial)
        report_source = "%sTrial-%s/Rep-1/%s/" % (destination, current_trial, cfg_hub_ext_fold[".txt"])
        report_name = "report.csv"
        readme_name = "readme.pdf"
        events_kml = "event.kml"
        utils.generate_trial_structure(destination, current_trial)
        shttp.post_full_trial(shttp.experiment_id_dic[expnum], trialtitle, description, current_trial)

        # Find and move every file within an event to the created file structure.
        move_datafiles(file_data_dicts, event, destination, current_trial, trial_doc_folder, default_folder, expnum)
        utils.move_files(report_source, trial_doc_folder, [report_name, readme_name, events_kml])
        snupload.upload_reportfile(expnum, current_trial, trial_doc_folder, report_name)
        snupload.upload_reportfile(expnum, current_trial, trial_doc_folder, readme_name)
        snupload.upload_reportfile(expnum, current_trial, trial_doc_folder, events_kml)
        utils.clean_up(report_source)

        # Move on to next trial for further processing after updating cache..
        nees_logging.log_goto_nextline(neeshub_log_filename)
        caching.update_all_cache_dictionaries(expnum, current_trial, site_event_id, site_event_ml, site_event_dist)
        current_trial += 1
Exemplo n.º 2
0
def place_reports_only(expnum, start_time, end_time):
    destination = experiment_path[expnum]
    event_data_dicts = smysql.retrieve_event_description(start_time, end_time, list_of_sites=mySQL_sitedef[expnum])
    default_folder = smysql.retrieve_data_folder()

    # Look at every event in the database between time constraints.
    for event in event_data_dicts:
        site_evt_number = event[cfg_evt_siteEvt]
        site_event_id = event[cfg_evt_evid]
        file_data_dicts = smysql.retrieve_file_location(site_evt_number, mySQL_stadef[expnum])
        current_trial = caching.trial_num_from_evid(expnum, site_event_id)
        trial_doc_folder = "%sTrial-%s/Documentation/" % (destination, current_trial)
        report.create_report(trial_doc_folder, event)
        create_filereports(file_data_dicts, event, destination, current_trial, trial_doc_folder, default_folder)
Exemplo n.º 3
0
def generate_evid_report(filepath, event_id, exp_num, evt_type="Earthquake", filename="report.csv"):
    """Creates report based on mySQL database in the requested folder.
    Args:
        filepath: Folder path where you want a report.csv created.
        event_id: Event ID of the requested event.
        exp_num: Experiment Number of the event
    """
    site_list = mySQL_sitedef[exp_num]
    station_list = mySQL_stadef[exp_num]
    event_dicts = smysql.retrieve_event_description(list_of_sites=site_list, evid=event_id)
    event_dict = utils.dict_frm_singular_list(event_dicts)
    site_evt = event_dict[cfg_evt_siteEvt]
    create_report(filepath, event_dict, evt_type, filename)

    # Parse through all files to create individual folder lines.
    file_list = smysql.retrieve_file_location(site_evt, station_list)
    for file_dict in file_list:
        event_time = event_dict[cfg_evt_time]
        pub_chan = "%s_%s_%s" % (file_dict[cfg_fl_net], file_dict[cfg_fl_sta], file_dict[cfg_fl_chan])
        channel_dict = smysql.retrieve_channel_position(pub_chan, event_time)
        append_report_if_valid(filepath, file_dict, channel_dict, filename)
Exemplo n.º 4
0
def generate_CSV(filepath, siteEvt, channel_list="all", time="", evt_type="Earthquake"):
    """Generates CSV file with custom number of channels.
    Args:
        filepath: Folder path or filename path. If filepath is entered as folder 
            path report.csv will be used.
        siteEvt: MYSQL database specific siteEVT assigned to the lookup. Enter 0 
            if you want to create report for non-event type database.
        Channel_List: List of channels you wish to display. Can either be a Python List or a
            string list with comma separated values. Leaving blank or "all" uses all channels.
        time: Time of non-event trials, such as SFSI. If siteEvt is specified, this is ignored.
        evt_type: Header event type for report.csv file. Defaulted at "Earthquake"
    """
    file_parse = utils.parse_file_path(filepath)
    filename = file_parse["filename"]
    folder_path = file_parse["folder"]
    channel_cond = utils.parse_channel_list(channel_list)

    # If this is a standard event type, proceed normally.
    if siteEvt != 0:
        event_dicts = smysql.retrieve_event_description(site_evt=siteEvt)
        event_dict = utils.dict_frm_singular_list(event_dicts)
        create_report(folder_path, event_dict, evt_type, filename)
        file_list = smysql.retrieve_file_location(siteEvt)

        # Look through every file associated with this siteEvt.
        for file_dict in file_list:
            event_time = event_dict[cfg_evt_time]
            pub_chan = "%s_%s_%s" % (file_dict[cfg_fl_net], file_dict[cfg_fl_sta], file_dict[cfg_fl_chan])

            # Check if we are allowed to use this channel for creation.
            if pub_chan in channel_cond or channel_cond == "all":
                channel_dict = smysql.retrieve_channel_position(pub_chan, event_time)
                append_report_if_valid(folder_path, file_dict, channel_dict, filename)

    # None Event report creation begins if the proper key is set.
    elif siteEvt == 0:
        print "VAL YOU NEED TO PROGRAM IN A REPORT CREATION FOR CROSS-HOLES AND SFSI SHAKES"