def place_trials_default(expnum, start_time, end_time, verbose=False):
    """This is going to be the primary way of moving processed data from it's proper location
    to the PEN tool's subfolder. As long as the data is organized with our standard format where
    the metadata is located on the mysql database, this will handle all the uploading.
    WARNING: Currently this will not realize if you've pointed it to a folder that it already uploaded."""
    destination = experiment_path[expnum]
    current_trial = utils.find_last_trial(expnum) + 1
    existing_evid_dict = caching.load_evid_dictionary(expnum)
    event_data_dicts = smysql.retrieve_event_description(start_time, end_time, list_of_sites=mySQL_sitedef[expnum])
    default_folder = smysql.retrieve_data_folder()

    # Look at every event in the database between time constraints.
    for event in event_data_dicts:
        site_evt_number = event[cfg_evt_siteEvt]
        site_evt_time = event[cfg_evt_time]
        site_event_id = event[cfg_evt_evid]
        site_event_dist = event[cfg_evt_dist]
        site_event_ml = event[cfg_evt_ml]
        file_data_dicts = smysql.retrieve_file_location(site_evt_number, mySQL_stadef[expnum])

        # If this event has already been uploaded, report it and skip this event.
        if site_event_id in existing_evid_dict.values():
            nees_logging.log_existing_evid(site_event_id)
            continue

        # Don't do anything if there's no data
        if file_data_dicts == []:
            continue

        # Generate file structure on shttp and local system.
        description = utils.generate_description(event)
        trialtitle = datetime.datetime.utcfromtimestamp(site_evt_time).strftime(default_time_format)
        trial_doc_folder = "%sTrial-%s/Documentation/" % (destination, current_trial)
        report_source = "%sTrial-%s/Rep-1/%s/" % (destination, current_trial, cfg_hub_ext_fold[".txt"])
        report_name = "report.csv"
        readme_name = "readme.pdf"
        events_kml = "event.kml"
        utils.generate_trial_structure(destination, current_trial)
        shttp.post_full_trial(shttp.experiment_id_dic[expnum], trialtitle, description, current_trial)

        # Find and move every file within an event to the created file structure.
        move_datafiles(file_data_dicts, event, destination, current_trial, trial_doc_folder, default_folder, expnum)
        utils.move_files(report_source, trial_doc_folder, [report_name, readme_name, events_kml])
        snupload.upload_reportfile(expnum, current_trial, trial_doc_folder, report_name)
        snupload.upload_reportfile(expnum, current_trial, trial_doc_folder, readme_name)
        snupload.upload_reportfile(expnum, current_trial, trial_doc_folder, events_kml)
        utils.clean_up(report_source)

        # Move on to next trial for further processing after updating cache..
        nees_logging.log_goto_nextline(neeshub_log_filename)
        caching.update_all_cache_dictionaries(expnum, current_trial, site_event_id, site_event_ml, site_event_dist)
        current_trial += 1
def place_reports_only(expnum, start_time, end_time):
    destination = experiment_path[expnum]
    event_data_dicts = smysql.retrieve_event_description(start_time, end_time, list_of_sites=mySQL_sitedef[expnum])
    default_folder = smysql.retrieve_data_folder()

    # Look at every event in the database between time constraints.
    for event in event_data_dicts:
        site_evt_number = event[cfg_evt_siteEvt]
        site_event_id = event[cfg_evt_evid]
        file_data_dicts = smysql.retrieve_file_location(site_evt_number, mySQL_stadef[expnum])
        current_trial = caching.trial_num_from_evid(expnum, site_event_id)
        trial_doc_folder = "%sTrial-%s/Documentation/" % (destination, current_trial)
        report.create_report(trial_doc_folder, event)
        create_filereports(file_data_dicts, event, destination, current_trial, trial_doc_folder, default_folder)