LATITUDE_COLUMN_ORIG][:],
                 raw_wind_io.LONGITUDE_COLUMN: netcdf_dataset.variables[
                     LONGITUDE_COLUMN_ORIG][:],
                 raw_wind_io.ELEVATION_COLUMN: netcdf_dataset.variables[
                     ELEVATION_COLUMN_ORIG][:],
                 raw_wind_io.TIME_COLUMN: numpy.array(unix_times_sec).astype(
                     int),
                 raw_wind_io.WIND_SPEED_COLUMN: wind_speeds_m_s01,
                 raw_wind_io.WIND_DIR_COLUMN: wind_directions_deg,
                 raw_wind_io.WIND_GUST_SPEED_COLUMN: wind_gust_speeds_m_s01,
                 raw_wind_io.WIND_GUST_DIR_COLUMN: wind_gust_directions_deg,
                 WIND_SPEED_FLAG_COLUMN: wind_speed_quality_flags,
                 WIND_DIR_FLAG_COLUMN: wind_dir_quality_flags,
                 WIND_GUST_SPEED_FLAG_COLUMN: wind_gust_speed_quality_flags,
                 WIND_GUST_DIR_FLAG_COLUMN: wind_gust_dir_quality_flags}

    netcdf_dataset.close()
    wind_table = pandas.DataFrame.from_dict(wind_dict)
    wind_table = _remove_invalid_wind_rows(wind_table)
    return _remove_low_quality_data(wind_table)


if __name__ == '__main__':
    WIND_TABLE = read_winds_from_raw_file(NETCDF_FILE_NAME)
    print(WIND_TABLE)

    WIND_TABLE = raw_wind_io.sustained_and_gust_to_uv_max(WIND_TABLE)
    print(WIND_TABLE)

    raw_wind_io.write_processed_file(WIND_TABLE, CSV_FILE_NAME)
        end_times_unix_sec,
        tornado_io.START_LAT_COLUMN:
        storm_event_table[START_LATITUDE_COLUMN_ORIG].values,
        tornado_io.START_LNG_COLUMN:
        storm_event_table[START_LONGITUDE_COLUMN_ORIG].values,
        tornado_io.END_LAT_COLUMN:
        storm_event_table[END_LATITUDE_COLUMN_ORIG].values,
        tornado_io.END_LNG_COLUMN:
        storm_event_table[END_LONGITUDE_COLUMN_ORIG].values,
        tornado_io.FUJITA_RATING_COLUMN:
        storm_event_table[TORNADO_RATING_COLUMN_ORIG].values,
        tornado_io.WIDTH_COLUMN:
        FEET_TO_METRES * storm_event_table[TORNADO_WIDTH_COLUMN_ORIG].values
    }

    tornado_table = pandas.DataFrame.from_dict(tornado_dict)
    return tornado_io.remove_invalid_reports(tornado_table)


if __name__ == '__main__':
    WIND_TABLE = read_thunderstorm_wind_reports(STORM_EVENT_FILE_NAME)
    print WIND_TABLE

    raw_wind_io.write_processed_file(WIND_TABLE, PROCESSED_WIND_FILE_NAME)
    print '\n'

    TORNADO_TABLE = read_tornado_reports(STORM_EVENT_FILE_NAME)
    print TORNADO_TABLE

    tornado_io.write_processed_file(TORNADO_TABLE, PROCESSED_TORNADO_FILE_NAME)
        s == STATION_ID_1MINUTE
        for s in STATION_METADATA_TABLE[raw_wind_io.STATION_ID_COLUMN].values
    ]
    THIS_STATION_INDEX = numpy.where(THESE_STATION_FLAGS)[0][0]
    THIS_UTC_OFFSET_HOURS = STATION_METADATA_TABLE[
        raw_wind_io.UTC_OFFSET_COLUMN].values[THIS_STATION_INDEX]

    WIND_TABLE_1MINUTE = read_1minute_winds_from_raw_file(
        ORIG_1MINUTE_FILE_NAME, THIS_UTC_OFFSET_HOURS)
    WIND_TABLE_1MINUTE = raw_wind_io.sustained_and_gust_to_uv_max(
        WIND_TABLE_1MINUTE)
    WIND_TABLE_1MINUTE = merge_winds_and_station_metadata(
        WIND_TABLE_1MINUTE, STATION_METADATA_TABLE, STATION_ID_1MINUTE)
    print(WIND_TABLE_1MINUTE)

    raw_wind_io.write_processed_file(WIND_TABLE_1MINUTE, CSV_FILE_NAME_1MINUTE)

    # Download 5-minute METARs and convert file type.
    ORIG_5MINUTE_FILE_NAME = download_5minute_file(
        station_id=ORIG_STATION_ID_5MINUTE,
        month_unix_sec=MONTH_5MINUTE_UNIX_SEC,
        top_local_directory_name=TOP_LOCAL_DIR_NAME_5MINUTE,
        raise_error_if_fails=True)

    THESE_STATION_FLAGS = [
        s == STATION_ID_5MINUTE
        for s in STATION_METADATA_TABLE[raw_wind_io.STATION_ID_COLUMN].values
    ]
    THIS_STATION_INDEX = numpy.where(THESE_STATION_FLAGS)[0][0]
    THIS_UTC_OFFSET_HOURS = STATION_METADATA_TABLE[
        raw_wind_io.UTC_OFFSET_COLUMN].values[THIS_STATION_INDEX]