Esempio n. 1
0
def main():
    inputFile = 'dive_meetup_data_old_format.csv'
    outputFile = inputFile.replace(".csv", "") + "_with_slacks.csv"
    PRINT_LOCATION_CLASSIFICATION = False

    print('Extracting dives from data file', inputFile)
    dives, slacks = dqo.getDives(inputFile)

    if slacks:
        print(
            'Slacks already present in file {}, no need to run this program. Exiting now.'
        )
        exit(0)

    print('Classifying dive sites')
    results = dqo.refineDives(dives)

    # Print how the dives were classified into locations
    if PRINT_LOCATION_CLASSIFICATION:
        for site, vals in results.items():
            print(site)
            for dive in vals:
                print('\t', dive)

    print('Identifying the nearest period of slack current for each dive')
    data = json.loads(open(data_collect.absName('dive_sites.json')).read())
    for site, sitedives in results.items():
        siteData = dqo.getSiteData(None, site, data)  # go through all sites
        if siteData == None:
            continue

        # for each dive at this location, find the slack that was dove
        station = dive_plan.getStation(data['stations'], siteData['data'])
        print('{} - {}\n{} - {}'.format(siteData['name'], siteData['data'],
                                        station['url'], station['coords']))
        for dive in sitedives:
            dive.slack = getSlackForDive(dive.date, siteData, station['url'])
            print('\t', dive)
            print('\t\t', dive.slack)

    print('Writing slacks to file', outputFile)
    with open(data_collect.absName(outputFile),
              'w',
              encoding='utf-8',
              newline='\n') as f:
        w = csv.writer(f, delimiter=',')
        for dive in dives:
            w.writerow([
                dt.strftime(dive.date, dqo.MEETUP_TIME_FORMAT), dive.title,
                dive.location, dive.address, dive.descr, dive.url, dive.slack
            ])
    print('Done writing to file', data_collect.absName(outputFile))
Esempio n. 2
0
def getDives(file):
    dives = []
    slacks = False
    with open(absName(file), 'r', encoding='utf-8', newline='\n') as f:
        reader = csv.reader(f, delimiter=',')
        for line in reader:
            dive = Dive()
            assert len(line) >= 6
            assert len(line) <= 8
            dive.date, dive.title, dive.location, dive.address, dive.descr, dive.url = \
                dt.strptime(line[0], MEETUP_TIME_FORMAT), line[1], line[2], line[3], line[4], line[5]
            if len(line) >= 7 and line[6]:
                dive.slack = parseSlack(line[6])
                slacks = True
            if len(line) == 8 and line[7]:
                dive.splash = dt.strptime(line[7], MEETUP_TIME_FORMAT)
            dives.append(dive)
    return dives, slacks
Esempio n. 3
0
def main():
    # STATION = "Strait of Juan de Fuca Entrance, Washington Current"
    # STATION = "Juan De Fuca Strait (East), British Columbia Current"
    # STATION = "Rosario Strait, Washington Current"
    STATION = "Deception Pass (narrows), Washington Current"
    # STATION = "Admiralty Inlet (off Bush Point), Washington Current"
    # STATION = "Alki Point, 0.3 mile west of, Puget Sound, Washington Current"
    # STATION = "West end, Rich Passage, Puget Sound, Washington Current"
    # STATION = "Agate Passage, north end, Puget Sound, Washington Current"
    # STATION = "The Narrows, north end (midstream), Washington Current"
    # STATION = "South end (midstream), The Narrows, Puget Sound, Washington Current"
    # STATION = "Hale Passage, west end, Puget Sound, Washington Current"

    NOAA = True
    # NOAA = False

    data = json.loads(open(data_collect.absName('dive_sites.json')).read())

    station = dive_plan.getStation(data['stations'], STATION)
    if NOAA:
        m = intp.NoaaInterpreter(station['url_noaa'])
    else:
        m = intp.TBoneSCInterpreter(station['url_xtide'])

    slacks = []
    # days = dive_plan.getAllDays(365, dt(2019, 1, 1))
    days = dive_plan.getAllDays(100)
    for day in days:
        slacks.extend(m.getSlacks(day, night=True))

    # sort by the sum of the max current speeds from weakest to strongest
    slacks.sort(key=lambda x: abs(x.ebbSpeed) + abs(x.floodSpeed))

    for s in slacks:
        print('{}\tSpeed sum = {:0.1f}'.format(
            s,
            abs(s.ebbSpeed) + abs(s.floodSpeed)))
Esempio n. 4
0
def main():
    # Dive site and current station data file
    data = json.loads(open(data_collect.absName('dive_sites.json')).read())

    # Command-line Args
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-n',
        '--night',
        action='store_true',
        default=False,
        dest='INCLUDE_NIGHT',
        help='Consider slacks that occur during during the night')

    parser.add_argument(
        '-s',
        '--ignorespeed',
        action='store_true',
        default=False,
        dest='IGNORE_MAX_SPEED',
        help='Ignore the max current speeds in dive_sites.json')

    parser.add_argument(
        '-w',
        '--includeworkdays',
        action='store_true',
        default=False,
        dest='INCLUDE_WORKDAYS',
        help=
        'Consider dives on any day, otherwise only considers diving on weekends and holidays'
    )

    parser.add_argument(
        '-i',
        '--ignorenondiveable',
        action='store_true',
        default=False,
        dest='IGNORE_NON_DIVEABLE',
        help=
        'Only print diveable slacks, otherwise non-diveable slack information is printed'
    )

    parser.add_argument(
        "--sort",
        action='store_true',
        default=False,
        dest="SORT",
        help=
        "Sort diveable days by from most optimal slack to least optimal slack")

    parser.add_argument(
        "-f",
        "--futuredays",
        dest="DAYS_IN_FUTURE",
        default=7,
        type=int,
        help="Number of days after start date to consider diving")

    parser.add_argument(
        "-d",
        "--start-date",
        dest="START",
        default=dt.now(),
        type=lambda d: dt.strptime(d, '%Y-%m-%d').date(),
        help=
        "Start date to begin considering diveable conditions in the format yyyy-mm-dd"
    )

    parser.add_argument(
        "--sites",
        default='',
        type=str,
        help="Comma-delimited list of dive sites from dive_sites.json "
        "({})".format(listDiveSites(data['sites'])))
    args = parser.parse_args()

    # Parse site list - allow indeterminate whitespace and capitals
    SITES = []
    for item in args.sites.split(','):
        if not item:
            continue
        words = item.split()
        n = len(words)
        if n == 1:
            SITES.append(words[0].capitalize())
        elif n > 1:
            site = words[0].capitalize()
            for i in range(1, n):
                site += ' ' + words[i].capitalize()
            SITES.append(site)

    # ---------------------------------- MANUALLY CONFIGURABLE PARAMETERS -------------------------------------------------------
    if not SITES:
        SITES = None  # Consider all sites
        # SITES = append(SITES, 'Salt Creek')
        # SITES = append(SITES, 'Point Hudson')
        # SITES = append(SITES, 'Lime Kiln Point')
        # SITES = append(SITES, 'Green Point')
        # SITES = append(SITES, 'Skyline Wall Rosario')
        # SITES = append(SITES, 'Skyline Wall Allan Pass')
        SITES = append(SITES, 'Skyline Wall')
        # SITES = append(SITES, 'Deception Pass')
        # SITES = append(SITES, 'Keystone Jetty')
        # SITES = append(SITES, 'Possession Point')
        # SITES = append(SITES, 'Mukilteo')
        # SITES = append(SITES, 'Edmonds Underwater Park')
        # SITES = append(SITES, 'Alki Junkyard')
        # SITES = append(SITES, 'Saltwater State Park')
        # SITES = append(SITES, 'Sunrise Beach')
        # SITES = append(SITES, 'Day Island Wall')
        # SITES = append(SITES, 'Fox Island Bridge')
        # SITES = append(SITES, 'Fox Island Bridge Hale')
        # SITES = append(SITES, 'Fox Island East Wall')
        # SITES = append(SITES, 'Fox Island East Wall Gibson')
        # SITES = append(SITES, 'Titlow')
        # SITES = append(SITES, 'Waterman Wall')
        # SITES = append(SITES, 'Warren Avenue Bridge')
        # SITES = append(SITES, 'Agate Pass')

    possibleDiveDays = [  # Specify dates
        # dt(2020, 2, 17),
    ]

    args.START = dt(2022, 1, 10)
    # args.START = dt.now()
    args.DAYS_IN_FUTURE = 10
    args.IGNORE_MAX_SPEED = False
    args.INCLUDE_WORKDAYS = True
    # args.INCLUDE_NIGHT = True
    # args.SORT = True
    # ------------------------------------------------------------------------------------------------------------------

    # Create list of dates based on given start date
    if not possibleDiveDays:
        if args.INCLUDE_WORKDAYS:
            possibleDiveDays = getAllDays(args.DAYS_IN_FUTURE, args.START)
        else:
            possibleDiveDays = getNonWorkDays(args.DAYS_IN_FUTURE, args.START)

    # Parameter validation
    if not possibleDiveDays:
        print(
            'No dive days possible with current params. Is start date a workday and includeworkdays flag is not set?'
        )
        parser.print_help()
        exit(1)
    if not SITES:
        print('No dive sites were specified')
        parser.print_help()
        exit(2)
    for site in SITES:
        if not isDiveSite(site, data['sites']):
            print('{} is not a valid dive site'.format(site))
            parser.print_help()
            exit(3)

    # Get slacks for each site and each day and print the data and splash times
    for i in range(len(data['sites'])):
        siteData = data['sites'][i]
        if SITES and siteData['name'] not in SITES:
            continue
        station = getStation(data['stations'], siteData['data'])

        m = intp.TBoneSCInterpreter(station['url_xtide'])
        m2 = intp.NoaaInterpreter(station['url_noaa'])

        print('{} - {} - {}'.format(siteData['name'], siteData['data'],
                                    station['coords']))
        print(m.getDayUrl(m.baseUrl, possibleDiveDays[0]))
        print(m2.getDayUrl(m2.baseUrl, possibleDiveDays[0]))

        if args.SORT:
            slacks = []
            for day in possibleDiveDays:
                slacks.extend(m.getSlacks(day, args.INCLUDE_NIGHT))
            # sort by the sum of the max current speeds from weakest to strongest
            slacks.sort(key=lambda x: abs(x.ebbSpeed) + abs(x.floodSpeed))
            printDiveDay(slacks, siteData, not args.IGNORE_NON_DIVEABLE,
                         args.IGNORE_MAX_SPEED, "XTide")

            slacks = []
            for day in possibleDiveDays:
                slacks.extend(m2.getSlacks(day, args.INCLUDE_NIGHT))
            slacks.sort(key=lambda x: abs(x.ebbSpeed) + abs(x.floodSpeed))
            printDiveDay(slacks, siteData, not args.IGNORE_NON_DIVEABLE,
                         args.IGNORE_MAX_SPEED, "NOAA")
        else:
            for day in possibleDiveDays:
                slacks = m.getSlacks(day, args.INCLUDE_NIGHT)
                canDive = printDiveDay(slacks, siteData,
                                       not args.IGNORE_NON_DIVEABLE,
                                       args.IGNORE_MAX_SPEED, "XTide")

                slacks = m2.getSlacks(day, args.INCLUDE_NIGHT)
                canDive |= printDiveDay(slacks, siteData,
                                        not args.IGNORE_NON_DIVEABLE,
                                        args.IGNORE_MAX_SPEED, "NOAA")

                if not canDive:
                    print('\tNot diveable on {}'.format(
                        dt.strftime(day, intp.DATEFMT)))
Esempio n. 5
0
def main():
    # START = dt.now()
    START = dt(2020, 11, 1)
    TRIM_NOAA = True

    # STATION1 = "Strait of Juan de Fuca Entrance, Washington Current"
    # STATION1 = "Juan De Fuca Strait (East), British Columbia Current"
    # STATION1 = "Rosario Strait, Washington Current"
    # STATION1 = "Deception Pass (narrows), Washington Current"
    # STATION1 = "Admiralty Inlet (off Bush Point), Washington Current"
    # STATION1 = "Alki Point, 0.3 mile west of, Puget Sound, Washington Current"
    # STATION1 = "West end, Rich Passage, Puget Sound, Washington Current"
    # STATION1 = "Agate Passage, north end, Puget Sound, Washington Current"
    # STATION1 = "The Narrows, north end (midstream), Washington Current"
    STATION1 = "South end (midstream), The Narrows, Puget Sound, Washington Current"
    # STATION1 = "Hale Passage, west end, Puget Sound, Washington Current"

    # NOAA1 = True
    NOAA1 = False

    # STATION2 = "Strait of Juan de Fuca Entrance, Washington Current"
    # STATION2 = "Juan De Fuca Strait (East), British Columbia Current"
    # STATION2 = "Rosario Strait, Washington Current"
    # STATION2 = "Deception Pass (narrows), Washington Current"
    # STATION2 = "Admiralty Inlet (off Bush Point), Washington Current"
    # STATION2 = "Alki Point, 0.3 mile west of, Puget Sound, Washington Current"
    # STATION2 = "West end, Rich Passage, Puget Sound, Washington Current"
    # STATION2 = "Agate Passage, north end, Puget Sound, Washington Current"
    # STATION2 = "The Narrows, north end (midstream), Washington Current"
    # STATION2 = "South end (midstream), The Narrows, Puget Sound, Washington Current"
    # STATION2 = "Hale Passage, west end, Puget Sound, Washington Current"
    STATION2 = "Gibson Point, 0.8 mile east of, Puget Sound, Washington Current"

    # NOAA2 = True
    NOAA2 = False

    data = json.loads(open(data_collect.absName('dive_sites.json')).read())

    station1 = dive_plan.getStation(data['stations'], STATION1)
    if NOAA1:
        m1 = intp.NoaaInterpreter(station1['url_noaa'])
    else:
        m1 = intp.TBoneSCInterpreter(station1['url_xtide'])

    station2 = dive_plan.getStation(data['stations'], STATION2)
    if NOAA2:
        m2 = intp.NoaaInterpreter(station2['url_noaa'])
    else:
        m2 = intp.TBoneSCInterpreter(station2['url_xtide'])

    slacks1 = m1.allSlacks(START)
    slacks2 = m2.allSlacks(START)

    if len(slacks1) != len(slacks2) and (
            NOAA1 or NOAA2
    ) and TRIM_NOAA:  # one source is NOAA and one is MobileGeographics
        print("Trimming excess NOAA slacks")
        if NOAA1:
            slacks1 = slacks1[:len(slacks2)]
        elif NOAA2:
            slacks2 = slacks2[:len(slacks1)]

    if len(slacks1) != len(slacks2):
        print(
            "Pick a different day or add some fancy comparison - number of slacks don't match"
        )
        source = "NOAA" if NOAA1 else "XTide"
        print('{} slacks from {} station for location {}'.format(
            len(slacks1), source, STATION1))
        source = "NOAA" if NOAA2 else "XTide"
        print('{} slacks from {} station for location {}'.format(
            len(slacks2), source, STATION2))
        exit(0)

    beforeEbbDiffs = []
    beforeFloodDiffs = []
    for i, s1 in enumerate(slacks1):
        s2 = slacks2[i]
        assert s2.slackBeforeEbb == s1.slackBeforeEbb

        diff = (s1.time - s2.time).total_seconds() / 60

        if s1.slackBeforeEbb:
            beforeEbbDiffs.append(diff)
        else:
            beforeFloodDiffs.append(diff)

    print("Before ebb diffs: ", beforeEbbDiffs)
    print("Before flood diffs: ", beforeFloodDiffs)

    beforeEbbDiffs = np.array(beforeEbbDiffs)
    beforeFloodDiffs = np.array(beforeFloodDiffs)

    print("{} slacks before Ebb".format(len(beforeEbbDiffs)))
    if len(beforeEbbDiffs) > 0:
        print(
            "\t{:.2f} avg slack difference from station1 to station2 in minutes"
            .format(np.average(beforeEbbDiffs)))
        print(
            "\t{:.2f} median slack difference from station1 to station2 in minutes"
            .format(np.median(beforeEbbDiffs)))
        print(
            "\t{:.2f} min slack difference from station1 to station2 in minutes"
            .format(np.min(beforeEbbDiffs)))
        print(
            "\t{:.2f} max slack difference from station1 to station2 in minutes"
            .format(np.max(beforeEbbDiffs)))
        print(
            "\t{:.2f} std deviation in slack difference from station1 to station2 in minutes"
            .format(np.std(beforeEbbDiffs)))

    print("{} slacks before Flood".format(len(beforeFloodDiffs)))
    if len(beforeFloodDiffs) > 0:
        print(
            "\t{:.2f} avg slack difference from station1 to station2 in minutes"
            .format(np.average(beforeFloodDiffs)))
        print(
            "\t{:.2f} median slack difference from station1 to station2 in minutes"
            .format(np.median(beforeFloodDiffs)))
        print(
            "\t{:.2f} min slack difference from station1 to station2 in minutes"
            .format(np.min(beforeFloodDiffs)))
        print(
            "\t{:.2f} max slack difference from station1 to station2 in minutes"
            .format(np.max(beforeFloodDiffs)))
        print(
            "\t{:.2f} std deviation in slack difference from station1 to station2 in minutes"
            .format(np.std(beforeFloodDiffs)))
Esempio n. 6
0
def main():
    FILENAME = 'dive_meetup_data_old_format_with_slacks.csv'

    PRINT_LOCATION_CLASSIFICATION = False
    PRINT_DIVE_DETAILS = True

    SITES = None  # Consider all sites
    SITES = dive_plan.append(SITES, 'Salt Creek')
    # SITES = dive_plan.append(SITES, 'Deception Pass')
    # SITES = dive_plan.append(SITES, 'Skyline Wall')
    # SITES = dive_plan.append(SITES, 'Keystone Jetty')
    # SITES = dive_plan.append(SITES, 'Possession Point')
    # SITES = dive_plan.append(SITES, 'Mukilteo')
    # SITES = dive_plan.append(SITES, 'Edmonds Underwater Park')
    # SITES = dive_plan.append(SITES, 'Three Tree North')
    # SITES = dive_plan.append(SITES, 'Alki Pipeline')
    # SITES = dive_plan.append(SITES, 'Saltwater State Park')
    # SITES = dive_plan.append(SITES, 'Day Island Wall')
    # SITES = dive_plan.append(SITES, 'Sunrise Beach')
    # SITES = dive_plan.append(SITES, 'Fox Island Bridge')
    # SITES = dive_plan.append(SITES, 'Fox Island East Wall')
    # SITES = dive_plan.append(SITES, 'Titlow')
    # SITES = dive_plan.append(SITES, 'Waterman Wall')
    # SITES = dive_plan.append(SITES, 'Agate Pass')

    print('Extracting dives from data file', FILENAME)
    dives, slacks = getDives(FILENAME)

    if not slacks:
        print(
            'No slacks in {}, make sure to run data_add_slacks.py first. Exiting.'
            .format(FILENAME))
        exit(0)

    print('Classifying dive sites')
    results = refineDives(dives)

    # Print how the dives were classified into locations
    if PRINT_LOCATION_CLASSIFICATION:
        for site, vals in results.items():
            print(site)
            for dive in vals:
                print('\t', dive)

    print('Getting slack metrics for each dive site')
    data = json.loads(open(absName('dive_sites.json')).read())
    for site, sitedives in results.items():
        siteData = getSiteData(SITES, site, data)
        if siteData == None:
            continue
        station = dive_plan.getStation(data['stations'], siteData['data'])
        print('{} - {}\n{} - {}'.format(siteData['name'], siteData['data'],
                                        station['url'], station['coords']))

        # Print each dive, its corresponding slack, and predicted entry time
        if PRINT_DIVE_DETAILS:
            for dive in sitedives:
                print('\t', dive)
                print('\t\t', dive.slack)
                minCurrentTime, markerBuoyEntryTime, entryTime, exitTime = dive_plan.getEntryTimes(
                    dive.slack, siteData)
                minCurrentTime = dt.strftime(minCurrentTime,
                                             MEETUP_TIME_FORMAT)
                markerBuoyEntryTime = dt.strftime(markerBuoyEntryTime,
                                                  MEETUP_TIME_FORMAT)
                entryTime = dt.strftime(entryTime, MEETUP_TIME_FORMAT)
                print(
                    '\t\tMarkerBuoyEntryTime = {}   MyEntryTime = {}   MinCurrentTime = {}'
                    .format(markerBuoyEntryTime, entryTime, minCurrentTime))

        printSlackMetrics(sitedives)
Esempio n. 7
0
def main():

    # ---------------------------------- CONFIGURABLE PARAMETERS -----------------------------------------------------------
    # START = dt.now()
    START = dt(2020, 11, 30)  # date to begin considering diveable conditions
    DAYS_IN_FUTURE = 90  # number of days after START to consider

    FILTER_NON_WORKDAYS = False  # only consider diving on weekends and holidays
    FILTER_DAYLIGHT = True  # only consider slacks that occur during daylight hours

    possibleDiveDays = [  # Specify dates
        # dt(2019, 3, 31),
        # dt(2019, 3, 16),
        # dt(2019, 3, 3)
    ]

    NOAA = True
    # ----------------------------------------------------------------------------------------------------------------------


    if not possibleDiveDays:
        if FILTER_NON_WORKDAYS:
            possibleDiveDays = dive_plan.getNonWorkDays(DAYS_IN_FUTURE, START)
        else:
            possibleDiveDays = dive_plan.getAllDays(DAYS_IN_FUTURE, START)

    data = json.loads(open(data_collect.absName('dive_sites.json')).read())

    for i in range(len(data['must_do_dives'])):
        site1 = getSite(data['sites'], data['must_do_dives'][i]['name1'])
        site2 = getSite(data['sites'], data['must_do_dives'][i]['name2'])

        station1 = dive_plan.getStation(data['stations'], site1['data'])
        station2 = dive_plan.getStation(data['stations'], site2['data'])

        if NOAA:
            m1 = intp.NoaaInterpreter(station1['url_noaa'])
            m2 = intp.NoaaInterpreter(station2['url_noaa'])
        else:
            m1 = intp.TBoneSCInterpreter(station1['url_xtide'])
            m2 = intp.TBoneSCInterpreter(station2['url_xtide'])


        print('{} - {}'.format(site1['name'], site2['name']))

        for day in possibleDiveDays:
            if site1 == site2:
                slacks = m1.getSlacks(day, not FILTER_DAYLIGHT)
                diveableSlacks = getDiveable(slacks, site1)
                if len(diveableSlacks) >= 2:
                    for s, info in diveableSlacks:
                        dive_plan.printDive(s, site1, info)
            else:
                slacks1 = m1.getSlacks(day, not FILTER_DAYLIGHT)
                diveableSlacks1 = getDiveable(slacks1, site1)

                slacks2 = m2.getSlacks(day, not FILTER_DAYLIGHT)
                diveableSlacks2 = getDiveable(slacks2, site2)

                if len(diveableSlacks2) >= 2 or len(diveableSlacks1) >= 2:
                    if len(diveableSlacks1) >= 2:
                        print('WOW: {} is diveable twice today!'.format(site1['name']))
                    else:
                        print('WOW: {} is diveable twice today!'.format(site2['name']))
                    for s, info in diveableSlacks1:
                        dive_plan.printDive(s, site1, info)
                    for s, info in diveableSlacks2:
                        dive_plan.printDive(s, site2, info)

                if len(diveableSlacks1) == 0:
                    continue
                if len(diveableSlacks2) == 0:
                    continue

                # each site is diveable once this day, print them if they don't overlap with each other
                _, _, entryTime1, exitTime1 = dive_plan.getEntryTimes(diveableSlacks1[0][0], site1)
                _, _, entryTime2, exitTime2 = dive_plan.getEntryTimes(diveableSlacks2[0][0], site2)

                latest_start = max(entryTime1, entryTime2)
                earliest_end = min(exitTime1, exitTime2)

                if latest_start > earliest_end:
                    print("BOTH SITES DIVEABLE: transfer time = {0:0.2f} minutes".format((latest_start-earliest_end).total_seconds() / 60))
                    print("{} Diveable".format(site1['name']))
                    dive_plan.printDive(diveableSlacks1[0][0], site1, diveableSlacks1[0][1])
                    print("{} Diveable".format(site2['name']))
                    dive_plan.printDive(diveableSlacks2[0][0], site2, diveableSlacks2[0][1])
                else:
                    print("{0}: Both sites diveable but times overlap by {1:0.2f} minutes".format(
                        dt.strftime(day, intp.DATEFMT), (earliest_end-latest_start).total_seconds() / 60))