Exemplo n.º 1
0
def get_predictions_for_stat(stat, startDate, endDate, training=True):
    trainRange = ml.getDateRangeArr(
        "2015-11-04", "2015-11-06"
    )  # initial training: first three days when all data is available
    retrainRange = ml.getDateRangeArr(startDate, endDate)  # last date: 4/18/16

    # create & train model for stat type:
    if training is True:
        createResponse = google.createAndTrainNewModelWithDateRange(
            trainRange, stat)
        google.waitForModelToRetrain(stat)

    # pull predictions for date range & stat type, retain each iteration
    getAndPostPredictionsForDates(retrainRange, stat)

    return "DONE!"
Exemplo n.º 2
0
def getGamesForDateRange(startDate, endDate):
    dateArr = ml.getDateRangeArr(startDate, endDate)

    allGames = []

    for date in dateArr:
        games = api.getGamesForDate(date)
        allGames.extend(games)

    return allGames
Exemplo n.º 3
0
        salariesForDate = fc.getSalaryDataForDate(date, site, playerData,
                                                  driver)

        # add salary & missing player data
        allData['allSalaries'].extend(salariesForDate['currentPlayerSalaries'])
        allData['allNewIds'].extend(salariesForDate['missingPlayerIds'])

    newSalariesResponse = api.postSalaries(allData['allSalaries'])
    # UNCOMMENT LINE BELOW IF ALSO WANT TO ADD NEW FC IDS
    # dedupedNewIds = [i for n, i in enumerate(allData["allNewIds"]) if i not in allData["allNewIds"][n + 1:]]
    # newIdsResponse = api.postNewIds(dedupedNewIds)

    return allData


salaryDates = ml.getDateRangeArr('2017-03-15', '2017-04-12')
allUpdates = getSalariesForDatesAndPostToApi(salaryDates, "fanduel")


# INCOMPLETE: this fcn used for manual bulk scraping of position changes
def getPositionChangesForDatesAndWriteToCsv(dateArr):
    driver = webdriver.PhantomJS()
    driver.set_window_size(1124, 850)

    playerData = api.getCurrentPlayerData()

    site = "fanduel"  # fanduel or draftkings?

    allData = {'allPositionUpdates': []}

    # keeps track of dupes
Exemplo n.º 4
0
import nba.ops.mlDataPrep as ml

trainDates = ml.getDateRangeArr('2015-11-04', '2016-01-25')
stat = 'tpt'

ml.pullAzureTrainingDataAndWriteToCsv(trainDates, stat)



Exemplo n.º 5
0
            # calculate actual pts for team
            actualPoints = getActualPointsForTeam(team)

            # get threshold for date (just double up for now)
            cutoff = cutoffs[dateToTest]["Double Up"]

            # inc totalOver count if team is over threshold
            if predicted_points > 0 and actualPoints > 0:
                print("DATE", dateToTest, "PREDICTED", predicted_points,
                      "ACTUAL", actualPoints, "SALARY", team_salary, "CUTOFF",
                      cutoff)
                totalTested += 1
                if actualPoints > cutoff:
                    totalOver += 1

        except (KeyError, TypeError) as e:
            print("ERROR OPTIMIZING", str(dateToTest))
            print(e)
            pass

    return float(totalOver / totalTested)


dates = ml.getDateRangeArr('2017-03-20', '2017-04-12')
source = 'GOOGLE'
cap = 60000
minimum = 285

print(getFinalAnalysisForDates(dates, source, cap, minimum))
Exemplo n.º 6
0
def getTournamentAverageScores():

    driver = webdriver.PhantomJS()
    driver.set_window_size(1124, 850)

    baseUrl = "https://www.fantasycruncher.com/lineup-rewind/fanduel/NBA/"
    dates = ml.getDateRangeArr('2017-03-15', '2017-04-12')
    rowsSelector = "#tournament-links-table-container"
    # tournamentTypes = ["Double Up", "Triple Up", "Quintuple Up"]
    bigTerm = "BIG"

    cutoffs = {}

    for date in dates:
        fullUrl = baseUrl + date

        cutoffsForDate = {
            "Double Up": [],
            "Double Up BIG": [],
            "Triple Up": [],
            "Triple Up BIG": [],
            "Quintuple Up": [],
            "Quintuple Up BIG": []
        }

        try:
            driver.get(fullUrl)
            print("Getting..", date)

            closeLogin = waits.byClass('close-login-alert', driver)
            closeLogin.click()

            openTab = waits.byClass("open-tournament-links", driver)
            openTab.click()

            tableCont = waits.byId("tournament-links-table-container", driver)

            rawHtml = driver.page_source

            tableRowsSelector = 'table.tournament-links-table tbody tr'

            tree = html.fromstring(rawHtml)
            rows = tree.cssselect(tableRowsSelector)

            for row in rows:
                tournamentName = row[0].text_content()
                if "Double Up" in tournamentName and bigTerm not in tournamentName:
                    cutoffsForDate["Double Up"].append(
                        float(row[8].text_content()))
                elif "Double Up" in tournamentName and bigTerm in tournamentName:
                    cutoffsForDate["Double Up BIG"].append(
                        float(row[8].text_content()))
                elif "Triple Up" in tournamentName and bigTerm not in tournamentName:
                    cutoffsForDate["Triple Up"].append(
                        float(row[8].text_content()))
                elif "Triple Up" in tournamentName and bigTerm in tournamentName:
                    cutoffsForDate["Triple Up BIG"].append(
                        float(row[8].text_content()))
                elif "Quintuple Up" in tournamentName and bigTerm not in tournamentName:
                    cutoffsForDate["Quintuple Up"].append(
                        float(row[8].text_content()))
                elif "Quintuple Up" in tournamentName and bigTerm in tournamentName:
                    cutoffsForDate["Quintuple Up BIG"].append(
                        float(row[8].text_content()))

        except Exception as e:
            continue

        cutoffs[date] = {}
        for tournamentType, cutoffArr in cutoffsForDate.items():
            try:
                average = sum(cutoffArr) / float(len(cutoffArr))
            except ZeroDivisionError as e:
                average = None

            cutoffs[date][tournamentType] = average

    filename = jsonData.LOCAL_DATA_PATH + "2017_tournament_results.json"
    with open(filename, 'w') as fp:
        json.dump(cutoffs, fp)

    driver.quit()

    return "DONE"