Ejemplo n.º 1
0
def update_rosters(gsheet_id, sheet_name='nhl_rosters', savefile=False):
    rosters = util.get_rosters()

    if savefile:
        util.save_csv(sheet_name + '.csv', rosters)
    else:
        push_update_to_sheet(rosters, gsheet_id, sheet_name)
    return
Ejemplo n.º 2
0
def update_stats(endYearOfSeason, regularSeason, gsheet_id, sheet_name='nhl_leaders', savefile=False):

    # Get skater and goalie stats, combine them in a dataframe.
    skaters = get_skater_stats(endYearOfSeason, regularSeason)
    goalies = get_goalie_stats(endYearOfSeason, regularSeason)
    all_stats = pd.concat([skaters, goalies], axis=0)
    all_stats = all_stats.sort_values('playerName')
    all_stats.fillna(0, inplace=True)

    if savefile:
        util.save_csv(sheet_name + '.csv', all_stats)
    else:
        push_update_to_sheet(all_stats, gsheet_id, sheet_name)
Ejemplo n.º 3
0
def main():
    start_season = 1995
    end_season = 2020
    regular_season = True

    # Only need to update drafts once a year
    if False:
        drafts = util.get_drafts(start_season, end_season) # Takes a loonnnggg time to run
        util.save_csv("drafts.csv", drafts)
    else:
        drafts = util.load_csv("drafts.csv")

    if True:
        players = get_career_stats(start_season, end_season, regular_season)
        util.save_csv("players.csv", players)

        rosters = util.get_rosters()
        util.save_csv("rosters.csv", rosters)
    else:
        players = util.load_csv("players.csv")
        rosters = util.load_csv("rosters.csv")

    drafts = update_team_names(drafts)
    drafts = drafts.sort_values(["team.name", "year", "round"], ascending=[1, 0, 1])

    # Merge all data into one dataframe
    drafts['name_lower'] = drafts['prospect.fullName'].str.lower()
    players['name_lower'] = players['playerName'].str.lower()
    rosters['name_lower'] = rosters['fullName'].str.lower()
    draft_data = pd.merge(drafts, players, how="left", on="name_lower", sort=False, suffixes=("", "_x"))
    draft_data = pd.merge(draft_data, rosters, how="left", on="name_lower", sort=False, suffixes=("", "_y"))

    # Update positions and set statuses for each filter in the visuzalization.  Then get rid of unneeded columns.
    draft_data = set_statuses(draft_data)
    draft_data = clean_data(draft_data)
    draft_data = reduce_columns(draft_data)

    util.save_csv("draft_data.csv", draft_data)
Ejemplo n.º 4
0
beta = []

#Sound velocities (m/s)
c= {'Water':1480,
    'Stainless Steel':5800,
    'Air':330,
    'Polystyrene':2400}


if continue_from_last==False:
    # Get path to save simulation results
    paths = utilities.get_paths("./results/")
    print("#"*40)
    print('Saving initial conditions')
    # Saving initial conditions
    utilities.save_csv(paths[2],iteration,{key: particles[key] for key in fluid_array})
    utilities.save_moving_vtk(paths[0],iteration,{key: particles[key] for key in fluid_array})
    try:
        utilities.save_boundary_vtk(paths[0],{key: particles[key] for key in boundary_array})
    except:
        pass
    utilities.add_to_group(paths[0],iteration,time,paths[1])
else:
    paths = [pull_last[2] + "/vtk", pull_last[3],pull_last[2] + "/csv"]

print("#"*40)

# Stop when simulation time reaches final time
while time < final_time:

    # Cleaning Force fields
Ejemplo n.º 5
0
def do_linear_search(test=False, test_dim=32):
    """
    Linear search function...

    Returns
    -------
    None.

    """
    logger = ut.get_logger()

    device = "cuda"
    model_name = "EDSR"
    config = toml.load("../config.toml")
    run = config["run"]
    scale = int(config["scale"]) if config["scale"] else 4
    # device information
    _, device_name = ut.get_device_details()
    total, _, _ = ut.get_gpu_details(
        device, "\nDevice info:", logger, print_details=False
    )
    log_message = (
        "\nDevice: "
        + device
        + "\tDevice name: "
        + device_name
        + "\tTotal memory: "
        + str(total)
    )
    logger.info(log_message)

    ut.clear_cuda(None, None)

    state = "Before loading model: "
    total, used, _ = ut.get_gpu_details(device, state, logger, print_details=True)

    model = md.load_edsr(device=device)

    state = "After loading model: "
    total, used, _ = ut.get_gpu_details(device, state, logger, print_details=True)

    # =============================================================================
    #     file = open("temp_max_dim.txt", "r")
    #     line = file.read()
    #     max_dim = int(line.split(":")[1])
    # =============================================================================
    config = toml.load("../config.toml")
    max_dim = int(config["max_dim"])
    if test == False:
        detailed_result, memory_used, memory_free = result_from_dimension_range(
            device, logger, config, model, 1, max_dim
        )
    else:
        detailed_result, memory_used, memory_free = result_from_dimension_range(
            device, logger, config, model, test_dim, test_dim
        )
    if test == False:
        # get mean
        # get std
        mean_time, std_time = ut.get_mean_std(detailed_result)
        mean_memory_used, std_memory_used = ut.get_mean_std(memory_used)
        mean_memory_free, std_memory_free = ut.get_mean_std(memory_free)

        # make folder for saving results
        plt_title = "Model: {} | GPU: {} | Memory: {} MB".format(
            model_name, device_name, total
        )
        date = "_".join(str(time.ctime()).split())
        date = "_".join(date.split(":"))
        foldername = date
        os.mkdir("results/" + foldername)
        # plot data
        ut.plot_data(
            foldername,
            "dimension_vs_meantime",
            mean_time,
            "Dimensionn of Patch(nxn)",
            "Mean Processing Time: LR -> SR, Scale: {} ( {} runs )".format(scale, run),
            mode="mean time",
            title=plt_title,
        )
        ut.plot_data(
            foldername,
            "dimension_vs_stdtime",
            std_time,
            "Dimension n of Patch(nxn)",
            "Std of Processing Time: LR -> SR, Scale: {} ( {} runs )".format(
                scale, run
            ),
            mode="std time",
            title=plt_title,
        )
        ut.plot_data(
            foldername,
            "dimension_vs_meanmemoryused",
            mean_memory_used,
            "Dimension n of Patch(nxn)",
            "Mean Memory used: LR -> SR, Scale: {} ( {} runs )".format(scale, run),
            mode="mean memory used",
            title=plt_title,
        )
        ut.plot_data(
            foldername,
            "dimension_vs_stdmemoryused",
            std_memory_used,
            "Dimension n of Patch(nxn)",
            "Std Memory Used: LR -> SR, Scale: {} ( {} runs )".format(scale, run),
            mode="std memory used",
            title=plt_title,
        )
        ut.plot_data(
            foldername,
            "dimension_vs_meanmemoryfree",
            mean_memory_free,
            "Dimension n of Patch(nxn)",
            "Mean Memory Free: LR -> SR, Scale: {} ( {} runs )".format(scale, run),
            mode="mean memory free",
            title=plt_title,
        )
        ut.plot_data(
            foldername,
            "dimension_vs_stdmemoryfree",
            std_memory_free,
            "Dimension n of Patch(nxn)",
            "Std Memory Free: LR -> SR, Scale: {} ( {} runs )".format(scale, run),
            mode="std memory free",
            title=plt_title,
        )
        # save data
        ut.save_csv(
            foldername,
            "total_stat",
            device,
            device_name,
            total,
            mean_time,
            std_time,
            mean_memory_used,
            std_memory_used,
            mean_memory_free,
            std_memory_free,
        )