Example #1
0
def fetch_stats(request):
    if request == "about":
        return discord.Embed(
            title="About this bot",
            description=
            "A bot made by NSNull#6107, built on the opensource reddit bot HowStat by @pranavrc. Source code is available at [jamiebishop/discord-cricket-stat-bot](https://github.com/jamiebishop/discord-cricket-stat-bot)",
            color=discord.Color.green())

    # Create a mapper instance
    init = Mapper()

    # Create an URL mapping using the input request.
    try:
        mapped = init.map_string(request)
    except:
        return build_error_embed("Unable to parse your command.")

    # Find the player using the player name in the request.
    try:
        player_url = PlayerFinder(init.player_name)
    except:
        return build_error_embed(
            "Sorry, the service seems to be unavailable right now.")

    # Scrape and parse the statistics for the corresponding player.
    try:
        zeroed_in = player_url.zero_in()
        if not player_url.test_player:
            base_url = zeroed_in.replace("class=11;", "")
        else:
            base_url = zeroed_in.replace("class=1;", "")
    except:
        return build_error_embed("I couldn't find that, sorry.")

    # Create a Prettifier instance if it's a valid stats url.
    try:
        if base_url[-1] == ";":
            base_url += mapped
            if init.has_type_override:
                base_url = base_url.replace("type=allround;", "")
            prettifier = Prettifier(base_url, player_url.test_player)
        else:
            return build_error_embed("I couldn't find that, sorry.")
    except:
        return build_error_embed("I couldn't find that, sorry.")

    try:
        stat_dict = prettifier.parse_into_dict(init.class_allround)
    except Exception as e:
        print(e)
        return build_error_embed(
            "An unknown error occured. Contact <@507975140567416843> with the command which caused this error."
        )

    return build_embed_fields_from_stats(stat_dict, base_url, init)
Example #2
0
class MainHandler(object):
    """

    """
    def __init__(self, *apps):
        self.mapper = Mapper()
        self.apps = []
        for app in apps:
            self.add_app(app)

    @dec.wsgify
    def __call__(self, req):
        _app, _kwargs = self.mapper.match(req)
        if not _app:
            return exc.HTTPNotFound()
        _app, _func_name = _app
        req.environ['_func_name'] = _func_name
        req.environ['_kwargs'] = _kwargs
        return req.get_response(_app)

    def add_app(self, app):
        """

        :param app:
        :return:
        """
        if not is_app(app):
            logger.error('not a app')
            raise Exception()
        app(self)
Example #3
0
def fetch_stats(request):
    # Create a mapper instance
    init = Mapper()

    # Create an URL mapping using the input request.
    try:
        mapped = init.map_string(request)
    except:
        return "Uh, like..I mean, like..really..like, y'know...What?"

    # Find the player using the player name in the request.
    try:
        player_url = PlayerFinder(init.player_name)
    except:
        return "Sorry, the service seems to be unavailable right now."

    # Scrape and parse the statistics for the corresponding player.
    try:
        zeroed_in = player_url.zero_in()
        if not player_url.test_player:
            base_url = zeroed_in.replace("class=11;", "")
        else:
            base_url = zeroed_in.replace("class=1;", "")
    except:
        return "I couldn't find that, sorry."

    # Create a Prettifier instance if it's a valid stats url.
    try:
        if base_url[-1] == ";":
            base_url += mapped
            prettifier = Prettifier(base_url, player_url.test_player)
        else:
            return base_url
    except:
        return base_url

    # Format the content for a reddit comment.
    try:
        final = prettifier.prettify(init.class_allround)
    except:
        return request + ":\n\n" + "Ouch, nothing to see here, I think. " + \
                "You can check out the [records](%s)." % base_url

    # Url for complete stats.
    elaborate = "Detailed Stats [here.](%s)" % base_url

    return request + ':\n\n' + final + '\n\n' + elaborate
Example #4
0
def fetch_stats(request):
    # Create a mapper instance
    init = Mapper()

    # Create an URL mapping using the input request.
    try:
        mapped = init.map_string(request)
    except:
        return "Uh, like..I mean, like..really..like, y'know...What?"

    # Find the player using the player name in the request.
    try:
        player_url = PlayerFinder(init.player_name)
    except:
        return "Sorry, the service seems to be unavailable right now."

    # Scrape and parse the statistics for the corresponding player.
    try:
        zeroed_in = player_url.zero_in()
        if not player_url.test_player:
            base_url = zeroed_in.replace("class=11;", "")
        else:
            base_url = zeroed_in.replace("class=1;", "")
    except:
        return "I couldn't find that, sorry."

    # Create a Prettifier instance if it's a valid stats url.
    try:
        if base_url[-1] == ";":
            base_url += mapped
            prettifier = Prettifier(base_url, player_url.test_player)
        else:
            return base_url
    except:
        return base_url

    # Format the content for a reddit comment.
    try:
        final = prettifier.prettify(init.class_allround)
    except:
        return request + ":\n\n" + "Ouch, nothing to see here, I think. " + \
                "You can check out the [records](%s)." % base_url

    # Url for complete stats.
    elaborate = "Detailed Stats [here.](%s)" % base_url

    return request + ':\n\n' + final + '\n\n' + elaborate
Example #5
0
 def __init__(self, *apps):
     self.mapper = Mapper()
     self.apps = []
     for app in apps:
         self.add_app(app)
Example #6
0
import json
import numpy as np
from utils import Mapper, LinearLearning, Saver, PendulumEnv

parameters_file = "experiments/exp_1_linear_learning.json"
with open(parameters_file) as j:
    parameters = json.loads(j.read())

mapping = Mapper()
env = PendulumEnv()
saver = Saver()

state_map, state_reverse_map = mapping.get_state_map(
    parameters["step_state"], parameters["decimal_state"])
action_map, action_reverse_map = mapping.get_action_map(
    parameters["step_action"], parameters["decimal_action"])

steps = []
rewards = []
final_mean_reward = []

for i in range(parameters["n_simulations"]):
    lr_learner = LowRankLearning(env=env,
                                 state_set=parameters["state_set"],
                                 state_map=state_map,
                                 action_map=action_map,
                                 state_reverse_map=state_reverse_map,
                                 action_reverse_map=action_reverse_map,
                                 decimal_state=parameters["decimal_state"],
                                 decimal_action=parameters["decimal_action"],
                                 step_state=parameters["step_state"],
Example #7
0
import json
import numpy as np
from utils import Mapper, LowRankTD, Saver

parameters_file = "experiments/exp_lr_learning.json"
with open(parameters_file) as j:
    parameters = json.loads(j.read())

mapper = Mapper()
env = gym.make('Acrobot-v1')
env._max_episode_steps = np.inf
saver = Saver()

rewards = []
steps = []

for _ in range(parameters["n_simulations"]):

    low_rank_learner = LowRankTD(
        env=env,
        k=parameters["k"],
        mapper=mapper,
        episodes=parameters["episodes"],  # 50000,
        max_steps=parameters["max_steps"],
        epsilon=parameters["epsilon"],
        decay=parameters["decay"],
        alpha=parameters["alpha"],
        gamma=parameters["gamma"])

    low_rank_learner.train()
Example #8
0
def main():

    input_NPZNB_file_name = "npz-nb.csv"
    input_NBNB_file_name = "nb-nb.csv"
    input_NBAZS_file_name = "nb-azs.csv"
    input_NPZAZS_file_name = "npz-azs.csv"
    input_supply_demand_file_name = "sup-dem.csv"
    out_csv_file_name = "out.csv"
    out_info_file_name = "info.txt"
    
    edgesNPZNB = edges_from_csv(input_NPZNB_file_name)
    edgesNBNB = edges_from_csv(input_NBNB_file_name)
    edgesNBAZS = edges_from_csv(input_NBAZS_file_name)
    edgesNPZAZS = edges_from_csv(input_NPZAZS_file_name)
    supply, demand, capacity = sup_dem_from_csv(input_supply_demand_file_name)

    print("Solving for: numEdges={}".format(len(edgesNPZNB+edgesNBAZS+edgesNPZAZS+edgesNBNB)))
    print("supply ={}, demand ={}".format(supply, demand))

    # отображать метки узлов как целые числа от 1 до n
    # Реализация MCMF требует, чтобы мы использовали от 0 до n для меток узлов, чтобы
    # удобно было использовать матрицы для весов, потоков и т. д.
    # При выводе ответа получим инверсию меток узлов.
    
    mapper = Mapper(1)

    edgesNPZ_NBmapped = [(mapper.mapQ(e[0]), mapper.mapP(e[1]), e[2])
                                                              for e in edgesNPZNB]
    edgesNB_NBmapped = [(mapper.mapP(e[0]), mapper.mapP(e[1]), e[2])
                                                              for e in edgesNBNB]
    edgesNB_AZSmapped = [(mapper.mapP(e[0]), mapper.mapU(e[1]), e[2])
                                                              for e in edgesNBAZS]
    edgesNPZ_AZSmapped = [(mapper.mapQ(e[0]), mapper.mapU(e[1]), e[2])
                                                              for e in edgesNPZAZS]
    
    unique_qs = set(mapper.qs.values())
    all_qs = list(unique_qs)
    unique_us = set(mapper.us.values())
    all_us = list(unique_us)
    all_us = sorted(all_us)   #confusing undefined bug
    unique_bs = set(mapper.bs.values())
    all_bs = list(unique_bs)
    n = len(all_qs) + len(all_us) +len(all_bs) + 1 + 1
    ## Назначаются первая и последняя метки узлов, то есть исток и сток.
    src_label = 0
    snk_label = n - 1

    desired_flow = 0.0 
    possible_flow = 0.0
 
    for i in range(len(demand)):
        desired_flow += demand[i]
    for i in range(len(supply)):
        possible_flow += supply[i]
         
    edgesNPZ_AZSdesired_flow = [(e[0], e[1], desired_flow, e[2])
                                                    for e in edgesNPZ_AZSmapped]
    edgesNPZ_NBdesired_flow = [(e[0], e[1], desired_flow, e[2])
                                                    for e in edgesNPZ_NBmapped]
    edgesNB_AZSdesired_flow = [(e[0], e[1], desired_flow, e[2])
                                                    for e in edgesNB_AZSmapped]

    sizeofList = len(edgesNB_NBmapped)
    edgesNB_NBdesired_flow = []
    i = 0
    while i< sizeofList:
        edgesNB_NBdesired_flow.append((edgesNB_NBmapped[i][0],edgesNB_NBmapped[i][1], \
                                                capacity[i], edgesNB_NBmapped[i][2])) 
        i += 1

    edges = edgesNPZ_NBdesired_flow + edgesNB_NBdesired_flow \
            + edgesNB_AZSdesired_flow + edgesNPZ_AZSdesired_flow
    num_edges_in_the_middle = len(edges)

    i = 0
    for q in all_qs:
        edges.append((src_label, q, supply[i], 0)) # src -> q (cap: h, cost: 0)
        i += 1
    i = 0
    for u in all_us:
        edges.append((u, snk_label, demand[i], 0)) # u -> snk (cap: h, cost: 0)
        i += 1

    edge_list = [Edge(e[0], e[1], e[2], e[3]) for e in edges]

    # Максимальный поток минимальной стоимости
    mcf = MCF(n, edge_list)
    flow, min_cost = mcf.min_cost_flow(desired_flow, src_label, snk_label)

    assigned_u_grouped_by_q = {}

    for i in range(num_edges_in_the_middle):
        e = mcf.edges[i]
        q = e.fr
        u = e.to
        c = mcf.capacity[q][u]

        if  0.0 < c < desired_flow :
            if assigned_u_grouped_by_q.get(q, None) is None:
                assigned_u_grouped_by_q[q] = []
            assigned_u_grouped_by_q[q].append(u)
    
    with open(out_csv_file_name, "w") as f:
         for q in assigned_u_grouped_by_q.keys():
            assigned_u_grouped_by_q[q] = sorted(assigned_u_grouped_by_q[q])
            for u in assigned_u_grouped_by_q[q]:
                f.write("{} --> {}".format(mapper.invQBU(q), mapper.invQBU(u)))
                f.write("   passed flow= {}\n".format(desired_flow - mcf.capacity[q][u]))
    
    with open(out_info_file_name, "w") as f:
        total_benefit = min_cost
        f.write("Value of left cut: |Q|.h = {}\n".format(possible_flow))
        f.write("Value of right cut: |U|.l = {}\n".format(desired_flow))
        f.write("Total flow sent: {}\n".format(flow))
        f.write("Min cost: {}\n".format(total_benefit))
        if flow > 0:
            f.write("Min cost per flow: {}\n".format(total_benefit/flow))
Example #9
0
from utils import Mapper, QLearning, LowRankLearning, Saver, PendulumEnv

mapping = Mapper()
env = PendulumEnv()
saver = Saver()

step = .1
decimal = 1
episodes = 30000
max_steps = 100
alpha_q = .1
alpha_lr = .005
gamma = .9
epsilon = .2
k = 5
lambda_l = .1
lambda_r = .1

state_map, state_reverse_map = mapping.get_state_map(step, decimal)
action_map, action_reverse_map = mapping.get_action_map(step, decimal)

n_states = len(state_map)
n_actions = len(action_map)

q_learner = QLearning(env=env,
                      state_map=state_map,
                      action_map=action_map,
                      state_reverse_map=state_reverse_map,
                      action_reverse_map=action_reverse_map,
                      n_states=n_states,
                      n_actions=n_actions,