Esempio n. 1
0
def fetch_public_players(region, league=None):
    api_as = SC2PublicAPI(BASE_URL, client=uplink.AiohttpClient())
    crawler = LadderCrawler(api_as, max_requests=12)
    loop = asyncio.get_event_loop()
    bundle_list = list()
    league_dict = {
        "grandmaster": Rank.GRANDMASTER.value,
        "master": Rank.MASTER.value,
        "diamond": Rank.DIAMOND.value,
        "platinum": Rank.PLATINUM.value,
        "gold": Rank.GOLD.value,
        "silver": Rank.SILVER.value,
        "bronze": Rank.BRONZE.value,
    }
    if league is not None:
        player_filter = Q(rank=league_dict[league], region=region)
    else:
        player_filter = Q(region=region)

    for ladder in Ladder.objects.filter(player_filter):
        bundle_list.append(
            (ladder.region, ladder.realm, ladder.profile_id, ladder.ladder_id)
        )

    players = loop.run_until_complete(crawler.get_player_dump(bundle_list))
    return players
Esempio n. 2
0
def fetch_public_ladders(region, league=None):
    league_dict = {
        "grandmaster": Rank.GRANDMASTER.value,
        "master": Rank.MASTER.value,
        "diamond": Rank.DIAMOND.value,
        "platinum": Rank.PLATINUM.value,
        "gold": Rank.GOLD.value,
        "silver": Rank.SILVER.value,
        "bronze": Rank.BRONZE.value,
    }
    if league is not None:
        ladder_filter = Q(rank=league_dict[league], region=region)
    else:
        ladder_filter = Q(region=region)
    ladder = Ladder.objects.filter(ladder_filter).order_by("?").first()
    realm, profile_id, ladder_id = None, None, None
    if ladder:
        realm, profile_id, ladder_id = ladder.realm, ladder.profile_id, ladder.ladder_id

    api_as = SC2PublicAPI(BASE_URL, client=uplink.AiohttpClient())
    crawler = LadderCrawler(api_as, max_requests=512)
    loop = asyncio.get_event_loop()

    # TODO: Can filter out recently updated ladders or players
    #  ladder_ids = [ladder.ladder_id for ladder in Ladder.objects.filter(ladder_filter)]
    #  player_filter = Q(rank=league_dict[league], region=Region.EU.value)
    #  profile_ids = [str(player.profile_id) for player in Player.players.filter(player_filter)]
    #  crawler.ladder_ids = ladder_ids
    #  crawler.profile_ids = profile_ids

    ladder_infos = loop.run_until_complete(
        crawler.get_adjacent_ladders(
            region=region,
            league=league,
            realm=realm,
            profile_id=profile_id,
            ladder_id=ladder_id,
        )
    )
    return ladder_infos
Esempio n. 3
0
import uplink

from .api import Kraken

kraken = Kraken(base_url="https://api.kraken.com/",
                client=uplink.AiohttpClient())
Esempio n. 4
0
        pass

    @get("/repos/{user}/{repo_name}/commits")
    def commits_for_repo(
        self,
        user,
        repo_name,
        since: Query,
        client_id: Query = CLIENT_ID,
        client_secret: Query = CLIENT_SECRET,
    ):
        """ Get a list of commits in a repo since some start date """
        pass


github = Github(BASE_URL, client=uplink.AiohttpClient())
loop = asyncio.get_event_loop()

# Helpers


async def _repos_for_keyword(keyword):
    """ Get repos which match the keyword search """
    r = await github.repos_for_keyword(keyword)
    r_json = await r.json()
    return [item["full_name"] for item in r_json["items"]]


async def _users_for_repo(user, repo_name, oldest_age=55):
    """ Returns users that have commited in a repo in the last N weeks """
Esempio n. 5
0
# Local imports
from github import BASE_URL, GitHub


@asyncio.coroutine
def get_contributors(full_name):
    print("Getting GitHub repository `{}`".format(full_name))
    response = yield from gh_async.get_contributors(*full_name.split("/"))
    json = yield from response.json()
    print("response for {}: {}".format(full_name, json))
    return json


if __name__ == "__main__":
    # This consumer instance uses Requests to make blocking requests.
    gh_sync = GitHub(base_url=BASE_URL)

    # This uses aiohttp, an HTTP client for asyncio.
    gh_async = GitHub(base_url=BASE_URL, client=uplink.AiohttpClient())

    # First, let's fetch a list of all public repositories.
    repos = gh_sync.get_repos().json()

    # Use only the first 10 results to avoid hitting the rate limit.
    repos = repos[:10]

    # Concurrently fetch the contributors for those repositories.
    futures = [get_contributors(repo["full_name"]) for repo in repos]
    loop = asyncio.get_event_loop()
    loop.run_until_complete(asyncio.wait(futures))