Exemple #1
0
 async def bestiary_from_critterdb(self, url):
     log.info(f"Getting bestiary ID {url}...")
     index = 1
     creatures = []
     async with aiohttp.ClientSession() as session:
         for _ in range(100):  # 100 pages max
             log.info(f"Getting page {index} of {url}...")
             async with session.get(
                     f"http://critterdb.com/api/publishedbestiaries/{url}/creatures/{index}"
             ) as resp:
                 if not 199 < resp.status < 300:
                     raise ExternalImportError(
                         "Error importing bestiary. Are you sure the link is right?"
                     )
                 raw = await resp.json()
                 if not raw:
                     break
                 creatures.extend(raw)
                 index += 1
         async with session.get(
                 f"http://critterdb.com/api/publishedbestiaries/{url}"
         ) as resp:
             raw = await resp.json()
             name = raw['name']
     parsed_creatures = [Monster.from_critterdb(c) for c in creatures]
     return Bestiary(url, name, parsed_creatures)
Exemple #2
0
    async def from_critterdb(cls, ctx, url):
        log.info(f"Getting bestiary ID {url}...")
        index = 1
        creatures = []
        sha256_hash = hashlib.sha256()
        async with aiohttp.ClientSession() as session:
            for _ in range(100):  # 100 pages max
                log.info(f"Getting page {index} of {url}...")
                async with session.get(
                        f"http://critterdb.com/api/publishedbestiaries/{url}/creatures/{index}"
                ) as resp:
                    if not 199 < resp.status < 300:
                        raise ExternalImportError(
                            "Error importing bestiary: HTTP error. Are you sure the link is right?"
                        )
                    try:
                        raw_creatures = await resp.json()
                        sha256_hash.update(await resp.read())
                    except (ValueError, aiohttp.ContentTypeError):
                        raise ExternalImportError(
                            "Error importing bestiary: bad data. Are you sure the link is right?"
                        )
                    if not raw_creatures:
                        break
                    creatures.extend(raw_creatures)
                    index += 1
            async with session.get(
                    f"http://critterdb.com/api/publishedbestiaries/{url}"
            ) as resp:
                try:
                    raw = await resp.json()
                except (ValueError, aiohttp.ContentTypeError):
                    raise ExternalImportError(
                        "Error importing bestiary metadata. Are you sure the link is right?"
                    )
                name = raw['name']
                desc = raw['description']
                sha256_hash.update(name.encode() + desc.encode())

        # try and find a bestiary by looking up upstream|hash
        # if it exists, return it
        # otherwise commit a new one to the db and return that
        sha256 = sha256_hash.hexdigest()
        log.debug(f"Bestiary hash: {sha256}")
        existing_bestiary = await ctx.bot.mdb.bestiaries.find_one({
            "upstream":
            url,
            "sha256":
            sha256
        })
        if existing_bestiary:
            log.info("This bestiary already exists, subscribing")
            existing_bestiary = Bestiary.from_dict(existing_bestiary)
            await existing_bestiary.subscribe(ctx)
            return existing_bestiary

        parsed_creatures = [Monster.from_critterdb(c) for c in creatures]
        b = cls(None, sha256, url, [], [], [], name, parsed_creatures, desc)
        await b.write_to_db(ctx)
        return b