async def _get_character(self): char_id = self.url character = None headers = {} ddb_user = await self.ctx.bot.ddb.get_ddb_user(self.ctx, self.ctx.author.id) if ddb_user is not None: headers = {"Authorization": f"Bearer {ddb_user.token}"} async with aiohttp.ClientSession() as session: async with session.get(f"{ENDPOINT}?charId={char_id}", headers=headers) as resp: log.debug(f"DDB returned {resp.status}") if resp.status == 200: character = await resp.json() elif resp.status == 403: if ddb_user is None: raise ExternalImportError("This character is private. Link your D&D Beyond and Discord accounts" " to import it!") else: raise ExternalImportError("You do not have permission to view this character.") elif resp.status == 404: raise ExternalImportError("This character does not exist. Are you using the right link?") elif resp.status == 429: raise ExternalImportError("Too many people are trying to import characters! Please try again in " "a few minutes.") else: raise ExternalImportError(f"Beyond returned an error: {resp.status} - {resp.reason}") character['_id'] = char_id self.character_data = character log.debug(character) return character
async def from_critterdb(cls, ctx, url): log.info(f"Getting bestiary ID {url}...") index = 1 creatures = [] sha256_hash = hashlib.sha256() async with aiohttp.ClientSession() as session: for _ in range(100): # 100 pages max log.info(f"Getting page {index} of {url}...") async with session.get( f"http://critterdb.com/api/publishedbestiaries/{url}/creatures/{index}" ) as resp: if not 199 < resp.status < 300: raise ExternalImportError( "Error importing bestiary: HTTP error. Are you sure the link is right?" ) try: raw_creatures = await resp.json() sha256_hash.update(await resp.read()) except (ValueError, aiohttp.ContentTypeError): raise ExternalImportError( "Error importing bestiary: bad data. Are you sure the link is right?" ) if not raw_creatures: break creatures.extend(raw_creatures) index += 1 async with session.get( f"http://critterdb.com/api/publishedbestiaries/{url}" ) as resp: try: raw = await resp.json() except (ValueError, aiohttp.ContentTypeError): raise ExternalImportError( "Error importing bestiary metadata. Are you sure the link is right?" ) name = raw['name'] desc = raw['description'] sha256_hash.update(name.encode() + desc.encode()) # try and find a bestiary by looking up upstream|hash # if it exists, return it # otherwise commit a new one to the db and return that sha256 = sha256_hash.hexdigest() log.debug(f"Bestiary hash: {sha256}") existing_bestiary = await ctx.bot.mdb.bestiaries.find_one({ "upstream": url, "sha256": sha256 }) if existing_bestiary: log.info("This bestiary already exists, subscribing") existing_bestiary = Bestiary.from_dict(existing_bestiary) await existing_bestiary.subscribe(ctx) return existing_bestiary parsed_creatures = [Monster.from_critterdb(c) for c in creatures] b = cls(None, sha256, url, [], [], [], name, parsed_creatures, desc) await b.write_to_db(ctx) return b
async def generate_token(img_url, is_subscriber=False): img_url = preprocess_url(img_url) def process_img(the_img_bytes, template_fp='res/template-f.png'): # open the images b = BytesIO(the_img_bytes) img = Image.open(b).convert('RGBA') template_img = Image.open(template_fp) mask_img = Image.open('res/alphatemplate.tif') # crop/resize the token image width, height = img.size is_taller = height >= width if is_taller: box = (0, 0, width, width) else: box = (width / 2 - height / 2, 0, width / 2 + height / 2, height) img = img.crop(box) img = img.resize(TOKEN_SIZE, Image.ANTIALIAS) # paste mask mask_img = ImageChops.darker(mask_img, img.getchannel('A')) img.putalpha(mask_img) # paste template img.paste(template_img, mask=template_img) # save the image, close files out_bytes = BytesIO() img.save(out_bytes, "PNG") template_img.close() mask_img.close() img.close() out_bytes.seek(0) return out_bytes try: async with aiohttp.ClientSession() as session: async with session.get(img_url) as resp: if not 199 < resp.status < 300: raise ExternalImportError( f"I was unable to download the image to tokenize. " f"({resp.status} {resp.reason})") # get the image type from the content type header content_type = resp.headers.get("Content-Type", '') if not content_type.startswith('image/'): raise ExternalImportError( f"This does not look like an image file (content type {content_type})." ) img_bytes = await resp.read() if is_subscriber: template = 'res/template-s.png' else: template = 'res/template-f.png' processed = await asyncio.get_event_loop().run_in_executor( None, process_img, img_bytes, template) except Exception: raise return processed
async def get_character(self): charId = self.url character = None async with aiohttp.ClientSession() as session: async with session.get(f"{API_BASE}{charId}/json", headers=HEADERS) as resp: log.debug(f"{API_BASE}{charId}/json") log.debug(f"HEADERS:{HEADERS}") log.debug(f"DDB returned {resp.status}") if resp.status == 200: character = await resp.json() elif resp.status == 404: raise ExternalImportError( "Error: I do not have permission to view this character sheet. " "Make sure you've generated a sharable link for your character." ) elif resp.status == 429: raise ExternalImportError( "Too many people are trying to import characters! Please try again in " "a few minutes.") else: raise ExternalImportError( f"Beyond returned an error: {resp.status} - {resp.reason}" ) character['_id'] = charId self.character_data = character self._calculate_stats() self._load_features() return character
async def get_character(self): url = self.url character = None async with aiohttp.ClientSession() as session: for _ in range(10): # 10 retries async with session.get( f"{API_BASE}{url}/json?key={KEY}") as resp: log.debug(f"Dicecloud returned {resp.status}") if resp.status == 200: character = await resp.json(encoding='utf-8') break elif resp.status == 429: timeout = await resp.json() log.info( f"Ratelimit hit getting character - resets in {timeout}ms" ) await asyncio.sleep(timeout['timeToReset'] / 1000 ) # rate-limited, just wait elif resp.status == 403: raise ExternalImportError( "Error: I do not have permission to view this character sheet. Make " "sure it's either shared with `avrae` on Dicecloud or set so " "anyone with link can view.") else: raise ExternalImportError( f"Dicecloud returned an error: {resp.status} - {resp.reason}" ) character['_id'] = url self.character = character return character
def parse_critterdb_traits(data, key): traits = [] attacks = [] for trait in data['stats'][key]: name = trait['name'] raw = trait['description'] overrides = list(AVRAE_ATTACK_OVERRIDES_RE.finditer(raw)) raw_atks = list(ATTACK_RE.finditer(raw)) raw_damage = list(JUST_DAMAGE_RE.finditer(raw)) filtered = AVRAE_ATTACK_OVERRIDES_RE.sub('', raw) desc = markdownify(filtered).strip() if overrides: for override in overrides: if override.group('simple'): attacks.append( Attack.from_dict({ 'name': override.group(2) or name, 'attackBonus': override.group(3) or None, 'damage': override.group(4) or None, 'details': desc })) elif (freeform_override := override.group('freeform')): try: attack_yaml = yaml.safe_load(freeform_override) except yaml.YAMLError: raise ExternalImportError( f"Monster had an invalid automation YAML ({data['name']}: {name})" ) if not isinstance(attack_yaml, list): attack_yaml = [attack_yaml] for atk in attack_yaml: if isinstance(atk, dict): atk['name'] = atk_name = atk.get('name') or name try: attacks.append(Attack.from_dict(atk)) except Exception: raise ExternalImportError( f"An automation YAML contained an invalid attack ({data['name']}: {atk_name})" ) else: raise ExternalImportError( f"An automation YAML contained an invalid attack ({data['name']}: {name})" ) # else: empty override, so skip this attack. elif raw_atks: for atk in raw_atks: attack_bonus = atk.group('attackBonus').lstrip('+') # Bonus damage bonus = "" if (bonus_damage_type := atk.group('damageTypeBonus')) and \ (bonus_damage := atk.group('damageBonusInt') or atk.group('damageBonusDice')):
async def get_link_shared_bestiary_creatures(url, session, api_base, sha256_hash): log.info(f"Getting link shared bestiary {url}...") async with session.get(f"{api_base}/{url}/creatures") as resp: if resp.status == 400: raise ExternalImportError( "Error importing bestiary: Cannot access bestiary. Please ensure link sharing is enabled!") elif not 199 < resp.status < 300: raise ExternalImportError( "Error importing bestiary: HTTP error. Are you sure the link is right?") creatures = await parse_critterdb_response(resp, sha256_hash) return creatures
async def bestiary_from_critterdb(self, url): log.info(f"Getting bestiary ID {url}...") index = 1 creatures = [] async with aiohttp.ClientSession() as session: for _ in range(100): # 100 pages max log.info(f"Getting page {index} of {url}...") async with session.get( f"http://critterdb.com/api/publishedbestiaries/{url}/creatures/{index}" ) as resp: if not 199 < resp.status < 300: raise ExternalImportError( "Error importing bestiary. Are you sure the link is right?" ) raw = await resp.json() if not raw: break creatures.extend(raw) index += 1 async with session.get( f"http://critterdb.com/api/publishedbestiaries/{url}" ) as resp: raw = await resp.json() name = raw['name'] parsed_creatures = [Monster.from_critterdb(c) for c in creatures] return Bestiary(url, name, parsed_creatures)
async def commit(self, ctx, do_live_integrations=True): """Writes a character object to the database, under the contextual author.""" data = self.to_dict() data.pop('active' ) # #1472 - may regress when doing atomic commits, be careful data.pop('active_guilds') try: await ctx.bot.mdb.characters.update_one( { "owner": self._owner, "upstream": self._upstream }, { "$set": data, "$setOnInsert": { 'active': self._active, 'active_guilds': self._active_guilds } # also #1472 }, upsert=True) except OverflowError: raise ExternalImportError( "A number on the character sheet is too large to store.") if self._live_integration is not None and do_live_integrations and self.options.sync_outbound: self._live_integration.commit_soon( ctx) # creates a task to commit eventually
def _client_lock(): if GoogleSheet._client_initializing: raise ExternalImportError( "I am still connecting to google. Try again in a few seconds.") GoogleSheet._client_initializing = True yield GoogleSheet._client_initializing = False
async def get_character(self): charId = self.url character = None async with aiohttp.ClientSession(headers=CUSTOM_HEADERS) as session: async with session.get(f"{API_BASE}{charId}/json") as resp: log.debug(f"DDB returned {resp.status}") if resp.status == 200: character = await resp.json() elif resp.status == 404: raise ExternalImportError("Error: I do not have permission to view this character sheet. " "Make sure you've generated a sharable link for your character.") else: raise ExternalImportError(f"Beyond returned an error: {resp.status} - {resp.reason}") character['_id'] = charId self.character = character return character
async def fetch_monster_image(img_url: str): """ Fetches a monster token image from the given URL, caching it until the bot restarts. :returns: A file-like object (file or bytesio) containing the monster token, or a path to the existing cached image. :rtype: BytesIO or str """ # ensure cache dir exists os.makedirs(".cache/monster-tokens", exist_ok=True) sha = hashlib.sha1(img_url.encode()).hexdigest() cache_path = f'.cache/monster-tokens/{sha}.png' if os.path.exists(cache_path): return cache_path async with aiohttp.ClientSession() as session: async with session.get(img_url) as resp: if not 199 < resp.status < 300: raise ExternalImportError( f"I was unable to retrieve the monster token. " f"({resp.status} {resp.reason})") img_bytes = await resp.read() # cache with open(cache_path, 'wb') as f: f.write(img_bytes) return BytesIO(img_bytes)
async def parse_critterdb_response(resp, sha256_hash): try: raw_creatures = await resp.json() sha256_hash.update(await resp.read()) except (ValueError, aiohttp.ContentTypeError): raise ExternalImportError("Error importing bestiary: bad data. Are you sure the link is right?") return raw_creatures
async def load_character(self, owner_id: str, args): """ Downloads and parses the character data, returning a fully-formed Character object. :raises ExternalImportError if something went wrong during the import that we can expect :raises Exception if something weirder happened """ try: await self.get_character() except (KeyError, SpreadsheetNotFound): raise ExternalImportError("Invalid character sheet. Make sure you've shared it with me at " "`[email protected]`!") except HttpError: raise ExternalImportError("Google returned an error. Please ensure your sheet is shared with " "`[email protected]` and try again in a few minutes.") except Exception: raise return await asyncio.get_event_loop().run_in_executor(None, self._load_character, owner_id, args)
def extract_gsheet_id_from_url(url): m2 = URL_KEY_V2_RE.search(url) if m2: return m2.group(1) m1 = URL_KEY_V1_RE.search(url) if m1: return m1.group(1) raise ExternalImportError("This is not a valid Google Sheets link.")
async def init_gsheet_client(): if GoogleSheet._client_initializing: raise ExternalImportError("I am still connecting to google. Try again in a few seconds.") GoogleSheet._client_initializing = True def _(): return pygsheets.authorize(service_account_file='avrae-google.json', no_cache=True) GoogleSheet.g_client = await asyncio.get_event_loop().run_in_executor(None, _) GoogleSheet._client_initializing = False log.info("Logged in to google")
async def load_character(self, owner_id: str, args): """ Downloads and parses the character data, returning a fully-formed Character object. :raises ExternalImportError if something went wrong during the import that we can expect :raises Exception if something weirder happened """ try: await self.get_character() except DicecloudException as e: raise ExternalImportError(f"Dicecloud returned an error: {e}") upstream = f"dicecloud-{self.url}" active = False sheet_type = "dicecloud" import_version = 15 name = self.character_data['characters'][0]['name'].strip() description = self.character_data['characters'][0]['description'] image = self.character_data['characters'][0]['picture'] stats = self.get_stats().to_dict() levels = self.get_levels().to_dict() attacks = self.get_attacks() skls, svs = self.get_skills_and_saves() skills = skls.to_dict() saves = svs.to_dict() resistances = self.get_resistances().to_dict() ac = self.get_ac() max_hp = int(self.calculate_stat('hitPoints')) hp = max_hp temp_hp = 0 cvars = {} options = {} overrides = {} death_saves = {} consumables = [] if args.last('cc'): consumables = self.get_custom_counters() spellbook = self.get_spellbook().to_dict() live = self.is_live() race = self.character_data['characters'][0]['race'].strip() background = self.character_data['characters'][0]['backstory'].strip() character = Character(owner_id, upstream, active, sheet_type, import_version, name, description, image, stats, levels, attacks, skills, resistances, saves, ac, max_hp, hp, temp_hp, cvars, options, overrides, consumables, death_saves, spellbook, live, race, background) return character
async def commit(self, ctx): """Writes a character object to the database, under the contextual author.""" data = self.to_dict() try: await ctx.bot.mdb.characters.update_one( { "owner": self._owner, "upstream": self._upstream }, {"$set": data}, upsert=True) except OverflowError: raise ExternalImportError( "A number on the character sheet is too large to store.")
async def get_published_bestiary_creatures(url, session, api_base, sha256_hash): creatures = [] index = 1 for _ in range(100): # 100 pages max log.info(f"Getting page {index} of {url}...") async with session.get(f"{api_base}/{url}/creatures/{index}") as resp: if not 199 < resp.status < 300: raise ExternalImportError( "Error importing bestiary: HTTP error. Are you sure the link is right?") raw_creatures = await parse_critterdb_response(resp, sha256_hash) if not raw_creatures: break creatures.extend(raw_creatures) index += 1 return creatures
async def load_character(self, ctx, args): """ Downloads and parses the character data, returning a fully-formed Character object. :raises ExternalImportError if something went wrong during the import that we can expect :raises Exception if something weirder happened """ owner_id = str(ctx.author.id) try: await self.get_character() except (KeyError, SpreadsheetNotFound, APIError): raise ExternalImportError("Invalid character sheet. Make sure you've shared it with me at " "`[email protected]`!") except Exception: raise return await asyncio.get_event_loop().run_in_executor(None, self._load_character, owner_id, args)
async def from_critterdb(cls, ctx, url, published=True): log.info(f"Getting bestiary ID {url}...") api_base = "https://critterdb.com:443/api/publishedbestiaries" if published \ else "https://critterdb.com:443/api/bestiaries" sha256_hash = hashlib.sha256() sha256_hash.update(BESTIARY_SCHEMA_VERSION) async with aiohttp.ClientSession() as session: if published: creatures = await get_published_bestiary_creatures( url, session, api_base, sha256_hash) else: creatures = await get_link_shared_bestiary_creatures( url, session, api_base, sha256_hash) async with session.get(f"{api_base}/{url}") as resp: try: raw = await resp.json() except (ValueError, aiohttp.ContentTypeError): raise ExternalImportError( "Error importing bestiary metadata. Are you sure the link is right?" ) name = raw['name'] desc = raw['description'] sha256_hash.update(name.encode() + desc.encode()) # try and find a bestiary by looking up upstream|hash # if it exists, return it # otherwise commit a new one to the db and return that sha256 = sha256_hash.hexdigest() log.debug(f"Bestiary hash: {sha256}") existing_bestiary = await ctx.bot.mdb.bestiaries.find_one({ "upstream": url, "sha256": sha256 }) if existing_bestiary: log.info("This bestiary already exists, subscribing") existing_bestiary = Bestiary.from_dict(existing_bestiary) await existing_bestiary.subscribe(ctx) return existing_bestiary parsed_creatures = [_monster_factory(c) for c in creatures] b = cls(None, sha256, url, published, name, parsed_creatures, desc) await b.write_to_db(ctx) await b.subscribe(ctx) return b
def get_custom_counters(self): counters = [] for res in CLASS_RESOURCES: res_value = self.calculate_stat(res) if res_value > 0: display_type = 'bubble' if res_value < 6 else None co = { # we have to initialize counters this way, which is meh "name": CLASS_RESOURCE_NAMES.get(res, 'Unknown'), "value": res_value, "minv": '0', "maxv": str(res_value), "reset": CLASS_RESOURCE_RESETS.get(res), "display_type": display_type, "live_id": res } counters.append(co) for f in self.character_data.get('features', []): if not f.get('enabled'): continue if f.get('removed'): continue if not ('uses' in f and f['uses']): continue reset = None desc = f.get('description', '').lower() if 'short rest' in desc or 'short or long rest' in desc: reset = 'short' elif 'long rest' in desc: reset = 'long' try: initial_value = self.evaluator.eval(f['uses']) except draconic.DraconicException: raise ExternalImportError( f"Invalid max uses on limited use feature {f['name']}: {f['uses']}" ) display_type = 'bubble' if initial_value < 7 else None co = { "name": f['name'], "value": initial_value, "minv": '0', "maxv": str(initial_value), "reset": reset, "display_type": display_type, "live_id": f['_id'] } counters.append(co) return counters
async def commit(self, ctx): """Writes a character object to the database, under the contextual author.""" data = self.to_dict() data.pop('active' ) # #1472 - may regress when doing atomic commits, be careful try: await ctx.bot.mdb.characters.update_one( { "owner": self._owner, "upstream": self._upstream }, { "$set": data, "$setOnInsert": { 'active': self._active } # also #1472 }, upsert=True) except OverflowError: raise ExternalImportError( "A number on the character sheet is too large to store.")
def _monster_factory(data, bestiary_name): ability_scores = BaseStats(data['stats']['proficiencyBonus'] or 0, data['stats']['abilityScores']['strength'] or 10, data['stats']['abilityScores']['dexterity'] or 10, data['stats']['abilityScores']['constitution'] or 10, data['stats']['abilityScores']['intelligence'] or 10, data['stats']['abilityScores']['wisdom'] or 10, data['stats']['abilityScores']['charisma'] or 10) cr = {0.125: '1/8', 0.25: '1/4', 0.5: '1/2'}.get(data['stats']['challengeRating'], str(data['stats']['challengeRating'])) num_hit_die = data['stats']['numHitDie'] hit_die_size = data['stats']['hitDieSize'] con_by_level = num_hit_die * ability_scores.get_mod('con') hp = floor(((hit_die_size + 1) / 2) * num_hit_die) + con_by_level hitdice = f"{num_hit_die}d{hit_die_size} + {con_by_level}" proficiency = data['stats']['proficiencyBonus'] if proficiency is None: raise ExternalImportError(f"Monster's proficiency bonus is nonexistent ({data['name']}).") skills = Skills.default(ability_scores) skill_updates = {} for skill in data['stats']['skills']: name = spaced_to_camel(skill['name']) if skill['proficient']: mod = skills[name].value + proficiency else: mod = skill.get('value') if mod is not None: skill_updates[name] = mod skills.update(skill_updates) saves = Saves.default(ability_scores) save_updates = {} for save in data['stats']['savingThrows']: name = save['ability'].lower() + 'Save' if save['proficient']: mod = saves.get(name).value + proficiency else: mod = save.get('value') if mod is not None: save_updates[name] = mod saves.update(save_updates) attacks = [] traits, atks = parse_critterdb_traits(data, 'additionalAbilities') attacks.extend(atks) actions, atks = parse_critterdb_traits(data, 'actions') attacks.extend(atks) reactions, atks = parse_critterdb_traits(data, 'reactions') attacks.extend(atks) legactions, atks = parse_critterdb_traits(data, 'legendaryActions') attacks.extend(atks) attacks = AttackList.from_dict(attacks) spellcasting = parse_critterdb_spellcasting(traits, ability_scores) resistances = Resistances.from_dict(dict(vuln=data['stats']['damageVulnerabilities'], resist=data['stats']['damageResistances'], immune=data['stats']['damageImmunities'])) return Monster(name=data['name'], size=data['stats']['size'], race=data['stats']['race'], alignment=data['stats']['alignment'], ac=data['stats']['armorClass'], armortype=data['stats']['armorType'], hp=hp, hitdice=hitdice, speed=data['stats']['speed'], ability_scores=ability_scores, saves=saves, skills=skills, senses=', '.join(data['stats']['senses']), resistances=resistances, display_resists=resistances, condition_immune=data['stats']['conditionImmunities'], languages=data['stats']['languages'], cr=cr, xp=data['stats']['experiencePoints'], traits=traits, actions=actions, reactions=reactions, legactions=legactions, la_per_round=data['stats']['legendaryActionsPerRound'], attacks=attacks, proper=data['flavor']['nameIsProper'], image_url=data['flavor']['imageUrl'], spellcasting=spellcasting, homebrew=True, source=bestiary_name)