def command(): click.echo("Purging redis caches...") cache.delete(ITEM_COLOR_IDS_KEYS) cache.delete(WORLD_ITEM_COLOR_IDS_KEYS) cache.delete(ITEM_METAL_IDS_KEYS) click.echo("Purging endpoints...") purge_cache(all_paths=True) click.echo("Purging static files...") purge_static_cache()
def command(dropbox_url): click.echo("Downloading zip...") response = requests.get(dropbox_url) response.raise_for_status() click.echo("Writing zip...") atlas_zip_file = tempfile.NamedTemporaryFile( # pylint: disable=consider-using-with delete=False) atlas_zip_file.write(response.content) atlas_zip_file.close() os.makedirs(BASE_DIR) with zipfile.ZipFile(atlas_zip_file.name, "r") as zip_file: zip_file.extractall(BASE_DIR) click.echo("Processing data...") for root, _, files in os.walk(BASE_DIR): with click.progressbar(files, show_percent=True, show_pos=True) as pbar: for name in pbar: pbar.label = name pbar.render_progress() world_id = int(name.split("_")[1]) world = World.objects.filter(id=world_id).first() if world is None: continue if name.endswith(".png"): _process_image(world, root, name) elif name.endswith(".beacons.gz"): _process_beacons(world, root, name) click.echo("Cleaning up...") os.remove(atlas_zip_file.name) shutil.rmtree(BASE_DIR) click.echo("Purging CDN cache...") purge_static_cache(["worlds", "atlas"])
def command(): queryset = Recipe.objects.all().prefetch_related( "tints", "tints__localizedname_set", "requirements", "requirements__skill", "levels", "levels__inputs", "levels__inputs__group", "levels__inputs__item", "levels__inputs__item__localizedname_set", ) groups = RecipeGroup.objects.all().prefetch_related( "members", "members__localizedname_set", ) workbook = Workbook() workbook.active.title = "Single" workbook.create_sheet("Bulk") workbook.create_sheet("Mass") workbook.create_sheet("Furnace") workbook.create_sheet("Groups") group_sheet = workbook["Groups"] group_sheet.append(["Name", "Members"]) for group in groups: group_sheet.append([group.name] + [i.english for i in group.members.all()]) single = workbook["Single"] bulk = workbook["Bulk"] mass = workbook["Mass"] furnace = workbook["Furnace"] crafting_headers = HEADERS.copy() crafting_headers.pop(3) # Heat single.append(crafting_headers) bulk.append(crafting_headers) mass.append(crafting_headers) furnace_headers = HEADERS.copy() furnace_headers.pop(11) # Spark click.echo("Creating recipes...") with click.progressbar(queryset) as pbar: for recipe in pbar: single_only, rows = _get_levels(recipe) if recipe.machine == "FURNACE": furnace.append(rows[0]) continue if rows[0][12] == rows[1][12]: single_only = True single.append(rows[0]) if not single_only: bulk.append(rows[1]) mass.append(rows[2]) click.echo("Resizing columns...") set_column_widths(group_sheet) set_column_widths(single) set_column_widths(bulk) set_column_widths(mass) click.echo("Creating file...") create_export_file(FILENAME, "xlsx", DESCRIPTION, save_virtual_workbook(workbook)) click.echo("Purging CDN cache...") purge_static_cache(["exports"])
def command(): _thumbs() _missing() click.echo("Purging CDN cache...") purge_static_cache(["worlds"])
def run( # pylint: disable=too-many-locals force=False, start_id=None, end_id=None, color_variants=True, **kwargs, ): items = {} compiled_items = GameFile.objects.get( folder="assets/archetypes", filename="compileditems.msgpack").content items_query = Item.objects.all() if start_id is not None: items_query = items_query.filter(game_id__gte=start_id) if end_id is not None: items_query = items_query.filter(game_id__lte=end_id) click.echo("Attaching localization and images data to items...") with click.progressbar( items_query.iterator(), show_percent=True, show_pos=True, length=items_query.count(), ) as pbar: for item in pbar: pbar.label = str(item.game_id) pbar.render_progress() items[item.game_id] = item list_type = compiled_items[str(item.game_id)].get("listTypeName") if list_type: item.list_type = LocalizedString.objects.filter( string_id=list_type).first() item.description = LocalizedString.objects.filter( string_id=f"{item.string_id}_DESCRIPTION").first() _create_icons(item, pbar, force, color_variants) item.save() click.echo("Purging CDN cache...") purge_static_cache(["items"]) click.echo("Creating AltItems...") with click.progressbar(compiled_items.items()) as pbar: for item_id, item_data in pbar: item_id = int(item_id) if item_id in items: continue string_id = item_data["stringID"] if "ITEM_TYPE_ASH_RECLAIM" in string_id: string_id = "ITEM_TYPE_ASH_DEFAULT_BASE" item = Item.objects.filter(string_id=string_id).first() if item is not None: alt_item, _ = AltItem.objects.get_or_create( game_id=int(item_data["id"]), name=item_data["name"], base_item=item) items[alt_item.game_id] = item compiled_blocks = GameFile.objects.get( folder="assets/archetypes", filename="compiledblocks.msgpack").content _blocks(compiled_blocks, items) _liquids(compiled_blocks, items)
def run(**kwargs): # pylint: disable=too-many-locals emoji_list = _get_emoji_list() emoji_nametable = GameFile.objects.get(folder="assets/gui/emoji", filename="emoji.json").content emoji_created = 0 click.echo("Importing emojis...") with click.progressbar(range(len(emoji_layer_data) // 2), show_pos=True, show_percent=True) as pbar: for index in pbar: name, layers = ( emoji_layer_data[2 * index], emoji_layer_data[(2 * index) + 1], ) image = get_emoji(name, layers) emoji_image = get_django_image(image, f"{name}.png") emoji, created = Emoji.objects.get_or_create( name=name, defaults={"image": emoji_image}, ) if not created: if emoji.image is not None and emoji.image.name: emoji.image.delete() emoji.image = emoji_image if emoji.image_small is not None and emoji.image_small.name: emoji.image_small.delete() emoji.image_small = make_thumbnail(emoji_image) alt_names = emoji_nametable.get(name) try: int(emoji.name, 16) except ValueError: emoji.category = "BOUNDLESS" else: lookup = emoji.name.upper() for emoji_dict in emoji_list: if lookup in emoji_dict["codePoint"].split(" "): emoji.category = GROUP_TO_CATEGORY[emoji_dict["group"]] if emoji.category is None: emoji.category = Emoji.EmojiCategory.UNCATEGORIZED emoji.active = alt_names is not None emoji.save() if alt_names is not None: for alt_name in alt_names: EmojiAltName.objects.get_or_create(emoji=emoji, name=alt_name) if created: emoji_created += 1 print_result("emojis", emoji_created) click.echo("Purging CDN cache...") purge_static_cache(["emoji"])
def run(**kwargs): # pylint: disable=too-many-locals skilltrees = GameFile.objects.get(folder="assets/archetypes", filename="skilltrees.msgpack").content click.echo("Importing skills...") with click.progressbar(skilltrees.items()) as pbar: skill_groups_created = 0 skills_created = 0 for skill_group_name, skill_group_dict in pbar: attrs = { "skill_type": skill_group_dict["type"], "display_name": LocalizedString.objects.get( string_id=skill_group_dict["displayName"]), "unlock_level": skill_group_dict["unlockAtLevel"], } skill_group, created = SkillGroup.objects.get_or_create( name=skill_group_name, defaults=attrs) if created: skill_groups_created += 1 else: for attr_name, attr_value in attrs.items(): setattr(skill_group, attr_name, attr_value) skill_group.save() for skill_dict in skill_group_dict["skills"]: name = f"{skill_dict['icon']}.png" image = get_image( f"gui/sprites/distance_maps_bw/icons/skills/{name}") attrs = { "group": skill_group, "number_unlocks": skill_dict["num"], "cost": skill_dict["cost"], "order": skill_dict["order"], "category": skill_dict["category"], "link_type": skill_dict["linkType"], "description": LocalizedString.objects.get( string_id=skill_dict["description"]), "display_name": LocalizedString.objects.get( string_id=skill_dict["displayName"]), "bundle_prefix": skill_dict["bundlePrefix"], "affected_by_other_skills": skill_dict["affectedByOtherSkills"], "icon": get_django_image(image, name), } skill, created = Skill.objects.get_or_create( name=skill_dict["name"], defaults=attrs) if created: skills_created += 1 else: _set_skill_attrs(skill, attrs) skill.save() print_result("skill groups", skill_groups_created) print_result("skills", skills_created) click.echo("Purging CDN cache...") purge_static_cache(["skills"])