def command(name=None, emoji=None, emojis=False, images=False, **kwargs): if name is not None: _save_image(name) return if emoji is not None: _save_emoji(emoji) return if not images and not emojis: images = True emojis = True if images: click.echo("Saving images...") with click.progressbar( atlas_data["sprites"], show_pos=True, show_percent=True ) as pbar: for sprite_data in pbar: _save_image(sprite_data["name"].lower(), sprite_data=sprite_data) if emojis: click.echo("Saving emojis...") with click.progressbar( range(len(emoji_layer_data) // 2), show_pos=True, show_percent=True ) as pbar: for index in pbar: name, layers = ( emoji_layer_data[2 * index], emoji_layer_data[(2 * index) + 1], ) _save_emoji(name, layers)
def command(initial, index): if not index.last_update_serial or initial: chunk_size = 150 # Number of packages to update per task concurrency = 30 # Number of concurrent tasks # As we are syncing everything, get the current serial. index.last_update_serial = index.client.changelog_last_serial() # Get the set of all existing packages. We will discard IDs of updated # packages from it and then remove all the remaining packages. all_package_ids = set(index.package_set.values_list('id', flat=True)) # Get all the names of the packages on the selected index. click.secho('Fetching list of packages from {}...'.format(index.url), fg='yellow') all_packages = index.client.list_packages() # Import all packages metadata in different chunks and tasks. click.secho('Importing {} packages...'.format(len(all_packages)), fg='yellow') # Create a generator of (index.pk, all_packages[i:i+chunk_size]) tuples args = iterzip( itertools.repeat(index.pk), utils.iter_chunks(all_packages, chunk_size), ) # Submit each tuple in args to the workers, but limit it to at most # `concurrency` running tasks results_iterator = utils.bounded_submitter( import_packages, concurrency, args, ) with click.progressbar(length=len(all_packages), show_pos=True) as bar: for succeded, ignored, failed in results_iterator: bar.update(len(succeded) + len(ignored) + len(failed)) all_package_ids -= set(succeded.values()) if failed: click.echo('') for k, v in six.iteritems(failed): click.secho('Failed to import {} ({})'.format(k, v), fg='red') # Remove the set of not-updated (i.e. not found on the index anymore) # packages from the database. click.secho('Removing {} outdated packages...'.format( len(all_package_ids)), fg='yellow') index.package_set.filter(pk__in=all_package_ids).delete() index.save(update_fields=['last_update_serial']) # Sync everything since the last serial, also when initial == True, as # something might have changed in the meantime... events = index.client.changelog_last_serial() - index.last_update_serial if events: click.secho('Syncing remaining updates...', fg='yellow') sync_iter = index.itersync() with click.progressbar(sync_iter, length=events, show_pos=True) as bar: for event in bar: pass
def command(initial, index): if not index.last_update_serial or initial: chunk_size = 150 # Number of packages to update per task concurrency = 30 # Number of concurrent tasks # As we are syncing everything, get the current serial. index.last_update_serial = index.client.changelog_last_serial() # Get the set of all existing packages. We will discard IDs of updated # packages from it and then remove all the remaining packages. all_package_ids = set(index.package_set.values_list('id', flat=True)) # Get all the names of the packages on the selected index. click.secho('Fetching list of packages from {}...'.format(index.url), fg='yellow') all_packages = index.client.list_packages() # Import all packages metadata in different chunks and tasks. click.secho('Importing {} packages...'.format(len(all_packages)), fg='yellow') # Create a generator of (index.pk, all_packages[i:i+chunk_size]) tuples args = iterzip( itertools.repeat(index.pk), utils.iter_chunks(all_packages, chunk_size), ) # Submit each tuple in args to the workers, but limit it to at most # `concurrency` running tasks results_iterator = utils.bounded_submitter( import_packages, concurrency, args, ) with click.progressbar(length=len(all_packages), show_pos=True) as bar: for succeded, ignored, failed in results_iterator: bar.update(len(succeded) + len(ignored) + len(failed)) all_package_ids -= set(succeded.values()) if failed: click.echo('') for k, v in six.iteritems(failed): click.secho('Failed to import {} ({})'.format(k, v), fg='red') # Remove the set of not-updated (i.e. not found on the index anymore) # packages from the database. click.secho('Removing {} outdated packages...' .format(len(all_package_ids)), fg='yellow') index.package_set.filter(pk__in=all_package_ids).delete() index.save(update_fields=['last_update_serial']) # Sync everything since the last serial, also when initial == True, as # something might have changed in the meantime... events = index.client.changelog_last_serial() - index.last_update_serial if events: click.secho('Syncing remaining updates...', fg='yellow') sync_iter = index.itersync() with click.progressbar(sync_iter, length=events, show_pos=True) as bar: for event in bar: pass
def _blocks(compiled_blocks, items): click.echo("Generating block mappings...") blocks_created = 0 with click.progressbar(compiled_blocks["BlockTypesData"]) as pbar: for block_data in pbar: if block_data is None: continue item = None item_id = (block_data["inventoryRemap"] or block_data["rootType"] or block_data["id"]) if item_id != 0: item = items.get(item_id) if item is None: item = Item.objects.filter( string_id=f"ITEM_TYPE_{block_data['name']}").first() if item is not None: item.is_block = True item.prestige = block_data.get("prestige", 0) item.mine_xp = block_data.get("mineXP", 0) item.build_xp = block_data.get("buildXP", 0) item.save() _, created = Block.objects.get_or_create(game_id=block_data["id"], name=block_data["name"], block_item=item) if created: blocks_created += 1
def _liquids(compiled_blocks, items): click.echo("Generating liquid mappings...") liquids_created = 0 with click.progressbar( compiled_blocks["LiquidTypesData"].values()) as pbar: for liquid_data in pbar: if liquid_data is None: continue item = None item_id = liquid_data["itemType"] if item_id != 0: item = items.get(item_id) if item is None: item = Item.objects.filter( string_id=f"ITEM_TYPE_{liquid_data['name']}").first() if item is not None: item.is_liquid = True item.prestige = liquid_data.get("prestige", 0) item.mine_xp = liquid_data.get("mineXP", 0) item.build_xp = liquid_data.get("buildXP", 0) item.save() _, created = Liquid.objects.get_or_create( game_id=liquid_data["id"], name=liquid_data["name"], block_item=item) if created: liquids_created += 1 click.echo(f"{liquids_created} Liquid(s) created")
def generate_master_hashes(): """ Generate missing sha1 hashes """ # mysql does not support remote/streaming cursors # to save memory items are loaded from db individually values = Media.objects.filter( master_sha1__isnull=True).order_by('pk').values('id').nocache() item_ids = [i['id'] for i in values] with click.progressbar(item_ids, show_pos=True, width=48, label='Reprocessing {} hashes'.format( len(item_ids))) as bar: for item_pk in bar: close_old_connections() item = Media.objects.get(pk=item_pk) if item.master and item.master.path: master_sha1 = item.generate_sha1() # update media duration Media.objects.filter(pk=item.pk).update( master_sha1=master_sha1)
def generate_master_hashes(): """ Generate missing sha1 hashes """ # mysql does not support remote/streaming cursors # to save memory items are loaded from db individually values = Media.objects.filter(master_sha1__isnull=True).order_by('pk').values('id').nocache() item_ids = [i['id'] for i in values] with click.progressbar(item_ids, show_pos=True, width=48,label='Reprocessing {} hashes'.format(len(item_ids))) as bar: for item_pk in bar: close_old_connections() item = Media.objects.get(pk=item_pk) if item.master and item.master.path: master_sha1 = item.generate_sha1() # update media duration Media.objects.filter(pk=item.pk).update(master_sha1=master_sha1)
def _create_colors(color_list): color_palettes = GameFile.objects.get( folder="assets/archetypes", filename="compiledcolorpalettelists.msgpack" ).content colors = _create_generic("Colors", color_list, Color) click.echo("Creating Colors Values...") color_values_created = 0 with click.progressbar(color_palettes) as pbar: for color_palette in pbar: for color_variations, color_id in color_palette["colorVariations"]: _, was_created = ColorValue.objects.get_or_create( color=colors[color_id], color_type=color_palette["name"], defaults={ "shade": color_variations[0], "base": color_variations[1], "hlight": color_variations[2], }, ) if was_created: color_values_created += 1 print_result("color values", color_values_created) return colors
def _create_color_group_data(colors: list[Color]): shades = {s: {s.value.lower()} for s in list(Color.ColorShade)} bases = {b: {b.value.lower()} for b in list(Color.ColorBase)} groups = { Color.ColorGroup.BLUE: {"azure", "cerulean", "cobalt", "blue"}, Color.ColorGroup.VIOLET: {"lavender", "lilac", "magenta", "violet", "berry"}, Color.ColorGroup.RED: {"fuchsia", "cherry", "red", "rose"}, Color.ColorGroup.ORANGE: {"orange"}, Color.ColorGroup.YELLOW: {"sepia", "taupe", "mustard", "tan", "yellow"}, Color.ColorGroup.GREEN: { "lime", "moss", "green", "mint", "teal", "viridian", "turquoise", "slate", }, Color.ColorGroup.BLACK: {"black", "grey", "white"}, } bases[Color.ColorBase.BLACK].add("grey") bases[Color.ColorBase.BLACK].add("white") click.echo("Calculating color groups...") with click.progressbar(colors) as pbar: for color in pbar: parts = set(color.default_name.lower().split(" ")) for shade, values in shades.items(): if len(parts & values) > 0: color.shade = shade break if len(parts) == 1: color.shade = Color.ColorShade.PURE break for base, values in bases.items(): if len(parts & values) > 0: color.base = base break if color.base is None: click.echo(f"Could not find base for {color}") for group, values in groups.items(): if len(parts & values) > 0: color.group = group break if color.group is None: click.echo(f"Could not find group for {color}") color.save()
def _thumbs(): click.echo("Adding thumbs/renmaing images...") duplicates = [] worlds = World.objects.filter(image__isnull=False) with click.progressbar(worlds.iterator(), show_percent=True, show_pos=True, length=worlds.count()) as pbar: for world in pbar: if world.image is not None and world.image.name: expected_name = f"{world.id}.png" if world.image.name != expected_name: try: temp_file = download_image(world.image.url) except (AzureMissingResourceHttpError, HTTPError): world.image = None world.save() continue else: world.image.delete() world.image = get_django_image_from_file( temp_file.name, expected_name) world.save() world.refresh_from_db() if world.image.name != expected_name: duplicates.append(world.image.name) continue if world.image_small is None or not world.image_small.name: try: world.image_small = make_thumbnail(world.image) except AzureMissingResourceHttpError: world.image = None expected_thumb_name = f"{world.id}_small.png" if world.image_small.name != expected_thumb_name: try: temp_file = download_image(world.image_small.url) except (AzureMissingResourceHttpError, HTTPError): world.image_small = None world.save() continue else: world.image_small.delete() world.image_small = get_django_image_from_file( temp_file.name, expected_thumb_name) world.save() world.refresh_from_db() if world.image_small.name != expected_thumb_name: duplicates.append(world.image_small.name) continue click.echo("-----duplicates") click.echo(duplicates)
def read_stream_with_progress( stream: TextIOWrapper, progress_label: str, length: int = None, reader=None ): length = length or sum(1 for _ in stream) reader = reader or stream stream.seek(0) click.secho(f"Found {length} lines") with click.progressbar( reader, length=length, label=progress_label ) as progress_reader: yield progress_reader
def clean_assets(age): """Delete (cached) media assets (encoded versions, waveforms) that have nt been accessed for x days.""" from media_asset.models import Format from media_asset.models import Waveform format_qs = Format.objects.filter( accessed__lte=datetime.now() - timedelta(days=age) ).nocache() waveform_qs = Waveform.objects.filter( accessed__lte=datetime.now() - timedelta(days=age) ).nocache() with click.progressbar(format_qs, label='Deleting {} media format versions'.format(format_qs.count())) as bar: for item in bar: item.delete() with click.progressbar(waveform_qs, label='Deleting {} waveforms'.format(waveform_qs.count())) as bar: for item in bar: item.delete()
def command(force): if force: LocalizedStringText.objects.all().update( _plain_text=None, ) click.echo("Getting localizations that need processed...") texts = ( LocalizedStringText.objects.filter( Q(_plain_text__isnull=True) | Q(_plain_text__contains="ATTRIBUTE") | Q(_plain_text__contains="BUNDLE") | Q(_plain_text__contains="ACTION") ) .select_related("string") .prefetch_related("string__skill_set") ) total = texts.count() changed_count = 0 with click.progressbar( texts.iterator(), length=total, show_pos=True, show_percent=True ) as pbar: for localized_text in pbar: changed = False if localized_text._plain_text is None: localized_text._plain_text = re.sub( LocalizedStringText.STYLE_REGEX, r"\g<1>", localized_text.text, ) localized_text._plain_text = re.sub( LocalizedStringText.POST_REL_REGEX, r"", localized_text._plain_text, ) changed = True matches = re.findall( LocalizedStringText.ATTRIBUTE_REGEX, localized_text._plain_text ) if len(matches) > 0: changed = _replace_lookups(localized_text, matches) if changed: changed_count += 1 localized_text.save() click.echo(f"{changed_count} of {total} updated")
def warm_cache(content_types): """Warm cache for given types.""" click.secho('Warming cache for: {}'.format(', '.join(content_types))) from alibrary.models import Artist if 'artist' in content_types or 'all' in content_types: artist_qs = Artist.objects.order_by('-updated').all() with click.progressbar(artist_qs, label='Warming cache for {} items'.format(artist_qs.count())) as bar: for item in bar: item.get_releases() item.get_media()
def _create_generic(name, index_list, klass): click.echo(f"Creating {name}...") created = 0 objects = {} with click.progressbar(index_list) as pbar: for index in pbar: obj, was_created = klass.objects.get_or_create(game_id=index) objects[obj.game_id] = obj if was_created: created += 1 print_result(name, created) return objects
def delete_orphaned_tags(): """Delete tags that are not assigned to any objects anymore (e.g. because they were 'merged').""" from tagging.models import TaggedItem to_delete = [] tagged_items = TaggedItem.objects.all().nocache() with click.progressbar(tagged_items, label='Scanning {} tagged items'.format(tagged_items.count())) as bar: for ti in bar: if not ti.object: to_delete.append(ti.pk) click.echo('Total tagged items: {}'.format(tagged_items.count())) click.echo('Orphaned tagged items: {}'.format(len(to_delete))) TaggedItem.objects.filter(pk__in=to_delete).delete()
def _beacons(force, colors): click.echo("Doing Beacon Scans...") if force: BeaconScan.objects.all().update(text_name=None, html_name=None) beacons = BeaconScan.objects.all() with click.progressbar(beacons.iterator(), length=beacons.count(), show_percent=True, show_pos=True) as pbar: for beacon in pbar: if beacon.name is not None and beacon.text_name is None: beacon.text_name = html_name(beacon.name, strip=True, colors=colors) beacon.html_name = html_name(beacon.name, colors=colors) beacon.save()
def _worlds(force, colors): click.echo("Doing Worlds...") if force: World.objects.all().update(text_name=None, html_name=None, sort_name=None) worlds = World.objects.all() with click.progressbar(worlds.iterator(), length=worlds.count(), show_percent=True, show_pos=True) as pbar: for world in pbar: world = calculate_extra_names(world, world.display_name, colors=colors) world.save()
def _missing(): click.echo("Fixing abandoned images...") not_found = [] multiple = [] for (dirpath, _, filenames) in os.walk(settings.BOUNDLESS_WORLDS_LOCATIONS): with click.progressbar(filenames, show_percent=True, show_pos=True) as pbar: for filename in pbar: world_name = filename.split(".")[0] if world_name.endswith("_small"): os.remove(os.path.join(dirpath, filename)) continue world_name = world_name.replace("_", " ").title() worlds = World.objects.filter( display_name__icontains=world_name) if worlds.count() == 0: # pylint: disable=no-else-continue not_found.append(filename) continue elif worlds.count() > 1: multiple.append(filename) continue world = worlds.get() image_path = os.path.join(dirpath, filename) if world.image is not None and world.image.name: os.remove(image_path) else: world.image = get_django_image_from_file( image_path, f"{world.id}.png") if world.image_small is not None and world.image_small.name: world.image_small.delete() world.image_small = make_thumbnail(world.image) world.save() os.remove(image_path) click.echo("-----not_found") click.echo(not_found) click.echo("-----multiple") click.echo(multiple)
def _create_items(items_list, subtitles): compiled_items = GameFile.objects.get( folder="assets/archetypes", filename="compileditems.msgpack" ).content click.echo("Creating Items...") items_created = 0 items_disabled = 0 items = {} with click.progressbar(items_list, show_pos=True) as pbar: for item in pbar: string_item_id = str(item["item_id"]) item_obj, was_created = Item.objects.get_or_create( game_id=item["item_id"], string_id=compiled_items[string_item_id]["stringID"], ) item_obj.name = compiled_items[string_item_id]["name"] item_obj.item_subtitle = subtitles[item["subtitle_id"]] item_obj.mint_value = compiled_items[string_item_id]["coinValue"] item_obj.max_stack = compiled_items[string_item_id]["maxStackSize"] item_obj.can_be_sold = item_obj.game_id not in settings.BOUNDLESS_NO_SELL # items that cannot be dropped or minted are not normally obtainable can_drop = compiled_items[string_item_id]["canDrop"] is_active = ( can_drop and item_obj.mint_value is not None and item_obj.game_id not in settings.BOUNDLESS_BLACKLISTED_ITEMS ) if not was_created and (not is_active and item_obj.active): items_disabled += 1 item_obj.active = is_active item_obj.save() items[item_obj.game_id] = item_obj if was_created: items_created += 1 print_result("item", items_created) print_result("item", items_disabled, "disabled") return items
def _settlements(force, colors): click.echo("Doing Settlements...") if force: Settlement.objects.all().update(text_name=None, html_name=None) settlements = Settlement.objects.all() with click.progressbar( settlements.iterator(), length=settlements.count(), show_percent=True, show_pos=True, ) as pbar: for settlement in pbar: if settlement.name is not None and settlement.text_name is None: settlement.text_name = html_name(settlement.name, strip=True, colors=colors) settlement.html_name = html_name(settlement.name, colors=colors) settlement.save()
def command(dropbox_url): click.echo("Downloading zip...") response = requests.get(dropbox_url) response.raise_for_status() click.echo("Writing zip...") atlas_zip_file = tempfile.NamedTemporaryFile( # pylint: disable=consider-using-with delete=False) atlas_zip_file.write(response.content) atlas_zip_file.close() os.makedirs(BASE_DIR) with zipfile.ZipFile(atlas_zip_file.name, "r") as zip_file: zip_file.extractall(BASE_DIR) click.echo("Processing data...") for root, _, files in os.walk(BASE_DIR): with click.progressbar(files, show_percent=True, show_pos=True) as pbar: for name in pbar: pbar.label = name pbar.render_progress() world_id = int(name.split("_")[1]) world = World.objects.filter(id=world_id).first() if world is None: continue if name.endswith(".png"): _process_image(world, root, name) elif name.endswith(".beacons.gz"): _process_beacons(world, root, name) click.echo("Cleaning up...") os.remove(atlas_zip_file.name) shutil.rmtree(BASE_DIR) click.echo("Purging CDN cache...") purge_static_cache(["worlds", "atlas"])
def _create_recipe_groups(recipes): recipe_groups_created = 0 click.echo("Importing recipe groups...") with click.progressbar(recipes["groups"]) as pbar: for group in pbar: display_name = LocalizedString.objects.get( string_id=group["groupDisplayName"]) recipe_group, created = RecipeGroup.objects.get_or_create( name=group["groupName"], defaults={"display_name": display_name}) members = [] for member_id in group["groupMembers"]: members.append(Item.objects.get(game_id=member_id)) recipe_group.members.set(members) if created: recipe_groups_created += 1 else: recipe_group.display_name = display_name recipe_group.save() print_result("recipe groups", recipe_groups_created)
def clean_orphaned_tagged_items(): """ usage: ./manage.py tagging_extra_cli clean_orphaned_tagged_items """ click.echo(u'cleaning orphaned tag assignments') qs = TaggedItem.objects.all().prefetch_related('object') to_be_deleted = [] with click.progressbar(qs) as bar: for item in bar: if not item.object: to_be_deleted.append(item.pk) # print(to_be_deleted) click.echo(u'{} assignments marked for deletion'.format( len(to_be_deleted))) TaggedItem.objects.filter(pk__in=to_be_deleted).delete()
def _create_localization_data(strings, data): click.echo("Processing localization data...") for lang_name, lang_data in strings.items(): _create_localized_names(lang_name, lang_data, data) click.echo(f"Creating localized strings for {lang_name}...") strings_content = GameFile.objects.get( folder="assets/archetypes/strings", filename=f"{lang_name}.msgpack" ).content strings_created = 0 with click.progressbar(strings_content.items()) as pbar: for string_id, text in pbar: string, _ = LocalizedString.objects.get_or_create(string_id=string_id) string_text, created = LocalizedStringText.objects.get_or_create( string=string, lang=lang_name, defaults={"text": text} ) string_text.text = text string_text.save() if created: strings_created += 1 print_result("localized strings", strings_created)
def run(force=False, **kwargs): recipes = GameFile.objects.get(folder="assets/archetypes", filename="recipes.msgpack").content _create_recipe_groups(recipes) recipes_created = 0 click.echo("Importing crafting recipes...") with click.progressbar(recipes["recipes"]) as pbar: for recipe_dict in pbar: attrs = { "heat": recipe_dict["heat"], "craft_xp": recipe_dict["craftXP"], "machine": recipe_dict.get("machine"), "output": Item.objects.get(game_id=recipe_dict["outputItem"]), "can_hand_craft": recipe_dict["canHandCraft"], "machine_level": recipe_dict["machineLevel"], "power": recipe_dict["powerRequired"], "group_name": recipe_dict["recipeGroupName"], "knowledge_unlock_level": recipe_dict["knowledgeUnlockLevel"], "required_event": recipe_dict.get("onlyDuringTimeLimitedEvents"), "required_backer_tier": recipe_dict.get("minBackerTierNeeded") or recipe_dict.get("accountFlagBitMaskNeeded"), } recipe, created = Recipe.objects.get_or_create( game_id=recipe_dict["ID"], defaults=attrs) recipe.tints.set(_get_tints(recipe_dict["tintTakenFrom"])) recipe.requirements.set( _get_requirements(recipe_dict.get("prerequisites", []))) recipe.levels.set(_get_levels(recipe_dict)) if created: recipes_created += 1 else: for attr_name, attr_value in attrs.items(): setattr(recipe, attr_name, attr_value) recipe.save() print_result("recipes", recipes_created) cleaned_up = 0 cleaned_up += RecipeRequirement.objects.filter( recipe__isnull=True).delete()[0] cleaned_up += RecipeLevel.objects.filter(recipe__isnull=True).delete()[0] cleaned_up += RecipeInput.objects.filter( recipelevel__isnull=True).delete()[0] if cleaned_up > 0: click.echo(f"Cleaned up {cleaned_up} dangling recipe models")
def command(): queryset = Recipe.objects.all().prefetch_related( "tints", "tints__localizedname_set", "requirements", "requirements__skill", "levels", "levels__inputs", "levels__inputs__group", "levels__inputs__item", "levels__inputs__item__localizedname_set", ) groups = RecipeGroup.objects.all().prefetch_related( "members", "members__localizedname_set", ) workbook = Workbook() workbook.active.title = "Single" workbook.create_sheet("Bulk") workbook.create_sheet("Mass") workbook.create_sheet("Furnace") workbook.create_sheet("Groups") group_sheet = workbook["Groups"] group_sheet.append(["Name", "Members"]) for group in groups: group_sheet.append([group.name] + [i.english for i in group.members.all()]) single = workbook["Single"] bulk = workbook["Bulk"] mass = workbook["Mass"] furnace = workbook["Furnace"] crafting_headers = HEADERS.copy() crafting_headers.pop(3) # Heat single.append(crafting_headers) bulk.append(crafting_headers) mass.append(crafting_headers) furnace_headers = HEADERS.copy() furnace_headers.pop(11) # Spark click.echo("Creating recipes...") with click.progressbar(queryset) as pbar: for recipe in pbar: single_only, rows = _get_levels(recipe) if recipe.machine == "FURNACE": furnace.append(rows[0]) continue if rows[0][12] == rows[1][12]: single_only = True single.append(rows[0]) if not single_only: bulk.append(rows[1]) mass.append(rows[2]) click.echo("Resizing columns...") set_column_widths(group_sheet) set_column_widths(single) set_column_widths(bulk) set_column_widths(mass) click.echo("Creating file...") create_export_file(FILENAME, "xlsx", DESCRIPTION, save_virtual_workbook(workbook)) click.echo("Purging CDN cache...") purge_static_cache(["exports"])
def repair_durations(limit_range, dump_to, load_from, tolerance, log_file): """ Repair/reprocess master durations. """ from base.audio.fileinfo import FileInfoProcessor items_to_reprocess = [] affected_playlists = [] affected_playlist_ids = [] # invalidate cache for Media invalidate_model(Media) if load_from: if limit_range: raise NotImplementedError('--limit-range option not allowed in combination with --load-from') # using `set` to remove duplicate ids item_ids = set([ int(l.strip().split(',')[0]) for l in load_from.readlines() if float(l.strip().split(',')[1]) > tolerance ]) click.echo('loaded {} ids from dump file'.format(len(item_ids))) items_to_reprocess = Media.objects.filter(pk__in=item_ids) else: # mysql does not support remote/streaming cursors # to save memory items are loaded from db individually values = Media.objects.order_by('pk').values('id').nocache() if limit_range: _limits = limit_range.split(':') values = values[_limits[0]:_limits[1]] item_ids = [i['id'] for i in values] with click.progressbar(item_ids, show_pos=True, width=48, label='Reprocessing {} tracks'.format(len(item_ids))) as bar: for item_pk in bar: close_old_connections() item = Media.objects.get(pk=item_pk) if item.master and item.master.path: p = FileInfoProcessor(item.master.path) current_duration = item.master_duration new_duration = p.duration try: diff = abs(current_duration - new_duration) except TypeError: diff = 100.0 if diff > tolerance: items_to_reprocess.append(item) # add to csv log if diff > tolerance and dump_to: dump_to.write('{pk},{diff}\n'.format(pk=item.pk, diff=diff)) dump_to.flush() click.echo('{} tracks have differences in duration'.format(len(items_to_reprocess))) if click.confirm('Do you want to update/repair the durations on {} tracks?'.format(len(items_to_reprocess))): base_url = 'http://{}'.format(Site.objects.get_current().domain) tpl = u'''id: {id} - "{name}" {url} old: {current_duration} new: {new_duration} diff: {diff} ''' tpl_log = u'{ct},{pk},{type},{current_duration},{new_duration},{diff},{url}\n' # write column header if log_file: log_file.write(tpl_log.format( ct='content-type', pk='id', url='url', type='type', # current_duration='old_duration', new_duration='new_duration', diff='diff', )) # loop affected media, fix durations, get playlist appearances & print/log info for item in items_to_reprocess: p = FileInfoProcessor(item.master.path) current_duration = item.master_duration new_duration = p.duration try: diff = current_duration - new_duration except TypeError: diff = '-' click.echo(tpl.format( id=item.id, name=item.name, url=base_url + item.get_absolute_url(), # current_duration=current_duration, new_duration=new_duration, diff=diff )) if log_file: log_file.write(tpl_log.format( ct='media', pk=item.pk, url=base_url + item.get_absolute_url(), type=item.get_mediatype_display(), # current_duration=current_duration, new_duration=new_duration, diff=diff )) log_file.flush() for p in item.get_appearances(): if not p.pk in affected_playlist_ids: affected_playlist_ids.append(p.pk) # we need to store the 'current' value of the duration affected_playlists.append({ 'obj': p, 'current_duration': p.get_duration() }) # update media duration Media.objects.filter(pk=item.pk).update(master_duration=new_duration) invalidate_obj(item) # loop playlists & print/log info for item in affected_playlists: invalidate_obj(item['obj']) current_duration = float(item['current_duration']) / 1000 new_duration = float(item['obj'].get_duration()) / 1000 try: diff = current_duration - new_duration except TypeError: diff = '-' click.echo(tpl.format( id=item['obj'].id, name=item['obj'].name, url=base_url + item['obj'].get_absolute_url(), # current_duration=current_duration, new_duration=new_duration, diff=diff )) if log_file: log_file.write(tpl_log.format( ct='playlist', pk=item['obj'].pk, url=base_url + item['obj'].get_absolute_url(), type=item['obj'].get_type_display(), # current_duration=current_duration, new_duration=new_duration, diff=diff )) log_file.flush() # update playlist duration Playlist.objects.filter(pk=item['obj'].pk).update(duration=new_duration * 1000) invalidate_obj(item)
def _create_localized_names(lang_name, lang_data, data): click.echo(f"Creating localized names for {lang_name}...") total = ( len(lang_data["items"]) + len(lang_data["colors"]) + len(lang_data["metals"]) + len(lang_data["subtitles"]) ) with click.progressbar(length=total) as pbar: localizations_created = 0 for index, name in lang_data["colors"].items(): l, was_created = LocalizedName.objects.get_or_create( game_obj=data["colors"][int(index)], lang=lang_name ) l.name = name l.save() if was_created: localizations_created += 1 pbar.update(1) pbar.render_progress() for index, name in lang_data["metals"].items(): l, was_created = LocalizedName.objects.get_or_create( game_obj=data["metals"][int(index)], lang=lang_name ) l.name = name l.save() if was_created: localizations_created += 1 pbar.update(1) pbar.render_progress() for index, name in lang_data["items"].items(): l, was_created = LocalizedName.objects.get_or_create( game_obj=data["items"][int(index)], lang=lang_name ) l.name = name l.save() if was_created: localizations_created += 1 pbar.update(1) pbar.render_progress() for index, name in lang_data["subtitles"].items(): l, was_created = LocalizedName.objects.get_or_create( game_obj=data["subtitles"][int(index)], lang=lang_name ) l.name = name l.save() if was_created: localizations_created += 1 pbar.update(1) pbar.render_progress() print_result("localized names", localizations_created)
def run(**kwargs): resourcetiers = GameFile.objects.get(folder="assets/archetypes", filename="resourcetiers.json").content compiled_resource_profiles = GameFile.objects.get( folder="server/assets/archetypes", filename="compiledresourceprofiles.msgpack").content["resourceData"] _create_resource_liquids() data_created = 0 click.echo("Creating Resource Data...") # 0 = Live universe, 1 = Multiverse? with click.progressbar(resourcetiers[0].items()) as pbar: for block_name, resource_data in pbar: block = Block.objects.select_related("block_item").get( name=block_name) resource_profile = None is_embedded = False if str(block.game_id ) in compiled_resource_profiles["blockResources"]: is_embedded = True resource_profile = compiled_resource_profiles[ "blockResources"][str(block.game_id)] else: resource_profile = compiled_resource_profiles[ "surfaceResources"][str(block.game_id)] if not block.block_item.is_resource: block.block_item.is_resource = True block.block_item.save() args = { "is_embedded": is_embedded, "exo_only": resource_data.get("exoOnly", False), "max_tier": resource_data["maxTier"], "min_tier": resource_data["minTier"], "best_max_tier": resource_data["bestType"]["maxTier"], "best_min_tier": resource_data["bestType"]["minTier"], "shape": resource_profile["shape"], "size_max": resource_profile["sizeMax"], "size_min": resource_profile["sizeMin"], "altitude_max": resource_profile["altitudeMax"], "altitude_min": resource_profile["altitudeMin"], "distance_max": resource_profile.get("distanceMax"), "distance_min": resource_profile.get("distanceMin"), "cave_weighting": resource_profile["caveWeighting"], "size_skew_to_min": resource_profile["sizeSkewToMin"], "blocks_above_max": resource_profile["blocksAboveMax"], "blocks_above_min": resource_profile["blocksAboveMin"], "liquid_above_max": resource_profile["liquidAboveMax"], "liquid_above_min": resource_profile["liquidAboveMin"], "noise_frequency": resource_profile.get("noiseFrequency"), "noise_threshold": resource_profile.get("noiseThreshold"), "liquid_favorite": _get_liquid(resource_profile["liquidFavourite"]), "three_d_weighting": resource_profile["threeDWeighting"], "surface_favorite": _get_block(resource_profile["surfaceFavourite"]), "surface_weighting": resource_profile["surfaceWeighting"], "altitude_best_lower": resource_profile["altitudeBestLower"], "altitude_best_upper": resource_profile["altitudeBestUpper"], "distance_best_lower": resource_profile.get("distanceBestLower"), "distance_best_upper": resource_profile.get("distanceBestUpper"), "blocks_above_best_lower": resource_profile["blocksAboveBestLower"], "blocks_above_best_upper": resource_profile["blocksAboveBestUpper"], "liquid_above_best_upper": resource_profile["liquidAboveBestUpper"], "liquid_above_best_lower": resource_profile["liquidAboveBestLower"], "liquid_second_favorite": _get_liquid(resource_profile["liquidSecondFavourite"]), "surface_second_favorite": _get_block(resource_profile["surfaceSecondFavourite"]), } data, created = ResourceData.objects.get_or_create( item=block.block_item, defaults=args) _create_world_types(resource_data, created, data) for attr, value in args.items(): setattr(data, attr, value) data.save() if created: data_created += 1 click.echo(f"{data_created} Resource data created")
def recalculate_colors( # pylint: disable=too-many-branches world_ids=None, log=None, max_age=None): if log is None: log = logger.info wbcs = _get_block_colors(world_ids) if max_age is not None: wbcs = wbcs.filter(time__gte=max_age) log("Updating timing for all world block colors...") with click.progressbar(wbcs.iterator(), length=wbcs.count(), show_percent=True, show_pos=True) as pbar: for block_color in pbar: if block_color.world is not None and block_color.world.start: block_color.time = block_color.world.start block_color.save() wbcs = _get_block_colors(world_ids) if max_age is not None: wbcs = wbcs.filter(time__gte=max_age) log("Recalculcating dynamic properties...") with click.progressbar(wbcs.iterator(), length=wbcs.count(), show_percent=True, show_pos=True) as pbar: for block_color in pbar: block_color.is_new = False block_color.first_world = None block_color.last_exo = None block_color.is_new_transform = False block_color.transform_first_world = None block_color.transform_last_exo = None if block_color.world is not None and block_color.world.is_creative: block_color.save() continue base_compare = WorldBlockColor.objects.filter( item=block_color.item, color=block_color.color, is_default=True, time__lt=block_color.time, ).filter(Q(world__isnull=True) | Q(world__is_creative=False)) wbc = base_compare.filter(NON_EXO).order_by("world__start").first() block_color.is_new = wbc is None if wbc is not None: block_color.first_world = wbc.world wbc = base_compare.filter(**EXO).order_by("-world__start").first() if wbc is not None: block_color.last_exo = wbc.world if block_color.transform_group is None: block_color.save() continue base_transform = WorldBlockColor.objects.filter( item_id__in=block_color.transform_group, color=block_color.color, is_default=True, time__lt=block_color.time, ).filter(Q(world__isnull=True) | Q(world__is_creative=False)) if block_color.is_new: wbc = base_transform.filter(NON_EXO).first() block_color.is_new_transform = wbc is None if wbc is not None and wbc.world is not None: block_color.transform_first_world = wbc.world wbc = base_compare.filter(**EXO).first() if wbc is not None: block_color.transform_last_exo = wbc.world else: block_color.is_new_transform = False block_color.save()
def run( # pylint: disable=too-many-locals force=False, start_id=None, end_id=None, color_variants=True, **kwargs, ): items = {} compiled_items = GameFile.objects.get( folder="assets/archetypes", filename="compileditems.msgpack").content items_query = Item.objects.all() if start_id is not None: items_query = items_query.filter(game_id__gte=start_id) if end_id is not None: items_query = items_query.filter(game_id__lte=end_id) click.echo("Attaching localization and images data to items...") with click.progressbar( items_query.iterator(), show_percent=True, show_pos=True, length=items_query.count(), ) as pbar: for item in pbar: pbar.label = str(item.game_id) pbar.render_progress() items[item.game_id] = item list_type = compiled_items[str(item.game_id)].get("listTypeName") if list_type: item.list_type = LocalizedString.objects.filter( string_id=list_type).first() item.description = LocalizedString.objects.filter( string_id=f"{item.string_id}_DESCRIPTION").first() _create_icons(item, pbar, force, color_variants) item.save() click.echo("Purging CDN cache...") purge_static_cache(["items"]) click.echo("Creating AltItems...") with click.progressbar(compiled_items.items()) as pbar: for item_id, item_data in pbar: item_id = int(item_id) if item_id in items: continue string_id = item_data["stringID"] if "ITEM_TYPE_ASH_RECLAIM" in string_id: string_id = "ITEM_TYPE_ASH_DEFAULT_BASE" item = Item.objects.filter(string_id=string_id).first() if item is not None: alt_item, _ = AltItem.objects.get_or_create( game_id=int(item_data["id"]), name=item_data["name"], base_item=item) items[alt_item.game_id] = item compiled_blocks = GameFile.objects.get( folder="assets/archetypes", filename="compiledblocks.msgpack").content _blocks(compiled_blocks, items) _liquids(compiled_blocks, items)
def run(**kwargs): # pylint: disable=too-many-locals emoji_list = _get_emoji_list() emoji_nametable = GameFile.objects.get(folder="assets/gui/emoji", filename="emoji.json").content emoji_created = 0 click.echo("Importing emojis...") with click.progressbar(range(len(emoji_layer_data) // 2), show_pos=True, show_percent=True) as pbar: for index in pbar: name, layers = ( emoji_layer_data[2 * index], emoji_layer_data[(2 * index) + 1], ) image = get_emoji(name, layers) emoji_image = get_django_image(image, f"{name}.png") emoji, created = Emoji.objects.get_or_create( name=name, defaults={"image": emoji_image}, ) if not created: if emoji.image is not None and emoji.image.name: emoji.image.delete() emoji.image = emoji_image if emoji.image_small is not None and emoji.image_small.name: emoji.image_small.delete() emoji.image_small = make_thumbnail(emoji_image) alt_names = emoji_nametable.get(name) try: int(emoji.name, 16) except ValueError: emoji.category = "BOUNDLESS" else: lookup = emoji.name.upper() for emoji_dict in emoji_list: if lookup in emoji_dict["codePoint"].split(" "): emoji.category = GROUP_TO_CATEGORY[emoji_dict["group"]] if emoji.category is None: emoji.category = Emoji.EmojiCategory.UNCATEGORIZED emoji.active = alt_names is not None emoji.save() if alt_names is not None: for alt_name in alt_names: EmojiAltName.objects.get_or_create(emoji=emoji, name=alt_name) if created: emoji_created += 1 print_result("emojis", emoji_created) click.echo("Purging CDN cache...") purge_static_cache(["emoji"])