Example #1
0
def mberecord_overwrite(result,
                        filepath,
                        id_len,
                        softcodes=None,
                        softcode_lookup=None,
                        encoding=default_encoding):
    mbetable_to_dict(result, filepath, id_len, softcodes, softcode_lookup,
                     encoding)
Example #2
0
def mberecord_merge(result,
                    filepath,
                    id_len,
                    softcodes=None,
                    softcode_lookup=None,
                    encoding=default_encoding):
    data = {}
    mbetable_to_dict(data, filepath, id_len, softcodes, softcode_lookup,
                     encoding)

    for key, value in data.items():
        if key in result:
            result[key] = [(result[key][i] if subval == "" else subval)
                           for i, subval in enumerate(value)]
        else:
            result[key] = value
Example #3
0
def mberecord_append(result,
                     filepath,
                     id_len,
                     softcodes=None,
                     softcode_lookup=None,
                     encoding=default_encoding,
                     fill_value='0'):
    header, data = mbetable_to_dict({}, filepath, id_len, softcodes,
                                    softcode_lookup, encoding)

    max_records = len(header) - id_len
    for key, value in data.items():
        nonzero_data = [
            elem for elem in result.get(key, []) if elem != fill_value
        ]
        new_data = [elem for elem in data[key] if elem not in nonzero_data]
        nonzero_data.extend(new_data)
        nonzero_data = nonzero_data[:max_records]

        nonzero_data.extend([fill_value] * (max_records - len(nonzero_data)))
        result[key] = nonzero_data[:max_records]
Example #4
0
    def execute(self):
        file_targets = set(self.filepack.get_file_targets())
        source_tables = set()

        # Copy all tables that don't need to be built
        mbe_resource = os.path.join(self.paths.base_resources_loc,
                                    self.filepack.get_resource_targets()[0])
        # dst needs to have the archive name inserted
        dst = os.path.join(self.paths.patch_build_loc,
                           self.filepack.get_resource_targets()[0])

        os.makedirs(dst, exist_ok=True)
        if os.path.isdir(mbe_resource):
            for file in os.listdir(mbe_resource):
                source_tables.add(
                    os.path.join(self.filepack.get_resource_targets()[0],
                                 file))
                if os.path.join(self.filepack.get_resource_targets()[0],
                                file) not in file_targets:
                    table_path = os.path.join(mbe_resource, file)
                    file_dst = os.path.join(dst, file)
                    shutil.copy2(table_path, file_dst)

        # Iterate over targets; build each target
        for file_target, pipeline in zip(self.filepack.get_file_targets(),
                                         self.filepack.build_pipelines):

            id_len = id_lengths.get(file_target.replace(os.sep, "/"), 1)
            # Load the table to be patched into memory
            if file_target in source_tables:
                table_source = os.path.join(self.paths.base_resources_loc,
                                            file_target)
                header, working_table = mbetable_to_dict({}, table_source,
                                                         id_len, None, None)
            else:
                step_1 = pipeline[0]
                table_source = os.path.join(self.paths.mm_root, step_1.mod,
                                            step_1.src)
                header, working_table = mbetable_to_dict({}, table_source,
                                                         id_len, None, None)
                # In case there are any softcodes in the first table...
                working_table = {}

            # Now iterate over build steps
            for build_step in pipeline:
                table_source = os.path.join(self.paths.mm_root, build_step.mod,
                                            build_step.src)
                rules[build_step.rule](working_table, table_source, id_len,
                                       build_step.softcodes,
                                       self.softcode_lookup)

            abs_filepath = os.path.join(dst, os.path.split(file_target)[1])
            dict_to_mbetable(abs_filepath, header, working_table)

        # Pack into the pack target
        cached_file = os.path.join(self.paths.patch_cache_loc,
                                   self.path_prefix,
                                   self.filepack.get_pack_targets()[0])
        os.makedirs(os.path.split(cached_file)[0], exist_ok=True)
        self.filepack.pack(dst, cached_file)
        self.filepack.set_build_pipelines(None)

        # Do any post actions, such as compressing the file
        if self.post_action is not None:
            self.post_action(cached_file, cached_file)
                cached_file := os.path.join(cache_loc, "DSDBP", *table_path)):
            DSCSTools.dobozDecompress(cached_file, build_file)
            mbe_filepack.unpack(build_file, working_loc)
        elif os.path.exists(
                resource_file := os.path.join(resource_loc, *table_path)):
            shutil.copytree(resource_file, build_file)
        else:
            DSCSTools.extractMDB1File(
                os.path.join((self.ops.paths.game_resources_loc,
                              f"{archive}.steam.mvgl"), build_loc,
                             "/".join(*table_path)))

        os.rmdir(working_loc)

        build_subtable = os.path.join(build_file, subtable)
        return mbetable_to_dict({}, build_subtable, 1, None, None)

    def name_getter(self, id_, charnames, lang):
        name_id = '1' + str(id_).rjust(3, '0')
        names = charnames[(name_id, )]
        jp_name = names[0]
        lang_name = names[lang]
        return (jp_name, lang_name)

    def sortmode_compress_keygen(self, digi_id, build_common_para_digimon,
                                 build_charname):
        # Remember that digi_id is a tuple, despite having one element
        stage = build_common_para_digimon[digi_id][0]
        field_guide_id = build_common_para_digimon[digi_id][20]
        name = self.name_getter(digi_id[0], build_charname, 1)[1]
        return (int(field_guide_id), int(stage), name.encode('utf8'))
Example #6
0
    def execute(self):
        # Copy all tables that don't need to be built
        csv_resource = os.path.join(self.paths.base_resources_loc,
                                    self.filepack.get_resource_targets()[0])

        file_target = self.filepack.get_file_targets()[0]
        pipeline = self.filepack.build_pipeline
        # Load the table to be patched into memory
        if os.path.exists(csv_resource):
            table_source = csv_resource
            id_len = id_lengths.get(
                os.path.join(self.filepack.get_resource_targets()[0],
                             file_target), 1)
            header, working_table = mbetable_to_dict(
                {},
                table_source,
                id_len,
                None,
                None,
                encoding=default_csv_encoding)
        else:
            step_1 = pipeline[0]
            table_source = os.path.join(self.paths.mm_root, step_1.mod,
                                        step_1.src)
            id_len = id_lengths.get(
                os.path.join(self.filepack.get_resource_targets()[0],
                             file_target), 1)
            header, working_table = mbetable_to_dict(
                {},
                table_source,
                id_len,
                None,
                None,
                encoding=default_csv_encoding)
            # In case there are any softcodes in the first table...
            working_table = {}

        # Now iterate over build steps
        id_len = id_lengths.get(file_target, 1)
        for build_step in pipeline:
            table_source = os.path.join(self.paths.mm_root, build_step.mod,
                                        build_step.src)
            rules[build_step.rule](working_table,
                                   table_source,
                                   id_len,
                                   encoding=default_csv_encoding)

        cached_file = os.path.join(self.paths.patch_cache_loc,
                                   self.path_prefix,
                                   self.filepack.get_pack_targets()[0])
        os.makedirs(os.path.split(cached_file)[0], exist_ok=True)
        dict_to_mbetable(cached_file,
                         header,
                         working_table,
                         encoding=default_csv_encoding)

        # Pack into the pack target
        self.filepack.wipe_pipelines()

        # Do any post actions, such as compressing the file
        if self.post_action is not None:
            self.post_action(cached_file, cached_file)