Ejemplo n.º 1
0
def init_column_names_and_make_rec_structs():
    inline_column_names = {}

    try:
        likely_wowdefs_path = os.path.dirname(
            os.path.realpath(__file__)) + '/WoWDBDefs'
        sys.path += [likely_wowdefs_path + '/code/Python']
        import dbd
    except Exception:
        print(
            'WARNING: NOT getting column names: unable to find WoWDBDefs directory'
        )
        return inline_column_names

    user_agent_prefix = 'Mozilla/5.0 (Windows; U; %s) WorldOfWarcraft/'
    build = butil.get_cstring(
        butil.find_string(user_agent_prefix) +
        len(user_agent_prefix)).decode("utf-8")

    print(
        F"Parsing DBD directory {likely_wowdefs_path + '/definitions'} with build {build}"
    )
    dbds = dbd.parse_dbd_directory(likely_wowdefs_path + '/definitions')

    for name, parsed in dbds.items():
        inline_column_names[name] = []

        columns = {}
        for column in parsed.columns:
            columns[column.name] = column
        assert (len(columns) == len(parsed.columns))

        for definition in parsed.definitions:
            if not has_build(build, definition.builds):
                continue

            lines = []
            has_string = False
            for entry in definition.entries:
                if 'noninline' in entry.annotation:
                    continue

                meta = columns[entry.column]

                type_str = meta.type
                if type_str in ['uint', 'int']:
                    type_str = '{}{}_t'.format(
                        meta.type if not entry.is_unsigned else 'uint',
                        entry.int_width if entry.int_width else 32)
                elif type_str in ['string', 'locstring']:
                    type_str = 'dbc_' + type_str
                    has_string = True
                else:
                    assert (not entry.int_width)
                    assert (not meta.foreign)

                inline_column_names[name] += [entry.column]
                array_str = '[{}]'.format(
                    entry.array_size) if entry.array_size else ''
                lines += ['{} {}{};'.format(type_str, entry.column, array_str)]

            if 'table is sparse' in definition.comments and has_string:
                print(
                    'WARNING: omitting rec struct for {}: table is sparse and has string, the layout would be wrong!'
                    .format(name))
            else:
                tutil.add_packed_type(name + 'Rec', ''.join(lines),
                                      tutil.ADD_TYPE.REPLACE)

    return inline_column_names
Ejemplo n.º 2
0
parser.add_argument( '--definitions', dest="definitions", type=str, required=True
                   , help="location of .dbd files")
parser.add_argument( '--output', dest="output", type=str, required=True
                   , help="directory to dump wiki pages to")
parser.add_argument( '--only', dest="only", action='append'
                   , help='if given, a list of tables to dump')
args = parser.parse_args()

os.makedirs (args.output, exist_ok=True)

dbds = {}
if args.only:
  for table in args.only:
    dbds[table] = dbd.parse_dbd_file(os.path.join(args.definitions, "{}{}".format(table, dbd.file_suffix)))
else:
  dbds = dbd.parse_dbd_directory(args.definitions)

file_data = {}

for name, parsed in dbds.items():
  file_data[name] = ""

  columns = {}
  for column in parsed.columns:
    columns[column.name] = column
  assert(len(columns)==len(parsed.columns))

  for definition in sorted(parsed.definitions, key=operator.attrgetter('builds')):
    def wiki_format_template(templ, *args):
      templates = { "Type": ("{{{{Type|{}}}}}", "{}ⁱ")
                  , "Unverified": ("{{{{Unverified|{}}}}}", "{}ᵘ")
Ejemplo n.º 3
0
                    required=True,
                    help="location of .dbd files")
parser.add_argument('--only',
                    dest="only",
                    action='append',
                    help='if given, a list of tables to dump')
args = parser.parse_args()

dbds = {}
if args.only:
    for table in args.only:
        dbds[table] = dbd.parse_dbd_file(
            os.path.join(args.definitions,
                         "{}{}".format(table, dbd.file_suffix)))
else:
    dbds = dbd.parse_dbd_directory(args.definitions)

print('digraph G {')
print('rankdir=LR;rank=same;splines=ortho;node[shape=underline]')

needed = {}

for name, parsed in dbds.items():
    for column in parsed.columns:
        if column.foreign:
            t = str(column.foreign.table)
            if t not in needed:
                needed[t] = []
            needed[t] += [str(column.foreign.column)]
            if name not in needed:
                needed[name] = []
def main():
    args = None
    if is_in_ida:
        import butil

        class FakeArgs:
            def __init__(self):
                self.definitions = likely_wowdefs_path + '/definitions'
                user_agent_prefix = 'Mozilla/5.0 (Windows; U; %s) WorldOfWarcraft/'
                self.build = idc.GetString(
                    butil.find_string(user_agent_prefix) +
                    len(user_agent_prefix), -1)

        args = FakeArgs()
    else:
        import argparse
        parser = argparse.ArgumentParser()
        parser.add_argument('--definitions',
                            dest="definitions",
                            type=str,
                            required=True,
                            help="location of .dbd files")
        parser.add_argument('--build',
                            dest="build",
                            type=str,
                            required=True,
                            help="WoW build to select")
        args = parser.parse_args()

    #dbds = {}
    #dbds['AreaPOI'] = dbd.parse_dbd_file(os.path.join(args.definitions, "AreaPOI{}".format(dbd.file_suffix)))
    dbds = dbd.parse_dbd_directory(args.definitions)

    file_data = {}
    inline_column_names = {}

    for name, parsed in dbds.items():
        file_data[name] = ""
        inline_column_names[name] = []

        columns = {}
        for column in parsed.columns:
            columns[column.name] = column
        assert (len(columns) == len(parsed.columns))

        for definition in parsed.definitions:
            if not has_build(args.build, definition.builds):
                continue

            lines = []
            has_string = False
            for entry in definition.entries:
                meta = columns[entry.column]

                type_str = meta.type
                if type_str in ["uint", "int"]:
                    type_str = '{}{}_t'.format(
                        meta.type if not entry.is_unsigned else "uint",
                        entry.int_width if entry.int_width else 32)
                elif type_str in ['string', 'locstring']:
                    type_str = 'dbc_' + type_str
                    has_string = True
                else:
                    assert (not entry.int_width)
                    assert (not meta.foreign)

                array_str = "[{}]".format(
                    entry.array_size) if entry.array_size else ""

                name_str = entry.column

                comments = []

                merged_str_pattern = "  {} {}{}; {}"
                for annotation in entry.annotation:
                    if annotation == "noninline":
                        merged_str_pattern = "  // {} {}{}; {}"
                        comments += ["non-inline field"]
                    elif annotation == "id":
                        pass
                    else:
                        comments += ["{}".format(annotation)]

                if not 'noninline' in entry.annotation:
                    inline_column_names[name] += [entry.column]

                comments += [entry.comment] if entry.comment else []
                comments += [meta.comment] if meta.comment else []
                comments_str = ''
                if len(comments):
                    comments_str = '// {}'.format(';'.join(comments).encode(
                        'ascii', 'backslashreplace'))

                lines += [
                    merged_str_pattern.format(type_str, name_str, array_str,
                                              comments_str)
                ]

            if 'table is sparse' in definition.comments and has_string:
                file_data[
                    name] += '// omitting: is sparse and has string, the layout would be wrong!'
            else:
                for comment in definition.comments:
                    file_data[name] += '// ' + str(
                        comment.encode('ascii', 'backslashreplace')) + "\n\n"
                if not is_in_ida:
                    file_data[name] += "struct {}Rec {{\n".format(name)
                for line in lines:
                    file_data[name] += line + "\n"
                if not is_in_ida:
                    file_data[name] += "};\n"

    if is_in_ida:
        import tutil
        import tdbc
        for name, data in file_data.items():
            tutil.add_packed_type(name + 'Rec', data, tutil.ADD_TYPE.REPLACE)
            for col in range(0, len(inline_column_names[name])):
                ea = idc.get_name_ea_simple('{}::column_{}'.format(name, col))
                if ea != BADADDR:
                    idc.MakeName(
                        ea, '{}::{}'.format(name,
                                            inline_column_names[name][col]))
    else:
        print('struct dbc_string { uint32_t _; };')
        print('typedef dbc_string dbc_locstring;')

        for name, data in file_data.items():
            print('//' + name)

            print('#pragma pack (push, 1)')
            print(data)
            print('#pragma pack (pop, 1)')