Пример #1
0
def create_relational_reader(ggpk):
    opt = {
        'use_dat_value': False,
        'auto_build_index': True,
    }
    return RelationalReader(path_or_ggpk=ggpk,
                            files=['Stats.dat'],
                            read_options=opt)
Пример #2
0
def create_relational_reader(file_system):
    opt = {
        "use_dat_value": False,
        "auto_build_index": True,
    }
    return RelationalReader(path_or_file_system=file_system,
                            files=["Stats.dat"],
                            read_options=opt)
Пример #3
0
    def __init__(self, ggpk_path):
        reader_opts = {'use_dat_value': False, 'auto_build_index': True}

        self.dat_reader = RelationalReader(path_or_ggpk=ggpk_path,
                                           read_options=reader_opts)
        self.ot_files = OTFileCache(path_or_ggpk=ggpk_path)
        self.translations = TranslationFileCache(path_or_ggpk=ggpk_path,
                                                 merge_with_custom_file=True)
Пример #4
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        # Set this up at the earlist so no processing time is wasted
        if not self.parsed_args.user or not self.parsed_args.password:
            raise ValueError(
                'User login to target wiki is required for this operation.')

        self.site_english = mwclient.Site(WIKIS['English'],
                                          path='/',
                                          scheme='https')
        self.site_english.login(self.parsed_args.en_user,
                                self.parsed_args.en_password)
        self.site_other = mwclient.Site(WIKIS[self.lang],
                                        path='/',
                                        scheme='https')
        self.site_other.login(self.parsed_args.user, self.parsed_args.password)

        if self.lang == 'English':
            raise ValueError('Can\'t export unique items TO English wiki')

        self.rr_english = RelationalReader(
            path_or_ggpk=self.base_path,
            raise_error_on_missing_relation=False,
            read_options={
                'use_dat_value': False,
                'auto_build_index': True,
            },
            language='English')

        console('Creating lookup cache...')
        self.words = []
        for row in self.rr_english['Words.dat']:
            if row['WordlistsKey'] == WORDLISTS.UNIQUE_ITEM:
                self.words.append(row)

        self.cache = defaultdict(BaseItemCacheInstance)
        for row in self.rr_english['BaseItemTypes.dat']:
            self.cache[row['ItemClassesKey']['Id']].append(row)
            self.cache[row['ItemClassesKey']['Id']].index['Name'][
                row['Name']].append(row)
Пример #5
0

maps = []
pantheon = []

# Path data is extracted to, only need Metadata/Data
path = 'D:/Temp/'

# speed things up
opt = {
    'use_dat_value': False,
    'auto_build_index': True,
}

r = RelationalReader(
    path_or_ggpk=path,
    read_options=opt,
)

tc = TranslationFileCache(path_or_ggpk=path)

r['Maps.dat'].build_index('Regular_WorldAreasKey')
r['Maps.dat'].build_index('Unique_WorldAreasKey')
r['PantheonSouls.dat'].build_index('BaseItemTypesKey')

for node in r['AtlasNode.dat']:
    map = r['Maps.dat'].index['Regular_WorldAreasKey'].get(
        node['WorldAreasKey'])
    unique = False
    if map is None:
        map = r['Maps.dat'].index['Unique_WorldAreasKey'].get(
            node['WorldAreasKey'])