Example #1
0
    def test_parse_file_source(self):
        self.assertEqual(vdf.parse(StringIO(" ")), {})

        class CustomDict(dict):
            pass

        self.assertEqual(vdf.parse(StringIO(" "), mapper=CustomDict), {})
Example #2
0
    def test_parse_file_source(self):
        self.assertEqual(vdf.parse(StringIO(" ")), {})

        class CustomDict(dict):
            pass

        self.assertEqual(vdf.parse(StringIO(" "), mapper=CustomDict), {})
Example #3
0
def get_installed_games():

    """
        Finds Steam games installed on the system,
        by searching each Steam library.

        Returns:
            A list of dictionary entries of games installed on the system.
            For example:
            {
                id: 228980
                name: Steamworks Common Redistributables
            }
    """

    games = []

    for library in get_libraries(get_steam_install_path()):
        # Add \steamapps to the library folder
        library += "\\steamapps\\"

        # Search for manifest files (.acf)
        files = os.listdir(library)
        for file in files:
            if file.endswith("acf"):
                # Get the game details from the manifest file
                f = vdf.parse(open(library + file))
                appid = f["AppState"]["appid"]
                name = f["AppState"]["name"]

                games.append({"name": name, "appid": appid})

    return games
Example #4
0
def get_current_steam_user():
    """
    Get the current AccountName with saved login credentials.

    If successful this returns the AccountName of the user with saved credentials.
    Otherwise this returns None.

    This function depends on the package "vdf".
    """
    for path in File.loginusers_paths:
        try:
            with open(path) as f:
                login_vdf = vdf.parse(f)

            for info in login_vdf["users"].values():
                remember = "RememberPassword" in info and info[
                    "RememberPassword"] == "1"
                recent_uc = "MostRecent" in info and info["MostRecent"] == "1"
                recent_lc = "mostrecent" in info and info["mostrecent"] == "1"
                if remember and (recent_lc
                                 or recent_uc) and "AccountName" in info:
                    return info["AccountName"]
        except Exception:
            pass
    return None
Example #5
0
def get_libraries(install_dir, including_install=True):

    """
        Gets the steam libary locations installed on the system
        and returns them as a list

        Parameters:
            install_dir - Steam installation directory
            including_install - Whether to include the installation directory as a library or not.
                                Defaults to True.
    """

    # Parse the vdf file into JSON
    try:
        f = vdf.parse(open(install_dir + "\\steamapps\\libraryfolders.vdf"))
    except:
        print("Could not find libraryfoldes.vdf")
        return
    
    libraries = []

    # Include the installation directory as a library, if wanted
    if including_install:
        libraries.append(install_dir)

    # Loop through the LibraryFolders entry
    for i in range(0, len(f["LibraryFolders"])):
        # Skip the first and second entries
        if i == 0 or i == 1:
            continue

        libraries.append(f["LibraryFolders"][str(i - 1)])

    return libraries
Example #6
0
def auto():

    # READ REGISTRY TO LOCATE STEAM INSTALLATION

    if Is64Windows() is True:
        key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,
                             "SOFTWARE\\Wow6432Node\\Valve\\Steam")
    else:
        key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,
                             "SOFTWARE\\Valve\\Steam")

    steampath = winreg.QueryValueEx(key, "InstallPath")[0]

    # LOCATE THE ACTUAL PUBG INSTALLATION FILES

    global folderpath
    acfpath = steampath + "\\SteamApps\\appmanifest_578080.acf"

    if os.path.isfile(acfpath):
        folderpath = steampath + "\\SteamApps\\common\\PUBG\\TslGame\\Content\\Movies"
        print("Game location detected")
    else:
        vdffile = vdf.parse(open(steampath +
                                 "\\SteamApps\\LibraryFolders.vdf"))
        vdflocations = len(vdffile['LibraryFolders']) - 2
        for a in range(vdflocations):
            b = a + 1
            steampath2 = vdffile['LibraryFolders'][str(b)]
            acfpath2 = steampath2 + "\\SteamApps\\appmanifest_578080.acf"
            if os.path.isfile(acfpath2):
                folderpath = steampath2 + "\\SteamApps\\common\\PUBG\\TslGame\\Content\\Movies"
                print("Game location detected at " + folderpath)
                break
Example #7
0
def get_current_steam_user():
    """
    Get the current AccountName with saved login credentials.

    If successful this returns the AccountName of the user with saved credentials.
    Otherwise this returns None.

    This function depends on the package "vdf".
    """
    loginvdf_paths = File.loginusers_paths.copy()
    # try Wine Steam directory first when Wine is used
    if Args.wine:
        loginvdf_paths.insert(
            0, os.path.join(Args.wine_steam_dir, File.loginvdf_inner))
    for path in loginvdf_paths:
        try:
            with open(path, encoding="utf-8") as f_in:
                login_vdf = vdf.parse(f_in)

            for info in login_vdf["users"].values():
                remember = "RememberPassword" in info and info[
                    "RememberPassword"] == "1"
                recent_uc = "MostRecent" in info and info["MostRecent"] == "1"
                recent_lc = "mostrecent" in info and info["mostrecent"] == "1"
                if remember and (recent_lc
                                 or recent_uc) and "AccountName" in info:
                    return info["AccountName"]
        except (KeyError, OSError, TypeError, ValueError):
            pass
    return None
Example #8
0
def GetGamePath(appid):
    for acfpath in GetSteamLibraryPaths():
        appmanifestpath = acfpath + "/SteamApps/appmanifest_%d.acf" % (appid)
        if os.path.isfile(appmanifestpath):
            appmanifest = vdf.parse(open(appmanifestpath))
            return Path(acfpath + "\\SteamApps\\common\\" +
                        appmanifest['AppState']['installdir'] + "\\")
Example #9
0
def do_map(path=None, filename=None):
    pprint(path)
    pprint(filename)
    if filename is None:
        filename = os.path.join(base_path, "maps", "src", "comacchio_d.vmf")
    d = vdf.parse(open(filename), mapper=vdf.VDFDict)
    print(d)
    print(vdf.dumps(d, pretty=True))
Example #10
0
def do_map(path = None, filename = None):
	pprint(path)
	pprint(filename)
	if filename is None:
		filename = os.path.join(base_path, "maps", "src", "comacchio_d.vmf")
	d = vdf.parse(open(filename), mapper=vdf.VDFDict)
	print(d)
	print(vdf.dumps(d, pretty=True))
Example #11
0
 def __init__(self, data=None):
     if not isinstance(data,
                       vdf.VDFDict):  # checks if data is already a VDFDict
         data = vdf.parse(open(data),
                          mapper=vdf.VDFDict,
                          merge_duplicate_keys=False)
         self.d = vdf.VDFDict(data)
     else:
         self.d = data
Example #12
0
def GetSteamLibraryPaths():
    with open(GetSteamPath() + "/SteamApps/LibraryFolders.vdf") as lf:
        vdffile = vdf.parse(lf)
        vdflocations = [
            val
            for key, val in vdffile['LibraryFolders'].items() if key.isdigit()
        ] + [steampath]
        for path in vdflocations:
            print("\tFound Library path: ", path)
        return vdflocations
Example #13
0
def run():
    ret = {}
    userdata_path = utils.getInstallPath() + '/userdata'
    uids = os.listdir(userdata_path)

    first = True
    for uid in uids:
        if not first:
            print('=' * 50)
        else:
            first = False

        filepath = userdata_path + '/' + uid + '/config/localconfig.vdf'
        data = {}

        alldata = vdf.parse(filepath)
        alldata = alldata['UserLocalConfigStore']['Software']['Valve'][
            'Steam']['Apps']
        for gameID in alldata:
            if gameID == '':
                continue
            data[gameID] = {'GameID': gameID}
            data[gameID].update(alldata[gameID])

            #===
            if 'LastPlayed' in data[gameID]:
                data[gameID]['LastPlayed'] += ' (' + datetime.utcfromtimestamp(
                    int(data[gameID]['LastPlayed'])).strftime(
                        '%Y-%m-%d %H:%M:%S') + ')'
            if 'Playtime' in data[gameID]:
                playtime = int(data[gameID]['Playtime'])
                data[gameID]['Playtime'] += ' ( {0}h {1}m )'.format(
                    playtime // 60, playtime % 60)
            if 'GameID' in data[gameID]:
                title = utils.fetchGameName(gameID)
                if title != None:
                    data[gameID]['GameID'] += f' ( {title} )'

            #===

        ret[uid] = data
        #utils.printAdvancedTable(data)
        '''first = True
        for gameID in data:
            if not first:
                print('='*25)
            else:
                first = False
            utils.printAsTable(data[gameID])'''

    return ret
Example #14
0
def run():
    data = []
    dataCanWrite = False
    filepath = utils.getInstallPath() + '/config/loginusers.vdf'

    data = vdf.parse(filepath)
    for u in data['users']:
        #add converted timestamp
        if 'Timestamp' in data['users'][u]:
            data['users'][u]['Timestamp'] += ' (' + datetime.utcfromtimestamp(
                int(data['users'][u]['Timestamp'])).strftime(
                    '%Y-%m-%d %H:%M:%S') + ')'
    #return results
    return data
Example #15
0
	def load_file(self, filename=None):
		"""Load file into VDFDict
			Args:
				filename (str): Filename to load. Should be fully qualified.
			Returns:
				VDFDict of data				
		"""
		if filename is None:
			filename = self.filename
		filename = self.find_file(filename=filename)
		logger.debug("load_file({})".format(filename))
		if not os.path.exists(filename):
			return False
		bn = os.path.basename(filename)
		if filename in self.files.keys():
			return self.files[filename]
		if bn in self.files.keys():
			return self.files[bn]
		self.add_path(os.path.dirname(filename))
		self.files[bn] = vdf.parse(open(filename), mapper=vdf.VDFDict)
		return self.files[bn]
Example #16
0
def find_steam_executables():
    librarydirfile = home + "/.local/share/Steam/SteamApps/libraryfolders.vdf"
    defaultlibrary = home + "/.local/share/Steam/SteamApps/"
    if os.path.isfile(librarydirfile) and os.access(drircfile, os.R_OK):
        import vdf
        import fnmatch
        lf = vdf.parse(librarydirfile)
        librarydirs = list(lf["LibraryFolders"].values())
        librarydirs.append(defaultlibrary)
        execs = []
        for ld in librarydirs:
            print("Searching executables in " + ld)
            rootPath = ld

            for root, dirs, files in os.walk(rootPath):
                for filename in fnmatch.filter(files, "*"):
                    fn = os.path.join(root, filename)
                    if os.access(fn, os.X_OK):
                        execs.append(fn)
        print(execs)
        return execs
def run():
    data = {}
    dataCanWrite = False
    userdata_path = utils.getInstallPath() + '/userdata'
    uids = os.listdir(userdata_path)

    first = True
    for uid in uids:
        if not first:
            print('=' * 25)
        else:
            first = False

        filepath = userdata_path + '/' + uid + '/config/localconfig.vdf'
        uids_data = {}

        alldata = vdf.parse(filepath)
        uids_data.update(alldata['UserLocalConfigStore']['streaming_v2'])
        data[uid] = uids_data

    return data
Example #18
0
def run():
    data = {}
    dataCanWrite = False
    userdata_path = utils.getInstallPath() + '/userdata'
    uids = os.listdir(userdata_path)
    
    first = True
    for uid in uids:
        if not first:
            print('='*25)
        else:
            first = False
        
        filepath = userdata_path + '/' + uid + '/config/localconfig.vdf'
        uids_data = {}

        alldata = vdf.parse(filepath)
        alldata = alldata['UserLocalConfigStore']['friends']

        for friend in alldata:
            if type(alldata[friend]) is dict:
                toAdd = alldata[friend]
                if '' in toAdd:
                    toAdd = toAdd['']
                uids_data[friend] = {
                    'UID': friend
                }
                uids_data[friend].update(toAdd)
        
        data[uid] = uids_data
        '''first = True
        for friend in uids_data:
            if not first:
                print('='*25)
            else:
                first = False
            utils.printAsTable(uids_data[friend])'''
        #utils.printAdvancedTable(uids_data)
    return data
Example #19
0
    def __init__(self, storefile):
        if not isinstance(storefile, six.string_types):
            raise ValueError('Needs string as a storefile!')

        self.base = storefile

        with open(storefile, 'rb') as handle:
            rawdata = handle.read()

            # First let's try the default codec, which is the normally supported by Valve games
            try:
                self.content = rawdata.decode(defaultCodec)
            except: # if it fails, let's find out what codec it has
                result = chardet.detect(rawdata)
                self.codec = result['encoding']
                if self.codec.lower() == 'ascii': # If it's ascii, convert automatically to UTF-8, as UTF-8 is completely valid for ASCII
                    self.codec = 'utf-8'
                self.content = rawdata.decode(self.codec)

            self.parsed = vdf.parse(io.StringIO(self.content))

        self.units = list(
            VDFItem(itemKey, itemValue) for itemKey, itemValue in self.parsed['lang']['Tokens'].items()
        )
Example #20
0
import time

def resolve_prefabs(item):
	prefabs = item.get('prefab')
	if prefabs:
		prefab_aggregate = {}
		for prefab in prefabs.split():
			prefab_data = data['prefabs'][prefab]
			prefab_data = resolve_prefabs(prefab_data.copy())
			prefab_aggregate.update(prefab_data)
		prefab_aggregate.update(item)
		item = prefab_aggregate
	return item

with open(ITEMS_GAME) as f:
	data = vdf.parse(f)
	data = data['items_game']

db = sqlite3.connect(DB_FILE)
dbc = db.cursor()

dbc.execute('DROP TABLE IF EXISTS new_tf2idb_class')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item_attributes')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_particles')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_conflicts')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_regions')
dbc.execute('DROP TABLE IF EXISTS new_tf2idb_capabilities')

dbc.execute('CREATE TABLE "new_tf2idb_class" ("id" INTEGER NOT NULL , "class" TEXT NOT NULL , PRIMARY KEY ("id", "class"))')
dbc.execute('CREATE TABLE "new_tf2idb_item_attributes" ("id" INTEGER NOT NULL , "attribute" INTEGER NOT NULL , "value" TEXT NOT NULL, PRIMARY KEY ("id", "attribute") )')
Example #21
0
def do_map(path = None, filename = None):
	#pprint(path)
	#pprint(filename)
	if filename is None:
		filename = os.path.join(base_path, "maps", "src", "comacchio_d.vmf")
	d = vdf.parse(open(filename), mapper=vdf.VDFDict)
Example #22
0
def parse(items_game: str, db: sqlite3.Connection, merge_allclass=True):
    """
    Parses items_game.txt into a database format usable by TF2IDB.
    
    :param items_game:  Path to the items_game.txt file from TF2.
    :param db:  An SQLite3 connection.
    :param merge_allclass:  Whether or not items designated as usable by every class should use
    the 'all' keyword.  Defaults to True.  Set to false if using a different branch of TF2IDB.
    """
    data = None
    with open(items_game) as f:
        data = vdf.parse(f)
        data = data['items_game']

    dbc = db.cursor()

    created_tables = {}

    def init_table(name: str, columns: list, primary_key=None):
        c = ', '.join(('"{}" {}'.format(k, v) for k, v in columns))

        if primary_key:
            column_names = (column for column, *_ in columns)
            if not all(key in column_names for key in primary_key):
                raise ValueError(
                    "Primary key not a valid column in table '{}'".format(
                        name))
            c += ', PRIMARY KEY ({})'.format(', '.join(
                ('"{}"'.format(k)) for k in primary_key))

        query = 'CREATE TABLE "new_{}" ({})'.format(name, c)

        dbc.execute('DROP TABLE IF EXISTS new_{}'.format(name))
        dbc.execute(query)

        created_tables[name] = [column for column, *_ in columns]

    def insert_dict(name: str, item: dict, prop_remap: dict = {}):
        if not name in created_tables:
            raise ValueError("Table '{}' does not exist")

        dbc.execute(
            'INSERT INTO new_{name} ({cols}) VALUES ({args})'.format(
                name=name,
                cols=','.join(created_tables[name]),
                args=','.join(':' + prop_remap.get(col, col)
                              for col in created_tables[name])), item)

    init_table('tf2idb_class', [('id', 'INTEGER NOT NULL'),
                                ('class', 'TEXT NOT NULL'), ('slot', 'TEXT')],
               primary_key=('id', 'class'))

    init_table('tf2idb_item_attributes', [('id', 'INTEGER NOT NULL'),
                                          ('attribute', 'INTEGER NOT NULL'),
                                          ('value', 'TEXT NOT NULL'),
                                          ('static', 'INTEGER')],
               primary_key=('id', 'attribute'))

    init_table('tf2idb_item',
               [('id', 'INTEGER PRIMARY KEY NOT NULL'),
                ('name', 'TEXT NOT NULL'), ('item_name', 'TEXT'),
                ('class', 'TEXT NOT NULL'), ('slot', 'TEXT'),
                ('quality', 'TEXT NOT NULL'), ('tool_type', 'TEXT'),
                ('min_ilevel', 'INTEGER'), ('max_ilevel', 'INTEGER'),
                ('baseitem', 'INTEGER'), ('holiday_restriction', 'TEXT'),
                ('has_string_attribute', 'INTEGER'), ('propername', 'INTEGER'),
                ('model_player', 'TEXT')])

    init_table('tf2idb_particles', [('id', 'INTEGER PRIMARY KEY NOT NULL'),
                                    ('name', 'TEXT NOT NULL'),
                                    ('type', 'TEXT NOT NULL')])

    init_table('tf2idb_equip_conflicts', [
        ('name', 'TEXT NOT NULL'),
        ('region', 'TEXT NOT NULL'),
    ],
               primary_key=('name', 'region'))

    init_table('tf2idb_equip_regions', [('id', 'INTEGER NOT NULL'),
                                        ('region', 'TEXT NOT NULL')],
               primary_key=('id', 'region'))

    init_table('tf2idb_capabilities', [('id', 'INTEGER NOT NULL'),
                                       ('capability', 'TEXT NOT NULL')],
               primary_key=('id', 'capability'))

    init_table('tf2idb_attributes',
               [('id', 'INTEGER PRIMARY KEY NOT NULL'),
                ('name', 'TEXT NOT NULL'), ('attribute_class', 'TEXT'),
                ('attribute_type', 'TEXT'), ('description_string', 'TEXT'),
                ('description_format', 'TEXT'), ('effect_type', 'TEXT'),
                ('hidden', 'INTEGER'), ('stored_as_integer', 'INTEGER'),
                ('armory_desc', 'TEXT'), ('is_set_bonus', 'INTEGER'),
                ('is_user_generated', 'INTEGER'),
                ('can_affect_recipe_component_name', 'INTEGER'),
                ('apply_tag_to_item_definition', 'TEXT')])

    init_table('tf2idb_qualities', [('name', 'TEXT PRIMARY KEY NOT NULL'),
                                    ('value', 'INTEGER NOT NULL')])

    init_table('tf2idb_rarities', [('name', 'TEXT PRIMARY KEY NOT NULL'),
                                   ('value', 'INTEGER NOT NULL')])

    init_table('tf2idb_item_rarities', [('id', 'INTEGER PRIMARY KEY NOT NULL'),
                                        ('rarity', 'INTEGER'),
                                        ('collection', 'TEXT')])

    nonce = int(time.time())
    dbc.execute(
        'CREATE INDEX "tf2idb_item_attributes_%i" ON "new_tf2idb_item_attributes" ("attribute" ASC)'
        % nonce)
    dbc.execute(
        'CREATE INDEX "tf2idb_class_%i" ON "new_tf2idb_class" ("class" ASC)' %
        nonce)
    dbc.execute(
        'CREATE INDEX "tf2idb_item_%i" ON "new_tf2idb_item" ("slot" ASC)' %
        nonce)

    # qualities
    dbc.executemany(
        'INSERT INTO new_tf2idb_qualities (name, value) VALUES (?,?)',
        ((qname, qdata['value'])
         for qname, qdata in data['qualities'].items()))

    # particles
    for particle_type, particle_list in data[
            'attribute_controlled_attached_particles'].items():
        dbc.executemany(
            'INSERT INTO new_tf2idb_particles (id, name, type) VALUES (?,?,?)',
            ((id, property['system'], particle_type)
             for id, property in particle_list.items()))

    # attributes
    attribute_type = {}
    for k, v in data['attributes'].items():
        atype = v.get('attribute_type',
                      'integer' if v.get('stored_as_integer') else 'float')
        attribute_type[v['name'].lower()] = (k, atype)
        insert_dict('tf2idb_attributes',
                    defaultdict(lambda: None, {
                        **{
                            'id': k
                        },
                        **v
                    }))

    # conflicts
    for k, v in data['equip_conflicts'].items():
        dbc.executemany(
            'INSERT INTO new_tf2idb_equip_conflicts (name,region) VALUES (?,?)',
            ((k, region) for region in v.keys()))

    # rarities
    db.executemany(
        'INSERT INTO new_tf2idb_rarities (name, value) VALUES (?, ?)',
        ((rname, rdata['value']) for rname, rdata in data['rarities'].items()))

    # item / rarity mapping
    item_rarity = {}
    for collection, collection_desc in data['item_collections'].items():
        for rarity, itemlist in collection_desc['items'].items():
            if rarity in data['rarities']:
                for item in itemlist:
                    item_rarity[item] = (
                        collection, int(data['rarities'][rarity]['value']))

    # items
    item_defaults = {'propername': 0, 'item_quality': ''}

    for id, v in data['items'].items():
        if id == 'default':
            continue

        i, prefabs_used = resolve_prefabs(v, data['prefabs'])
        baseitem = 'baseitem' in i

        try:
            has_string_attribute = False
            for name, value in i.get('static_attrs', {}).items():
                aid, atype = attribute_type[name.lower()]
                if atype == 'string':
                    has_string_attribute = True
                dbc.execute(
                    'INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (?,?,?,?)',
                    (id, aid, value, 1))

            for name, info in i.get('attributes', {}).items():
                aid, atype = attribute_type[name.lower()]
                if atype == 'string':
                    has_string_attribute = True
                dbc.execute(
                    'INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (?,?,?,?)',
                    (id, aid, info['value'], 0))

            tool = i.get('tool', {}).get('type')
            model_player = i.get('model_player', None)
            item_insert_values = {
                'id': id,
                'tool_type': tool,
                'baseitem': baseitem,
                'has_string_attribute': has_string_attribute,
                'model_player': model_player
            }

            insert_dict('tf2idb_item',
                        defaultdict(lambda: None, {
                            **item_defaults,
                            **item_insert_values,
                            **i
                        }),
                        prop_remap={
                            'class': 'item_class',
                            'slot': 'item_slot',
                            'quality': 'item_quality'
                        })

            default_slot = i.get('item_slot', None)
            used_classes = i.get('used_by_classes', {})
            if merge_allclass and all(c in used_classes
                                      for c in CLASSES_USABLE):
                # insert the 'all' keyword into tf2idb_class instead of a row for each class
                dbc.execute(
                    'INSERT INTO new_tf2idb_class (id, class, slot) VALUES (?, ?, ?)',
                    (id, 'all', default_slot))
            else:
                dbc.executemany(
                    'INSERT INTO new_tf2idb_class (id,class,slot) VALUES (?,?,?)',
                    ((id, prof.lower(), val if val != '1' else default_slot)
                     for prof, val in used_classes.items()))

            region_field = i.get('equip_region') or i.get('equip_regions')
            if region_field:
                if type(region_field) is str:
                    region_field = {region_field: 1}
                dbc.executemany(
                    'INSERT INTO new_tf2idb_equip_regions (id,region) VALUES (?,?)',
                    ((id, region) for region in region_field.keys()))

            # capabilties
            dbc.executemany(
                'INSERT INTO new_tf2idb_capabilities (id,capability) VALUES (?,?)',
                ((id, (capability if val != '0' else '!' + capability))
                 for capability, val in i.get('capabilities', {}).items()))

            # custom extended capabilities
            if item_has_australium_support(int(id), i):
                dbc.execute(
                    'INSERT INTO new_tf2idb_capabilities (id, capability) VALUES (?, ?)',
                    (id, 'supports_australium'))
            if item_has_paintkit_support(int(id), i):
                dbc.execute(
                    'INSERT INTO new_tf2idb_capabilities (id, capability) VALUES (?, ?)',
                    (id, 'can_apply_paintkit'))

            # item rarity
            if i['name'] in item_rarity:
                dbc.execute(
                    'INSERT INTO new_tf2idb_item_rarities (id, collection, rarity) VALUES (?, ?, ?)',
                    (id, ) + item_rarity[i['name']])
        except Exception as e:
            raise ItemParseError(id) from e

    # finalize tables
    for table in created_tables.keys():
        dbc.execute('DROP TABLE IF EXISTS %s' % table)
        dbc.execute('ALTER TABLE new_%s RENAME TO %s' % (table, table))

    db.commit()
    dbc.execute('VACUUM')
Example #23
0
def main():
    data = None
    with open(ITEMS_GAME) as f:
        data = vdf.parse(f)
        data = data['items_game']

    db = sqlite3.connect(DB_FILE)
    dbc = db.cursor()

    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_class')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item_attributes')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_particles')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_conflicts')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_regions')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_capabilities')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_attributes')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_qualities')

    dbc.execute(
        'CREATE TABLE "new_tf2idb_class" ("id" INTEGER NOT NULL , "class" TEXT NOT NULL , "slot" TEXT , PRIMARY KEY ("id", "class"))'
    )
    dbc.execute('CREATE TABLE "new_tf2idb_item_attributes" ('
                '"id" INTEGER NOT NULL,'
                '"attribute" INTEGER NOT NULL,'
                '"value" TEXT NOT NULL,'
                '"static" INTEGER,'
                'PRIMARY KEY ("id", "attribute")'
                ')')
    dbc.execute('CREATE TABLE "new_tf2idb_item" ('
                '"id" INTEGER PRIMARY KEY NOT NULL,'
                '"name" TEXT NOT NULL,'
                '"item_name" TEXT NOT NULL,'
                '"class" TEXT NOT NULL,'
                '"slot" TEXT,'
                '"quality" TEXT NOT NULL,'
                '"tool_type" TEXT,'
                '"min_ilevel" INTEGER,'
                '"max_ilevel" INTEGER,'
                '"baseitem" INTEGER,'
                '"holiday_restriction" TEXT,'
                '"has_string_attribute" INTEGER,'
                '"propername" INTEGER'
                ')')
    dbc.execute(
        'CREATE TABLE "new_tf2idb_particles" ("id" INTEGER PRIMARY KEY  NOT NULL , "name" TEXT NOT NULL )'
    )
    dbc.execute(
        'CREATE TABLE "new_tf2idb_equip_conflicts" ("name" TEXT NOT NULL , "region" TEXT NOT NULL , PRIMARY KEY ("name", "region"))'
    )
    dbc.execute(
        'CREATE TABLE "new_tf2idb_equip_regions" ("id" INTEGER NOT NULL , "region" TEXT NOT NULL , PRIMARY KEY ("id", "region"))'
    )
    dbc.execute(
        'CREATE TABLE "new_tf2idb_capabilities"  ("id" INTEGER NOT NULL , "capability" TEXT NOT NULL )'
    )
    dbc.execute('CREATE TABLE "new_tf2idb_attributes" ('
                '"id" INTEGER PRIMARY KEY NOT NULL,'
                '"name" TEXT NOT NULL,'
                '"attribute_class" TEXT,'
                '"attribute_type" TEXT,'
                '"description_string" TEXT,'
                '"description_format" TEXT,'
                '"effect_type" TEXT,'
                '"hidden" INTEGER,'
                '"stored_as_integer" INTEGER,'
                '"armory_desc" TEXT,'
                '"is_set_bonus" INTEGER,'
                '"is_user_generated" INTEGER,'
                '"can_affect_recipe_component_name" INTEGER,'
                '"apply_tag_to_item_definition" TEXT'
                ')')
    dbc.execute(
        'CREATE TABLE "new_tf2idb_qualities" ("name" TEXT PRIMARY KEY  NOT NULL , "value" INTEGER NOT NULL )'
    )

    nonce = int(time.time())
    dbc.execute(
        'CREATE INDEX "tf2idb_item_attributes_%i" ON "new_tf2idb_item_attributes" ("attribute" ASC)'
        % nonce)
    dbc.execute(
        'CREATE INDEX "tf2idb_class_%i" ON "new_tf2idb_class" ("class" ASC)' %
        nonce)
    dbc.execute(
        'CREATE INDEX "tf2idb_item_%i" ON "new_tf2idb_item" ("slot" ASC)' %
        nonce)

    # qualities
    for qname, qdata in data['qualities'].items():
        dbc.execute(
            'INSERT INTO new_tf2idb_qualities (name, value) VALUES (?,?)',
            (qname, qdata['value']))

    # particles
    for particle_type, particle_list in data[
            'attribute_controlled_attached_particles'].items():
        for k, v in particle_list.items():
            dbc.execute(
                'INSERT INTO new_tf2idb_particles (id,name) VALUES (?,?)',
                (k, v['system']))  #TODO add the other fields too

    # attributes
    attribute_type = {}
    for k, v in data['attributes'].items():
        at = v.get('attribute_type')
        if at:
            atype = at
        else:
            if v.get('stored_as_integer'):
                atype = 'integer'
            else:
                atype = 'float'
        attribute_type[v['name'].lower()] = (k, atype)
        dbc.execute(
            'INSERT INTO new_tf2idb_attributes '
            '(id,name,attribute_class,attribute_type,description_string,description_format,effect_type,hidden,stored_as_integer,armory_desc,is_set_bonus,'
            'is_user_generated,can_affect_recipe_component_name,apply_tag_to_item_definition) '
            'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
            (k, v.get('name'), v.get('attribute_class'),
             v.get('attribute_type'), v.get('description_string'),
             v.get('description_format'), v.get('effect_type'),
             v.get('hidden'), v.get('stored_as_integer'), v.get('armory_desc'),
             v.get('is_set_bonus'), v.get('is_user_generated'),
             v.get('can_affect_recipe_component_name'),
             v.get('apply_tag_to_item_definition')))

    # conflicts
    for k, v in data['equip_conflicts'].items():
        for region in v.keys():
            dbc.execute(
                'INSERT INTO new_tf2idb_equip_conflicts (name,region) VALUES (?,?)',
                (k, region))

    # items
    for id, v in data['items'].items():
        if id == 'default':
            continue
        i = resolve_prefabs(v, data)
        baseitem = 'baseitem' in i

        try:
            tool = None
            if 'tool' in i:
                tool = i['tool'].get('type')

            has_string_attribute = False
            if 'static_attrs' in i:
                for name, value in i['static_attrs'].items():
                    aid, atype = attribute_type[name.lower()]
                    if atype == 'string':
                        has_string_attribute = True
                    dbc.execute(
                        'INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (?,?,?,?)',
                        (id, aid, value, 1))

            if 'attributes' in i:
                for name, info in i['attributes'].items():
                    aid, atype = attribute_type[name.lower()]
                    if atype == 'string':
                        has_string_attribute = True
                    dbc.execute(
                        'INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (?,?,?,?)',
                        (id, aid, info['value'], 0))

            dbc.execute(
                'INSERT INTO new_tf2idb_item '
                '(id,name,item_name,class,slot,quality,tool_type,min_ilevel,max_ilevel,baseitem,holiday_restriction,has_string_attribute,propername) '
                'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)',
                (id, i['name'], i.get('item_name'), i['item_class'],
                 i.get('item_slot'), i.get('item_quality',
                                           ''), tool, i.get('min_ilevel'),
                 i.get('max_ilevel'), baseitem, i.get('holiday_restriction'),
                 has_string_attribute, i.get('propername')))

            if 'used_by_classes' in i:
                for prof, val in i['used_by_classes'].items():
                    dbc.execute(
                        'INSERT INTO new_tf2idb_class (id,class,slot) VALUES (?,?,?)',
                        (id, prof.lower(), val if val != '1' else None))

            region_field = i.get('equip_region') or i.get('equip_regions')
            if region_field:
                if type(region_field) is str:
                    region_field = {region_field: 1}
                for region in region_field.keys():
                    dbc.execute(
                        'INSERT INTO new_tf2idb_equip_regions (id,region) VALUES (?,?)',
                        (id, region))

            # capabilties
            for capability, val in i.get('capabilities', {}).items():
                dbc.execute(
                    'INSERT INTO new_tf2idb_capabilities (id,capability) VALUES (?,?)',
                    (id, (capability if val != '0' else '!' + capability)))

        except:
            traceback.print_exc()
            print(id)
            raise

    def replace_table(name):
        dbc.execute('DROP TABLE IF EXISTS %s' % name)
        dbc.execute('ALTER TABLE new_%s RENAME TO %s' % (name, name))

    replace_table('tf2idb_class')
    replace_table('tf2idb_item_attributes')
    replace_table('tf2idb_item')
    replace_table('tf2idb_particles')
    replace_table('tf2idb_equip_conflicts')
    replace_table('tf2idb_equip_regions')
    replace_table('tf2idb_capabilities')
    replace_table('tf2idb_attributes')
    replace_table('tf2idb_qualities')

    db.commit()
    dbc.execute('VACUUM')
    key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,
                         "SOFTWARE\\Wow6432Node\\Valve\\Steam")
else:
    key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Valve\\Steam")

steampath = winreg.QueryValueEx(key, "InstallPath")[0]

# LOCATE THE ATUAL PUBG INSTALLATION FILES

acfpath = steampath + "\\SteamApps\\appmanifest_578080.acf"

if os.path.isfile(acfpath):
    folderpath = steampath + "\\SteamApps\\common\\PUBG\\TslGame\\Content\\Movies"
    print("Game location detected")
else:
    vdffile = vdf.parse(open(steampath + "\\SteamApps\\LibraryFolders.vdf"))
    vdflocations = len(vdffile['LibraryFolders']) - 2
    for a in range(vdflocations):
        b = a + 1
        steampath2 = vdffile['LibraryFolders'][str(b)]
        acfpath2 = steampath2 + "\\SteamApps\\appmanifest_578080.acf"
        if os.path.isfile(acfpath2):
            folderpath = steampath2 + "\\SteamApps\\common\\PUBG\\TslGame\\Content\\Movies"
            print("Game location detected at " + folderpath)
            break

# MOVIE FILENAMES

i = 0
filename = ["LicenseScreen.mp4", "LoadingScreen.mp4", "LoadingScreen_Xbox.mp4"]
Example #25
0
def load_vdf(path: Path, **kwargs) -> dict:
    with open(path, **kwargs) as f:
        return vdf.parse(f)
Example #26
0
import vdf

d = vdf.parse(open("C:\\Steam\\SteamApps\libraryfolders.vdf"))

print(d['LibraryFolders']['1'])

print(d['LibraryFolders']['2'])

print(len(d['LibraryFolders']) - 2)
Example #27
0
fill_blue = "#1f496f"
fill_green = "#26721b"
font = ImageFont.truetype('Exo-SemiBold.ttf', 13)
src_image_root = "./src/"
src_preimages_root = "./prerendered_items/"
out_image_root = "./out/"

# clear output directory
if os.path.exists("./out"):
    shutil.rmtree("./out")
os.mkdir("./out")
os.mkdir("./out/items")
os.mkdir("./out/spellicons")

# parse items.txt
items = vdf.parse(open("./src/items.txt"))['DOTAAbilities']

# find all items with mana or health requirements
for item_name in items:

    if type(items[item_name]) is not dict:
        continue

    item = items[item_name]

    if 'AbilityManaCost' not in item:
        continue

    cost = item['AbilityManaCost']
    color = fill_blue
def parse_schema():
    with open(client_schema_file) as client_schema, open(main_schema_file) as main_schema:
        client_schema = vdf.parse(client_schema)

        # Parsing the prefabs' equip regions
        schema_prefabs = {}
        for pf_name, pf in client_schema['items_game']['prefabs'].iteritems():
            pf_equip_region = prop(pf, 'equip_region')
            if len(pf_equip_region)>0:
                schema_prefabs[pf_name] = list(pf_equip_region)

        # Parsing the items
        items_tmp = {}
        for obj_id, obj in client_schema['items_game']['items'].iteritems():
            item = {}
            item_name = obj['name']

            """
                Short strings are used for the fields (saving ~32KiB):
                c - classes
                e - equip_regions
                i - image
            """

            # Character Classes
            item['c'] = prop(obj, 'used_by_classes')

            # Equip regions
            equip_regions = set()
            equip_regions |= prop(obj, 'equip_regions')
            equip_regions |= prop(obj, 'equip_region')

            # Prefab also affects the equip regions.
            # We already parsed the prefabs earlier.
            # For each of the item's prefabs, get that
            # prefab's equip regions and add them to the item's.
            if 'prefab' in obj:
                item_prefabs = set(obj['prefab'].split(' '))
                for ipf in item_prefabs:
                    equip_regions |= prop(schema_prefabs, ipf)
            item['e'] = set(equip_regions)    
            
            # add equip_regions from the conflicts_table dict
            for eq in equip_regions:
                if eq in conflicts_table:
                    item['e'] |= set(conflicts_table[eq])
                    
            items_tmp[item_name] = item

        """
            Going over the schema (not the client one) we do two things:
            1. Add an image URL
            2. Recreate the item list, this time using the item's real (in-game) name.
            Some items (like the Proffessor Speks) feature in the client schema under
            an unfamiliar internal name. The name which is put here is instead the one
            people see in-game.
        """
        items = {}
        schema = json.loads(main_schema.read())
        schema_items = schema['result']['items']
        for schema_item in schema_items:
            # if schema_item['item_class'] in ignore_item_classes: continue
            if schema_item['item_class'] not in accept_item_classes: continue
            
            # item is refered in both schemas by its internal 'name'
            item = items_tmp[schema_item['name']]

            # sometimes the classes list is only present in the main schema
            item['c'] |= prop(schema_item, 'used_by_classes')
            if len(item['c'])==9: item['c'] = []
            # deleting this element if empty saves only ~2KiB, so don't bother
            
            # the common image location prefix is 45 characters long (saving ~65KiB)
            # http://media.steampowered.com/apps/440/icons/
            item['i'] = schema_item['image_url'][45:]
            # the actual in-game name of the item
            ingame_name = schema_item['item_name']
            items[ingame_name] = item
        
        return items
Example #29
0
def main():
    data = None
    with open(ITEMS_GAME) as f:
        data = vdf.parse(f)
        data = data['items_game']

    db = sqlite3.connect(DB_FILE)
    dbc = db.cursor()

    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_class')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item_attributes')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_item')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_particles')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_conflicts')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_equip_regions')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_capabilities')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_attributes')
    dbc.execute('DROP TABLE IF EXISTS new_tf2idb_qualities')

    dbc.execute('CREATE TABLE "new_tf2idb_class" ("id" INTEGER NOT NULL , "class" TEXT NOT NULL , "slot" TEXT , PRIMARY KEY ("id", "class"))')
    dbc.execute('CREATE TABLE "new_tf2idb_item_attributes" ('
        '"id" INTEGER NOT NULL,'
        '"attribute" INTEGER NOT NULL,'
        '"value" TEXT NOT NULL,'
        '"static" INTEGER,'
        'PRIMARY KEY ("id", "attribute")'
        ')'
    )
    dbc.execute('CREATE TABLE "new_tf2idb_item" ('
        '"id" INTEGER PRIMARY KEY NOT NULL,'
        '"name" TEXT NOT NULL,'
        '"item_name" TEXT,'
        '"class" TEXT NOT NULL,'
        '"slot" TEXT,'
        '"quality" TEXT NOT NULL,'
        '"tool_type" TEXT,'
        '"min_ilevel" INTEGER,'
        '"max_ilevel" INTEGER,'
        '"baseitem" INTEGER,'
        '"holiday_restriction" TEXT,'
        '"has_string_attribute" INTEGER,'
        '"propername" INTEGER'
        ')'
    )
    dbc.execute('CREATE TABLE "new_tf2idb_particles" ("id" INTEGER PRIMARY KEY  NOT NULL , "name" TEXT NOT NULL )')
    dbc.execute('CREATE TABLE "new_tf2idb_equip_conflicts" ("name" TEXT NOT NULL , "region" TEXT NOT NULL , PRIMARY KEY ("name", "region"))')
    dbc.execute('CREATE TABLE "new_tf2idb_equip_regions" ("id" INTEGER NOT NULL , "region" TEXT NOT NULL , PRIMARY KEY ("id", "region"))')
    dbc.execute('CREATE TABLE "new_tf2idb_capabilities"  ("id" INTEGER NOT NULL , "capability" TEXT NOT NULL )')
    dbc.execute('CREATE TABLE "new_tf2idb_attributes" ('
        '"id" INTEGER PRIMARY KEY NOT NULL,'
        '"name" TEXT NOT NULL,'
        '"attribute_class" TEXT,'
        '"attribute_type" TEXT,'
        '"description_string" TEXT,'
        '"description_format" TEXT,'
        '"effect_type" TEXT,'
        '"hidden" INTEGER,'
        '"stored_as_integer" INTEGER,'
        '"armory_desc" TEXT,'
        '"is_set_bonus" INTEGER,'
        '"is_user_generated" INTEGER,'
        '"can_affect_recipe_component_name" INTEGER,'
        '"apply_tag_to_item_definition" TEXT'
        ')'
    )
    dbc.execute('CREATE TABLE "new_tf2idb_qualities" ("name" TEXT PRIMARY KEY  NOT NULL , "value" INTEGER NOT NULL )')

    nonce = int(time.time())
    dbc.execute('CREATE INDEX "tf2idb_item_attributes_%i" ON "new_tf2idb_item_attributes" ("attribute" ASC)' % nonce)
    dbc.execute('CREATE INDEX "tf2idb_class_%i" ON "new_tf2idb_class" ("class" ASC)' % nonce)
    dbc.execute('CREATE INDEX "tf2idb_item_%i" ON "new_tf2idb_item" ("slot" ASC)' % nonce)


    # qualities
    for qname,qdata in data['qualities'].items():
        dbc.execute('INSERT INTO new_tf2idb_qualities (name, value) VALUES (?,?)', (qname, qdata['value']))

    # particles
    for particle_type,particle_list in data['attribute_controlled_attached_particles'].items():
        for k,v in particle_list.items():
            dbc.execute('INSERT INTO new_tf2idb_particles (id,name) VALUES (?,?)', (k, v['system']) )   #TODO add the other fields too

    # attributes
    attribute_type = {}
    for k,v in data['attributes'].items():
        at = v.get('attribute_type')
        if at:
            atype = at
        else:
            if v.get('stored_as_integer'):
                atype = 'integer'
            else:
                atype = 'float'
        attribute_type[v['name'].lower()] = (k, atype)
        dbc.execute('INSERT INTO new_tf2idb_attributes '
            '(id,name,attribute_class,attribute_type,description_string,description_format,effect_type,hidden,stored_as_integer,armory_desc,is_set_bonus,'
                'is_user_generated,can_affect_recipe_component_name,apply_tag_to_item_definition) '
            'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
            (k,v.get('name'),v.get('attribute_class'),v.get('attribute_type'),v.get('description_string'),v.get('description_format'),
                v.get('effect_type'),v.get('hidden'),v.get('stored_as_integer'),v.get('armory_desc'),v.get('is_set_bonus'),
                v.get('is_user_generated'),v.get('can_affect_recipe_component_name'),v.get('apply_tag_to_item_definition')
            )
        )

    # conflicts
    for k,v in data['equip_conflicts'].items():
        for region in v.keys():
            dbc.execute('INSERT INTO new_tf2idb_equip_conflicts (name,region) VALUES (?,?)', (k, region))

    # items
    for id,v in data['items'].items():
        if id == 'default':
            continue
        i, prefabs_used = resolve_prefabs(v, data['prefabs'])
        baseitem = 'baseitem' in i

        try:
            tool = None
            if 'tool' in i:
                tool = i['tool'].get('type')

            has_string_attribute = False
            if 'static_attrs' in i:
                for name,value in i['static_attrs'].items():
                    aid,atype = attribute_type[name.lower()]
                    if atype == 'string':
                        has_string_attribute = True
                    dbc.execute('INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (?,?,?,?)', (id,aid,value,1))

            if 'attributes' in i:
                for name,info in i['attributes'].items():
                    aid,atype = attribute_type[name.lower()]
                    if atype == 'string':
                        has_string_attribute = True
                    dbc.execute('INSERT INTO new_tf2idb_item_attributes (id,attribute,value,static) VALUES (?,?,?,?)', (id,aid,info['value'],0))

            dbc.execute('INSERT INTO new_tf2idb_item '
                '(id,name,item_name,class,slot,quality,tool_type,min_ilevel,max_ilevel,baseitem,holiday_restriction,has_string_attribute,propername) '
                'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)', 
                (id,i['name'],i.get('item_name'),i['item_class'],i.get('item_slot'),i.get('item_quality', ''), tool, i.get('min_ilevel'), i.get('max_ilevel'),baseitem,
                    i.get('holiday_restriction'), has_string_attribute, i.get('propername'))
            )

            if 'used_by_classes' in i:
                for prof, val in i['used_by_classes'].items():
                    dbc.execute('INSERT INTO new_tf2idb_class (id,class,slot) VALUES (?,?,?)', (id, prof.lower(), val if val != '1' else None))

            region_field = i.get('equip_region') or i.get('equip_regions')
            if region_field:
                if type(region_field) is str:
                    region_field = {region_field: 1}
                for region in region_field.keys():
                    dbc.execute('INSERT INTO new_tf2idb_equip_regions (id,region) VALUES (?,?)', (id, region))

            # capabilties
            for capability,val in i.get('capabilities', {}).items():
                dbc.execute('INSERT INTO new_tf2idb_capabilities (id,capability) VALUES (?,?)', (id, (capability if val != '0' else '!'+capability)))

        except:
            traceback.print_exc()
            print(id)
            raise

    def replace_table(name):
        dbc.execute('DROP TABLE IF EXISTS %s' % name)
        dbc.execute('ALTER TABLE new_%s RENAME TO %s' % (name,name))

    replace_table('tf2idb_class')
    replace_table('tf2idb_item_attributes')
    replace_table('tf2idb_item')
    replace_table('tf2idb_particles')
    replace_table('tf2idb_equip_conflicts')
    replace_table('tf2idb_equip_regions')
    replace_table('tf2idb_capabilities')
    replace_table('tf2idb_attributes')
    replace_table('tf2idb_qualities')

    db.commit()
    dbc.execute('VACUUM')
Example #30
0
 def _parse_vdf(self, vdf_path=None, vdf_data=None):
     if vdf_path and os.path.isfile(vdf_path):
         return vdf.parse(open(vdf_path))
     if vdf_data:
         return vdf.parse(vdf_data)
     return None
Example #31
0
import vdf

new = vdf.parse(open("items_game.txt"))
old = vdf.parse(open("items_game_old.txt"))

f = open('changed_models.txt', 'a')
f.truncate(0)

for key, value in old['items_game']['items'].items():
    if 'model_player' in value:
        old_model = value['model_player']
        if 'model_player' in new['items_game']['items'][key]:
            new_model = new['items_game']['items'][key]['model_player']
            if old_model != new_model:
                f.write("%s %s\n" % (old_model, new_model))
f.close()
dbc.execute('DROP TABLE IF EXISTS localizations')

# Prepare table
dbc.execute('CREATE TABLE IF NOT EXISTS "localizations" ('
	'"language" TEXT NOT NULL,'
	'"token" TEXT NOT NULL,'
	'"string" TEXT,'
	'PRIMARY KEY ("language", "token"))'
)

total_local_strings = 0

for localization_file in LANGUAGE_FILES:
	# Decode VDF.  It has UCS2 encoding, so decode it as such
	tokens_included = 0
	data = vdf.parse(open(localization_file, 'r', encoding='UTF-16LE'))
	data = data['lang']
	
	language = data['Language'].lower()
	for k, v in data['Tokens'].items():
		if not (k.startswith('[english]') and DROP_LOCALIZED_ENGLISH_TOKEN):
			dbc.execute('INSERT OR REPLACE INTO localizations (language,token,string) VALUES (?,?,?)', (language, k, v) )
			tokens_included += 1

	db.commit()
	print('Localization file for {} ({}) has {} string entries (inserted {})'.format(data['Language'], os.path.basename(localization_file), len(data['Tokens']), tokens_included))
	total_local_strings += tokens_included

# Just do some housekeeping for size
print('Performing a VACUUM on the database.')
dbc.execute('VACUUM')
Example #33
0
def parse_schema():
    with open(client_schema_file) as client_schema, open(
            main_schema_file) as main_schema:
        client_schema = vdf.parse(client_schema)

        # Parsing the prefabs' equip regions
        schema_prefabs = {}
        for pf_name, pf in client_schema['items_game']['prefabs'].iteritems():
            pf_equip_region = prop(pf, 'equip_region')
            if len(pf_equip_region) > 0:
                schema_prefabs[pf_name] = list(pf_equip_region)

        # Parsing the items
        items_tmp = {}
        for obj_id, obj in client_schema['items_game']['items'].iteritems():
            item = {}
            item_name = obj['name']
            """
                Short strings are used for the fields (saving ~32KiB):
                c - classes
                e - equip_regions
                i - image
            """

            # Character Classes
            item['c'] = prop(obj, 'used_by_classes')

            # Equip regions
            equip_regions = set()
            equip_regions |= prop(obj, 'equip_regions')
            equip_regions |= prop(obj, 'equip_region')

            # Prefab also affects the equip regions.
            # We already parsed the prefabs earlier.
            # For each of the item's prefabs, get that
            # prefab's equip regions and add them to the item's.
            if 'prefab' in obj:
                item_prefabs = set(obj['prefab'].split(' '))
                for ipf in item_prefabs:
                    equip_regions |= prop(schema_prefabs, ipf)
            item['e'] = set(equip_regions)

            # add equip_regions from the conflicts_table dict
            for eq in equip_regions:
                if eq in conflicts_table:
                    item['e'] |= set(conflicts_table[eq])

            items_tmp[item_name] = item
        """
            Going over the schema (not the client one) we do two things:
            1. Add an image URL
            2. Recreate the item list, this time using the item's real (in-game) name.
            Some items (like the Proffessor Speks) feature in the client schema under
            an unfamiliar internal name. The name which is put here is instead the one
            people see in-game.
        """
        items = {}
        schema = json.loads(main_schema.read())
        schema_items = schema['result']['items']
        for schema_item in schema_items:
            # if schema_item['item_class'] in ignore_item_classes: continue
            if schema_item['item_class'] not in accept_item_classes: continue

            # item is refered in both schemas by its internal 'name'
            item = items_tmp[schema_item['name']]

            # sometimes the classes list is only present in the main schema
            item['c'] |= prop(schema_item, 'used_by_classes')
            if len(item['c']) == 9: item['c'] = []
            # deleting this element if empty saves only ~2KiB, so don't bother

            # the common image location prefix is 45 characters long (saving ~65KiB)
            # http://media.steampowered.com/apps/440/icons/
            item['i'] = schema_item['image_url'][45:]
            # the actual in-game name of the item
            ingame_name = schema_item['item_name']
            items[ingame_name] = item

        return items