def setup_resource_packs(): terrains = {} try: os.mkdir("terrain-textures") except OSError: pass terrains["Default"] = DefaultResourcePack() if os.path.exists(os.path.join(directories.getMinecraftProfileDirectory(directories.getSelectedProfile()), "resourcepacks")): zipResourcePacks = directories.getAllOfAFile(unicode(os.path.join(directories.getMinecraftProfileDirectory(directories.getSelectedProfile()), "resourcepacks")), ".zip") folderResourcePacks = os.listdir(unicode(os.path.join(directories.getMinecraftProfileDirectory(directories.getSelectedProfile()), "resourcepacks"))) for zip_tex_pack in zipResourcePacks: zrp = ZipResourcePack(zip_tex_pack) if not zrp.isEmpty: if not zrp.tooBig: terrains[zrp.pack_name] = zrp for folder_tex_pack in folderResourcePacks: if os.path.isdir(os.path.join(directories.getMinecraftProfileDirectory(directories.getSelectedProfile()), "resourcepacks", folder_tex_pack)): frp = FolderResourcePack(folder_tex_pack) if not frp.isEmpty: if not frp.tooBig: terrains[frp.pack_name] = frp for tex in terrains.keys(): pack = terrains[tex] if not os.path.exists(pack.terrain_path()): del terrains[tex] try: shutil.rmtree(os.path.join(directories.parentDir, "textures")) except: print "Could not remove \"textures\" directory" pass return terrains
def setup_resource_packs(): ''' Handles parsing of Resource Packs and removing ones that are either have to0 high of a resolution, or don't replace any textures ''' log.debug("Setting up the resource packs.") terrains = {} try: os.mkdir("terrain-textures") except OSError: pass terrains["Default Resource Pack"] = DefaultResourcePack() if os.path.exists( os.path.join( directories.getMinecraftProfileDirectory( directories.getSelectedProfile()), "resourcepacks")): log.debug("Gathering zipped packs...") zipResourcePacks = directories.getAllOfAFile( unicode( os.path.join( directories.getMinecraftProfileDirectory( directories.getSelectedProfile()), "resourcepacks")), ".zip") log.debug("Gatering folder packs...") folderResourcePacks = os.listdir( unicode( os.path.join( directories.getMinecraftProfileDirectory( directories.getSelectedProfile()), "resourcepacks"))) log.debug("Processing zipped packs...") for zip_tex_pack in zipResourcePacks: zrp = ZipResourcePack(zip_tex_pack) if not zrp.isEmpty: if not zrp.tooBig: terrains[zrp.pack_name] = zrp log.debug("Processing folder packs...") for folder_tex_pack in folderResourcePacks: if os.path.isdir( os.path.join( directories.getMinecraftProfileDirectory( directories.getSelectedProfile()), "resourcepacks", folder_tex_pack)): frp = FolderResourcePack(folder_tex_pack) if not frp.isEmpty: if not frp.tooBig: terrains[frp.pack_name] = frp for tex in terrains.keys(): pack = terrains[tex] if not os.path.exists(pack.terrain_path()): del terrains[tex] try: shutil.rmtree(os.path.join(directories.parentDir, "textures")) except: print "Could not remove \"textures\" directory" pass return terrains
def compareMD5Hashes(found_filters): ''' Compares the MD5 Hashes of filters :param found_filters: A list of filter paths ''' ff = {} for filter in found_filters: ff[os.path.split(filter)[-1]] = filter try: if not os.path.exists(os.path.join(directories.getDataDir(), "filters.json")): filterDict = {"filter-md5s": {}} with open(os.path.join(directories.getDataDir(), "filters.json"), 'w') as j: json.dump(filterDict, j) else: convert = json.load(open(os.path.join(directories.getDataDir(), "filters.json"), 'rb')) if "filters" in convert: convert["filter-md5s"] = convert["filters"] del convert["filters"] with open(os.path.join(directories.getDataDir(), "filters.json"), 'w') as done: json.dump(convert, done) filterInBundledFolder = directories.getAllOfAFile(os.path.join(directories.getDataDir(), "stock-filters"), ".py") filterBundle = {} for bundled in filterInBundledFolder: filterBundle[os.path.split(bundled)[-1]] = bundled hashJSON = json.load(open(os.path.join(directories.getDataDir(), "filters.json"), 'rb')) for filt in ff.keys(): realName = filt if realName in filterBundle.keys(): with open(ff[filt], 'r') as filtr: filterData = filtr.read() if realName in hashJSON["filter-md5s"]: old_hash = hashJSON["filter-md5s"][realName] bundledData = None with open(filterBundle[realName]) as bundledFilter: bundledData = bundledFilter.read() if old_hash != hashlib.md5(bundledData).hexdigest() and bundledData is not None: shutil.copy(filterBundle[realName], directories.filtersDir) hashJSON["filter-md5s"][realName] = hashlib.md5(bundledData).hexdigest() if old_hash != hashlib.md5(filterData).hexdigest() and hashlib.md5(filterData).hexdigest() != hashlib.md5(bundledData).hexdigest(): shutil.copy(filterBundle[realName], directories.filtersDir) hashJSON["filter-md5s"][realName] = hashlib.md5(bundledData).hexdigest() else: hashJSON["filter-md5s"][realName] = hashlib.md5(filterData).hexdigest() for bundled in filterBundle.keys(): if bundled not in ff.keys(): shutil.copy(filterBundle[bundled], directories.filtersDir) data = None with open(filterBundle[bundled], 'r') as f: data = f.read() if data is not None: hashJSON["filter-md5s"][bundled] = hashlib.md5(data).hexdigest() with open(os.path.join(directories.getDataDir(), "filters.json"), 'w') as done: json.dump(hashJSON, done) except Exception, e: print ('Error: {}'.format(e))
def reloadFilters(self): if self.filterModules: for k, m in self.filterModules.iteritems(): name = m.__name__ del m mceutils.compareMD5Hashes(directories.getAllOfAFile(directories.filtersDir, ".py")) def tryImport(name): try: return __import__(name) except Exception, e: print traceback.format_exc() alert(_(u"Exception while importing filter module {}. See console for details.\n\n{}").format(name, e)) return object()
def compareMD5Hashes(found_filters): ''' Compares the MD5 Hashes of filters :param found_filters: A list of filter paths ''' ff = {} for filter in found_filters: ff[os.path.split(filter)[-1]] = filter try: if not os.path.exists(os.path.join(directories.getDataDir(), "filters.json")): filterDict = {} filterDict["filters"] = {} with open(os.path.join(directories.getDataDir(), "filters.json"), 'w') as j: json.dump(filterDict, j) filterInBundledFolder = directories.getAllOfAFile(os.path.join(directories.getDataDir(), "stock-filters"), ".py") filterBundle = {} for bundled in filterInBundledFolder: filterBundle[os.path.split(bundled)[-1]] = bundled hashJSON = json.load(open(os.path.join(directories.getDataDir(), "filters.json"), 'rb')) for filt in ff.keys(): realName = filt if realName in filterBundle.keys(): with open(ff[filt], 'r') as filtr: filterData = filtr.read() if realName in hashJSON["filters"]: old_hash = hashJSON["filters"][realName] bundledData = None with open(filterBundle[realName]) as bundledFilter: bundledData = bundledFilter.read() if old_hash != hashlib.md5(bundledData).hexdigest() and bundledData != None: shutil.copy(filterBundle[realName], directories.filtersDir) hashJSON["filters"][realName] = hashlib.md5(bundledData).hexdigest() if old_hash != hashlib.md5(filterData).hexdigest() and hashlib.md5(filterData).hexdigest() != hashlib.md5(bundledData).hexdigest(): shutil.copy(filterBundle[realName], directories.filtersDir) hashJSON["filters"][realName] = hashlib.md5(bundledData).hexdigest() else: hashJSON["filters"][realName] = hashlib.md5(filterData).hexdigest() for bundled in filterBundle.keys(): if bundled not in ff.keys(): shutil.copy(filterBundle[bundled], directories.filtersDir) data = None with open(filterBundle[bundled], 'r') as f: data = f.read() if data != None: hashJSON[bundled] = hashlib.md5(data).hexdigest() with open(os.path.join(directories.getDataDir(), "filters.json"), 'w') as done: json.dump(hashJSON, done) except Exception, e: print ('Error: {}'.format(e))
def setup_resource_packs(): terrains = {} try: os.mkdir("terrain-textures") except OSError: pass terrains["Default"] = DefaultResourcePack() resourcePacks = directories.getAllOfAFile(os.path.join(directories.getMinecraftProfileDirectory(directories.getSelectedProfile()), "resourcepacks"), ".zip") for tex_pack in resourcePacks: rp = ResourcePack(tex_pack) if not rp.isEmpty: if not rp.tooBig: terrains[rp.pack_name] = rp try: shutil.rmtree(directories.parentDir+os.path.sep+"textures") except: print "Could not remove \"textures\" directory" pass return terrains
def setup_resource_packs(): ''' Handles parsing of Resource Packs and removing ones that are either have to0 high of a resolution, or don't replace any textures ''' log.debug("Setting up the resource packs.") terrains = {} try: os.mkdir("terrain-textures") except OSError: pass terrains["Default Resource Pack"] = DefaultResourcePack() if os.path.exists(os.path.join(directories.getMinecraftProfileDirectory(directories.getSelectedProfile()), "resourcepacks")): log.debug("Gathering zipped packs...") zipResourcePacks = directories.getAllOfAFile(unicode(os.path.join(directories.getMinecraftProfileDirectory(directories.getSelectedProfile()), "resourcepacks")), ".zip") log.debug("Gatering folder packs...") folderResourcePacks = os.listdir(unicode(os.path.join(directories.getMinecraftProfileDirectory(directories.getSelectedProfile()), "resourcepacks"))) log.debug("Processing zipped packs...") for zip_tex_pack in zipResourcePacks: zrp = ZipResourcePack(zip_tex_pack) if not zrp.isEmpty: if not zrp.tooBig: terrains[zrp.pack_name] = zrp log.debug("Processing folder packs...") for folder_tex_pack in folderResourcePacks: if os.path.isdir(os.path.join(directories.getMinecraftProfileDirectory(directories.getSelectedProfile()), "resourcepacks", folder_tex_pack)): frp = FolderResourcePack(folder_tex_pack) if not frp.isEmpty: if not frp.tooBig: terrains[frp.pack_name] = frp for tex in terrains.keys(): pack = terrains[tex] if not os.path.exists(pack.terrain_path()): del terrains[tex] try: shutil.rmtree(os.path.join(directories.parentDir, "textures")) except: print "Could not remove \"textures\" directory" pass return terrains
except Exception, e: logging.warning('Error copying bundled Brushes: {0!r}'.format(e)) try: os.mkdir(directories.brushesDir) except Exception, e: logging.warning('Error creating Brushes folder: {0!r}'.format(e)) try: if not os.path.exists(directories.filtersDir): shutil.copytree( os.path.join(directories.getDataDir(), u'stock-filters'), directories.filtersDir ) else: # Start hashing the filter dir mceutils.compareMD5Hashes(directories.getAllOfAFile(directories.filtersDir, ".py")) except Exception, e: logging.warning('Error copying bundled filters: {0!r}'.format(e)) try: os.mkdir(directories.filtersDir) except Exception, e: logging.warning('Error creating filters folder: {0!r}'.format(e)) if directories.filtersDir not in [s.decode(sys.getfilesystemencoding()) if isinstance(s, str) else s for s in sys.path]: sys.path.append(directories.filtersDir.encode(sys.getfilesystemencoding())) try: ServerJarStorage()
except Exception, e: logging.warning('Error copying bundled Brushes: {0!r}'.format(e)) try: os.mkdir(directories.brushesDir) except Exception, e: logging.warning('Error creating Brushes folder: {0!r}'.format(e)) try: if not os.path.exists(directories.filtersDir): shutil.copytree( os.path.join(directories.getDataDir(), u'stock-filters'), directories.filtersDir) else: # Start hashing the filter dir mceutils.compareMD5Hashes( directories.getAllOfAFile(directories.filtersDir, ".py")) except Exception, e: logging.warning('Error copying bundled filters: {0!r}'.format(e)) try: os.mkdir(directories.filtersDir) except Exception, e: logging.warning('Error creating filters folder: {0!r}'.format(e)) if directories.filtersDir not in [ s.decode(sys.getfilesystemencoding()) if isinstance(s, str) else s for s in sys.path ]: sys.path.append( directories.filtersDir.encode(sys.getfilesystemencoding())) try: