def importFitFromBuffer(self, bufferStr, activeFit=None): _, fits = Port.importAuto(bufferStr, activeFit=activeFit) for fit in fits: fit.character = self.character fit.damagePattern = self.pattern fit.targetResists = self.targetResists return fits
def importFitFromBuffer(self, bufferStr, activeFit=None): _, fits = Port.importAuto(bufferStr, activeFit=activeFit) for fit in fits: fit.character = self.character fit.damagePattern = self.pattern fit.targetResists = self.targetResists eos.db.save(fit) return fits
def importFitFromFiles(self, paths, callback=None): """ Imports fits from file(s). First processes all provided paths and stores assembled fits into a list. This allows us to call back to the GUI as fits are processed as well as when fits are being saved. returns """ defcodepage = locale.getpreferredencoding() fits = [] for path in paths: if callback: # Pulse wx.CallAfter(callback, "Processing file:\n%s"%path) file = open(path, "r") srcString = file.read() codec_found = None # If file had ANSI encoding, convert it to unicode using system # default codepage, or use fallbacks UTF-16, then cp1252 on any # encoding errors if isinstance(srcString, str): attempt_codecs = (defcodepage, "utf-16", "cp1252") for page in attempt_codecs: try: srcString = unicode(srcString, page) codec_found = page except UnicodeDecodeError: logger.warn("Error unicode decoding %s from page %s, trying next codec", path, page) else: break else: # nasty hack to detect other transparent utf-16 loading if srcString[0] == '<' and 'utf-16' in srcString[:128].lower(): codec_found = "utf-16" else: codec_found = "utf-8" _, fitsImport = Port.importAuto(srcString, path, callback=callback, encoding=codec_found) fits += fitsImport IDs = [] numFits = len(fits) for i, fit in enumerate(fits): # Set some more fit attributes and save fit.character = self.character fit.damagePattern = self.pattern fit.targetResists = self.targetResists eos.db.save(fit) IDs.append(fit.ID) if callback: # Pulse wx.CallAfter( callback, "Processing complete, saving fits to database\n(%d/%d)" % (i+1, numFits) ) return fits
def importFitFromFiles(self, paths, callback=None): """ Imports fits from file(s). First processes all provided paths and stores assembled fits into a list. This allows us to call back to the GUI as fits are processed as well as when fits are being saved. returns """ defcodepage = locale.getpreferredencoding() fits = [] for path in paths: if callback: # Pulse wx.CallAfter(callback, "Processing file:\n%s"%path) file = open(path, "r") srcString = file.read() codec_found = None # If file had ANSI encoding, convert it to unicode using system # default codepage, or use fallbacks UTF-16, then cp1252 on any # encoding errors if isinstance(srcString, str): attempt_codecs = (defcodepage, "utf-16", "cp1252") for page in attempt_codecs: try: srcString = unicode(srcString, page) codec_found = page except UnicodeDecodeError: logger.warn("Error unicode decoding %s from page %s, trying next codec", path, page) else: break else: # nasty hack to detect other transparent utf-16 loading if srcString[0] == '<' and 'utf-16' in srcString[:128].lower(): codec_found = "utf-16" else: codec_found = "utf-8" _, fitsImport = Port.importAuto(srcString, path, callback=callback, encoding=codec_found) fits += fitsImport IDs = [] numFits = len(fits) for i, fit in enumerate(fits): # Set some more fit attributes and save fit.character = self.character fit.damagePattern = self.pattern fit.targetResists = self.targetResists eos.db.save(fit) IDs.append(fit.ID) if callback: # Pulse wx.CallAfter( callback, "Processing complete, saving fits to database\n(%d/%d)" % (i+1, numFits) ) return fits
def importFitFromFiles(self, paths, callback=None): """ Imports fits from file(s). First processes all provided paths and stores assembled fits into a list. This allows us to call back to the GUI as fits are processed as well as when fits are being saved. returns """ defcodepage = locale.getpreferredencoding() fits = [] for path in paths: if callback: # Pulse wx.CallAfter(callback, "Processing file:\n%s"%path) file = open(path, "r") srcString = file.read() # If file had ANSI encoding, convert it to unicode using system # default codepage, or use fallback cp1252 on any encoding errors if isinstance(srcString, str): try: srcString = unicode(srcString, defcodepage) except UnicodeDecodeError: srcString = unicode(srcString, "cp1252") _, fitsImport = Port.importAuto(srcString, path, callback=callback) fits += fitsImport IDs = [] numFits = len(fits) for i, fit in enumerate(fits): # Set some more fit attributes and save fit.character = self.character fit.damagePattern = self.pattern fit.targetResists = self.targetResists eos.db.save(fit) IDs.append(fit.ID) if callback: # Pulse wx.CallAfter( callback, "Processing complete, saving fits to database\n(%d/%d)" % (i+1, numFits) ) return fits
def importFitFromFiles(self, paths, callback=None): """ Imports fits from file(s). First processes all provided paths and stores assembled fits into a list. This allows us to call back to the GUI as fits are processed as well as when fits are being saved. returns """ defcodepage = locale.getpreferredencoding() fits = [] for path in paths: if callback: # Pulse wx.CallAfter(callback, "Processing file:\n%s" % path) file = open(path, "r") srcString = file.read() # If file had ANSI encoding, convert it to unicode using system # default codepage, or use fallback cp1252 on any encoding errors if isinstance(srcString, str): try: srcString = unicode(srcString, defcodepage) except UnicodeDecodeError: srcString = unicode(srcString, "cp1252") _, fitsImport = Port.importAuto(srcString, path, callback=callback) fits += fitsImport IDs = [] numFits = len(fits) for i, fit in enumerate(fits): # Set some more fit attributes and save fit.character = self.character fit.damagePattern = self.pattern fit.targetResists = self.targetResists eos.db.save(fit) IDs.append(fit.ID) if callback: # Pulse wx.CallAfter( callback, "Processing complete, saving fits to database\n(%d/%d)" % (i + 1, numFits)) return fits
def importFit(self, path): filename = os.path.split(path)[1] defcodepage = locale.getpreferredencoding() file = open(path, "r") srcString = file.read() # If file had ANSI encoding, convert it to unicode using system # default codepage, or use fallback cp1252 on any encoding errors if isinstance(srcString, str): try: srcString = unicode(srcString, defcodepage) except UnicodeDecodeError: srcString = unicode(srcString, "cp1252") _, fits = Port.importAuto(srcString, filename) for fit in fits: fit.character = self.character fit.damagePattern = self.pattern return fits
def importFit(self, path): filename = os.path.split(path)[1] defcodepage = locale.getpreferredencoding() file = open(path, "r") srcString = file.read() # If file had ANSI encoding, convert it to unicode using system # default codepage, or use fallback cp1252 on any encoding errors if isinstance(srcString, str): try: srcString = unicode(srcString, defcodepage) except UnicodeDecodeError: srcString = unicode(srcString, "cp1252") _, fits = Port.importAuto(srcString, filename) for fit in fits: fit.character = self.character fit.damagePattern = self.pattern return fits
def importFitFromFiles(self, paths, callback=None): """ Imports fits from file(s). First processes all provided paths and stores assembled fits into a list. This allows us to call back to the GUI as fits are processed as well as when fits are being saved. returns """ defcodepage = locale.getpreferredencoding() fits = [] for path in paths: if callback: # Pulse wx.CallAfter(callback, 1, "Processing file:\n%s" % path) file = open(path, "r") srcString = file.read() if len(srcString) == 0: # ignore blank files continue codec_found = None # If file had ANSI encoding, decode it to unicode using detection # of BOM header or if there is no header try default # codepage then fallback to utf-16, cp1252 if isinstance(srcString, str): encoding_map = (('\xef\xbb\xbf', 'utf-8'), ('\xff\xfe\0\0', 'utf-32'), ('\0\0\xfe\xff', 'UTF-32BE'), ('\xff\xfe', 'utf-16'), ('\xfe\xff', 'UTF-16BE')) for bom, encoding in encoding_map: if srcString.startswith(bom): codec_found = encoding savebom = bom if codec_found is None: logger.info("Unicode BOM not found in file %s.", path) attempt_codecs = (defcodepage, "utf-8", "utf-16", "cp1252") for page in attempt_codecs: try: logger.info( "Attempting to decode file %s using %s page.", path, page) srcString = unicode(srcString, page) codec_found = page logger.info("File %s decoded using %s page.", path, page) except UnicodeDecodeError: logger.info( "Error unicode decoding %s from page %s, trying next codec", path, page) else: break else: logger.info("Unicode BOM detected in %s, using %s page.", path, codec_found) srcString = unicode(srcString[len(savebom):], codec_found) else: # nasty hack to detect other transparent utf-16 loading if srcString[0] == '<' and 'utf-16' in srcString[:128].lower(): codec_found = "utf-16" else: codec_found = "utf-8" if codec_found is None: return False, "Proper codec could not be established for %s" % path try: _, fitsImport = Port.importAuto(srcString, path, callback=callback, encoding=codec_found) fits += fitsImport except xml.parsers.expat.ExpatError, e: return False, "Malformed XML in %s" % path except Exception, e: logger.exception("Unknown exception processing: %s", path) return False, "Unknown Error while processing %s" % path
def importFitFromBuffer(self, bufferStr, activeFit=None): _, fits = Port.importAuto(bufferStr, activeFit=activeFit) for fit in fits: fit.character = self.character fit.damagePattern = self.pattern return fits
def importFitFromFiles(self, paths, callback=None): """ Imports fits from file(s). First processes all provided paths and stores assembled fits into a list. This allows us to call back to the GUI as fits are processed as well as when fits are being saved. returns """ defcodepage = locale.getpreferredencoding() fits = [] for path in paths: if callback: # Pulse wx.CallAfter(callback, 1, "Processing file:\n%s"%path) file = open(path, "r") srcString = file.read() if len(srcString) == 0: # ignore blank files continue codec_found = None # If file had ANSI encoding, decode it to unicode using detection # of BOM header or if there is no header try default # codepage then fallback to utf-16, cp1252 if isinstance(srcString, str): encoding_map = ( ('\xef\xbb\xbf', 'utf-8'), ('\xff\xfe\0\0', 'utf-32'), ('\0\0\xfe\xff', 'UTF-32BE'), ('\xff\xfe', 'utf-16'), ('\xfe\xff', 'UTF-16BE')) for bom, encoding in encoding_map: if srcString.startswith(bom): codec_found = encoding savebom = bom if codec_found is None: logger.info("Unicode BOM not found in file %s.", path) attempt_codecs = (defcodepage, "utf-8", "utf-16", "cp1252") for page in attempt_codecs: try: logger.info("Attempting to decode file %s using %s page.", path, page) srcString = unicode(srcString, page) codec_found = page logger.info("File %s decoded using %s page.", path, page) except UnicodeDecodeError: logger.info("Error unicode decoding %s from page %s, trying next codec", path, page) else: break else: logger.info("Unicode BOM detected in %s, using %s page.", path, codec_found) srcString = unicode(srcString[len(savebom):], codec_found) else: # nasty hack to detect other transparent utf-16 loading if srcString[0] == '<' and 'utf-16' in srcString[:128].lower(): codec_found = "utf-16" else: codec_found = "utf-8" if codec_found is None: return False, "Proper codec could not be established for %s" % path try: _, fitsImport = Port.importAuto(srcString, path, callback=callback, encoding=codec_found) fits += fitsImport except xml.parsers.expat.ExpatError, e: return False, "Malformed XML in %s"%path except Exception, e: logger.exception("Unknown exception processing: %s", path) return False, "Unknown Error while processing %s"%path
def importFitFromFiles(self, paths, callback=None): """ Imports fits from file(s). First processes all provided paths and stores assembled fits into a list. This allows us to call back to the GUI as fits are processed as well as when fits are being saved. returns """ defcodepage = locale.getpreferredencoding() fits = [] for path in paths: if callback: # Pulse wx.CallAfter(callback, "Processing file:\n%s"%path) file = open(path, "r") srcString = file.read() codec_found = None # If file had ANSI encoding, decode it to unicode using detection # of BOM header or if there is no header try default # codepage then fallback to utf-16, cp1252 if isinstance(srcString, str): encoding_map = (('\xef\xbb\xbf', 'utf-8'),('\xff\xfe\0\0', 'utf-32'),('\0\0\xfe\xff', 'UTF-32BE'),('\xff\xfe', 'utf-16'),('\xfe\xff', 'UTF-16BE')) for bom, encoding in encoding_map: if srcString.startswith(bom): codec_found = encoding savebom = bom if codec_found is None: logger.warn("Unicode BOM not found in file %s.", path) attempt_codecs = (defcodepage, "utf-16", "cp1252") for page in attempt_codecs: try: logger.warn("Attempting to decode file %s using %s page.", path, page) srcString = unicode(srcString, page) codec_found = page logger.warn("File %s decoded using %s page.", path, page) except UnicodeDecodeError: logger.warn("Error unicode decoding %s from page %s, trying next codec", path, page) else: break else: logger.debug("Unicode BOM detected in %s, using %s page.", path, codec_found) srcString = unicode(srcString[len(savebom):], codec_found) else: # nasty hack to detect other transparent utf-16 loading if srcString[0] == '<' and 'utf-16' in srcString[:128].lower(): codec_found = "utf-16" else: codec_found = "utf-8" _, fitsImport = Port.importAuto(srcString, path, callback=callback, encoding=codec_found) fits += fitsImport IDs = [] numFits = len(fits) for i, fit in enumerate(fits): # Set some more fit attributes and save fit.character = self.character fit.damagePattern = self.pattern fit.targetResists = self.targetResists eos.db.save(fit) IDs.append(fit.ID) if callback: # Pulse wx.CallAfter( callback, "Processing complete, saving fits to database\n(%d/%d)" % (i+1, numFits) ) return fits