def getPrefix(self, *args, **kwargs): if self._locationPrefix: item = self.getStackData()[-1] loc = ' -> %s #%s]' % (item['file'], StringUtils.toUnicode(item['line'])) else: loc = ']' if self._app and self._app.pyramidApp: wsgi = self._app.environ initials = self._INITIALS_RX.sub('', ArgsUtils.get('REMOTE_USER', '', wsgi)) if initials: initials += ' | ' domainName = ArgsUtils.get('SERVER_NAME', '', wsgi) uriPath = ArgsUtils.get( 'REQUEST_URI', ArgsUtils.get('HTTP_REQUEST_URI', '', wsgi), wsgi) info = ' <' + initials + domainName + uriPath + '>' else: info = '' threadID = ThreadUtils.getCurrentID() return StringUtils.toUnicode( TimeUtils.toFormat('[%a %H:%M <%S.%f>') + '<' + threadID + '>' + info + loc)
def prettyPrint(target, indentLevel =1): indent = '\n' + (indentLevel*' ') s = '\n' if isinstance(target, list): index = 0 for t in target: try: v = StringUtils.toUnicode(t) except Exception: v = '<UNPRINTABLE>' s += '%s[%s]: %s' % (indent, index, v) return s if isinstance(target, dict): for n,v in target.items(): try: v = StringUtils.toUnicode(v) except Exception: v = '<UNPRINTABLE>' s += '%s%s: %s' % (indent, n, v) return s items = dir(target) for n in items: v = getattr(target, n) try: v = StringUtils.toUnicode(v) except Exception: v = '<UNPRINTABLE>' s += '%s%s: %s' % (indent, n, v) return s
def insertColumn(self, sheetname, columnname, columnnumber): """Inserts a new empty column into the current doc. @param sheetname: The name of the sheet to be added to. @type sheetname: string @param columnname: The name of the new column to be added @type columnname: string @param columnnumber: Where to insert the new column (= how many come before it?) @type columnnumber: int """ sheets = self._doc.spreadsheet.getElementsByType(Table) for sheet in sheets: if sheet.getAttribute('name') == sheetname: rownum = 0 rows = sheet.getElementsByType(TableRow) for row in rows: colNum = 0 cells = row.getElementsByType(TableCell) for cell in cells: if colNum == columnnumber: newCell = TableCell() if rownum == 0: p = P() p.addText(StringUtils.toUnicode(columnname)) newCell.addElement(p) else: p = P() p.addText(StringUtils.toUnicode('')) newCell.addElement(p) row.insertBefore(newCell, cell) colNum += 1 rownum += 1
def prettyPrint(target, indentLevel=1): indent = '\n' + (indentLevel * ' ') s = '\n' if isinstance(target, list): index = 0 for t in target: try: v = StringUtils.toUnicode(t) except Exception: v = '<UNPRINTABLE>' s += '%s[%s]: %s' % (indent, index, v) return s if isinstance(target, dict): for n, v in target.items(): try: v = StringUtils.toUnicode(v) except Exception: v = '<UNPRINTABLE>' s += '%s%s: %s' % (indent, n, v) return s items = dir(target) for n in items: v = getattr(target, n) try: v = StringUtils.toUnicode(v) except Exception: v = '<UNPRINTABLE>' s += '%s%s: %s' % (indent, n, v) return s
def getUidTimecode(cls, prefix=None, suffix=None): """ Creates a timecode down to the microsecond for use in creating unique UIDs. """ out = Base64.to64(cls.getNowSeconds()) + '-' + Base64.to64( datetime.microsecond) return ((StringUtils.toUnicode(prefix) + '-') if prefix else '') + out \ + (('-' + StringUtils.toUnicode(suffix)) if suffix else '')
def createErrorMessage(cls, message, error): try: errorType = StringUtils.toUnicode(sys.exc_info()[0]) except Exception: try: errorType = str(sys.exc_info()[0]) except Exception: errorType = '[[UNABLE TO PARSE]]' try: errorValue = StringUtils.toUnicode(sys.exc_info()[1]) except Exception: try: errorValue = str(sys.exc_info()[1]) except Exception: errorValue = '[[UNABLE TO PARSE]]' try: error = StringUtils.toUnicode(error) except Exception as err: try: error = str(err) except Exception: error = '[[UNABLE TO PARSE]]' try: es = '%s\n TYPE: %s\n VALUE: %s\nERROR: %s\n' % ( cls.formatAsString(message), errorType, errorValue, error) except Exception: try: es = '%s\n [[ERROR ATTRIBUTE PARSING FAILURE]]' % cls.formatAsString(message) except Exception: es = 'FAILED TO PARSE EXCEPTION' return es
def asWebRgbOpacity(self, opacity =None): c = self.asRgb(output=tuple) return 'rgba(%s, %s, %s, %s)' % ( StringUtils.toUnicode(c[0]), StringUtils.toUnicode(c[1]), StringUtils.toUnicode(c[2]), StringUtils.toUnicode(100.0*(self._opacity if opacity is None else opacity)) + '%' )
def formatAsString(cls, src, indentLevel=0): indents = ' ' * indentLevel if isinstance(src, (list, tuple)): out = [StringUtils.toUnicode('%s%s' % (indents, src[0]))] indents += ' ' lines = [] maxIndex = 0 for item in src[1:]: item = StringUtils.toUnicode(item) index = item.find(':') index = index if index != -1 and index < 12 else 0 maxIndex = max(index, maxIndex) lines.append([index, item]) for item in lines: if item[0] > 0: out.append(indents + (' ' * max(0, maxIndex - item[0])) + item[1]) else: out.append(indents + item[1]) return StringUtils.toUnicode('\n'.join(out)) else: return StringUtils.toUnicode(indents + src)
def formatAsString(cls, src, indentLevel =0): indents = ' '*indentLevel if isinstance(src, (list, tuple)): out = [StringUtils.toUnicode('%s%s' % (indents, src[0]))] indents += ' ' lines = [] maxIndex = 0 for item in src[1:]: item = StringUtils.toUnicode(item) index = item.find(':') index = index if index != -1 and index < 12 else 0 maxIndex = max(index, maxIndex) lines.append([index, item]) for item in lines: if item[0] > 0: out.append(indents + (' '*max(0, maxIndex - item[0])) + item[1]) else: out.append(indents + item[1]) return StringUtils.toUnicode('\n'.join(out)) else: return StringUtils.toUnicode(indents + src)
def __str__(self): modelInfo = self._getReprString() return '<%s[%s] cts[%s] upts[%s]%s>' % ( self.__class__.__name__, StringUtils.toUnicode(self.i), StringUtils.toUnicode(self.cts.strftime('%m-%d-%y %H:%M:%S') if self.cts else 'None'), StringUtils.toUnicode(self.upts.strftime('%m-%d-%y %H:%M:%S') if self.upts else 'None'), (' %s' % modelInfo) if modelInfo else '')
def createUploadPolicy(self, key, durationSeconds, maxSizeBytes): """Returns a S3 upload policy and signature for this bucket with the specified key. """ return self._conn.build_post_form_args( bucket_name=StringUtils.toUnicode(self.bucketName), key=StringUtils.toUnicode(key), expires_in=durationSeconds, acl=StringUtils.toUnicode('private'), max_content_length=maxSizeBytes)
def getPrefix(self, *args, **kwargs): if self._locationPrefix: item = self.getStackData()[-1] loc = ' -> %s #%s]' % (item['file'], StringUtils.toUnicode(item['line'])) else: loc = ']' return StringUtils.toUnicode( self.getTime(self.timezone).strftime('[%a %H:%M <%S.%f>') + loc)
def _compileUiFile(self, path, filename): """Doc...""" source = FileUtils.createPath(path, filename, isFile=True) if self._verbose: self._log.write('COMPILING: ' + source) if PyGlassEnvironment.isWindows: uicCommand = FileUtils.createPath(self._pythonPath, 'Scripts', 'pyside-uic.exe') else: uicCommand = 'pyside-uic' cmd = '%s %s' % (uicCommand, source) pipe = subprocess.Popen( cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, error = pipe.communicate() if pipe.returncode or error: self._log.write('ERROR: Failed to compile %s widget: %s' % (str(source), str(error))) return False out = StringUtils.toUnicode(out) res = WidgetUiCompiler._CLASS_NAME_RE.search(out) if not res: self._log.write('ERROR: Failed to find widget class name for ' + str(source)) return False out = WidgetUiCompiler._CLASS_NAME_RE.sub('PySideUiFileSetup', out, 1) res = WidgetUiCompiler._SETUP_UI_RE.search(out) if not res: self._log.write('ERROR: Failed to find widget setupUi method for ' + str(source)) return False targetName = res.groupdict().get('parentName') out = WidgetUiCompiler._SETUP_UI_RE.sub('\g<parentName>', out, 1) res = WidgetUiCompiler._RETRANSLATE_RE.search(out) if not res: self._log.write('ERROR: Failed to find widget retranslateUi method for ' + str(source)) return False out = WidgetUiCompiler._RETRANSLATE_RE.sub('\g<parentName>', out, 1) out = StringUtils.toUnicode(out) out = WidgetUiCompiler._SELF_RE.sub(targetName + '.', out) dest = FileUtils.createPath(path, filename[:-3] + '.py', isFile=True) if os.path.exists(dest): os.remove(dest) f = open(dest, 'w+') f.write(out) f.close() py_compile.compile(dest) return True
def getUidTimecode(cls, prefix=None, suffix=None): """ Creates a timecode down to the microsecond for use in creating unique UIDs. """ out = Base64.to64(cls.getNowSeconds()) + "-" + Base64.to64(datetime.microsecond) return ( ((StringUtils.toUnicode(prefix) + "-") if prefix else "") + out + (("-" + StringUtils.toUnicode(suffix)) if suffix else "") )
def openSpreadsheet(self): """(Re)Loads the spreadsheet.""" self._doc = load(self._filepath) rows = self._doc.spreadsheet.getElementsByType(TableRow) dataWidth = 1 # Determine data-width (as opposed to trailing blank cells) cells = rows[0].getElementsByType(TableCell) for cell in cells[1:]: pl = cell.getElementsByType(P) if len(pl) > 0 and (pl[0].firstChild) and len( StringUtils.toUnicode(pl[0].firstChild)) > 0: dataWidth += 1 else: break # Expand out / decompress repeated cells (e.g. number-columns-repeated="2") for row in rows: cells = row.getElementsByType(TableCell) colNum = 0 for cell in cells: if colNum < dataWidth: repeated = int( cell.getAttribute('numbercolumnsrepeated') or 0) pl = cell.getElementsByType(P) if repeated > 1: if len(pl) > 0 and pl[0].firstChild and len( StringUtils.toUnicode(pl[0].firstChild)) > 0: for i in range(repeated): c = TableCell() p = P() p.addText( StringUtils.toUnicode(pl[0].firstChild)) c.addElement(p) row.insertBefore(c, cell) row.removeChild(cell) else: for i in range(min(repeated, dataWidth - colNum)): c = TableCell() p = P() p.addText(StringUtils.toUnicode('')) c.addElement(p) row.insertBefore(c, cell) row.removeChild(cell) else: row.removeChild(cell) colNum += 1 # Add a constant 3 trailing columns for i in range(3): c = TableCell() p = P() p.addText(StringUtils.toUnicode('')) c.addElement(p) row.addElement(c)
def secondsToDurationTimecode(cls, seconds): """ Turns the specified number of seconds (including fractional seconds) into a durational timecode of the format HH:MM:SS.000 """ time = cls.explodeElapsedTime(seconds) secs = int(time['seconds']) millis = int(round(1000.0 * (time['seconds'] - float(secs)))) return StringUtils.toUnicode(time['hours']).zfill(2) + ':' \ + StringUtils.toUnicode(time['minutes']).zfill(2) + ':' \ + StringUtils.toUnicode(secs).zfill(2) + '.' \ + StringUtils.toUnicode(millis).zfill(3)
def openSpreadsheet(self): """(Re)Loads the spreadsheet.""" self._doc = load(self._filepath) rows = self._doc.spreadsheet.getElementsByType(TableRow) dataWidth = 1 # Determine data-width (as opposed to trailing blank cells) cells = rows[0].getElementsByType(TableCell) for cell in cells[1:]: pl = cell.getElementsByType(P) if len(pl) > 0 and (pl[0].firstChild) and len(StringUtils.toUnicode(pl[0].firstChild)) > 0: dataWidth += 1 else: break # Expand out / decompress repeated cells (e.g. number-columns-repeated="2") for row in rows: cells = row.getElementsByType(TableCell) colNum = 0 for cell in cells: if colNum < dataWidth: repeated = int(cell.getAttribute('numbercolumnsrepeated') or 0) pl = cell.getElementsByType(P) if repeated > 1: if len(pl) > 0 and pl[0].firstChild and len(StringUtils.toUnicode(pl[0].firstChild)) > 0: for i in range(repeated): c = TableCell() p = P() p.addText(StringUtils.toUnicode(pl[0].firstChild)) c.addElement(p) row.insertBefore(c, cell) row.removeChild(cell) else: for i in range(min(repeated, dataWidth-colNum)): c = TableCell() p = P() p.addText(StringUtils.toUnicode('')) c.addElement(p) row.insertBefore(c, cell) row.removeChild(cell) else: row.removeChild(cell) colNum += 1 # Add a constant 3 trailing columns for i in range(3): c = TableCell() p = P() p.addText(StringUtils.toUnicode('')) c.addElement(p) row.addElement(c)
def unpack(self, dataType, length): data = StringUtils.unicodeToStr(self.read(length)) assert len(data) == length, \ u"[UNPACK ERROR]: Unexpected end of stream [%s | %s]" % ( StringUtils.toUnicode(len(data)), StringUtils.toUnicode(length)) try: return struct.unpack(StringUtils.unicodeToStr(self.endianess + dataType), data)[0] except struct.error: print(len(data)) print(u"Unable to unpack '%r'" % data) raise
def logMessageToString( cls, logMessage, includePrefix =True, includeStack =True, prefixSeparator ='\n ', stackSeparator ='\n' ): out = [] if includePrefix and 'prefix' in logMessage: out.append(StringUtils.toUnicode(logMessage['prefix']) + prefixSeparator) out.append(StringUtils.toUnicode(logMessage['log'])) if includeStack and 'stack' in logMessage: out.append(stackSeparator + StringUtils.toUnicode(logMessage['stack'])) return ''.join(out)
def unpack(self, dataType, length): data = StringUtils.unicodeToStr(self.read(length)) assert len(data) == length, \ u"[UNPACK ERROR]: Unexpected end of stream [%s | %s]" % ( StringUtils.toUnicode(len(data)), StringUtils.toUnicode(length)) try: return struct.unpack( StringUtils.unicodeToStr(self.endianess + dataType), data)[0] except struct.error: print(len(data)) print(u"Unable to unpack '%r'" % data) raise
def _buildHttpsReply(self, parent, request, url, operation, data, page): headers = dict() for header in request.rawHeaderList(): headers[StringUtils.toUnicode(header)] = StringUtils.toUnicode(request.rawHeader(header)) if data: data = data.readAll() thread = HttpsRemoteExecutionThread( parent=self, operation=operation, data=data, headers=headers, url=url.toString()) thread.completeSignal.signal.connect(self._handleHttpsResult) thread.start()
def executeCommand(cls, cmd, remote=False, shell=True, wait=False, background=False, resultObj=False): if shell and not StringUtils.isStringType(cmd): from pyaid.list.ListUtils import ListUtils cmd = ' '.join(ListUtils.itemsToString(cmd)) # Background nohup processes shouldn't PIPE and once run should immediately return if background: subprocess.Popen(cmd, shell=shell) return {'error': '', 'out': '', 'code': 0, 'command': cmd} if remote: pipe = subprocess.Popen(cmd, shell=shell, stdout=None, stderr=None, stdin=None, close_fds=False) if wait: pipe.wait() return {'error': '', 'out': '', 'code': 0, 'command': cmd} pipe = subprocess.Popen(cmd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if wait: pipe.wait() out, error = pipe.communicate() if resultObj: return cls.CMD_RESULT_NT(error=StringUtils.toUnicode(error), output=StringUtils.toUnicode(out), code=pipe.returncode, success=pipe.returncode == 0, command=cmd) return { 'error': StringUtils.toUnicode(error), 'out': StringUtils.toUnicode(out), 'code': pipe.returncode, 'command': cmd }
def put( self, key, contents, zipContents =False, maxAge =-1, eTag =None, expires =None, newerThanDate =None, policy =None ): """Doc...""" k = self.getKey(key) if not self._localIsNewer(k, newerThanDate): return False headers = self._generateHeaders(k.name, expires=expires, eTag=eTag, maxAge=maxAge) contents = StringUtils.toUnicode(contents) if zipContents: fd, tempPath = tempfile.mkstemp() f = gzip.open(tempPath, 'w+b') f.write(contents.encode('utf-8', 'ignore')) f.close() headers['Content-Encoding'] = 'gzip' k.set_contents_from_filename(filename=tempPath, headers=headers, policy=policy) os.close(fd) if os.path.exists(tempPath): os.remove(tempPath) return True k.set_contents_from_string(contents, headers=headers, policy=policy) return True
def __init__(self): """Creates a new instance of UniqueObject.""" self._INSTANCE_INDEX += 1 self._instanceUid = TimeUtils.getUidTimecode( prefix=self.__class__.__name__, suffix=StringUtils.toUnicode( self._INSTANCE_INDEX) + '-' + StringUtils.getRandomString(8) )
def _handleResponseReady(self, request, response): """Event handler for the response object being ready for use.""" if self._cacheControlPublic: response.cache_control = "public" # ------------------------------------------------------------------------------------------- # Cache Expiration: Set the caching values according to the _expires property rep = self._explicitResponse if rep is None or (isinstance(rep, ViewResponse) and rep.allowCaching): response.cache_control.max_age = self.expires if not self.expires is None else 0 else: response.cache_control.max_age = 0 # ------------------------------------------------------------------------------------------- # Cache Validators if self._etag is not None: response.etag = StringUtils.toUnicode(self._etag) if self._lastModified is not None: response.last_modified = self._lastModified # If required encode the response headers as strings to prevent unicode errors. This is # necessary for certain WSGI server applications, e.g. flup. if self.ziggurat.strEncodeEnviron: for n, v in DictUtils.iter(response.headers): if StringUtils.isStringType(v): response.headers[n] = StringUtils.toStr2(v) # Clean up per-thread sessions. ConcreteModelsMeta.cleanupSessions()
def open(self): if self._source: return self._source f = open(self.path, 'r') raw = f.read() f.close() res = self._cls.FLAGS_PATTERN.finditer(raw) if res: for r in res: for f in r.group('flags').strip().split(','): f = f.strip().lower().replace(' ', '').replace('-', '').replace( '_', '') if f and not f in self._flags: self._flags.append(f) #------------------------------------------------------------------------------------------- # REPLACE COMPILE VARIABLES reps = {} # NONE AT THE MOMENT for n, v in reps.items(): raw = raw.replace(n, StringUtils.toUnicode(v)) self._source = raw + '\n' return raw
def moveColumn(self, sheetname, oldcolumn, newcolumn): """Replaces the column oldcolumn with newcolumn and deletes newcolumn. This function assumes: oldcolumn > newcolumn. @param sheetname: The name of the sheet to be operated on. @type sheetname: string @param oldcolumn: The column to move data from. @type oldcolumn: int @param newcolumn: The column to move data to. @type newcolumn: int """ sheets = self._doc.spreadsheet.getElementsByType(Table) for sheet in sheets: if sheet.getAttribute('name') == sheetname: rows = sheet.getElementsByType(TableRow) for row in rows: colNum = 0 cells = row.getElementsByType(TableCell) for cell in cells: if colNum == newcolumn: newcolumncell = cell pl = cell.getElementsByType(P) for p in pl: cell.removeChild(p) elif colNum == oldcolumn: pl = cell.getElementsByType(P) if len(pl) > 0: p = P() if pl[0].firstChild: p.addText(StringUtils.toUnicode(pl[0].firstChild)) newcolumncell.addElement(p) colNum += 1
def moveColumn(self, sheetname, oldcolumn, newcolumn): """Replaces the column oldcolumn with newcolumn and deletes newcolumn. This function assumes: oldcolumn > newcolumn. @param sheetname: The name of the sheet to be operated on. @type sheetname: string @param oldcolumn: The column to move data from. @type oldcolumn: int @param newcolumn: The column to move data to. @type newcolumn: int """ sheets = self._doc.spreadsheet.getElementsByType(Table) for sheet in sheets: if sheet.getAttribute('name') == sheetname: rows = sheet.getElementsByType(TableRow) for row in rows: colNum = 0 cells = row.getElementsByType(TableCell) for cell in cells: if colNum == newcolumn: newcolumncell = cell pl = cell.getElementsByType(P) for p in pl: cell.removeChild(p) elif colNum == oldcolumn: pl = cell.getElementsByType(P) if len(pl) > 0: p = P() if pl[0].firstChild: p.addText( StringUtils.toUnicode( pl[0].firstChild)) newcolumncell.addElement(p) colNum += 1
def __init__(self, src ='', debug =False, blockDefs =None, debugData =None, **kwargs): """Creates a new instance of ClassTemplate.""" self._log = ArgsUtils.getLogger(self, kwargs) self._debugData = debugData self._debug = debug src = StringUtils.toUnicode(src) self._raw = src.replace('\r','') if ArgsUtils.get('stripSource', True, kwargs): self._raw = self._raw.strip('\n') self._analyzed = False self._errors = [] self._blocks = [] self._bookmarks = [] self._initialBlock = ArgsUtils.get('initialBlock', None, kwargs) if isinstance(blockDefs, BlockDefinition): self._blockDefs = {'root':blockDefs} elif isinstance(blockDefs, dict): self._blockDefs = blockDefs elif isinstance(blockDefs, list): self._blockDefs = {'root':blockDefs} else: self._blockDefs = { 'root':[ BlockDefinition.createQuoteDef(BlockDefinition.BLOCKED), BlockDefinition.createLiteralDef(BlockDefinition.BLOCKED), BlockDefinition.createParensDef(), BlockDefinition.createBracketsDef(), BlockDefinition.createBracesDef(), ], }
def __init__(self): """Creates a new instance of UniqueObject.""" self._INSTANCE_INDEX += 1 self._instanceUid = TimeUtils.getUidTimecode( prefix=self.__class__.__name__, suffix=StringUtils.toUnicode(self._INSTANCE_INDEX) + '-' + StringUtils.getRandomString(8))
def executeCommand( cls, cmd, remote =False, shell =True, wait =False, background =False, resultObj =False ): if shell and not StringUtils.isStringType(cmd): from pyaid.list.ListUtils import ListUtils cmd = ' '.join(ListUtils.itemsToString(cmd)) # Background nohup processes shouldn't PIPE and once run should immediately return if background: subprocess.Popen(cmd, shell=shell) return {'error':'', 'out':'', 'code':0, 'command':cmd} if remote: pipe = subprocess.Popen( cmd, shell=shell, stdout=None, stderr=None, stdin=None, close_fds=False) if wait: pipe.wait() return {'error':'', 'out':'', 'code':0, 'command':cmd} pipe = subprocess.Popen( cmd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if wait: pipe.wait() out, error = pipe.communicate() if resultObj: return cls.CMD_RESULT_NT( error=StringUtils.toUnicode(error), output=StringUtils.toUnicode(out), code=pipe.returncode, success=pipe.returncode == 0, command=cmd) return { 'error':StringUtils.toUnicode(error), 'out':StringUtils.toUnicode(out), 'code':pipe.returncode, 'command':cmd}
def getStackData(stackSource =None): res = [] if not stackSource: stackSource = Logger.getRawStack() for item in stackSource: path = StringUtils.toUnicode(item[0]) res.append(dict( path=path, internal=True, dir=StringUtils.toUnicode(os.path.dirname(path)), file=StringUtils.toUnicode(os.path.basename(path).replace('.py','')), line=item[1], function=StringUtils.toUnicode(item[2]), code=StringUtils.toUnicode(item[3]) )) return res
def secondsToDurationTimecode(cls, seconds): """ Turns the specified number of seconds (including fractional seconds) into a durational timecode of the format HH:MM:SS.000 """ time = cls.explodeElapsedTime(seconds) secs = int(time["seconds"]) millis = int(round(1000.0 * (time["seconds"] - float(secs)))) return ( StringUtils.toUnicode(time["hours"]).zfill(2) + ":" + StringUtils.toUnicode(time["minutes"]).zfill(2) + ":" + StringUtils.toUnicode(secs).zfill(2) + "." + StringUtils.toUnicode(millis).zfill(3) )
def printBucketContents(self, path, fileFilter, logger=None): out = self.listKeys(path, fileFilter) s = 'Displaying %s results for %s/%s.' % (StringUtils.toUnicode( len(out)), self._bucketName, StringUtils.toUnicode(path)) if logger: logger.write(s) else: print(s) index = 0 for obj in out: s = ' ' + StringUtils.toUnicode(index) + ' - ' + obj.name if logger: logger.write(s) else: print(s) index += 1
def name(self): """ Human-readable display name for the track, based of its properties. """ number = StringUtils.toUnicode(int(self.number)) if self.number else '*' return '%s%s%s' % ( ('L' if self.left else 'R'), ('P' if self.pes else 'M'), number)
def toPrettyElapsedTime(cls, elapsedMilliseconds): """ Returns a pretty elapsed time based on the number of milliseconds elapsed argument. """ t = int(elapsedMilliseconds) if t == 0: return "0" hasMinutes = False hasSeconds = False out = "" if t >= 60000: hasMinutes = True cVal = int(float(t) / 60000.0) s = StringUtils.toUnicode(cVal) t -= cVal * 60000 if t == 0: return s + " min" + ("s" if cVal > 1 else "") s = s.zfill(2) out += s + ":" if t >= 1000: hasSeconds = True cVal = int(float(t) / 1000.0) s = StringUtils.toUnicode(cVal) t -= cVal * 1000 if t == 0 and not hasMinutes: return s + " sec" + ("s" if cVal > 1 else "") s = s.zfill(2) out += s elif hasMinutes: out += "00" if t == 0: return out s = StringUtils.toUnicode(int(round(t))) if not hasMinutes and not hasSeconds: return s + " ms" s = s.zfill(2) return out + "." + s
def toPrettyElapsedTime(cls, elapsedMilliseconds): """ Returns a pretty elapsed time based on the number of milliseconds elapsed argument. """ t = int(elapsedMilliseconds) if t == 0: return '0' hasMinutes = False hasSeconds = False out = '' if t >= 60000: hasMinutes = True cVal = int(float(t) / 60000.0) s = StringUtils.toUnicode(cVal) t -= cVal * 60000 if t == 0: return s + ' min' + ('s' if cVal > 1 else '') s = s.zfill(2) out += s + ':' if t >= 1000: hasSeconds = True cVal = int(float(t) / 1000.0) s = StringUtils.toUnicode(cVal) t -= cVal * 1000 if t == 0 and not hasMinutes: return s + ' sec' + ('s' if cVal > 1 else '') s = s.zfill(2) out += s elif hasMinutes: out += '00' if t == 0: return out s = StringUtils.toUnicode(int(round(t))) if not hasMinutes and not hasSeconds: return s + ' ms' s = s.zfill(2) return out + '.' + s
def getStackData(stackSource=None): res = [] if not stackSource: stackSource = Logger.getRawStack() for item in stackSource: path = StringUtils.toUnicode(item[0]) res.append( dict(path=path, internal=True, dir=StringUtils.toUnicode(os.path.dirname(path)), file=StringUtils.toUnicode( os.path.basename(path).replace('.py', '')), line=item[1], function=StringUtils.toUnicode(item[2]), code=StringUtils.toUnicode(item[3]))) return res
def logMessageToString(cls, logMessage, includePrefix=True, includeStack=True, prefixSeparator='\n ', stackSeparator='\n'): out = [] if includePrefix and 'prefix' in logMessage: out.append( StringUtils.toUnicode(logMessage['prefix']) + prefixSeparator) out.append(StringUtils.toUnicode(logMessage['log'])) if includeStack and 'stack' in logMessage: out.append(stackSeparator + StringUtils.toUnicode(logMessage['stack'])) return ''.join(out)
def flush(self): if not self._buffer: return if sys.platform.startswith('win'): return items = [] for b in self._buffer: try: d = DictUtils.merge(self._meta, b['data']) item = b['prefix'] + ' ' + JSON.asString(d) except Exception as err: item = '>> EXCEPTION: JSON ENCODING FAILED >> ' + str(err).replace('\n', '\t') try: item = item.encode('utf8', 'ignore') except Exception as err: item = '>> EXCEPTION: UNICODE DECODING FAILED >> ' + str(err).replace('\n', '\t') items.append(item) count = self._fileCount offset = random.randint(0, count - 1) success = False path = self.getReportFolder() + self._timeCode + '/' if not os.path.exists(path): os.makedirs(path) for i in range(count): index = (i + offset) % count p = path + str(index) + '.report' lock = FileLock(p) if lock.i_am_locking() and i < count - 1: continue try: lock.acquire() except Exception: continue try: out = StringUtils.toUnicode('\n'.join(items) + '\n') f = open(p, 'a+') f.write(out.encode('utf8')) f.close() success = True except Exception as err: print("REPORTER ERROR: Unable to write report file.") print(err) lock.release() if success: break self.clear() return success
def printBucketContents(self, path, fileFilter, logger =None): out = self.listKeys(path, fileFilter) s = 'Displaying %s results for %s/%s.' % ( StringUtils.toUnicode(len(out)), self._bucketName, StringUtils.toUnicode(path)) if logger: logger.write(s) else: print(s) index = 0 for obj in out: s = ' ' + StringUtils.toUnicode(index) + ' - ' + obj.name if logger: logger.write(s) else: print(s) index += 1
def write(self, folder=None, name=None): """ Writes the Cadence data to an encoded string and returns that string. If a path is specified, it will also write the data to that file before returning the string. If the writing process fails, None is returned instead. @@@param folder:string The folder where the file should be written relative to Cadence's root data directory. @@@param name:string If specified this value will override the CadenceData instance's name for the file to be written. """ data = {"version": CadenceData.VERSION} if self._configs: data[CadenceData._CONFIGS_KEY] = self._configs.toDict() if self._name: data["name"] = self._name if self._channels: channels = [] for c in self._channels: channels.append(c.toDict()) data["channels"] = channels try: data = json.dumps(data) except Exception as err: print("FAILED: Writing Cadence data.", err) return None if folder: name = name if name else (self._name if self._name else "data") if not name.endswith(CadenceData.EXTENSION): name += CadenceData.EXTENSION path = os.path.join(CadenceData.ROOT_DATA_PATH, folder, name) outDir = os.path.dirname(path) try: if not os.path.exists(outDir): os.makedirs(outDir) except Exception as err: print("FAILED: Unable to create output directory: " + str(outDir)) return None try: f = open(StringUtils.toUnicode(path), "w+") f.write(data) f.close() except Exception as err: print("FAILED: Writing Cadence file.", err) return data
def createRevision( cls, databaseUrl, message, resourcesPath =None, localResourcesPath =None, info =None ): config = cls.getConfig( databaseUrl=databaseUrl, resourcesPath=resourcesPath, localResourcesPath=localResourcesPath) previousRevisions = cls.getRevisionList( databaseUrl=databaseUrl, resourcesPath=resourcesPath, config=config) alembicCmd.revision( config=config, message=StringUtils.toUnicode(len(previousRevisions)) + ': ' + message) if not info: return True scriptInfo = alembicScript.ScriptDirectory.from_config(config) scriptPath = None for item in os.listdir(scriptInfo.versions): if item.startswith(scriptInfo.get_current_head()): scriptPath = os.path.join(scriptInfo.versions, item) break if not scriptPath: return True info = StringUtils.toUnicode(info) f = open(scriptPath, 'r+') script = StringUtils.toUnicode(f.read()) f.close() index = script.find('"""') index = script.find('"""', index + 1) script = script[:index] + info + '\n' + script[index:] f = open(scriptPath, 'w+') f.write(StringUtils.toStr2(script)) f.close() return True
def _reformatValue(cls, value): if isinstance(value, dict): value = cls._reformat(value) elif StringUtils.isStringType(value): value = StringUtils.toUnicode(value) elif isinstance(value, (list, tuple)): vout = [] for item in value: vout.append(cls._reformatValue(item)) value = vout return value
def prettyPrint(cls, source, delimiter = ' | ', separator = ': '): if not source: return '[EMPTY]' from pyaid.list.ListUtils import ListUtils out = [] for n,v in cls.iter(source): n = StringUtils.toUnicode(n) if isinstance(v, dict): v = '{ ' + cls.prettyPrint(v, delimiter=delimiter, separator=separator) + ' }' elif isinstance(v, StringUtils.BINARY_TYPE): v = StringUtils.strToUnicode(v) elif isinstance(v, (list, tuple)): v = ListUtils.prettyPrint(v) else: v = StringUtils.toUnicode(v) out.append(n + separator + v) out.sort(key=StringUtils.TEXT_TYPE.lower) return delimiter.join(out)
def getFormattedStackTrace(cls, skipStackLevels=0, maxLevels=0, stackSource=None): """ Get the exception stack trace if it exists, otherwise extract the generic stack trace instead. """ stack = Logger.getStackData(stackSource) stop = len(stack) - skipStackLevels start = max(0, stop - maxLevels) if maxLevels > 0 else 0 s = '' index = start for item in stack[start:stop]: index += 1 if item['internal']: s += ('\n [%s]: %s.%s [#%s]\n code: %s' % (StringUtils.toUnicode(index), item['file'], item['function'], StringUtils.toUnicode( item['line']), item['code'][:100])) else: s += '\n [%s] EXT: %s {line: %s}' % (StringUtils.toUnicode( index), item['file'], StringUtils.toUnicode(item['line'])) return s
def createErrorMessage(cls, message, error): try: errorType = StringUtils.toUnicode(sys.exc_info()[0]) except Exception: try: errorType = str(sys.exc_info()[0]) except Exception: errorType = '[[UNABLE TO PARSE]]' try: errorValue = StringUtils.toUnicode(sys.exc_info()[1]) except Exception: try: errorValue = str(sys.exc_info()[1]) except Exception: errorValue = '[[UNABLE TO PARSE]]' try: error = StringUtils.toUnicode(error) except Exception as err: try: error = str(err) except Exception: error = '[[UNABLE TO PARSE]]' try: es = '%s\n TYPE: %s\n VALUE: %s\nERROR: %s\n' % ( cls.formatAsString(message), errorType, errorValue, error) except Exception: try: es = '%s\n [[ERROR ATTRIBUTE PARSING FAILURE]]' % cls.formatAsString( message) except Exception: es = 'FAILED TO PARSE EXCEPTION' return es
def asAscii(cls, string): if sys.version > '3': return StringUtils.toUnicode(string) if StringUtils.isStringType(string): try: return string.encode('utf8', 'ignore') except Exception: try: return unicodedata.normalize('NFKD', string).encode( 'ascii', 'ignore') except Exception: return '[[UNABLE TO DISPLAY LOG ENTRY IN ASCII CHARS]]' else: return string
def getContents(cls, path, raiseErrors=False, gzipped=False): if not os.path.exists(path): return None try: if gzipped: f = gzip.open(path, 'r+') else: f = open(path, 'r+') source = f.read() f.close() return StringUtils.toUnicode(source) except Exception as err: if raiseErrors: raise print(err) return None
def itemsToUnicode(cls, target, inPlace=False): """ Iterates through the elements of the target list and converts each of them to unicode strings, including decoding byte strings to unicode strings.""" from pyaid.string.StringUtils import StringUtils output = target if inPlace else [] index = 0 while index < len(target): source = target[index] if StringUtils.isStringType(source): output[index] = StringUtils.strToUnicode(source) else: output[index] = StringUtils.toUnicode(source) return output
def prettyPrint(cls, source, separator=', '): """prettyPrint doc...""" out = [] from pyaid.dict.DictUtils import DictUtils for v in source: if isinstance(v, (list, tuple)): v = cls.prettyPrint(v, separator=',') if isinstance(v, dict): v = DictUtils.prettyPrint(v) elif isinstance(v, StringUtils.BINARY_TYPE): v = StringUtils.strToUnicode(v) else: v = StringUtils.toUnicode(v) out.append(v) return '[%s]' % separator.join(out)
def put(self, key, contents, zipContents=False, maxAge=-1, eTag=None, expires=None, newerThanDate=None, policy=None): """Doc...""" k = self.getKey(key) if not self._localIsNewer(k, newerThanDate): return False headers = self._generateHeaders(k.name, expires=expires, eTag=eTag, maxAge=maxAge) contents = StringUtils.toUnicode(contents) if zipContents: fd, tempPath = tempfile.mkstemp() f = gzip.open(tempPath, 'w+b') f.write(contents.encode('utf-8', 'ignore')) f.close() headers['Content-Encoding'] = 'gzip' k.set_contents_from_filename(filename=tempPath, headers=headers, policy=policy) os.close(fd) if os.path.exists(tempPath): os.remove(tempPath) return True k.set_contents_from_string(contents, headers=headers, policy=policy) return True
def __init__(self, src='', debug=False, blockDefs=None, debugData=None, **kwargs): """Creates a new instance of ClassTemplate.""" self._log = ArgsUtils.getLogger(self, kwargs) self._debugData = debugData self._debug = debug src = StringUtils.toUnicode(src) self._raw = src.replace('\r', '') if ArgsUtils.get('stripSource', True, kwargs): self._raw = self._raw.strip('\n') self._analyzed = False self._errors = [] self._blocks = [] self._bookmarks = [] self._initialBlock = ArgsUtils.get('initialBlock', None, kwargs) if isinstance(blockDefs, BlockDefinition): self._blockDefs = {'root': blockDefs} elif isinstance(blockDefs, dict): self._blockDefs = blockDefs elif isinstance(blockDefs, list): self._blockDefs = {'root': blockDefs} else: self._blockDefs = { 'root': [ BlockDefinition.createQuoteDef(BlockDefinition.BLOCKED), BlockDefinition.createLiteralDef(BlockDefinition.BLOCKED), BlockDefinition.createParensDef(), BlockDefinition.createBracketsDef(), BlockDefinition.createBracesDef(), ], }
def clearColumn(self, sheetname, column): """Clears a column of all data. @param sheetname: The name of the sheet to be operated on. @type sheetname: string @param column: The column to clear. @type column: int """ sheets = self._doc.spreadsheet.getElementsByType(Table) for sheet in sheets: if sheet.getAttribute('name') == sheetname: rows = sheet.getElementsByType(TableRow) for row in rows: colNum = 0 cells = row.getElementsByType(TableCell) for cell in cells: if colNum == column: pl = cell.getElementsByType(P) for p in pl: cell.removeChild(p) p = P() p.addText(StringUtils.toUnicode('')) cell.addElement(p) colNum += 1