def _writeError(self, data): """ Writes import error data to the logger, formatting it for human readable display. """ source = {} if 'data' in data: for n,v in DictUtils.iter(data['data']): source[' '.join(n.split('_')).title()] = v indexPrefix = '' if 'index' in data: indexPrefix = ' [INDEX: %s]:' % data.get('index', 'Unknown') result = [ 'IMPORT ERROR%s: %s' % (indexPrefix, data['message']), 'DATA: ' + DictUtils.prettyPrint(source)] if 'existing' in data: source = {} snapshot = data['existing'].snapshot if snapshot: snapshot = JSON.fromString(snapshot) if snapshot: for n,v in DictUtils.iter(snapshot): source[' '.join(n.split('_')).title()] = v result.append('CONFLICT: ' + DictUtils.prettyPrint(source)) if 'error' in data: self._logger.writeError(result, data['error']) else: self._logger.write(result)
def __call__(self): super(ZigguratDataView, self).__call__() if isinstance(self._response, Response) or StringUtils.isStringType(self._response): return self._response DictUtils.cleanBytesToText(self._response, inPlace=True) return render_to_response('json', self._response, self._request)
def test_closestPointOnLine(self): """ doc... """ count = 5000 bound = 10000.0 for i in range(count): start = PositionValue2D( x=random.uniform(-bound, bound), y=random.uniform(-bound, bound), xUnc=0.1, yUnc=0.1) end = PositionValue2D( x=random.uniform(-bound, bound) + start.x, y=random.uniform(-bound, bound) + start.y, xUnc=0.1, yUnc=0.1) line = LineSegment2D(start, end) if not line.isValid: continue target = line.getParametricPosition(random.uniform(0.0, 1.0)) offset = random.uniform(1.0, bound) point = line.adjustPointAlongLine(target, offset, inPlace=False) debug = { 'POINT':point, 'TARGET':target, 'OFFSET':offset, 'DISTANCE':point.distanceTo(target) } self.assertAlmostEqual( offset, point.distanceTo(target).raw, msg='Invalid offset distance:\n' + DictUtils.prettyPrint(debug, delimiter='\n')) point.rotate(Angle(degrees=90.0*random.choice([1.0, -1.0])), target) self.assertAlmostEqual( offset, point.distanceTo(target).raw, msg='Invalid rotated offset distance:\n' + DictUtils.prettyPrint(debug, delimiter='\n')) pointOnLine = line.closestPointOnLine(point) xUnc = math.sqrt(pointOnLine.xUnc*pointOnLine.xUnc + target.xUnc*target.xUnc) yUnc = math.sqrt(pointOnLine.yUnc*pointOnLine.yUnc + target.yUnc*target.yUnc) debug = { 'POINT':point, 'RESULT':pointOnLine, 'INDEX':i, 'TARGET':target, 'START':start, 'END':end, 'X_UNC':xUnc, 'Y_UNC':yUnc } self.assertAlmostEqual( pointOnLine.x, target.x, delta=2.0*xUnc, msg='BAD RESULT [X]:\n' + DictUtils.prettyPrint(debug, delimiter='\n')) self.assertAlmostEqual( pointOnLine.y, target.y, delta=2.0*yUnc, msg='BAD RESULT [Y]:\n' + DictUtils.prettyPrint(debug, delimiter='\n'))
def _postAnalyze(self): self.logger.write('TRACKWAY COUNT: %s' % self._weightedStats.count) self._weightedStats.save() self._unweightedStats.save() for key, csv in DictUtils.iter(self._quartileStats): csv.save() for label, paths in DictUtils.iter(self._densityPlots): self.mergePdfs(paths, '%s-Densities.pdf' % label.replace(' ', '-'))
def getChannel(self, kind): """Doc...""" for n,v in DictUtils.iter(self._channels): if n == kind: return v return None
def addChannels(self, channels): if isinstance(channels, list): for v in channels: self.addChannel(v) elif isinstance(channels, dict): for n, v in DictUtils.iter(channels): self.addChannel(v)
def __init__(self, **kwargs): """Creates a new instance of ConfigReader.""" self._configs = ArgsUtils.get('configs', dict(), kwargs) self._filenames = ArgsUtils.get('filenames', None, kwargs) self._configPath = ArgsUtils.get( 'rootConfigPath', CadenceEnvironment.getConfigPath(), kwargs ) if self._filenames: for n,v in DictUtils.iter(self._filenames): if not v: continue path = os.path.join(self._configPath, v) if not path.endswith('.cfg'): path += '.cfg' parser = ConfigParser.ConfigParser() if os.path.exists(path): parser.read(path) else: raise Exception(path + ' config file does not exist!') self._configs[n] = self._configParserToDict(parser) self._overrides = dict() self.setOverrides(ArgsUtils.get('overrides', None, kwargs))
def fromMessage(cls, message): if not message: return None try: if NimbleEnvironment.ENABLE_COMPRESSION: message = zlib.decompress(message) data = json.loads(message.replace(NimbleData._NEWLINE_ESCAPE, '\n').strip()) except Exception as err: print('Corrupt Nimble Data:') print(str(message)) print(err) return None data = DictUtils.cleanDictKeys(data) className = data['class'] if className == cls.__name__: return NimbleData(**data) module = '' try: module = '.'.join(cls.__module__.split('.')[:-1]) + '.' + className res = __import__(module, globals(), locals(), [className]) Source = getattr(res, className) return Source(**data) except Exception as err: print('Invalid Nimble data:') print('ERROR: ', err) print('MESSAGE:', message) print('DATA:', data) print('CLASS:', className) print('MODULE:', module) return None
def print_track(track, aSession): """ @param track: @param aSession: @return: """ limb_id = "{}{}".format("l" if track.left else "r", "p" if track.pes else "m") print(track.echoForVerification()) print( " size: (%s, %s) | field (%s, %s)" % (track.width, track.length, track.widthMeasured, track.lengthMeasured) ) aTrack = track.getAnalysisPair(aSession) print( " curve[#%s -> %s]: %s (%s)" % ( aTrack.curveIndex, aTrack.curveSegment, NumericUtils.roundToSigFigs(aTrack.segmentPosition, 4), NumericUtils.roundToSigFigs(aTrack.curvePosition, 4), ) ) print(" snapshot: {}\n".format(DictUtils.prettyPrint(track.snapshotData))) return dict(limb_id=limb_id, track=track, aTrack=aTrack)
def compileAllOnPath(path, rootPath=None, recursive=False, debug=False, trace=False, force=False, compress=False): CoffeescriptBuilder._results = "" CoffeescriptBuilder._missing = {} if recursive: print("RECURSIVE COMPILE AT: " + path) def walker(paths, dirName, names): out = CoffeescriptBuilder._compileAllInDirectory( os.path.join(paths[0], dirName), paths[1], debug=debug, trace=trace, force=force, compress=compress ) CoffeescriptBuilder._results += out["res"] for n, v in DictUtils.iter(out["missing"]): if n in CoffeescriptBuilder._missing: continue CoffeescriptBuilder._missing[n] = v FileUtils.walkPath(path, walker, [path, rootPath]) print("\n\nCOMPILATION RESULTS:" + CoffeescriptBuilder._results) if CoffeescriptBuilder._missing: print("\n\nMISSING IMPORTS:" + "\n\n") for n, v in DictUtils.iter(CoffeescriptBuilder._missing): print(v["class"] + " [LINE: #" + str(v["line"]) + " | " + v["package"] + "]") else: print("COMPILING DIRECTORY: " + path) CoffeescriptBuilder._compileAllInDirectory( path, rootPath, debug=debug, trace=trace, force=force, compress=compress )
def save(self, path =None): """ Saves the CSV file data to the specified path """ if path is None: path = self.path if self.removeIfSavedEmpty and not self.rows: self.remove() return index = 0 names = self.fieldNames if self.autoIndexFieldName: names.insert(0, self.autoIndexFieldName) try: with open(path, 'wb') as f: writer = csv.DictWriter(f, fieldnames=names, dialect=csv.excel) writer.writeheader() for row in self.rows: result = dict() if self.autoIndexFieldName: index += 1 result[self.autoIndexFieldName] = index for key, spec in DictUtils.iter(self._fields): value = row.get(key, spec.get('empty', '')) name = spec.get('name', key) if StringUtils.isTextType(value): value = value.encode('latin-1') result[name] = value writer.writerow(result) return True except Exception: return False
def echoModel(self): """ An example using a Ziggurat database model. Here a new entry of the ZigguratTest_Test model is created and added to the database and its index in differing radices is returned in the response. NOTE: The model class is imported in-line in this example simply to allow use of the Hello Ziggurat examples without model support for those not in an environment without the required database support. """ try: from ziggHello.models.zigguratTest.ZigguratTest_Test import ZigguratTest_Test model = ZigguratTest_Test.MASTER out = dict() for name, value in DictUtils.iter(self._router.ziggurat.environ): if name.upper() == name: out[name] = StringUtils.toUnicode(value) entry = model() entry.infoData = out model.session.add(entry) model.session.flush() except Exception as err: self._router.response['error'] = str(err) self._router.logger.writeError(u'MODEL ERROR', err) return self._router.response['index'] = [entry.i, entry.i16, entry.i36, entry.i64]
def _postAnalyze(self): """_postAnalyze doc...""" ratios = [] for name, curve in DictUtils.iter(self.data): segments = curve.segments for i in ListUtils.rangeOn(segments): segment = segments[i] segmentLine = segment.line # If this is an extrapolated segment, use the length from the neighboring segment # instead of the artificial length of this segment. if segment == segments[0]: segmentLine = segments[i + 1].line elif segment == segments[-1]: segmentLine = segments[i - 1].line for pairData in segment.pairs: projectionLine = pairData["line"] ratios.append(100.0 * projectionLine.length.raw / segmentLine.length.raw) h = Histogram( data=ratios, binCount=50, xLabel="Projection/Stride Ratio (%)", title="Relative Stride to Projection Length Ratios", ) h.shaveDataToXLimits() self._paths.append(h.save(path=self.getTempFilePath(extension="pdf"))) self.mergePdfs(self._paths, "Curve-Projection.pdf")
def _handleResponseReady(self, request, response): """Event handler for the response object being ready for use.""" if self._cacheControlPublic: response.cache_control = "public" # ------------------------------------------------------------------------------------------- # Cache Expiration: Set the caching values according to the _expires property rep = self._explicitResponse if rep is None or (isinstance(rep, ViewResponse) and rep.allowCaching): response.cache_control.max_age = self.expires if not self.expires is None else 0 else: response.cache_control.max_age = 0 # ------------------------------------------------------------------------------------------- # Cache Validators if self._etag is not None: response.etag = StringUtils.toUnicode(self._etag) if self._lastModified is not None: response.last_modified = self._lastModified # If required encode the response headers as strings to prevent unicode errors. This is # necessary for certain WSGI server applications, e.g. flup. if self.ziggurat.strEncodeEnviron: for n, v in DictUtils.iter(response.headers): if StringUtils.isStringType(v): response.headers[n] = StringUtils.toStr2(v) # Clean up per-thread sessions. ConcreteModelsMeta.cleanupSessions()
def echo(self): print('TARGET:',self.target) print('GAIT PHASE OFFSET:',self._phaseOffset) print('DUTY FACTOR:',self._dutyFactor) print('CHANNELS:') for n,v in DictUtils.iter(self._channels): print(v.toString())
def getColorNameAndValue(self): """ Finds the nearest named color by comparing all named colors """ if self._rawColor == 0: return { 'name':'Black', 'value':0, 'key':'black', 'residual':0.0 } maxRange = 560.0 nearestValue = None nearestName = None range = 360 myColor = self.asHsl(output=list) poolColor = self.__class__(0) for name, value in DictUtils.iter(ColorNames.NAMES): poolColor.load(value) color = poolColor.asHsl(output=list) test = (myColor[0] - color[0])*(myColor[0] - color[0]) \ + (myColor[1] - color[1])*(myColor[1] - color[1]) \ + (myColor[2] - color[2])*(myColor[2] - color[2]) if test < range: nearestValue = value nearestName = name range = test if range < 1: break return { 'name':StringUtils.capitalizeWords(nearestName.replace('_', ' ')), 'value':nearestValue, 'key':nearestName, 'residual':100.0*range/maxRange }
def currentChildWidgetID(self): if not self._currentWidget: return None for key, widget in DictUtils.iter(self._widgets): if widget == self._currentWidget: return key return None
def fromMessage(cls, message): if not message: return None try: if NimbleEnvironment.ENABLE_COMPRESSION: message = zlib.decompress(message) data = json.loads( message.replace(NimbleData._NEWLINE_ESCAPE, '\n').strip()) except Exception as err: print('Corrupt Nimble Data:') print(str(message)) print(err) return None data = DictUtils.cleanDictKeys(data) className = data['class'] if className == cls.__name__: return NimbleData(**data) module = '' try: module = '.'.join(cls.__module__.split('.')[:-1]) + '.' + className res = __import__(module, globals(), locals(), [className]) Source = getattr(res, className) return Source(**data) except Exception as err: print('Invalid Nimble data:') print('ERROR: ', err) print('MESSAGE:', message) print('DATA:', data) print('CLASS:', className) print('MODULE:', module) return None
def _instantiateClass(self, Target, command): k = 'constructorArgs' conArgs = command[k] if k in command else None k = 'constructorKwargs' conKwargs = command[k] if k in command else None if conArgs and conKwargs: targetObject = Target(*conArgs, **DictUtils.cleanDictKeys(conKwargs)) elif conArgs: targetObject = Target(*conArgs) elif conKwargs: targetObject = Target(**DictUtils.cleanDictKeys(conKwargs)) else: targetObject = Target() return targetObject
def echoAnalysisFlags(self, separator =' | '): """echoAnalysisFlags doc...""" out = [] enums = Reflection.getReflectionDict(AnalysisFlagsEnum) for key, value in DictUtils.iter(enums): if value & self.analysisFlags: out.append(key) return ('[%s]' % separator.join(out)) if out else '--'
def echoImportFlags(self, separator =' | '): """echoImportFlags doc...""" out = [] d = Reflection.getReflectionDict(ImportFlagsEnum) for key, value in DictUtils.iter(d): if value & self.importFlags: out.append(key) return ('[%s]' % separator.join(out)) if out else '--'
def _convertDataToText(self): """Doc...""" out = [] for agent in self._data: agent = DictUtils.lowerDictKeys(agent) out.append(u'user-agent: ' + agent.get('user_agent', u'*')) out.append(u'sitemap: ' + self.site.sitemap.targetUrl) return u'\n'.join(out)
def setFromDict(self, keysAndValues): if not keysAndValues: return self._loadSettings() for key, value in DictUtils.iter(keysAndValues): self._updateSetting(key, value) self._saveSettings()
def CLEAN_NAMES(cls): """ doc...""" if cls._CLEAN_NAMES: return cls._CLEAN_NAMES cls._CLEAN_NAMES = dict() for name, value in DictUtils.iter(cls.NAMES): cls._CLEAN_NAMES[name.replace('_', '').lower()] = value return cls._CLEAN_NAMES
def getByProperties(cls, session, **kwargs): """ Loads based on the current values set for the track. This form of loading is useful when the uid is not available, e.g. when importing data from the spreadsheet. """ query = session.query(cls) for key,value in DictUtils.iter(kwargs): query = query.filter(getattr(cls, key) == value) return query.all()
def _executeCommand(cls, payload): cmd = payload['command'] if cmd is None or (StringUtils.isStringType(cmd) and not cmd in globals()): return NimbleResponseData( kind=DataKindEnum.COMMAND, response=NimbleResponseData.FAILED_RESPONSE, error=DataErrorEnum.INVALID_COMMAND) if StringUtils.isStringType(cmd): targetObject = globals().get(cmd) else: if isinstance(cmd, dict): module = str(cmd['module']) target = str(cmd['target']) method = str(cmd['method']) if 'method' in cmd else None else: target = str(cmd[0]) module = str(cmd[1]) if len(cmd) > 0 else None method = str(cmd[2]) if len(cmd) > 1 else None try: res = __import__(module, globals(), locals(), [target]) Target = getattr(res, target) if method: m = getattr(Target, method) if m is None: raise Exception( '%s not found on %s. Unable to execute command.' % \ (str(method), str(target) )) except Exception as err: return NimbleResponseData( kind=DataKindEnum.COMMAND, response=NimbleResponseData.FAILED_RESPONSE, error=cls._getDetailedError( 'Failed to import remote command module', err)) if method: targetObject = getattr(Target, method) if inspect.ismethod( targetObject) and targetObject.__self__ is None: targetObject = getattr(cls._instantiateClass(Target, cmd), method) elif inspect.isclass(Target): targetObject = cls._instantiateClass(Target, cmd) else: targetObject = Target try: result = targetObject(*payload['args'], **DictUtils.cleanDictKeys(payload['kwargs'])) return cls.createReply(DataKindEnum.COMMAND, result) except Exception as err: return NimbleResponseData( kind=DataKindEnum.COMMAND, response=NimbleResponseData.FAILED_RESPONSE, error=cls._getDetailedError('Failed to execute command', err))
def _instantiateClass(cls, Target, command): k = 'constructorArgs' conArgs = command[k] if k in command else None k = 'constructorKwargs' conKwargs = command[k] if k in command else None if conArgs and conKwargs: targetObject = Target(*conArgs, **DictUtils.cleanDictKeys(conKwargs)) elif conArgs: targetObject = Target(*conArgs) elif conKwargs: targetObject = Target(**DictUtils.cleanDictKeys(conKwargs)) else: targetObject = Target() return targetObject
def equivalentProps(self, **kwargs): """ Iterates through the kwargs and checks whether or not the values for each kwarg property to see if it matches the value for this track instance. """ for n,v in DictUtils.iter(kwargs): if getattr(self, n) != v: return False return True
def flush(self): if not self._buffer: return if sys.platform.startswith('win'): return items = [] for b in self._buffer: try: d = DictUtils.merge(self._meta, b['data']) item = b['prefix'] + ' ' + JSON.asString(d) except Exception as err: item = '>> EXCEPTION: JSON ENCODING FAILED >> ' + str(err).replace('\n', '\t') try: item = item.encode('utf8', 'ignore') except Exception as err: item = '>> EXCEPTION: UNICODE DECODING FAILED >> ' + str(err).replace('\n', '\t') items.append(item) count = self._fileCount offset = random.randint(0, count - 1) success = False path = self.getReportFolder() + self._timeCode + '/' if not os.path.exists(path): os.makedirs(path) for i in range(count): index = (i + offset) % count p = path + str(index) + '.report' lock = FileLock(p) if lock.i_am_locking() and i < count - 1: continue try: lock.acquire() except Exception: continue try: out = StringUtils.toUnicode('\n'.join(items) + '\n') f = open(p, 'a+') f.write(out.encode('utf8')) f.close() success = True except Exception as err: print("REPORTER ERROR: Unable to write report file.") print(err) lock.release() if success: break self.clear() return success
def walker(paths, dirName, names): out = CoffeescriptBuilder._compileAllInDirectory( os.path.join(paths[0], dirName), paths[1], debug=debug, trace=trace, force=force, compress=compress ) CoffeescriptBuilder._results += out["res"] for n, v in DictUtils.iter(out["missing"]): if n in CoffeescriptBuilder._missing: continue CoffeescriptBuilder._missing[n] = v
def _cleanupSettings(self, target =None): if not target: target = self._settings for n,v in DictUtils.iter(target): if isinstance(v, dict): self._cleanupSettings(target=v) if not v: del target[n] return True
def _toSerializedDict(cls, src): out = dict() for n,v in DictUtils.iter(src): if isinstance(v, Vector3D): v = v.toSerialDict() elif isinstance(v, dict): v = cls._toSerializedDict(v) out[n] = v return out
def _executeCommand(cls, payload): cmd = payload['command'] if cmd is None or (StringUtils.isStringType(cmd) and not cmd in globals()): return NimbleResponseData( kind=DataKindEnum.COMMAND, response=NimbleResponseData.FAILED_RESPONSE, error=DataErrorEnum.INVALID_COMMAND ) if StringUtils.isStringType(cmd): targetObject = globals().get(cmd) else: if isinstance(cmd, dict): module = str(cmd['module']) target = str(cmd['target']) method = str(cmd['method']) if 'method' in cmd else None else: target = str(cmd[0]) module = str(cmd[1]) if len(cmd) > 0 else None method = str(cmd[2]) if len(cmd) > 1 else None try: res = __import__(module, globals(), locals(), [target]) Target = getattr(res, target) if method: m = getattr(Target, method) if m is None: raise Exception( '%s not found on %s. Unable to execute command.' % \ (str(method), str(target) )) except Exception as err: return NimbleResponseData( kind=DataKindEnum.COMMAND, response=NimbleResponseData.FAILED_RESPONSE, error=cls._getDetailedError( 'Failed to import remote command module', err) ) if method: targetObject = getattr(Target, method) if inspect.ismethod(targetObject) and targetObject.__self__ is None: targetObject = getattr(cls._instantiateClass(Target, cmd), method) elif inspect.isclass(Target): targetObject = cls._instantiateClass(Target, cmd) else: targetObject = Target try: result = targetObject( *payload['args'], **DictUtils.cleanDictKeys(payload['kwargs']) ) return cls.createReply(DataKindEnum.COMMAND, result) except Exception as err: return NimbleResponseData( kind=DataKindEnum.COMMAND, response=NimbleResponseData.FAILED_RESPONSE, error=cls._getDetailedError('Failed to execute command', err) )
def _executeMayaCommand(cls, payload, createReply=True): cmd = getattr(mc, str(payload['command']), None) if cmd is None: return NimbleResponseData( kind=DataKindEnum.MAYA_COMMAND, error=DataErrorEnum.UNRECOGNIZED_MAYA_COMMAND, response=NimbleResponseData.FAILED_RESPONSE) args = None kwargs = None try: kwargs = DictUtils.cleanDictKeys(payload['kwargs'], True) args = payload['args'] try: result = cmd(*args, **kwargs) except Exception: # Attempts to remove an empty key if one is somehow created if '' in kwargs: del kwargs[''] else: raise result = cmd(*args, **kwargs) if createReply: return cls.createReply(DataKindEnum.MAYA_COMMAND, result) else: return result except Exception as err: print('ERROR:', cmd, args, kwargs) message = '\n'.join([ 'Failed to execute maya command with payload:', 'CMD {}'.format(cmd), 'PAYLOAD: {}'.format(DictUtils.prettyPrint(payload)), 'ARGS: {}'.format(args), 'KWARGS: {}'.format(DictUtils.prettyPrint(kwargs)) ]) return NimbleResponseData( kind=DataKindEnum.MAYA_COMMAND, error=cls._getDetailedError(message, err), response=NimbleResponseData.FAILED_RESPONSE)
def _parseElement(name, value, configData): if isinstance(value, list): configData.setItem(name, value[0], value[1]) elif isinstance(value, str): configData.setItem(name, 's', value) elif isinstance(value, (int, float)): configData.setItem(name, 'n', value) elif isinstance(value, dict): cd = ConfigData() for n, v in DictUtils.iter(value): JSONConfigParser._parseElement(n, v, cd) configData.setItem(name, 'o', cd)
def walker(paths, dirName, names): out = CoffeescriptBuilder._compileAllInDirectory( os.path.join(paths[0], dirName), paths[1], debug=debug, trace=trace, force=force, compress=compress) CoffeescriptBuilder._results += out['res'] for n, v in DictUtils.iter(out['missing']): if n in CoffeescriptBuilder._missing: continue CoffeescriptBuilder._missing[n] = v
def _compileAllInDirectory(path, rootPath=None, debug=False, trace=False, force=False, compress=False): results = '' missing = {} count = 0 for f in CoffeescriptBuilder.getScriptsInPath(path): target = CoffeescriptDependency(f, rootPath) if not (target.exists and (target.isExec or target.isLib)): continue c = CoffeescriptBuilder(target, rootPath, debug=debug, trace=trace, force=force, compress=compress) c.construct() count += 1 for n, v in DictUtils.iter(c.report): num = max(0, 60 - len(n)) results += '\n' + n + ':' + ('.' * num) if v == 0: results += 'SUCCESS' elif v > 0: results += 'COMPILATION FAILED' else: results += 'ASSEMBLY FAILED' if len(c.warnings) > 0: results += '[' + str(len(c.warnings)) + ' WARNINGS]' for v in c.warnings: if not v[ 'id'] == CoffeescriptBuilder._WARN_ID_MISSING_IMPORT: continue key = v['package'] + '-' + v['class'] + '-' + str( v['line']) if key in missing: continue missing[key] = v if len(results) > 0: print('\nDIRECTORY ' + path + ' COMPILE RESULTS [' + str(count) + ']:' + results) return {'res': results, 'missing': missing}
def echo(self, verbose=False, pretty=False): msg = self._createMessage() header = 'RESPONSE' if hasattr(self, 'response') else 'REQUEST' if verbose: if pretty: s = '\n' + 100 * '-' + '\n' + header + ':\n' + (len(header) + 1) * '-' + '\n' for n, v in DictUtils.iter(msg): s += ' ' + str(n).upper() + ': ' + str(v) + '\n' return s return header + ': ' + str(msg) return '<NIMBLE %s | %s>' % (header, self.kind)
def clone(cls, item): out = [] for value in item: if isinstance(value, dict): from pyaid.dict.DictUtils import DictUtils out.append(DictUtils.clone(value)) elif isinstance(value, list) or isinstance(value, tuple): out.append(ListUtils.clone(value)) else: out.append(value) if isinstance(item, tuple): return tuple(out) return out
def parse(data, target=None, parseToInterchangeFormat=False): d = json.loads(data) if target is None: target = {} cd = ConfigData() for n, v in DictUtils.iter(d): JSONConfigParser._parseElement(n, v, cd) if parseToInterchangeFormat: cd.writeToInterchangeDict(target) else: cd.writeToDict(target) return target
def prettyPrint(cls, source, separator=', '): """prettyPrint doc...""" out = [] from pyaid.dict.DictUtils import DictUtils for v in source: if isinstance(v, (list, tuple)): v = cls.prettyPrint(v, separator=',') if isinstance(v, dict): v = DictUtils.prettyPrint(v) elif isinstance(v, StringUtils.BINARY_TYPE): v = StringUtils.strToUnicode(v) else: v = StringUtils.toUnicode(v) out.append(v) return '[%s]' % separator.join(out)
def _createElement(data): if isinstance(data, list): if isinstance(data[1], list): out = [] for v in data[1]: out.append(str(v)) d = '|'.join(out) else: d = data[1] return [data[0], d] elif isinstance(data, dict): d = {} for n, v in DictUtils.iter(data): d[n] = JSONConfigParser._createElement(v) return d return data
def _writeNode(name, data, depth =1): indent = (' '*4*depth) target = indent + '<' if isinstance(data, list): d = '|'.join(data[1]) if isinstance(data[1], list) else str(data) target += data[0] + ' n="' + name + '" v="' + d + '" />\n' elif isinstance(data, dict): target += 'o n="' + name + '">\n' for n,v in DictUtils.iter(data): target += XMLConfigParser._writeNode(n, v, depth+1) target += indent + '</o>' elif isinstance(data, str): target += 's' + 'n="' + name + '" v="' + data + '" />\n' elif isinstance(data, (int, float)): target += 'n' + 'n="' + name + '" v="' + str(data) + '" />\n' else: target += 'unknown n="' + name + '" />' return target
def compileAllOnPath(path, rootPath=None, recursive=False, debug=False, trace=False, force=False, compress=False): CoffeescriptBuilder._results = '' CoffeescriptBuilder._missing = {} if recursive: print('RECURSIVE COMPILE AT: ' + path) def walker(paths, dirName, names): out = CoffeescriptBuilder._compileAllInDirectory( os.path.join(paths[0], dirName), paths[1], debug=debug, trace=trace, force=force, compress=compress) CoffeescriptBuilder._results += out['res'] for n, v in DictUtils.iter(out['missing']): if n in CoffeescriptBuilder._missing: continue CoffeescriptBuilder._missing[n] = v FileUtils.walkPath(path, walker, [path, rootPath]) print('\n\nCOMPILATION RESULTS:' + CoffeescriptBuilder._results) if CoffeescriptBuilder._missing: print('\n\nMISSING IMPORTS:' + '\n\n') for n, v in DictUtils.iter(CoffeescriptBuilder._missing): print(v['class'] + ' [LINE: #' + str(v['line']) + ' | ' + v['package'] + ']') else: print('COMPILING DIRECTORY: ' + path) CoffeescriptBuilder._compileAllInDirectory(path, rootPath, debug=debug, trace=trace, force=force, compress=compress)
def cleanBytesToText(cls, source, inPlace=True): """cleanBytesToText doc...""" out = source if inPlace else [] from pyaid.dict.DictUtils import DictUtils for i in range(len(source)): v = source[i] if isinstance(v, (tuple, list)): v = cls.cleanBytesToText(v, inPlace=inPlace) elif isinstance(v, dict): v = DictUtils.cleanBytesToText(v, inPlace=inPlace) else: v = StringUtils.strToUnicode(v, force=False) if inPlace: out[i] = v else: out.append(v) return out
def compare(cls, a, b): if a == b: return True if a is None or b is None: return False if len(a) != len(b): return False for i in range(len(a)): # Compare dict values if isinstance(a[i], dict) and isinstance(b[i], dict): from pyaid.dict.DictUtils import DictUtils if not DictUtils.compare(a[i], b[i]): return False # Compare list values if isinstance(a[i], list) or isinstance(a[i], tuple): if not cls.compare(a[i], b[i]): return False if a[i] != b[i]: return False
def render(self, **kwargs): """Doc...""" # ADD KWARGS TO TEMPLATE RENDER PROPERTIES if kwargs: data = DictUtils.merge(self._data, kwargs) else: data = self._data td = [self._rootDir] if StringUtils.isStringType(self._rootDir) else self._rootDir lookup = TemplateLookup( directories=td, input_encoding='utf-8', output_encoding='utf-8', encoding_errors='replace') template = self._template if template: if not template.startswith('/'): template = '/' + template try: target = lookup.get_template(template) except Exception as err: self._result = None self._error = err self._errorMsg = 'Failed to get template (%s):\n%s' % ( template, exceptions.text_error_template().render().replace('%','%%') ) self._log.writeError(self._errorMsg, self._error) return self.dom else: target = Template(self._source if self._source else '', lookup=lookup) mr = MakoDataTransporter(data=data, logger=self._log) try: self._result = target.render_unicode(mr=mr).replace('\r', '') except Exception: d = [] if data: for n,v in data.items(): d.append(StringUtils.toUnicode(n) + ': ' + StringUtils.toUnicode(v)) try: stack = exceptions.text_error_template().render().replace('%','%%') except Exception as err2: stack = '' self._log.writeError('Unable to build mako exception stack', err2) traces = mr.getTraces() self._errorMsg = 'Failed to render (%s):\n%s\n%sDATA:\n\t%s' % ( str(template), str(stack), ('TRACES:\n\t' + '\n\t'.join(traces) if traces else ''), '\n\t'.join(d) if d else '') self._log.write(self._errorMsg) if self._minify: return self.minifyResult() return self.dom
def run(self): """Doc...""" #------------------------------------------------------------------------------------------- # GET SELECTED OBJECTS # Get a list of select objects. If no objects are selected then return an error. # Because objects are list based on components, shape nodes are generally returned # instead of transform nodes. In those cases the transform node must be found from # the shape node name objectSelection = cmds.ls(selection=True, objectsOnly=True) if not objectSelection: self.putErrorResult(u'Nothing selected') return targets = dict() for obj in objectSelection: # Check for shape nodes, and get transform node name if a shape is found nodeTypes = cmds.nodeType(obj, inherited=True) if u'shape' in nodeTypes: obj = obj.rsplit(u'|', 1)[0] targets[obj] = [] #------------------------------------------------------------------------------------------- # SORT SELECTED FACES # Use a component selection to get the selected faces and add them to the target # list for their object. for comp in cmds.ls(selection=True, flatten=True): parts = comp.split(u'.') if len(parts) < 2 or parts[0] not in targets: continue targets[parts[0]].append(int(parts[1].lstrip(u'f[').rstrip(u']'))) #------------------------------------------------------------------------------------------- # EXTRACT & SEPARATE # For each object in the targets list extract the selected faces by chipping them off # and then separating the mesh into the separated pieces. results = dict() selects = [] for obj, faces in DictUtils.iter(targets): if not faces: continue faces.sort() comps = [] for f in faces: comps.append(u'%s.f[%s]' % (obj, f)) cmds.polyChipOff(*comps, duplicate=False, keepFacesTogether=True) separateOut = cmds.polySeparate(obj) out = [] for node in separateOut: if MayaNodeUtils.isTransformNode(node): out.append(node) selects.append(node) results[obj] = out cmds.select(*selects, replace=True) self.put('extracts', results)
def serialize(interchangeData): xml = '<vm>\n' for n,v in DictUtils.iter(interchangeData): xml += XMLConfigParser._writeNode(n, v) return (xml + '</vm>').decode('unicode_escape')
def puts(self, **kwargs): for key, value in DictUtils.iter(kwargs): self.put(key, value)
def runPythonImport(cls, payload): try: kwargs = payload.get('kwargs', {}) targetModule = StringUtils.toStr2(payload.get('module')) targetMethod = StringUtils.toStr2(payload.get('method')) targetClass = StringUtils.toStr2(payload.get('class')) target = targetClass if targetClass is not None else targetMethod if target is None: parts = targetModule.rsplit('.', 1) targetModule = parts[0] target = parts[1] except Exception as err: NimbleEnvironment.logError([ 'ERROR: Failed to parse python import payload', 'PAYLOAD: ' + DictUtils.prettyPrint(payload) ], err) return NimbleResponseData( kind=DataKindEnum.PYTHON_IMPORT, error=cls._getDetailedError( '\n'.join([ 'ERROR: Failed to parse python import payload', 'PAYLOAD: ' + DictUtils.prettyPrint(payload) ]), err), response=NimbleResponseData.FAILED_RESPONSE) # Dynamically import the specified module and reload it to make sure any changes have # been updated try: module = __import__(StringUtils.toStr2(targetModule), globals(), locals(), [StringUtils.toStr2(target)] if target else []) reload(module) target = getattr(module, target) except Exception as err: NimbleEnvironment.logError([ 'ERROR: Failed to import python target', 'MODULE: %s' % targetModule, 'TARGET: %s' % target, 'PAYLOAD: ' + DictUtils.prettyPrint(payload) ], err) return NimbleResponseData( kind=DataKindEnum.PYTHON_IMPORT, error=cls._getDetailedError('Failed to import python module', err), response=NimbleResponseData.FAILED_RESPONSE) try: result = dict() if targetClass is not None: tc = target() result = getattr(tc, targetMethod)(**kwargs) \ if targetMethod else \ tc(**kwargs) elif targetMethod is not None: result = target(**kwargs) else: # Find a NimbleScriptBase derived class definition and if it exists, run it to # populate the results for name, value in DictUtils.iter( Reflection.getReflectionDict(target)): if not inspect.isclass(value): continue if NimbleScriptBase in value.__bases__: result = getattr(target, name)()(**kwargs) found = True # If a result dictionary contains an error key format the response as a failure errorMessage = None try: errorMessage = ArgsUtils.extract( NimbleEnvironment.REMOTE_RESULT_ERROR_KEY, None, result) except Exception as err: pass return cls.createReply(DataKindEnum.PYTHON_IMPORT, result, errorMessage=errorMessage) except Exception as err: msg = 'ERROR: Failed to execute remote script' NimbleEnvironment.logError([ msg, 'PAYLOAD: ' + DictUtils.prettyPrint(payload), 'TARGET: ' + str(target) ], err) return NimbleResponseData( kind=DataKindEnum.PYTHON_IMPORT, error=cls._getDetailedError(msg, err), response=NimbleResponseData.FAILED_RESPONSE)
def serialize(interchangeData): data = {} for n, v in DictUtils.iter(interchangeData): data[n] = JSONConfigParser._createElement(v) return json.dumps(data, separators=(',', ':')).decode('unicode_escape')
def _reformat(cls, src): out = dict() for n, v in DictUtils.iter(src): n = StringUtils.strToUnicode(n) out[n] = cls._reformatValue(v) return out