def _createSetupFile(self, binPath): path = FileUtils.createPath(binPath, 'setup.py', isFile=True) scriptPath = inspect.getabsfile(self.applicationClass) try: sourcePath = PyGlassEnvironment.getPyGlassResourcePath( '..', 'setupSource.txt', isFile=True) f = open(sourcePath, 'r+') source = f.read() f.close() except Exception as err: print(err) return None try: f = open(path, 'w+') f.write(source.replace( '##SCRIPT_PATH##', StringUtils.escapeBackSlashes(scriptPath) ).replace( '##RESOURCES##', StringUtils.escapeBackSlashes(JSON.asString(self.resources)) ).replace( '##INCLUDES##', StringUtils.escapeBackSlashes(JSON.asString(self.siteLibraries)) ).replace( '##ICON_PATH##', StringUtils.escapeBackSlashes(self._createIcon(binPath)) ).replace( '##APP_NAME##', self.appDisplayName ).replace( '##SAFE_APP_NAME##', self.appDisplayName.replace(' ', '_') )) f.close() except Exception as err: print(err) return None return path
def add(self, *args, **kwargs): """Adds value to the existing item, replacing existing entries. @@@param value:string The value argument can be a single value. @@@param group:string The name of the group in which to add the value. Default of None adds the value to the root group. """ value = ArgsUtils.get('value', None, kwargs, args, 0) if value is None: value = u'' elif isinstance(value, dict) or isinstance(value, list): value = JSON.asString(value) else: value = unicode(value) group = ArgsUtils.get('group', None, kwargs, args, 1) once = ArgsUtils.get('once', False, kwargs) if group: target = self._tempSubs if once else self._subs target[group] = value else: if once: self._tempRoot = value else: self._root = value
def _setEnvValue(cls, key, value): settings = cls._getEnvValue(None) if cls._ENV_SETTINGS is None else cls._ENV_SETTINGS if settings is None: settings = dict() cls._ENV_SETTINGS = settings if isinstance(key, basestring): key = [key] src = settings for k in key[:-1]: src = src[k] src[key[-1]] = value envPath = cls.getRootLocalResourcePath(cls._GLOBAL_SETTINGS_FILE, isFile=True) envDir = os.path.dirname(envPath) if not os.path.exists(envDir): os.makedirs(envDir) f = open(envPath, 'w+') try: f.write(JSON.asString(cls._ENV_SETTINGS)) except Exception, err: print 'ERROR: Unable to write environmental settings file at: ' + envPath return False
def _writeImpl(self, value, *args, **kwargs): if not value: return u'' elif isinstance(value, dict) or isinstance(value, list): value = JSON.asString(value) elif not isinstance(value, basestring): value = str(value) value = value.replace("'", "\'").replace('\n',' ') offset = value.find('\'') while offset != -1: if offset == 0 or value[offset-1] != '\\': value = value[:offset] + '\\' + value[offset:] offset = value.find('\'', offset + 1) if not value: kwargs['writeEmpty'] = False for j in self._joins: v = j.write(*args, **kwargs) if v: return v if not ArgsUtils.get('writeEmpty', True, kwargs): return None return u'%s=\'%s\'' % (self._name, value)
def flush(self): if not self._buffer: return if sys.platform.startswith('win'): return items = [] for b in self._buffer: try: d = DictUtils.merge(self._meta, b['data']) item = b['prefix'] + ' ' + JSON.asString(d) except Exception as err: item = '>> EXCEPTION: JSON ENCODING FAILED >> ' + str(err).replace('\n', '\t') try: item = item.encode('utf8', 'ignore') except Exception as err: item = '>> EXCEPTION: UNICODE DECODING FAILED >> ' + str(err).replace('\n', '\t') items.append(item) count = self._fileCount offset = random.randint(0, count - 1) success = False path = self.getReportFolder() + self._timeCode + '/' if not os.path.exists(path): os.makedirs(path) for i in range(count): index = (i + offset) % count p = path + str(index) + '.report' lock = FileLock(p) if lock.i_am_locking() and i < count - 1: continue try: lock.acquire() except Exception: continue try: out = StringUtils.toUnicode('\n'.join(items) + '\n') f = open(p, 'a+') f.write(out.encode('utf8')) f.close() success = True except Exception as err: print("REPORTER ERROR: Unable to write report file.") print(err) lock.release() if success: break self.clear() return success
def _storeBuildSnapshot(self): if not self._buildSnapshot: return snap = dict() for n,v in self._buildSnapshot.iteritems(): if n in ['parent']: continue snap[n] = v settings = SettingsConfig(CompilerDeckEnvironment.projectSettingsPath, pretty=True) settings.set(['BUILD', 'LAST_SNAPSHOT'], JSON.asString(snap))
def _createErrorResult(self, code =None, info =None, data=None): out = dict( success=False, error=True, code=code if code else 'COMMUNICATOR_ERROR', info=info if info else 'Unknown error occurred.', data=data ) # Keep errors to the 50 most recent to prevent memory overloads on long sessions. while len(self._errors) > 49: self._errors.pop(0) self._errors.append(out) return JSON.asString(out)
def _createAttr(self, name, value): if not value: return u'' elif isinstance(value, dict) or isinstance(value, list): value = JSON.asString(value) elif not isinstance(value, basestring): value = str(value) value = value.replace("'", "\'").replace('\n',' ') offset = value.find('\'') while offset != -1: if offset == 0 or value[offset-1] != '\\': value = value[:offset] + '\\' + value[offset:] offset = value.find('\'', offset + 1) return u'%s%s=\'%s\'' % (self._prefix, name, value)
def flush(self): if not self._buffer: return if sys.platform.startswith('win'): return items = [] for b in self._buffer: try: d = dict(self._meta.items() + b['data'].items()) item = b['prefix'] + u' ' + JSON.asString(d) except Exception, err: item = '>> EXCEPTION: JSON ENCODING FAILED >> ' + str(err).replace('\n', '\t') try: item = item.encode('utf8', 'ignore') except Exception, err: item = '>> EXCEPTION: UNICODE DECODING FAILED >> ' + str(err).replace('\n', '\t')
def handle(self): try: data = self.rfile.readline().strip() self._log.write('HANDLE: ' + str(data)) try: result = self._respondImpl(JSON.fromString(unquote(data))) except Exception as err: self._log.writeError('RESPOND FAILURE', err) if self.returnResponse: self.wfile.write(JSON.asString({'error': 1})) return if self.returnResponse: out = {'error': 0} if result: out['payload'] = result self.wfile.write(out) except Exception as err: self._log.write('HANDLE FAILURE', err) return
def handle(self): try: data = self.rfile.readline().strip() self._log.write('HANDLE: ' + str(data)) try: result = self._respondImpl(JSON.fromString(unquote(data))) except Exception as err: self._log.writeError('RESPOND FAILURE', err) if self.returnResponse: self.wfile.write(JSON.asString({'error':1})) return if self.returnResponse: out = {'error':0} if result: out['payload'] = result self.wfile.write(out) except Exception as err: self._log.write('HANDLE FAILURE', err) return
def fromSpreadsheetEntry(self, csvRowData, session): """ From the spreadsheet data dictionary representing raw track data, this method creates a track entry in the database. """ #------------------------------------------------------------------------------------------- # MISSING # Try to determine if the missing value has been set for this row data. If so and it # has been marked missing, skip the track during import to prevent importing tracks # with no data. try: missingValue = csvRowData[TrackCsvColumnEnum.MISSING.name].strip() if missingValue: return False except Exception: pass try: csvIndex = int(csvRowData[TrackCsvColumnEnum.INDEX.name]) except Exception: self._writeError({ 'message':'Missing spreadsheet index', 'data':csvRowData }) return False model = Tracks_Track.MASTER t = model() t.importFlags = 0 t.index = csvIndex #------------------------------------------------------------------------------------------- # SITE try: t.site = csvRowData.get(TrackCsvColumnEnum.TRACKSITE.name).strip().upper() except Exception: self._writeError({ 'message':'Missing track site', 'data':csvRowData, 'index':csvIndex }) return False #------------------------------------------------------------------------------------------- # SECTOR try: t.sector = csvRowData.get(TrackCsvColumnEnum.SECTOR.name).strip().upper() except Exception: self._writeError({ 'message':'Missing sector', 'data':csvRowData, 'index':csvIndex }) return False #------------------------------------------------------------------------------------------- # LEVEL try: t.level = csvRowData.get(TrackCsvColumnEnum.LEVEL.name) except Exception: self._writeError({ 'message':'Missing level', 'data':csvRowData, 'index':csvIndex }) return False #------------------------------------------------------------------------------------------- # TRACKWAY # Parse the trackway entry into type and number values. In the process illegal # characters are removed to keep the format something that can be handled correctly # within the database. try: test = csvRowData.get(TrackCsvColumnEnum.TRACKWAY.name).strip().upper() except Exception: self._writeError({ 'message':'Missing trackway', 'data':csvRowData, 'index':csvIndex }) return False # If the trackway contains an ignore pattern then return without creating the track. # This is used for tracks in the record that are actually under-prints from a higher # level recorded in the spreadsheet only for catalog reference. testIndexes = [ test.find(self._UNDERPRINT_IGNORE_TRACKWAY_STR), test.find(self._OVERPRINT_IGNORE_TRACKWAY_STR) ] testParensIndex = test.find('(') for testIndex in testIndexes: if testIndex != -1 and (testParensIndex == -1 or testParensIndex > testIndex): return False result = self._TRACKWAY_PATTERN.search(test) try: t.trackwayType = result.groupdict()['type'].upper().strip() t.trackwayNumber = result.groupdict()['number'].upper().strip() except Exception: self._writeError({ 'message':'Invalid trackway value: %s' % test, 'data':csvRowData, 'result':result, 'match':result.groupdict() if result else 'N/A', 'index':csvIndex }) return False #------------------------------------------------------------------------------------------- # NAME # Parse the name value into left, pes, and number attributes try: t.name = csvRowData.get(TrackCsvColumnEnum.TRACK_NAME.name).strip() except Exception: self._writeError({ 'message':'Missing track name', 'data':csvRowData, 'index':csvIndex }) return False #------------------------------------------------------------------------------------------- # YEAR try: year = csvRowData.get(TrackCsvColumnEnum.MEASURED_DATE.name) if not year: year = '2014' else: try: y = StringUtils.toText(year).split(';')[-1].strip().replace( '/', '_').replace( ' ', '').replace( '-', '_').split('_')[-1] year = int(re.compile('[^0-9]+').sub('', y)) except Exception: year = 2014 if year > 2999: # When multiple year entries combine into a single large number year = int(StringUtils.toUnicode(year)[-4:]) elif year < 2000: # When two digit years (e.g. 09) are used instead of four digit years year += 2000 year = StringUtils.toUnicode(year) t.year = year except Exception: self._writeError({ 'message':'Missing cast date', 'data':csvRowData, 'index':csvIndex }) return False #------------------------------------------------------------------------------------------- # FIND EXISTING # Use data set above to attempt to load the track database entry fingerprint = t.fingerprint for uid, fp in DictUtils.iter(self.remainingTracks): # Remove the fingerprint from the list of fingerprints found in the database, which at # the end will leave only those fingerprints that exist in the database but were not # touched by the importer. These values can be used to identify tracks that should # have been "touched" but were not. if fp == fingerprint: del self.remainingTracks[uid] break existing = t.findExistingTracks(session) if existing and not isinstance(existing, Tracks_Track): existing = existing[0] if fingerprint in self.fingerprints: if not existing: existing = self.fingerprints[fingerprint] self._writeError({ 'message':'Ambiguous track entry "%s" [%s -> %s]' % ( fingerprint, csvIndex, existing.index), 'data':csvRowData, 'existing':existing, 'index':csvIndex }) return False self.fingerprints[fingerprint] = t if existing: t = existing else: session.add(t) session.flush() TCCE = TrackCsvColumnEnum IFE = ImportFlagsEnum #------------------------------------------------------------------------------------------- # CSV PROPERTY CLEANUP # Cleanup and format additional CSV values before saving the csv data to the track's # snapshot. removeNonColumns = [ TrackCsvColumnEnum.PRESERVED.name, TrackCsvColumnEnum.CAST.name, TrackCsvColumnEnum.OUTLINE_DRAWING.name] for columnName in removeNonColumns: if columnName in csvRowData: testValue = StringUtils.toText(csvRowData[columnName]).strip().upper() if testValue.startswith('NON'): del csvRowData[columnName] # Create a snapshot that only includes a subset of properties that are flagged to be # included in the database snapshot entry snapshot = dict() for column in Reflection.getReflectionList(TrackCsvColumnEnum): # Include only values that are marked in the enumeration as to be included if not column.snapshot or column.name not in csvRowData: continue value = csvRowData.get(column.name) if value is None: continue elif not value is StringUtils.isStringType(value): value = StringUtils.toText(value) value = StringUtils.toText(value).strip() if value in ['-', b'\xd0'.decode(b'MacRoman')]: continue snapshot[column.name] = value #------------------------------------------------------------------------------------------- # WIDTH # Parse the width into a numerical value and assign appropriate default uncertainty try: t.widthMeasured = 0.01*float(self._collapseManusPesProperty( t, csvRowData, TCCE.PES_WIDTH, TCCE.PES_WIDTH_GUESS, TCCE.MANUS_WIDTH, TCCE.MANUS_WIDTH_GUESS, '0', IFE.HIGH_WIDTH_UNCERTAINTY, IFE.NO_WIDTH )) t.widthMeasured = t.widthMeasured if not existing or t.widthUncertainty == 0: t.widthUncertainty = 0.05 if (t.importFlags & IFE.HIGH_WIDTH_UNCERTAINTY) else 0.03 except Exception as err: print(Logger().echoError('WIDTH PARSE ERROR:', err)) self._writeError({ 'message':'Width parse error', 'data':csvRowData, 'error':err, 'index':csvIndex }) t.widthMeasured = 0.0 if not existing: t.widthUncertainty = 0.05 #------------------------------------------------------------------------------------------- # LENGTH # Parse the length into a numerical value and assign appropriate default uncertainty try: t.lengthMeasured = 0.01*float(self._collapseManusPesProperty( t, csvRowData, TCCE.PES_LENGTH, TCCE.PES_LENGTH_GUESS, TCCE.MANUS_LENGTH, TCCE.MANUS_LENGTH_GUESS, '0', IFE.HIGH_LENGTH_UNCERTAINTY, IFE.NO_LENGTH )) t.lengthMeasured = t.lengthMeasured if not existing or t.lengthUncertainty == 0: t.lengthUncertainty = 0.05 if (t.importFlags & IFE.HIGH_LENGTH_UNCERTAINTY) else 0.03 except Exception as err: print(Logger().echoError('LENGTH PARSE ERROR:', err)) self._writeError({ 'message':'Length parse error', 'data':csvRowData, 'error':err, 'index':csvIndex }) t.lengthMeasured = 0.0 if not existing: t.lengthUncertainty = 0.05 #------------------------------------------------------------------------------------------- # DEPTH # Parse the depth into a numerical value and assign appropriate default uncertainty try: t.depthMeasured = 0.01*float(self._collapseManusPesProperty( t, csvRowData, TCCE.PES_DEPTH, TCCE.PES_DEPTH_GUESS, TCCE.MANUS_DEPTH, TCCE.MANUS_DEPTH_GUESS, '0', IFE.HIGH_DEPTH_UNCERTAINTY, 0 )) if not existing or t.depthUncertainty == 0: t.depthUncertainty = 0.05 if (t.importFlags & IFE.HIGH_DEPTH_UNCERTAINTY) else 0.03 except Exception as err: print(Logger().echoError('DEPTH PARSE ERROR:', err)) t.depthMeasured = 0.0 if not existing: t.depthUncertainty = 0.05 #------------------------------------------------------------------------------------------- # ROTATION # Parse the rotation into a numerical value and assign appropriate default uncertainty try: t.rotationMeasured = float(self._collapseLimbProperty( t, csvRowData, TCCE.LEFT_PES_ROTATION, TCCE.LEFT_PES_ROTATION_GUESS, TCCE.RIGHT_PES_ROTATION, TCCE.RIGHT_PES_ROTATION_GUESS, TCCE.LEFT_MANUS_ROTATION, TCCE.LEFT_MANUS_ROTATION_GUESS, TCCE.RIGHT_MANUS_ROTATION, TCCE.RIGHT_MANUS_ROTATION_GUESS, '0', IFE.HIGH_ROTATION_UNCERTAINTY, 0 )) if not existing or t.rotationUncertainty == 0: t.rotationUncertainty = \ 10.0 if (t.importFlags & IFE.HIGH_ROTATION_UNCERTAINTY) else 45.0 except Exception as err: print(Logger().echoError('ROTATION PARSE ERROR:', err)) self._writeError({ 'message':'Rotation parse error', 'error':err, 'data':csvRowData, 'index':csvIndex }) t.rotationMeasured = 0.0 if not existing: t.rotationUncertainty = 45.0 #------------------------------------------------------------------------------------------- # STRIDE try: strideLength = self._collapseManusPesProperty( t, csvRowData, TCCE.PES_STRIDE, TCCE.PES_STRIDE_GUESS, TCCE.MANUS_STRIDE, TCCE.MANUS_STRIDE_GUESS, None, IFE.HIGH_STRIDE_UNCERTAINTY ) strideFactor = self._collapseManusPesProperty( t, csvRowData, TCCE.PES_STRIDE_FACTOR, None, TCCE.MANUS_STRIDE_FACTOR, None, 1.0) if strideLength: snapshot[SnapshotDataEnum.STRIDE_LENGTH] = 0.01*float(strideLength)*float(strideFactor) except Exception as err: print(Logger().echoError('STRIDE PARSE ERROR:', err)) #------------------------------------------------------------------------------------------- # WIDTH ANGULATION PATTERN try: widthAngulation = self._collapseManusPesProperty( t, csvRowData, TCCE.WIDTH_PES_ANGULATION_PATTERN, TCCE.WIDTH_PES_ANGULATION_PATTERN_GUESS, TCCE.WIDTH_MANUS_ANGULATION_PATTERN, TCCE.WIDTH_MANUS_ANGULATION_PATTERN_GUESS, None, IFE.HIGH_WIDTH_ANGULATION_UNCERTAINTY ) if widthAngulation: snapshot[SnapshotDataEnum.WIDTH_ANGULATION_PATTERN] = 0.01*float(widthAngulation) except Exception as err: print(Logger().echoError('WIDTH ANGULATION PARSE ERROR:', err)) #------------------------------------------------------------------------------------------- # PACE try: pace = self._collapseLimbProperty( t, csvRowData, TCCE.LEFT_PES_PACE, TCCE.LEFT_PES_PACE_GUESS, TCCE.RIGHT_PES_PACE, TCCE.RIGHT_PES_PACE_GUESS, TCCE.LEFT_MANUS_PACE, TCCE.LEFT_MANUS_PACE_GUESS, TCCE.RIGHT_MANUS_PACE, TCCE.RIGHT_MANUS_PACE_GUESS, None, IFE.HIGH_PACE_UNCERTAINTY ) if pace: snapshot[SnapshotDataEnum.PACE] = 0.01*float(pace) except Exception as err: print(Logger().echoError('PACE PARSE ERROR:', err)) #------------------------------------------------------------------------------------------- # PACE ANGULATION PATTERN try: paceAngulation = self._collapseManusPesProperty( t, csvRowData, TCCE.PES_PACE_ANGULATION, TCCE.PES_PACE_ANGULATION_GUESS, TCCE.MANUS_PACE_ANGULATION, TCCE.MANUS_PACE_ANGULATION_GUESS, None, IFE.HIGH_WIDTH_ANGULATION_UNCERTAINTY ) if paceAngulation: snapshot[SnapshotDataEnum.PACE_ANGULATION_PATTERN] = float(paceAngulation) except Exception as err: print(Logger().echoError('PACE ANGULATION PARSE ERROR:', err)) #------------------------------------------------------------------------------------------- # PROGRESSION try: progression = self._collapseLimbProperty( t, csvRowData, TCCE.LEFT_PES_PROGRESSION, TCCE.LEFT_PES_PROGRESSION_GUESS, TCCE.RIGHT_PES_PROGRESSION, TCCE.RIGHT_PES_PROGRESSION_GUESS, TCCE.LEFT_MANUS_PROGRESSION, TCCE.LEFT_MANUS_PROGRESSION_GUESS, TCCE.RIGHT_MANUS_PROGRESSION, TCCE.RIGHT_MANUS_PROGRESSION_GUESS, None, IFE.HIGH_PROGRESSION_UNCERTAINTY ) if progression: snapshot[SnapshotDataEnum.PROGRESSION] = 0.01*float(progression) except Exception as err: print(Logger().echoError('PROGRESSION PARSE ERROR:', err)) #------------------------------------------------------------------------------------------- # GLENO-ACETABULAR DISTANCE try: gad = self._collapseGuessProperty( t, csvRowData, TCCE.GLENO_ACETABULAR_DISTANCE, TCCE.GLENO_ACETABULAR_DISTANCE_GUESS, None, IFE.HIGH_GLENO_ACETABULAR_UNCERTAINTY ) if gad: snapshot[SnapshotDataEnum.GLENO_ACETABULAR_LENGTH] = 0.01*float(gad) except Exception as err: print(Logger().echoError('GLENO-ACETABULAR DISTANCE PARSE ERROR:', err)) # Save the snapshot try: t.snapshot = JSON.asString(snapshot) except Exception: raise if TrackCsvColumnEnum.MEASURED_BY.name not in snapshot: # Mark entries that have no field measurements with a flag for future reference t.importFlags |= ImportFlagsEnum.NO_FIELD_MEASUREMENTS if existing: self.modified.append(t) else: self.created.append(t) return t
def _createSuccessResult(self, payload): return JSON.asString(dict( success=True, error=False, payload=payload ))
def json_data(self, value): if value is None: self.json_data = "" return self.json_data = JSON.asString(value)
def _writeImpl(self, value, kwargs): return JSON.asString(value)
def infoData(self, value): self.info = JSON.asString(value) if value else u''
def _writeImpl(self, value, params): if len(value) == 0: return u'' return JSON.asString(value)
def sha256hmacSignObject(cls, key, obj): return cls.sha256hmac(key, JSON.asString(obj))
def _renderImpl(self, **kwargs): if self._processor.globalVars: self._processor.globalVars.includeTwitterWidgetAPI = True a = self.attrs q = a.get(TagAttributesEnum.SEARCH, '@vizme', kwargs) start = a.get(TagAttributesEnum.START, None, kwargs) stop = a.get(TagAttributesEnum.STOP, None, kwargs) skips = a.get(TagAttributesEnum.IGNORE, None, kwargs) height = a.getAsEnumerated(TagAttributesEnum.HEIGHT, GeneralSizeEnum, GeneralSizeEnum.medium) title = a.get(TagAttributesEnum.TITLE, '', kwargs) subtitle = a.get(TagAttributesEnum.SUBTITLE, '', kwargs) count = a.get(TagAttributesEnum.COUNT + TagAttributesEnum.TWEETS, 10, kwargs) scrollbar = a.getAsBool(TagAttributesEnum.SCROLL, count > 5, kwargs) interval = a.getAsInt(TagAttributesEnum.TIME, 5, kwargs) loop = a.getAsBool(TagAttributesEnum.LOOP, interval > 0, kwargs) if not isinstance(q, list): q = [q] user = len(q) == 1 and q[0].startswith('@') and not StringUtils.contains(q[0], [' ', ',']) q = u' OR '.join(q) if height in ['none', 'm']: height = 300 elif height == 'xxs': height = 100 elif height == 'xs': height = 175 elif height == 's': height = 250 elif height == 'l': height = 375 elif height == 'xl': height = 450 elif height == 'xxl': height = 525 else: height = 300 if skips: user = False q += u' ' + (u'-' + skips if isinstance(skips, basestring) else u' -'.join(skips)) if start or stop: user = False if start: q += u' since:' + start if stop: q += u' until:' + stop data = { 'id':a.id.get(), 'version':2, 'width':'auto', 'height':height, 'interval':1000*interval, 'theme': { 'shell': { 'background': a.backColors.baseColor.web, 'color': a.focalColors.highlightColor.web }, 'tweets': { 'background': a.backColors.baseColor.web, 'color': a.focalColors.baseColor.web, 'links': a.focalColors.linkColor.web } }, 'features': { 'scrollbar':scrollbar, 'loop':loop, 'live':interval > 0, 'behavior': u'all' if user else u'default' }, 'type': 'profile' if user else 'search' } if user: a.render['setUser'] = u'.setUser("' + q + u'")' data['rpp'] = count else: a.render['setUser'] = u'' data['search'] = q data['title'] = subtitle.capitalize() if subtitle else string.capwords(q) data['subject'] = title.capitalize() if title else string.capwords(q.split(' ')[0]) a.render['twitterData'] = JSON.asString(data).replace("'", "\\'")
def callJavascript(self, function, data =None): frame = self._webView.page().mainFrame() frame.addToJavaScriptWindowObject(self.javaScriptID, self) frame.evaluateJavaScript( u'try{ window.%s(%s); } catch (e) {}' % (function, JSON.asString(data) if data else u'') )
def snapshotData(self, value): if not value: self.snapshot = '' else: self.snapshot = JSON.asString(value)
def sha256hmacSign(cls, key, **kwargs): return cls.sha256hmac(key, JSON.asString(kwargs))