def flush(self): if not self._buffer: return if sys.platform.startswith('win'): return items = [] for b in self._buffer: try: d = DictUtils.merge(self._meta, b['data']) item = b['prefix'] + ' ' + JSON.asString(d) except Exception as err: item = '>> EXCEPTION: JSON ENCODING FAILED >> ' + str(err).replace('\n', '\t') try: item = item.encode('utf8', 'ignore') except Exception as err: item = '>> EXCEPTION: UNICODE DECODING FAILED >> ' + str(err).replace('\n', '\t') items.append(item) count = self._fileCount offset = random.randint(0, count - 1) success = False path = self.getReportFolder() + self._timeCode + '/' if not os.path.exists(path): os.makedirs(path) for i in range(count): index = (i + offset) % count p = path + str(index) + '.report' lock = FileLock(p) if lock.i_am_locking() and i < count - 1: continue try: lock.acquire() except Exception: continue try: out = StringUtils.toUnicode('\n'.join(items) + '\n') f = open(p, 'a+') f.write(out.encode('utf8')) f.close() success = True except Exception as err: print("REPORTER ERROR: Unable to write report file.") print(err) lock.release() if success: break self.clear() return success
def _handleUploadResult(self, event): self._toggleInteractivity(True) if not event.target.success: return # Add the upload urls to the build snapshot output = event.target.output if 'urls' in output: self._buildSnapshot['platformUploads'] = DictUtils.merge( self._buildSnapshot['platformUploads'], output['urls']) self._storeBuildSnapshot()
def _handleDeployResult(self, event): if event.target.success: # Any new Urls added by uploads during the deployment should be stored in the build # snapshot to save the information for future reference self._buildSnapshot['platformUploads'] = DictUtils.merge( self._buildSnapshot['platformUploads'], event.target.output['urls']) self._storeBuildSnapshot() settings = SettingsConfig(CompilerDeckEnvironment.projectSettingsPath, pretty=True) settings.set(['DEPLOY', 'LAST', 'SUMMARY'], self.summaryText.toPlainText()) settings.set(['DEPLOY', 'LAST', 'ADDITIONS'], self.additionsText.toPlainText()) settings.set(['DEPLOY', 'LAST', 'FIXES'], self.fixesText.toPlainText()) settings.set(['DEPLOY', 'LAST', 'REMOVALS'], self.removalsText.toPlainText()) settings.set(['DEPLOY', 'LAST', 'INFO'], self.releaseInfoText.toPlainText()) self._handleRemoteThreadComplete(event)
def _handleCompilationComplete(self, event): snap = self._buildSnapshot if self._package and event.target.success: # If this was an appended package then prior to storing the snapshot the combined # platforms should be stored as the result instead of the platforms stored in this # particular case if 'combinedPlatforms' in snap: platforms = snap['combinedPlatforms'] snap['platforms'] = platforms del snap['combinedPlatforms'] else: platforms = snap['platforms'] # Any package uploads conducted as part of the compilation process should be included # in the build snapshot for reference to prevent uploading them again in the future output = event.target.output if 'urls' in output: snap['platformUploads'] = DictUtils.merge( snap['platformUploads'], output['urls']) self._storeBuildSnapshot() FileUtils.putContents('\t'.join([ TimeUtils.getNowDatetime().strftime('[%a %m-%d %H:%M]'), 'DSK' if platforms.get(FlexProjectData.AIR_PLATFORM, False) else '---', 'AND' if platforms.get(FlexProjectData.ANDROID_PLATFORM, False) else '---', 'IOS' if platforms.get(FlexProjectData.IOS_PLATFORM, False) else '---', 'WIN' if platforms.get(FlexProjectData.WINDOWS_PLATFORM, False) else '---', 'MAC' if platforms.get(FlexProjectData.MAC_PLATFORM, False) else '---', '<<' + snap['versionInfo']['number'] + '>>', '<<' + snap['versionInfo']['label'] + '>>' ]) + '\n', self._settingsEditor.buildLogFilePath, True ) self._settingsEditor.reset() self._settingsEditor.populate() self._updateSettings() self._handleRemoteThreadComplete(event) self._package = False
def getMerged(self, key, defaultValue =None, localOnly =False): items = [] sources = self.localDataSources if localOnly else self.dataSources for source in sources: if source is None: continue res = source.get(key, self.DATA_GET_NULL) if res != self.DATA_GET_NULL: items.append(res) if len(items) == 0: return defaultValue if len(items) == 1: return DictUtils.clone(items[0]) out = items.pop() while len(items): out = DictUtils.merge(out, items.pop()) return out
def render(self, **kwargs): """Doc...""" # ADD KWARGS TO TEMPLATE RENDER PROPERTIES if kwargs: data = DictUtils.merge(self._data, kwargs) else: data = self._data td = [self._rootDir] if StringUtils.isStringType(self._rootDir) else self._rootDir lookup = TemplateLookup( directories=td, input_encoding='utf-8', output_encoding='utf-8', encoding_errors='replace') template = self._template if template: if not template.startswith('/'): template = '/' + template try: target = lookup.get_template(template) except Exception as err: self._result = None self._error = err self._errorMsg = 'Failed to get template (%s):\n%s' % ( template, exceptions.text_error_template().render().replace('%','%%') ) self._log.writeError(self._errorMsg, self._error) return self.dom else: target = Template(self._source if self._source else '', lookup=lookup) mr = MakoDataTransporter(data=data, logger=self._log) try: self._result = target.render_unicode(mr=mr).replace('\r', '') except Exception: d = [] if data: for n,v in data.items(): d.append(StringUtils.toUnicode(n) + ': ' + StringUtils.toUnicode(v)) try: stack = exceptions.text_error_template().render().replace('%','%%') except Exception as err2: stack = '' self._log.writeError('Unable to build mako exception stack', err2) traces = mr.getTraces() self._errorMsg = 'Failed to render (%s):\n%s\n%sDATA:\n\t%s' % ( str(template), str(stack), ('TRACES:\n\t' + '\n\t'.join(traces) if traces else ''), '\n\t'.join(d) if d else '') self._log.write(self._errorMsg) if self._minify: return self.minifyResult() return self.dom
def render(self, **kwargs): """Doc...""" # ADD KWARGS TO TEMPLATE RENDER PROPERTIES if kwargs: data = DictUtils.merge(self._data, kwargs) else: data = self._data td = [self._rootDir] if StringUtils.isStringType(self._rootDir) else self._rootDir lookup = TemplateLookup( directories=td, input_encoding="utf-8", output_encoding="utf-8", encoding_errors="replace" ) template = self._template if template: if not template.startswith("/"): template = "/" + template try: target = lookup.get_template(template) except Exception as err: self._result = None self._error = err self._errorMsg = "Failed to get template (%s):\n%s" % ( template, exceptions.text_error_template().render().replace("%", "%%"), ) self._log.writeError(self._errorMsg, self._error) return self.dom else: target = Template(self._source if self._source else "", lookup=lookup) mr = MakoDataTransporter(data=data, logger=self._log) try: self._result = target.render_unicode(mr=mr).replace("\r", "") except Exception: d = [] if data: for n, v in data.items(): d.append(StringUtils.toUnicode(n) + ": " + StringUtils.toUnicode(v)) try: stack = exceptions.text_error_template().render().replace("%", "%%") except Exception as err2: stack = "" self._log.writeError("Unable to build mako exception stack", err2) traces = mr.getTraces() self._errorMsg = "Failed to render (%s):\n%s\n%sDATA:\n\t%s" % ( str(template), str(stack), ("TRACES:\n\t" + "\n\t".join(traces) if traces else ""), "\n\t".join(d) if d else "", ) self._log.write(self._errorMsg) if self._minify: return self.minifyResult() return self.dom