def elapsedTime(self): if not self._startTime: return 0 return TimeUtils.getElapsedTime( startDateTime=self._startTime, endDateTime=TimeUtils.getNowDatetime(), toUnit=TimeUtils.MILLISECONDS)
def open(self): if self._active: nowTime = TimeUtils.getNowSeconds() if nowTime - self._activatedTime > NimbleEnvironment.CONNECTION_LIFETIME: self.close() else: return False self._activatedTime = TimeUtils.getNowSeconds() try: target = (NimbleEnvironment.getConnectionHost(), NimbleEnvironment.getConnectionPort()) self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Sets socket option to prevent connection being refused by TCP reconnecting # to the same socket after a recent closure. self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self._socket.setblocking(1) self._socket.connect(target) except Exception as err: NimbleEnvironment.logError( '[ERROR | NIMBLE COMMUNICATION] Failed to open Nimble connection', err) return False if self not in NimbleConnection._CONNECTION_POOL: NimbleConnection._CONNECTION_POOL.append(self) self._active = True return True
def open(self): if self._active: nowTime = TimeUtils.getNowSeconds() if nowTime - self._activatedTime > NimbleEnvironment.CONNECTION_LIFETIME: self.close() else: return False self._activatedTime = TimeUtils.getNowSeconds() try: target = ( NimbleEnvironment.getConnectionHost(), NimbleEnvironment.getConnectionPort() ) self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Sets socket option to prevent connection being refused by TCP reconnecting # to the same socket after a recent closure. self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self._socket.setblocking(1) self._socket.connect(target) except Exception as err: NimbleEnvironment.logError( '[ERROR | NIMBLE COMMUNICATION] Failed to open Nimble connection', err) return False if self not in NimbleConnection._CONNECTION_POOL: NimbleConnection._CONNECTION_POOL.append(self) self._active = True return True
def run(self): """ Executes the analysis process, iterating through each of the analysis stages before cleaning up and exiting. """ print('[OUTPUT PATH]: %s' % self.analysisRootPath) print(analysisStamp) print(tracksStamp) self._startTime = TimeUtils.getNowDatetime() myRootPath = self.getPath(isDir=True) if os.path.exists(myRootPath): FileUtils.emptyFolder(myRootPath) if not os.path.exists(myRootPath): os.makedirs(myRootPath) tempPath = self.tempPath if os.path.exists(tempPath): SystemUtils.remove(tempPath) os.makedirs(tempPath) if not self.logger.loggingPath: self.logger.loggingPath = myRootPath try: session = self.getAnalysisSession() self._preAnalyze() for stage in self._stages: self._currentStage = stage stage.analyze() self._currentStage = None self._postAnalyze() session.commit() session.close() self._success = True except Exception as err: session = self.getAnalysisSession() session.close() msg = [ '[ERROR]: Failed to execute analysis', 'STAGE: %s' % self._currentStage] self._errorMessage = Logger.createErrorMessage(msg, err) self.logger.writeError(msg, err) session = self.getTracksSession() session.close() self._cleanup() SystemUtils.remove(tempPath) self.logger.write('\n\n[%s]: %s (%s)' % ( 'SUCCESS' if self._success else 'FAILED', self.__class__.__name__, TimeUtils.toPrettyElapsedTime(self.elapsedTime) ), indent=False)
def _writeFooter(self): """ The final method called in the analysis process, which writes the final information about the analysis stage to the log file for reference. This includes basic operational information about performance by default. """ elapsed = TimeUtils.getElapsedTime( startDateTime=self._startTime, endDateTime=TimeUtils.getNowDatetime(), toUnit=TimeUtils.MILLISECONDS) self.logger.write([ '\n' + 80*'_', '[COMPLETE]: %s ANALYSIS STAGE' % self._label.upper(), ' * Elapsed Time: %s' % TimeUtils.toPrettyElapsedTime(elapsed) ] + self._getFooterArgs(), indent=False)
def _generateHeaders(self, keyName, expires =None, eTag =None, maxAge =-1): """Doc...""" headers = dict() if expires: if isinstance(expires, unicode): headers['Expires'] = expires.encode('utf-8', 'ignore') elif isinstance(expires, str): headers['Expires'] = expires else: headers['Expires'] = TimeUtils.dateTimeToWebTimestamp(expires) elif eTag: headers['ETag'] = unicode(eTag) if maxAge > -1: headers['Cache-Control'] = 'public, max-age=' + unicode(maxAge) if keyName.endswith('.jpg'): contentType = MIME_TYPES.JPEG_IMAGE elif keyName.endswith('.png'): contentType = MIME_TYPES.PNG_IMAGE elif keyName.endswith('.gif'): contentType = MIME_TYPES.GIF_IMAGE else: contentType = FileUtils.getMimeType(keyName) if StringUtils.begins(contentType, ('text/', 'application/')): headers['Content-Type'] = contentType + '; charset=UTF-8' else: headers['Content-Type'] = contentType return headers
def __init__(self, parent=None, **kwargs): """Creates a new instance of VisibilityElement.""" super(VisibilityElement, self).__init__(parent=parent) self._instanceUid = TimeUtils.getUidTimecode( prefix=self.__class__.__name__, suffix=StringUtils.getRandomString(8) ) self._visibility = VisibilityManager(target=self)
def saveFigure(self, key, path =None, close =True, **kwargs): """ Saves the specified figure to a file at teh specified path. key :: String The key for the figure to be saved. If no such key exists, the method will return false. path :: String :: None The absolute file location to where the figure should be saved. If no path is specified the file will be saved as a pdf in this Analyzer's temporary folder. close :: Boolean :: True If true, the figure will be closed upon successful completion of the save process. [kwargs] Data to be passed directly to the PyPlot Figure.savefig() method, which can be used to further customize how the figure is saved. """ if not plt or key not in self._plotFigures: return False if not path: path = self.getTempPath('%s-%s.pdf' % ( key, TimeUtils.getUidTimecode(suffix=StringUtils.getRandomString(16))), isFile=True) figure = self._plotFigures[key] if 'orientation' not in kwargs: kwargs['orientation'] = 'landscape' figure.savefig(path, **kwargs) if close: self.closeFigure(key) return path
def getPrefix(self, *args, **kwargs): if self._locationPrefix: item = self.getStackData()[-1] loc = ' -> %s #%s]' % (item['file'], StringUtils.toUnicode(item['line'])) else: loc = ']' if self._app and self._app.pyramidApp: wsgi = self._app.environ initials = self._INITIALS_RX.sub('', ArgsUtils.get('REMOTE_USER', '', wsgi)) if initials: initials += ' | ' domainName = ArgsUtils.get('SERVER_NAME', '', wsgi) uriPath = ArgsUtils.get( 'REQUEST_URI', ArgsUtils.get('HTTP_REQUEST_URI', '', wsgi), wsgi) info = ' <' + initials + domainName + uriPath + '>' else: info = '' threadID = ThreadUtils.getCurrentID() return StringUtils.toUnicode( TimeUtils.toFormat('[%a %H:%M <%S.%f>') + '<' + threadID + '>' + info + loc)
def _handleAddApp(self): defaultPath = self.appConfig.get('LAST_APP_PATH', OsUtils.getDocumentsPath()) path = PyGlassBasicDialogManager.browseForDirectory( parent=self, caption=StringUtils.dedent(""" Specify the root path to a PyGlass application, in which a resource folder resides"""), defaultPath=defaultPath) if not path: return label = PyGlassBasicDialogManager.openTextQuery( parent=self, header='Enter Application Name', message='Specify the name of this application for display within Alembic Migrator', defaultText=os.path.basename(path.rstrip(os.sep)) ) apps = self.appConfig.get('APPLICATIONS', dict()) appData = { 'label':label, 'path':path, 'databases':dict(), 'id':TimeUtils.getUidTimecode('App', StringUtils.slugify(label))} apps[appData['id']] = appData self.appConfig.set('APPLICATIONS', apps) self.refresh() resultItem = self.appsListWidget.findItems(appData['id'], QtCore.Qt.MatchExactly) if resultItem: resultItem[0].setSelected(True)
def run(self): """Doc...""" resources = self._compiler.resources #------------------------------------------------------------------------------------------- # RESOURCES # If no resource folders were specified copy the entire contents of the resources # folder. Make sure to skip the local resources path in the process. if not resources: for item in os.listdir(PyGlassEnvironment.getRootResourcePath(isDir=True)): itemPath = PyGlassEnvironment.getRootResourcePath(item) if os.path.isdir(itemPath) and not item in ['local', 'apps']: resources.append(item) for container in resources: parts = container.replace('\\', '/').split('/') self._copyResourceFolder( PyGlassEnvironment.getRootResourcePath(*parts, isDir=True), parts) #------------------------------------------------------------------------------------------- # APP RESOURCES appResources = self._compiler.resourceAppIds if not appResources: appResources = [] for appResource in appResources: itemPath = PyGlassEnvironment.getRootResourcePath('apps', appResource, isDir=True) if not os.path.exists(itemPath): self._log.write('[WARNING]: No such app resource path found: %s' % appResource) continue self._copyResourceFolder(itemPath, ['apps', appResource]) #------------------------------------------------------------------------------------------- # PYGLASS RESOURCES # Copy the resources from the PyGlass resources = [] for item in os.listdir(PyGlassEnvironment.getPyGlassResourcePath('..', isDir=True)): itemPath = PyGlassEnvironment.getPyGlassResourcePath('..', item) if os.path.isdir(itemPath): resources.append(item) for container in resources: self._copyResourceFolder( PyGlassEnvironment.getPyGlassResourcePath('..', container), [container]) # Create a stamp file in resources for comparing on future installations creationStampFile = FileUtils.makeFilePath(self._targetPath, 'install.stamp') JSON.toFile(creationStampFile, {'CTS':TimeUtils.toZuluPreciseTimestamp()}) #------------------------------------------------------------------------------------------- # CLEANUP if self._verbose: self._log.write('CLEANUP: Removing unwanted destination files.') self._cleanupFiles(self._targetPath) self._copyPythonStaticResources() if self._verbose: self._log.write('COMPLETE: Resource Collection') return True
def __init__(self): """Creates a new instance of UniqueObject.""" self._INSTANCE_INDEX += 1 self._instanceUid = TimeUtils.getUidTimecode( prefix=self.__class__.__name__, suffix=StringUtils.toUnicode( self._INSTANCE_INDEX) + '-' + StringUtils.getRandomString(8) )
def getTempFilePath(self, name =None, extension =None, *args): """ Used to create a temporary file path within this instance's temporary folder. Any file on this path will be automatically removed at the end of the analysis process. [name] :: String :: None The desired file name for the desired file within the temporary directory. If no name is specified, a name will be created automatically using the current time (microsecond) and a 16 digit random code for a very low probability of collisions. [extension] :: String :: None Specifies the extension to add to this file. The file name is not altered if no extension is specified. [*args] :: [String] :: [] A list of relative folder prefixes in which the file should reside. For example, if you wish to have a file 'bar' in a folder 'foo' then you would specify 'foo' as the single arg to get a file located at 'foo/bar' within the temporary file. No directory prefixes will be created within this method. """ if not name: name = TimeUtils.getUidTimecode(suffix=StringUtils.getRandomString(16)) if extension: extension = '.' + extension.strip('.') if not name.endswith(extension): name += extension args = list(args) + [name] return FileUtils.makeFilePath(self.tempPath, *args)
def __init__(self): """Creates a new instance of UniqueObject.""" self._INSTANCE_INDEX += 1 self._instanceUid = TimeUtils.getUidTimecode( prefix=self.__class__.__name__, suffix=StringUtils.toUnicode(self._INSTANCE_INDEX) + '-' + StringUtils.getRandomString(8))
def _writeHeader(self): """ Method for writing the logging header for this stage. This is the first method called during the analysis process to denote in the log file that the following output was created by this stage. """ self.logger.write([ '\n' + 80*'*', '[STARTED]: %s STAGE' % self._label.upper(), ' * Run on %s' % TimeUtils.toZuluFormat(self._startTime).replace('T', ' at ') ] + self._getHeaderArgs(), indent=False)
def __init__(self, sock): asynchat.async_chat.__init__(self, sock=sock) self._createTime = TimeUtils.getNowSeconds() self._data = None self._message = None self.handling = False self._requestFlags = 0 self._responseFlags = 0 self._chunk = ByteChunk(endianess=ByteChunk.BIG_ENDIAN) self._resetRouterState()
def createUniqueId(cls, prefix = u''): """ Creates a universally unique identifier string based on current time, active application instance state, and a randomized hash """ cls._UID_INDEX += 1 return '%s%s-%s-%s' % ( prefix, TimeUtils.getNowTimecode(cls.BASE_UNIX_TIME), Base64.to64(cls._UID_INDEX), StringUtils.getRandomString(12))
def lastModifiedTimestamp(self): """ Timestamp of the last time a change was made to one or more of the entries in the RSS file being generated. """ if not self._entries: return self.compiledTimestamp lastDate = self._entries[0].date for entry in self._entries[1:]: if entry.date > lastDate: lastDate = entry.date return TimeUtils.dateTimeToWebTimestamp(lastDate)
def getTimecodeFromDatetime(cls, time =None, zeroTime =None, rotationInterval =None): if zeroTime is None: zeroTime = cls._ZERO_TIME if rotationInterval is None: rotationInterval = cls._ROTATION_INTERVAL if time is None: time = datetime.datetime.utcnow() t = float(TimeUtils.datetimeToSeconds(time) - zeroTime)/60.0 t = float(rotationInterval)*math.floor(t/float(rotationInterval)) return Base64.to64(int(t))
def analyze(self): """ Executes the analysis process for this stage, which consists largely of calling the analysis hook methods in their specified order. """ # resets the cache self.cache.unload() self._startTime = TimeUtils.getNowDatetime() self._writeHeader() self._preAnalyze() self._analyze() self._postAnalyze() self._writeFooter()
def _putFile(self, sourcePath, key): self._bucket.putFile( key=key, filename=sourcePath, policy=S3Bucket.PRIVATE if self._flexData.debug else S3Bucket.PUBLIC_READ) expires = TimeUtils.getNowDatetime() expires += datetime.timedelta(days=30) if self._flexData.debug: url = self._bucket.generateExpiresUrl(key, expires) else: url = 'http://' + self._bucket.bucketName + '/' + key return url
def __init__(self, request, **kwargs): """Creates a new instance of ZigguratBaseView.""" self._request = request self._response = None self._outgoingTimecode = TimeUtils.getNowTimecode(request.ziggurat.timecodeOffset) self._logger = ArgsUtils.get("logger", self._request.ziggurat.logger, kwargs) self._expires = ArgsUtils.get("expires", 0, kwargs) self._lastModified = None self._cacheControlPublic = False self._etag = None self._explicitResponse = None # Event called when the response object is ready. self._request.add_response_callback(self._handleResponseReady) self._request.add_finished_callback(self._handleFinishedCallback)
def __init__(self, projectData, bucket =None, uploadFolder =None, **kwargs): """Creates a new instance of BuildPackageUploader.""" self._flexData = projectData if uploadFolder is None: self._uploadFolder = '/'.join([ 'downloads', 'debug' if self._flexData.debug else 'release', TimeUtils.getNowDatetime().strftime('%b-%d-%y')]) else: self._uploadFolder = uploadFolder if not self._uploadFolder.endswith('/'): self._uploadFolder += '/' self._bucket = bucket if bucket else self._flexData.createBucket()
def createHeaderFile(cls, path, lastModified =None, headers =None): if not lastModified: lastModified = datetime.datetime.utcnow() if isinstance(lastModified, tuple) or isinstance(lastModified, list): modTime = lastModified[0] for newTime in lastModified[1:]: if newTime and newTime > modTime: modTime = newTime lastModified = modTime if not headers: headers = dict() headers['_LAST_MODIFIED'] = TimeUtils.dateTimeToWebTimestamp(lastModified) return JSON.toFile(path + '.headers', headers)
def run(self): """run doc...""" for AnalyzerClass in self.ANALYZERS: a = AnalyzerClass() a.run() self.analyzers.append([a.elapsedTime, a]) print('\n\n%s\n%s\n\n' % (80*'#', 80*'#')) print('%s\nANALYSIS COMPLETE:' % (80*'-')) for a in self.analyzers: print(' [%s]: %s (%s)' % ( 'SUCCESS' if a[-1].success else 'FAILED', TimeUtils.toPrettyElapsedTime(a[0]), a[-1].__class__.__name__))
def _deployWalker(self, args, path, names): """Doc...""" # Skip CDN file uploads when not walking the CDN root path explicitly if not args['cdn'] and path.find(StaticFlowEnvironment.CDN_ROOT_PREFIX) != -1: return for name in names: namePath = FileUtils.createPath(path, name) if os.path.isdir(namePath) or StringUtils.ends(name, self._SKIP_EXTENSIONS): continue headersPath = namePath + '.headers' if os.path.exists(headersPath): headers = JSON.fromFile(headersPath) else: headers = dict() if self._forceAll: lastModified = None elif self._forceHtml and StringUtils.ends(name, self._FORCE_HTML_EXTENSIONS): lastModified = None else: lastModified = ArgsUtils.extract('_LAST_MODIFIED', None, headers) if lastModified: lastModified = TimeUtils.webTimestampToDateTime(lastModified) kwargs = dict( key=u'/' + namePath[len(self._localRootPath):].replace(u'\\', u'/').strip(u'/'), maxAge=headers.get('max-age', -1), eTag=headers.get('eTag', None), expires=headers.get('Expires'), newerThanDate=lastModified, policy=S3Bucket.PUBLIC_READ) if StringUtils.ends(name, self._STRING_EXTENSIONS): result = self._bucket.put( contents=FileUtils.getContents(namePath), zipContents=True, **kwargs) else: result = self._bucket.putFile(filename=namePath, **kwargs) if result: self._logger.write(u'DEPLOYED: ' + unicode(namePath) + u'->' + unicode(kwargs['key']))
def _handleCompilationComplete(self, event): snap = self._buildSnapshot if self._package and event.target.success: # If this was an appended package then prior to storing the snapshot the combined # platforms should be stored as the result instead of the platforms stored in this # particular case if 'combinedPlatforms' in snap: platforms = snap['combinedPlatforms'] snap['platforms'] = platforms del snap['combinedPlatforms'] else: platforms = snap['platforms'] # Any package uploads conducted as part of the compilation process should be included # in the build snapshot for reference to prevent uploading them again in the future output = event.target.output if 'urls' in output: snap['platformUploads'] = DictUtils.merge( snap['platformUploads'], output['urls']) self._storeBuildSnapshot() FileUtils.putContents('\t'.join([ TimeUtils.getNowDatetime().strftime('[%a %m-%d %H:%M]'), 'DSK' if platforms.get(FlexProjectData.AIR_PLATFORM, False) else '---', 'AND' if platforms.get(FlexProjectData.ANDROID_PLATFORM, False) else '---', 'IOS' if platforms.get(FlexProjectData.IOS_PLATFORM, False) else '---', 'WIN' if platforms.get(FlexProjectData.WINDOWS_PLATFORM, False) else '---', 'MAC' if platforms.get(FlexProjectData.MAC_PLATFORM, False) else '---', '<<' + snap['versionInfo']['number'] + '>>', '<<' + snap['versionInfo']['label'] + '>>' ]) + '\n', self._settingsEditor.buildLogFilePath, True ) self._settingsEditor.reset() self._settingsEditor.populate() self._updateSettings() self._handleRemoteThreadComplete(event) self._package = False
def copyToCdnFolder(cls, targetPath, processor, lastModified =None, headers =None): if processor.isLocal: return False folder = targetPath[len(processor.targetWebRootPath):].replace('\\', '/').strip('/').split('/') destPath = FileUtils.createPath( processor.targetWebRootPath, processor.cdnRootFolder, folder, isFile=True) destFolder = FileUtils.getDirectoryOf(destPath) if not os.path.exists(destFolder): os.makedirs(destFolder) shutil.copy(targetPath, destPath) if not headers: headers = dict() if 'Expires' not in headers: headers['Expires'] = TimeUtils.dateTimeToWebTimestamp( datetime.datetime.utcnow() + datetime.timedelta(days=360)) cls.createHeaderFile(destPath, lastModified=lastModified, headers=headers) return True
def _generateHeaders(cls, keyName, expires=None, eTag=None, maxAge=-1, gzipped=False): """Doc...""" headers = dict() if expires: if StringUtils.isStringType(expires): headers['Expires'] = StringUtils.toBytes(expires) elif StringUtils.isBinaryType(expires): headers['Expires'] = expires else: headers['Expires'] = StringUtils.toBytes( TimeUtils.dateTimeToWebTimestamp(expires)) elif eTag: headers['ETag'] = StringUtils.toBytes(eTag) if maxAge > -1: headers['Cache-Control'] = StringUtils.toBytes( 'max-age=%s; public' % maxAge) if keyName.endswith('.jpg'): contentType = MIME_TYPES.JPEG_IMAGE elif keyName.endswith('.png'): contentType = MIME_TYPES.PNG_IMAGE elif keyName.endswith('.gif'): contentType = MIME_TYPES.GIF_IMAGE else: contentType = FileUtils.getMimeType(keyName) if StringUtils.begins(contentType, ('text/', 'application/')): contentType = '%s; charset=UTF-8' % contentType headers['Content-Type'] = contentType if gzipped: headers['Content-Encoding'] = 'gzip' return headers
def _handleAddDatabase(self): result = PyGlassBasicDialogManager.openTextQuery( parent=self, header='Enter Database Name', message='Enter the name of the database as it would appear in the Database URL, e.g. ' +'"activity" or "employees/artists"') if not result: return data = { 'id':TimeUtils.getUidTimecode('DATABASE', StringUtils.slugify(result)), 'label':StringUtils.toText(result).title(), 'name':result } apps = self.appConfig.get('APPLICATIONS') app = apps[self.currentAppID] app['databases'][data['id']] = data self.appConfig.set('APPLICATIONS', apps) self._refreshAppDisplay() resultItem = self.databasesListWidget.findItems(result, QtCore.Qt.MatchExactly) if resultItem: resultItem[0].setSelected(True)
def _generateHeaders( cls, keyName, expires =None, eTag =None, maxAge =-1, gzipped =False ): """Doc...""" headers = dict() if expires: if StringUtils.isStringType(expires): headers['Expires'] = StringUtils.toBytes(expires) elif StringUtils.isBinaryType(expires): headers['Expires'] = expires else: headers['Expires'] = StringUtils.toBytes( TimeUtils.dateTimeToWebTimestamp(expires)) elif eTag: headers['ETag'] = StringUtils.toBytes(eTag) if maxAge > -1: headers['Cache-Control'] = StringUtils.toBytes( 'max-age=%s; public' % maxAge) if keyName.endswith('.jpg'): contentType = MIME_TYPES.JPEG_IMAGE elif keyName.endswith('.png'): contentType = MIME_TYPES.PNG_IMAGE elif keyName.endswith('.gif'): contentType = MIME_TYPES.GIF_IMAGE else: contentType = FileUtils.getMimeType(keyName) if StringUtils.begins(contentType, ('text/', 'application/')): contentType = '%s; charset=UTF-8' % contentType headers['Content-Type'] = contentType if gzipped: headers['Content-Encoding'] = 'gzip' return headers
def generateExpiresUrl(self, key, expiresAtDateTime, secure=True): delta = TimeUtils.datetimeToSeconds( expiresAtDateTime) - TimeUtils.getNowSeconds() return self._bucket.get_key(key_name=key).generate_url( expires_in=delta, force_http=not secure)
def keepAlive(self): return self._requestFlags & ConnectionFlags.KEEP_ALIVE \ and (TimeUtils.getNowSeconds() - self._createTime < NimbleEnvironment.CONNECTION_LIFETIME)
def _localIsNewer(cls, key, newerThanDate): if not newerThanDate or not key.last_modified: return True return TimeUtils.webTimestampToDateTime( key.last_modified) < newerThanDate
def _parseTimestamp(cls, value): if value is None: return datetime.datetime.utcnow() elif StringUtils.isStringType(value): return TimeUtils.secondsToDatetime(Base64.from64(value) + PyGlassEnvironment.BASE_UNIX_TIME) return value
def getPrefix(self): return Base64.to64(TimeUtils.getNowSeconds() - self._zeroTime)