def getConfig(cls, databaseUrl, writeCallback =None): logger = Logger( databaseUrl.replace(u'://', u'~').replace(u'/', u'--').replace(u'.vdb', u''), useStorageBuffer=True ) if writeCallback is not None: logger.addWriteCallback(writeCallback) migrationPath = PyGlassModelUtils.getMigrationPathFromDatabaseUrl(databaseUrl) config = alembicConfig.Config(os.path.join(migrationPath, 'alembic.ini'), stdout=logger) config.set_main_option( 'script_location', migrationPath ) config.set_main_option( 'sqlalchemy.url', PyGlassModelUtils.getEngineUrl(databaseUrl) ) config.set_main_option( 'url', PyGlassModelUtils.getEngineUrl(databaseUrl) ) return config
def getConfig(cls, databaseUrl, resourcesPath, localResourcesPath, writeCallback =None): """ Retrieves the Alembic configuration for the specified database URL stored within the resources and local resources path for the target application """ logger = Logger( databaseUrl.replace('://', '~').replace('/', '--').replace('.vdb', ''), useStorageBuffer=True) if writeCallback is not None: logger.addWriteCallback(writeCallback) migrationPath = PyGlassModelUtils.getMigrationPathFromDatabaseUrl( databaseUrl, resourcesPath=resourcesPath) config = alembicConfig.Config(os.path.join(migrationPath, 'alembic.ini'), stdout=logger) engineUrl = PyGlassModelUtils.getEngineUrl( databaseUrl=databaseUrl, localResourcesPath=localResourcesPath) # These options are overridden during loading to prevent issues of absolute path corruption # when running in different deployment modes and when installed on different computers config.set_main_option('url', engineUrl) config.set_section_option('alembic', 'script_location', migrationPath) config.set_section_option('alembic', 'sqlalchemy.url', engineUrl) config.set_section_option('alembic', 'url', migrationPath) return config
def runPythonExec(script, kwargs =None): from nimble.NimbleEnvironment import NimbleEnvironment from nimble.data.NimbleResponseData import NimbleResponseData from nimble.data.enum.DataKindEnum import DataKindEnum try: nimble.cmds.undoInfo(openChunk=True) except Exception as err: return False try: # Create a new, temporary module in which to run the script module = imp.new_module('runExecTempModule') # Initialize the script with script inputs setattr(module, NimbleEnvironment.REMOTE_KWARGS_KEY, kwargs if kwargs is not None else dict()) setattr(module, NimbleEnvironment.REMOTE_RESULT_KEY, dict()) # Executes the script in the new module exec_(script, module.__dict__) # Find a NimbleScriptBase derived class definition and if it exists, run it to populate the # results for name,value in Reflection.getReflectionDict(module).iteritems(): if not inspect.isclass(value): continue if NimbleScriptBase in value.__bases__: getattr(module, name)().run() break # Retrieve the results object that contains all results set by the execution of the script result = getattr(module, NimbleEnvironment.REMOTE_RESULT_KEY) except Exception as err: logger = Logger('runPythonExec', printOut=True) logger.writeError('ERROR: Failed Remote Script Execution', err) result = NimbleResponseData( kind=DataKindEnum.PYTHON_SCRIPT, response=NimbleResponseData.FAILED_RESPONSE, error=str(err) ) # If a result dictionary contains an error key format the response as a failure try: errorMessage = ArgsUtils.extract( NimbleEnvironment.REMOTE_RESULT_ERROR_KEY, None, result) if errorMessage: return NimbleResponseData( kind=DataKindEnum.PYTHON_SCRIPT, response=NimbleResponseData.FAILED_RESPONSE, error=errorMessage, payload=result) except Exception as err: pass try: nimble.cmds.undoInfo(closeChunk=True) except Exception as err: return False return result
def _getDetailedError(cls, message, error): return '\n'.join([ Logger.logMessageToString(Logger.createLogMessage( logValue=Logger.createErrorMessage(message, error), traceStack=True, shaveStackTrace=0, htmlEscape=False)), 'VERSION: {}'.format(sys.version) ])
def __init__(self, targetPackageOrPath, rootPath, verbose=True, debug=False, trace=False, force=False, compress=False, buildOnly=False): """Creates a new instance of CoffeescriptBuilder.""" self.buildOnly = buildOnly self._imports = dict() self._requires = dict() self._includes = dict() self._report = dict() self._warnings = [] self._dependencyReport = dict() self._verbose = verbose self._log = Logger(self, printOut=True) self._trace = trace self._debug = debug self._targets = [] self._force = force self._compress = compress self._rootPath = rootPath if not isinstance(targetPackageOrPath, CoffeescriptDependency): target = CoffeescriptDependency(targetPackageOrPath, rootPath, None) else: target = targetPackageOrPath if target.exists: self._targets.append(target) else: csFiles = CoffeescriptBuilder.getScriptsInPath(target.packagePath) # Look for exec matches first for f in csFiles: testTarget = CoffeescriptDependency(f, rootPath, None) if testTarget.isExec: self._targets.append(testTarget) # Look for lib matches second. Lib matches are tested as a second pass because # constructing all exec files first potentially optimizes the import process for # the libraries. for f in csFiles: testTarget = CoffeescriptDependency(f, rootPath, None) if testTarget.isLib: self._targets.append(testTarget) if len(self._targets) == 0: print('\n\n') self._log.write('No targets exist for: %s. Compilation aborted.' % targetPackageOrPath) print('\n')
def _getDetailedError(cls, message, error): return '\n'.join([ Logger.logMessageToString( Logger.createLogMessage(logValue=Logger.createErrorMessage( message, error), traceStack=True, shaveStackTrace=0, htmlEscape=False)), 'VERSION: {}'.format(sys.version) ])
class SocketHandler(SocketServer.StreamRequestHandler): """A class for...""" #=================================================================================================== # C L A S S SERVICE_UID = 'test' VERBOSE = False WORK_PATH = '/var/lib/' RUN_PATH = '/var/run/' LOG_PATH = '/var/log/' #___________________________________________________________________________________________________ __init__ def __init__(self, request, client_address, server): self._log = Logger(self) self._log.write('Socket handler created') SocketServer.StreamRequestHandler.__init__(self, request, client_address, server) #=================================================================================================== # G E T / S E T #___________________________________________________________________________________________________ GS: returnResponse @property def returnResponse(self): return getattr(self.__class__, 'RETURN_RESPONSE', False) #=================================================================================================== # P U B L I C #___________________________________________________________________________________________________ handle def handle(self): try: data = self.rfile.readline().strip() self._log.write('HANDLE: ' + str(data)) try: result = self._respondImpl(JSON.fromString(unquote(data))) except Exception as err: self._log.writeError('RESPOND FAILURE', err) if self.returnResponse: self.wfile.write(JSON.asString({'error':1})) return if self.returnResponse: out = {'error':0} if result: out['payload'] = result self.wfile.write(out) except Exception as err: self._log.write('HANDLE FAILURE', err) return #=================================================================================================== # P R O T E C T E D #___________________________________________________________________________________________________ _respondImpl def _respondImpl(self, data): pass
def __init__(self, parent, **kwargs): QtCore.QThread.__init__(self, parent) self._events = dict() self._log = Logger(self) self._log.trace = True self._log.addPrintCallback(self._handleLogWritten) self._response = None self._output = None self._error = None self._explicitComplete = ArgsUtils.get('explicitComplete', False, kwargs) class RETCompleteSignal(QtCore.QObject): signal = QtCore.Signal(dict) self._completeSignal = RETCompleteSignal() class RETLogSignal(QtCore.QObject): signal = QtCore.Signal(str) self._logSignal = RETLogSignal() class RETProgressSignal(QtCore.QObject): signal = QtCore.Signal(dict) self._progressSignal = RETProgressSignal() class RETEventSignal(QtCore.QObject): signal = QtCore.Signal(dict) self._eventSignal = RETEventSignal() # Add the thread to the static active thread storage so that it won't be garbage collected # until the thread completes. self.__class__._ACTIVE_THREAD_STORAGE.append(self) self._connectSignals(**kwargs)
def __call__(self, *args, **kwargs): """Doc...""" if self.response is None: self.response = RemoteScriptResponse() if self.kwargs is None: self.kwargs = kwargs try: self.run() except Exception as err: message = "Nimble remote script run failure" NimbleEnvironment.logError(message, err) logMessage = Logger.createErrorMessage(message, err) self.putErrorResult(Logger.logMessageToString(logMessage=logMessage, includePrefix=False)) result = self.response.result return result if result else dict()
def __init__(self, logger =None): """Creates a new instance of TrackExporter.""" self.results = None self.logger = logger self.modifications = 0 if not logger: self.logger = Logger(self, printOut=True)
def __init__(self, data, logger=None): """Creates a new instance of MakoDataTransporter.""" self._log = logger if logger else Logger(self) self._data = data if data else dict() self._traces = [] self._warns = [] self._rootData = None
def __init__(self, **kwargs): """Creates a new instance of MarkupError.""" self._thrown = Logger.getFormattedStackTrace(2, 3) self._definition = ArgsUtils.get('errorDef', None, kwargs) self._tag = ArgsUtils.get('tag', None, kwargs) self._block = ArgsUtils.get('block', self._tag.block if self._tag else None, kwargs) self._processor = ArgsUtils.get('processor', self._tag.processor if self._tag else None, kwargs) self._code = ArgsUtils.get('code', self._definition.code, kwargs, allowNone=False) self.label = ArgsUtils.get('label', self._definition.label, kwargs, allowNone=False) self.message = ArgsUtils.get('message', self._definition.message, kwargs, allowNone=False) self._critical = ArgsUtils.get('critical', False, kwargs) replacements = ArgsUtils.getAsList('replacements', kwargs) replacements.append([u'#TAG#', unicode(self._tag.tagName if self._tag else u'???')]) for r in replacements: if self.message: self.message = self.message.replace(unicode(r[0]), unicode(r[1])) if self.label: self.label = self.label.replace(unicode(r[0]), unicode(r[1])) self._verbose = ArgsUtils.get('verbose', False, kwargs) self._line = None self._character = None self._source = None self._logSource = None self._populateData()
def getLogger( logIdentifier, kwargs =None, args =None, index =None, name ='logger', extract =False, trace =None ): if extract: res = ArgsUtils.extract(name, None, kwargs, args, index) else: res = ArgsUtils.get(name, None, kwargs, args, index) if res is None: from pyaid.debug.Logger import Logger res = Logger(logIdentifier) if trace is not None: res.trace = trace return res
def __init__( self, targetPackageOrPath, rootPath, verbose=True, debug=False, trace=False, force=False, compress=False, buildOnly=False, ): """Creates a new instance of CoffeescriptBuilder.""" self.buildOnly = buildOnly self._imports = dict() self._requires = dict() self._includes = dict() self._report = dict() self._warnings = [] self._dependencyReport = dict() self._verbose = verbose self._log = Logger(self, printOut=True) self._trace = trace self._debug = debug self._targets = [] self._force = force self._compress = compress self._rootPath = rootPath if not isinstance(targetPackageOrPath, CoffeescriptDependency): target = CoffeescriptDependency(targetPackageOrPath, rootPath, None) else: target = targetPackageOrPath if target.exists: self._targets.append(target) else: csFiles = CoffeescriptBuilder.getScriptsInPath(target.packagePath) # Look for exec matches first for f in csFiles: testTarget = CoffeescriptDependency(f, rootPath, None) if testTarget.isExec: self._targets.append(testTarget) # Look for lib matches second. Lib matches are tested as a second pass because # constructing all exec files first potentially optimizes the import process for # the libraries. for f in csFiles: testTarget = CoffeescriptDependency(f, rootPath, None) if testTarget.isLib: self._targets.append(testTarget) if len(self._targets) == 0: print("\n\n") self._log.write("No targets exist for: %s. Compilation aborted." % targetPackageOrPath) print("\n")
def __call__(self, *args, **kwargs): """Doc...""" if self.response is None: self.response = RemoteScriptResponse() if self.kwargs is None: self.kwargs = kwargs try: self.run() except Exception as err: message = u'Nimble remote script run failure' NimbleEnvironment.logError(message, err) logMessage = Logger.createErrorMessage(message, err) self.putErrorResult( Logger.logMessageToString(logMessage=logMessage, includePrefix=False)) result = self.response.result return result if result else dict()
def run(self): """ Executes the analysis process, iterating through each of the analysis stages before cleaning up and exiting. """ print('[OUTPUT PATH]: %s' % self.analysisRootPath) print(analysisStamp) print(tracksStamp) self._startTime = TimeUtils.getNowDatetime() myRootPath = self.getPath(isDir=True) if os.path.exists(myRootPath): FileUtils.emptyFolder(myRootPath) if not os.path.exists(myRootPath): os.makedirs(myRootPath) tempPath = self.tempPath if os.path.exists(tempPath): SystemUtils.remove(tempPath) os.makedirs(tempPath) if not self.logger.loggingPath: self.logger.loggingPath = myRootPath try: session = self.getAnalysisSession() self._preAnalyze() for stage in self._stages: self._currentStage = stage stage.analyze() self._currentStage = None self._postAnalyze() session.commit() session.close() self._success = True except Exception as err: session = self.getAnalysisSession() session.close() msg = [ '[ERROR]: Failed to execute analysis', 'STAGE: %s' % self._currentStage] self._errorMessage = Logger.createErrorMessage(msg, err) self.logger.writeError(msg, err) session = self.getTracksSession() session.close() self._cleanup() SystemUtils.remove(tempPath) self.logger.write('\n\n[%s]: %s (%s)' % ( 'SUCCESS' if self._success else 'FAILED', self.__class__.__name__, TimeUtils.toPrettyElapsedTime(self.elapsedTime) ), indent=False)
class DataFormatConverter(object): """A class for converting between various data interchange formats, e.g. XML and JSON.""" #=================================================================================================== # C L A S S #___________________________________________________________________________________________________ __init__ def __init__(self): """Creates a new instance of ClassTemplate.""" self._type = None self._src = None self._log = Logger('DataFormatConverter') self._path = None #=================================================================================================== # G E T / S E T #___________________________________________________________________________________________________ GS: propertyName @property def source(self): return self._src #=================================================================================================== # P U B L I C #___________________________________________________________________________________________________ load def load(self, path, fileType): if not os.path.exists(path): self._log.write('ERROR: Path does not exist [%s]. Unable to load.' % path) return False try: fh = codecs.open(path, 'r', 'utf-8') res = fh.read() fh.close() enc = res.encode('utf-8') self.loads(enc, fileType) except Exception, err: self._log.writeError('Failed to load source file [%s].' % path, err) return False self._path = path return True
class IncludeCompressor(object): #=================================================================================================== # C L A S S _REMOVE_COMMENT_RE = re.compile('/\*.+\*/', re.DOTALL) _REMOVE_COMMENT_LINE_RE = re.compile('(^|\n)[\s\t]*//.+(\n|$)') JS_TYPE = 'js' CSS_TYPE = 'css' #___________________________________________________________________________________________________ __init__ def __init__(self, compileCoffee =False): self._log = Logger('IncludeCompressor') self._compileCoffee = compileCoffee #=================================================================================================== # P U B L I C #___________________________________________________________________________________________________ compress def compress(self, rootPath): if not self._fileExists(rootPath): return False elif os.path.isfile(rootPath): return self.compressFile(rootPath) else: return self.compressPath(rootPath) #___________________________________________________________________________________________________ compressFile def compressFile(self, rootPath, directory =None): if not self._fileExists(rootPath): return False if self._compileCoffee: try: from pyaid.web.coffeescript.CoffeescriptBuilder import CoffeescriptBuilder CoffeescriptBuilder.compileAllOnPath(rootPath, os.path.dirname(rootPath), True) self._log.write('Coffeescript compiled.') except Exception, err: self._log.writeError('Failed to compile coffeescript file.', err) return False return self._compressFile(rootPath, directory)
def _createMeshPointNode(self, shapeData): self._removeMeshPointNode() try: node = cmds.createNode('closestPointOnMesh', skipSelect=True) self._meshPointNode = node except Exception as err: print(Logger.createErrorMessage(u'ERROR: Unable to create mesh point node', err)) self._removeMeshPointNode() return False try: cmds.connectAttr(shapeData['name'] + '.message', node + '.inMesh', force=True) except Exception as err: print(Logger.createErrorMessage(u'ERROR: Unable to connect mesh point node to shape', err)) self._removeMeshPointNode() return False return True
def __init__( self, fileName, siteMap, labelTracks=True, labelColor="black", session=None, showUncertainty=True, showCenters=True, **kwargs ): """ Creates a new instance of CadenceDrawing. Calls to the public functions line(), rect(), and others result in objects being added to the SVG canvas, with the file written by the save() method to specified fileName. The second argument, the siteMap is provided as an argument to establish the correspondence between the Maya scene and the site siteMap coordinates. """ self._logger = kwargs.get("logger") if not self._logger: self._logger = Logger(self, printOut=True) self.siteMapReady = siteMap.isReady if not self.siteMapReady: self._logger.write('[ERROR|CadenceDrawing]: Sitemap "%s-%s" not ready' % (siteMap.name, siteMap.level)) return self.fileName = fileName self.siteMap = siteMap self.siteName = siteMap.name self.siteLevel = siteMap.level # Generally units can be specified in millimeters. In a few cases, however, (e.g., # PolyLine) the coordinates must be unqualified integers (as px). The site maps, however # are in 'scaled mm'. Hence the need for a cnversion factor pxPerMm. Unfortunately, the # conversion between px and mm is OS-dependent. The conversion from px to inches is 72 for # Apple but 90 more generally). ppi = 72 if OsUtils.isMac() else 90 self.pxPerMm = ppi / 25.4 # specify the width and height explicitly in mm, and likewise for the background rect left = siteMap.left * mm top = siteMap.top * mm width = siteMap.width * mm height = siteMap.height * mm self._drawing = svgwrite.Drawing(fileName, profile="tiny", size=(width, height), stroke=svgwrite.rgb(0, 0, 0)) self._drawing.add(self._drawing.rect((left, top), (width, height), opacity="0")) self.groups = dict() if labelTracks: self.labelTracks( color=labelColor, session=session, showUncertainty=showUncertainty, showCenters=showCenters )
def _getResult(self, function, passErrors =None): # SLAVE models are read-only, so make sure to lock the table in read mode only to prevent # lock collisions. MASTER models query for updates, so use the stronger update lockmode # to prevent read collisions. # 3 iterations is significant to SQLAlchemy. for i in range(3): try: result = function() if self._lock: if self._modelClass.IS_MASTER: result = result.with_lockmode("update") else: result = result.with_lockmode("read") return result except passErrors as err: raise err except Exception as err: stackData = Logger.getStackData() self._modelClass._log.writeError('[%s] BAD CURSOR ACTION: %s' % (str(i), str(function)), err) pass # Sleeps away collisions. time.sleep(1) try: result = function() if self._lock: if self._modelClass.IS_MASTER: result = result.with_lockmode("update") else: result = result.with_lockmode("read") return result except passErrors as err: raise err except Exception as err: stackData = Logger.getStackData() # noinspection PyProtectedMember self._modelClass._log.writeError('FAILED CURSOR ACTION: %s'% str(function), err) return None
def __init__(self, template, rootPath, data =None, logger =None, minify =False, source =None): """Creates a new instance of ClassTemplate.""" self._template = template self._data = data if data else dict() self._error = None self._minify = minify self._errorMsg = '' self._rootDir = rootPath self._result = None self._source = source self._log = logger if logger else Logger(self)
def unloadGeneralPlugin(force =False): try: #from elixir.plugins.general import ElixirGeneralPlugin results = cmds.unloadPlugin('ElixirGeneralPlugin', force=force) if not results: print('ERROR: Failed to unload the Elixir General Plugin') return False return True except Exception as err: print(Logger.createErrorMessage('ERROR: Failed to unload the Elixir General Plugin', err)) return False
def getLogger(logIdentifier, kwargs=None, args=None, index=None, name='logger', extract=False, trace=None): if extract: res = ArgsUtils.extract(name, None, kwargs, args, index) else: res = ArgsUtils.get(name, None, kwargs, args, index) if res is None: from pyaid.debug.Logger import Logger res = Logger(logIdentifier) if trace is not None: res.trace = trace return res
def __init__(self, path =None, logger =None): """Creates a new instance of TrackCsvImporter.""" self._path = path self.created = [] self.modified = [] self.fingerprints = dict() self.remainingTracks = dict() self._logger = logger if not logger: self._logger = Logger(self, printOut=True)
def __init__(self, logger =None): """Creates a new instance of TrackLinkConnector.""" self.logger = logger if not logger: self.logger = Logger(self, printOut=True) self.searchNext = True self.searchPrev = True self.overrideExisting = False self.operatedTracks = [] self.modifiedTracks = [] self.trackLinkages = []
def __init__(self, contextRunner, socketHandler): """Creates a new instance of SystemSocketDaemon. @@@param contextRunner:DaemonRunner The running context that wraps this execution within a systemd service or orphan daemon. """ SystemDaemon.__init__(self, contextRunner, Logger(socketHandler.__name__)) self._server = None self._socketHandler = socketHandler self._socket = os.path.join(self.SOCKET_ROOT_PATH, contextRunner.uid + '.sock')
def __init__(self, rootPath =None, recursive =True, **kwargs): """Creates a new instance of WidgetUiCompiler.""" self._log = Logger(self) self._verbose = ArgsUtils.get('verbose', False, kwargs) self._recursive = recursive self._pythonPath = os.path.normpath(sys.exec_prefix) if rootPath and os.path.isabs(rootPath): self._rootPath = FileUtils.cleanupPath(rootPath, isDir=True) elif rootPath: parts = rootPath.split(os.sep if rootPath.find(os.sep) != -1 else '/') self._rootPath = PyGlassEnvironment.getRootResourcePath(*parts, isDir=True) else: self._rootPath = PyGlassEnvironment.getRootResourcePath()
def __init__(self, *args, **kwargs): """ Creates a new instance of MarkupTag. @@@param attributeSource:string If specified this will be used as the source attribute data for the tag. For parsed tags this will override the attribute data that was supplied in the actual tag definition text. However, in the procedural and/or independent cases where no attribute definition existed, this will take its place. """ self._processor = ArgsUtils.get('processor', None, kwargs, args, 0) self._block = ArgsUtils.get('block', None, kwargs, args, 1) self._index = ArgsUtils.get('index', 0, kwargs, args, 2) tagName = ArgsUtils.get('tagName', None, kwargs, args, 3) self._procedural = ArgsUtils.get('procedural', False, kwargs, args, 4) attributes = ArgsUtils.get('attributes', None, kwargs, args, 5) self._independent = ArgsUtils.get('independent', False, kwargs) self._attrData = None self._attrsReady = False self._voidTag = ArgsUtils.get('void', None, kwargs) self._leafTag = ArgsUtils.get('leaf', None, kwargs) self._isInsertsTag = ArgsUtils.get('inserts', None, kwargs) self._passthruTag = ArgsUtils.get('passthru', None, kwargs) self._renderOverride = ArgsUtils.get('renderOverride', None, kwargs) self._renderTemplate = ArgsUtils.get('renderTemplate', None, kwargs) self._replacementName = ArgsUtils.get('replacementName', None, kwargs) self._classMetadata = {} self._errors = [] self._parent = ArgsUtils.get('parent', None, kwargs) self._replacement = '' self._offset = 0 self._name = self.getClassAttr('TAG', '') if tagName is None else tagName.lower() if self._independent: self._log = Logger(self) self._attrs = AttributeData( self, ArgsUtils.get('attributeSource', u'', kwargs), attributes=attributes) else: self._log = self._processor.logger start = self._block.start + (0 if self._procedural else len(self.tagName) + 3) self._attrs = AttributeData( self, ArgsUtils.get('attributeSource', u'', kwargs) if self._procedural else self._processor.source[start:self._block.end-1], attributes=attributes)
def loadGeneralPlugin(): """ Loads the elixir general plugin. Must be called within Maya. """ try: if cmds.pluginInfo('ElixirGeneralPlugin', loaded=True): return True except Exception: pass try: from elixir.plugins.general import ElixirGeneralPlugin cmds.loadPlugin(ElixirGeneralPlugin.__file__) return True except Exception as err: print(Logger.createErrorMessage('ERROR: Failed to load Elixir General Plugin', err)) return False
def __init__(self, compiler, **kwargs): """Creates a new instance of ResourceCollector.""" self._log = Logger(self) self._verbose = ArgsUtils.get('verbose', False, kwargs) self._compiler = compiler if OsUtils.isWindows(): self._targetPath = self._compiler.getBinPath('resources', isDir=True) elif OsUtils.isMac(): self._targetPath = self._compiler.getBinPath('resources', 'resources', isDir=True) if os.path.exists(self._targetPath): shutil.rmtree(self._targetPath) if not os.path.exists(self._targetPath): os.makedirs(self._targetPath)
def __init__(self, compiler, **kwargs): """Creates a new instance of ResourceCollector.""" self._log = Logger(self, printOut=True) self._verbose = ArgsUtils.get('verbose', False, kwargs) self._compiler = compiler if OsUtils.isWindows(): self._targetPath = self._compiler.getBinPath('resources', isDir=True) elif OsUtils.isMac(): # Resource folder resides inside another resource folder so that the copying retains # the original directory structure self._targetPath = self._compiler.getBinPath('resources', 'resources', isDir=True) #self._targetPath = self._compiler.getBinPath('resources', isDir=True) if os.path.exists(self._targetPath): shutil.rmtree(self._targetPath) if not os.path.exists(self._targetPath): os.makedirs(self._targetPath)
def __init__(self, parent, **kwargs): QtCore.QThread.__init__(self, parent) self.userData = ArgsUtils.get('userData', None, kwargs) self._events = dict() self._log = Logger(self) self._log.trace = True self._log.addPrintCallback(self._handleLogWritten) self._logger = self._log self._maxLogBufferSize = 0 self._logBuffer = [] self._returnCode = None self._output = None self._error = None self._explicitComplete = ArgsUtils.get('explicitComplete', False, kwargs) # Add the thread to the static active thread storage so that it won't be garbage collected # until the thread completes. self.__class__._ACTIVE_THREAD_STORAGE.append(self) self._connectSignals(**kwargs)
def __init__(self, request, client_address, server): self._log = Logger(self) self._log.write('Socket handler created') SocketServer.StreamRequestHandler.__init__(self, request, client_address, server)
def __init__(self): """Creates a new instance of ClassTemplate.""" self._type = None self._src = None self._log = Logger('DataFormatConverter') self._path = None
class CoffeescriptBuilder(object): """A class for...""" CLASS_PATTERN = '^[\s\t]*class[\s\t]+(?P<class>[^\s\t\r\n]+)[\s\t]*' MISSING_CLASS_PATTERN = '[\s\t\(\[\{\!]+(?=[A-Z])(?P<class>[A-Za-z0-9_]+)(?P<next>[^A-Za-z0-9_]+)' _WARN_ID_MISSING_IMPORT = 'MISSING-IMPORT' _GLOBAL_CLASSES = [ 'SFLOW', 'PAGE', 'FB', 'Math', 'JSON', 'String', 'ActiveXObject', 'Date', 'DOMParser', 'RegExp', 'Object', 'Number', 'Array', 'Function', 'XMLHttpRequest' ] _results = None _missing = None #=================================================================================================== # C L A S S #___________________________________________________________________________________________________ __init__ def __init__(self, targetPackageOrPath, rootPath, verbose=True, debug=False, trace=False, force=False, compress=False, buildOnly=False): """Creates a new instance of CoffeescriptBuilder.""" self.buildOnly = buildOnly self._imports = dict() self._requires = dict() self._includes = dict() self._report = dict() self._warnings = [] self._dependencyReport = dict() self._verbose = verbose self._log = Logger(self, printOut=True) self._trace = trace self._debug = debug self._targets = [] self._force = force self._compress = compress self._rootPath = rootPath if not isinstance(targetPackageOrPath, CoffeescriptDependency): target = CoffeescriptDependency(targetPackageOrPath, rootPath, None) else: target = targetPackageOrPath if target.exists: self._targets.append(target) else: csFiles = CoffeescriptBuilder.getScriptsInPath(target.packagePath) # Look for exec matches first for f in csFiles: testTarget = CoffeescriptDependency(f, rootPath, None) if testTarget.isExec: self._targets.append(testTarget) # Look for lib matches second. Lib matches are tested as a second pass because # constructing all exec files first potentially optimizes the import process for # the libraries. for f in csFiles: testTarget = CoffeescriptDependency(f, rootPath, None) if testTarget.isLib: self._targets.append(testTarget) if len(self._targets) == 0: print('\n\n') self._log.write('No targets exist for: %s. Compilation aborted.' % targetPackageOrPath) print('\n') #=================================================================================================== # G E T / S E T #___________________________________________________________________________________________________ GS: report @property def report(self): return self._report #___________________________________________________________________________________________________ GS: warnings @property def warnings(self): return self._warnings #___________________________________________________________________________________________________ GS: imports @property def imports(self): return self._imports #___________________________________________________________________________________________________ GS: requires @property def requires(self): return self._requires #___________________________________________________________________________________________________ GS: includes @property def includes(self): return self._includes #=================================================================================================== # P U B L I C #___________________________________________________________________________________________________ construct def construct(self): """Doc...""" for t in self._targets: self._report[t.package] = -1 if t.isLib: self._constructLibrary(t) else: self._constructTarget(t) if self._compress: print('COMPRESSING:', t.package) from pyaid.web.coffeescript.IncludeCompressor import IncludeCompressor ic = IncludeCompressor() if not ic.compressFile(t.compiledPath): print('COMPRESSION FAILURE:', t.compiledPath) return self._targets #___________________________________________________________________________________________________ compileAllOnPath @staticmethod def compileAllOnPath(path, rootPath=None, recursive=False, debug=False, trace=False, force=False, compress=False): CoffeescriptBuilder._results = '' CoffeescriptBuilder._missing = {} if recursive: print('RECURSIVE COMPILE AT: ' + path) def walker(paths, dirName, names): out = CoffeescriptBuilder._compileAllInDirectory( os.path.join(paths[0], dirName), paths[1], debug=debug, trace=trace, force=force, compress=compress) CoffeescriptBuilder._results += out['res'] for n, v in DictUtils.iter(out['missing']): if n in CoffeescriptBuilder._missing: continue CoffeescriptBuilder._missing[n] = v FileUtils.walkPath(path, walker, [path, rootPath]) print('\n\nCOMPILATION RESULTS:' + CoffeescriptBuilder._results) if CoffeescriptBuilder._missing: print('\n\nMISSING IMPORTS:' + '\n\n') for n, v in DictUtils.iter(CoffeescriptBuilder._missing): print(v['class'] + ' [LINE: #' + str(v['line']) + ' | ' + v['package'] + ']') else: print('COMPILING DIRECTORY: ' + path) CoffeescriptBuilder._compileAllInDirectory(path, rootPath, debug=debug, trace=trace, force=force, compress=compress) #___________________________________________________________________________________________________ getScriptsInPath @staticmethod def getScriptsInPath(path): files = [] for f in os.listdir(path): if f.lower().endswith('.' + CoffeescriptDependency.EXTENSION): files.append(os.path.join(path, f)) return files #=================================================================================================== # P R O T E C T E D #___________________________________________________________________________________________________ _constructLibrary def _constructLibrary(self, target): try: if self._verbose: print("\n\n" + ('-' * 100) + '\n') self._log.add('LIBRARY: %s\n\tsource: %s\n\troot: %s' % (target.package, target.path, target.rootPath)) #--------------------------------------------------------------------------------------- # Compile all includes using library data targets, imports, modules, includes = self._getLibraryData(target) # Process requires for all of the targets for t in (targets + imports + modules): self._processRequires(t) #--------------------------------------------------------------------------------------- # IMPORTS # Compile all excludes skipping any exec or lib files that are listed in the import # statements. importExcludes = [] for t in targets: for imp in self._imports[t.package]: if not (imp.isExec or imp.isLib or imp.isInList(importExcludes)): importExcludes.append(imp) # Compile all imports needed for the library. Any excludes are added to the shared # library to be made accessible via the VIZME registry. libImports = [] sharedImports = [] for t in (imports + modules): for imp in self.imports[t.package]: if not imp.isInList(libImports): if imp.isInList(importExcludes): if not imp.isInList(sharedImports): sharedImports.append(imp) else: libImports.append(imp) libImports.append(target) #--------------------------------------------------------------------------------------- # INCLUDES # Compile all includes to exclude from the library because they already exist in a # target. includeExcludes = [] for t in targets: for inc in self._includes[t.package]: if not inc.isInList(includeExcludes): includeExcludes.append(inc) # Compile all includes needed for the library. libIncludes = [] sharedIncludes = [] # Add the top-level includes directly because they are not handled implicitly like # the import case for inc in includes: if inc.isInList(includeExcludes): sharedIncludes.append(inc) else: libIncludes.append(inc) for t in (imports + modules): for inc in self.includes[t.package]: if not inc.isInList(libIncludes): if inc.isInList(includeExcludes): if not inc.isInList(sharedIncludes): sharedIncludes.append(inc) else: libIncludes.append(inc) if self._verbose: print('\n') s = 'IMPORTING:' for imp in libImports: s += '\n\t' + imp.package for inc in libIncludes: s += '\n\tEXTERNAL: ' + inc.package self._log.add(s) print('\n') s = 'EXCLUDING:' for imp in sharedImports: s += '\n\t' + imp.package for inc in sharedIncludes: s += '\n\tEXTERNAL: ' + inc.package self._log.add(s) #--------------------------------------------------------------------------------------- # Construct intermediate compilation file. assembledFile = self._assembleFile(target, libImports, sharedImports, {'modules': modules}) if assembledFile is None: self._log.write('ERROR: File assembly failed.') return #--------------------------------------------------------------------------------------- # Compile to Javascript if not self.buildOnly: self._compileToJavascript(target, assembledFile, libIncludes) if self._verbose: print("\n" + ('-' * 100) + '\n') except Exception as err: print("\n\n\n") self._log.writeError( 'ERROR: Compilation failure for: %s\n\tsource: %s\n\troot: %s' % (target.package, target.path, target.rootPath), err) #___________________________________________________________________________________________________ _constructTarget def _constructTarget(self, target): try: if self._verbose: print("\n\n" + ('-' * 100) + '\n') self._log.write('EXECUTABLE: %s\n\tsource: %s\n\troot: %s' % (target.package, target.path, target.rootPath)) #--------------------------------------------------------------------------------------- # Handle imports and requires self._parseIncludes(target) self._processRequires(target) if self._verbose: s = 'IMPORTING:' for imp in self._imports[target.package]: s += '\n\t' + imp.package self._log.write(s) #--------------------------------------------------------------------------------------- # Construct intermediate compilation file. assembledFile = self._assembleFile(target) if assembledFile is None: self._log.write('ERROR: File assembly failed.') return #--------------------------------------------------------------------------------------- # Compile to Javascript if not self.buildOnly: self._compileToJavascript(target, assembledFile) if self._verbose: print("\n" + ('-' * 100) + '\n') except Exception as err: print("\n\n\n") self._log.writeError( 'ERROR: Compilation failure for: %s\n\tsource: %s\n\troot: %s' % (target.package, target.path, target.rootPath), err) #___________________________________________________________________________________________________ _createOutputFile def _createOutputFile(self, target): """Creates the output ccs assembly file for writing.""" outFile = target.assembledPath try: return open(outFile, 'w') except Exception as err: print("\n\n") self._log.write('Unable To Open output file: ' + str(outFile) + '\n' \ 'Check to make sure you have write permissions to that directory.') return None #___________________________________________________________________________________________________ _writeRegistryEntry def _writeRegistryEntry(self, out, cacheOut, entry): # If there is an unconsumed registryEntry write it. if not entry: return None s = '\n' + entry + '\n' out.write(s) if cacheOut: cacheOut.write(s) return None #___________________________________________________________________________________________________ _assembleFile def _assembleFile(self, target, importOverride=None, replacements=None, assembleData=None): #------------------------------------------------------------------------------------------- # CREATE FILE # Creates the file to write out = self._createOutputFile(target) if not out: self._log('ERROR: Unable to create output file') return #------------------------------------------------------------------------------------------- # DEFINE IMPORTS # Specify the files to import. For exec files the default packages are included, for # libraries these are overridden based on library target dependencies. targetImports = self._imports[ target.package] if importOverride is None else importOverride replacements = replacements if isinstance(replacements, list) else [] classList = [] #------------------------------------------------------------------------------------------- # Note the last dependency so that the glue script can be appended prior lastDep = targetImports[-1] #------------------------------------------------------------------------------------------- # DEPENDENCY ASSEMBLY LOOP print('\n') for dep in targetImports: dep.open() if self._force or not dep.useCache: if not self._compileDependency(dep, out, replacements, targetImports, classList): return None continue self._log.write('\tFROM CACHE: ' + dep.package) out.write(dep.cacheSource) dep.close() out.close() if self._verbose: print('\n') self._log.add('CONSTRUCTED: ' + out.name) return out.name #___________________________________________________________________________________________________ _compileDependency def _compileDependency(self, dep, out, replacements, targetImports, classList): classPattern = re.compile(CoffeescriptBuilder.CLASS_PATTERN) missingPattern = re.compile(CoffeescriptBuilder.MISSING_CLASS_PATTERN) #------------------------------------------------------------------------------------------- # MISSING DEPENDENCIES # Handle missing dependencies if not os.path.exists(dep.path): print("\n\n") self._log.write('ERROR: ' + dep.package + ' package does not exist at: ' + dep.path) return False lastWhitespace = '' openParens = 0 openBrackets = 0 openBraces = 0 skipNextLine = False methodName = '' className = '' registryEntry = None raw = dep.source dep.close() s = '\n\n\t#' + ('%' * 100) + '\n\t#' + ( '%' * 100) + '\n#\t\t' + dep.package + '\n' out.write(s) if dep.allowCaching: cacheOut = open(dep.cachePath, 'w') cacheOut.write(s) else: try: if os.path.exists(dep.cachePath): os.remove(dep.cachePath) except Exception as err: pass cacheOut = None self._log.write('\tCOMPILING: ' + dep.package) analyzer = CoffeescriptAnalyzer(raw, debug=self._debug) analyzer.analyze() #--------------------------------------------------------------------------------------- # COMPILE # Line by line compile to ccs output file for l in analyzer: #----------------------------------------------------------------------------------- # RETARGET CLASS ACCESSORS TO VIZME registry # All classes (except internal class references) are made to # VIZME registry ClassName to prevent class conflicts. for rep in replacements + targetImports: if rep != dep: offset = 0 res = rep.searchPattern.finditer(l.redacted) for r in res: start = r.start() + offset end = r.end() + offset if self._trace: self._log.write('RETARGET: ' + l.source[start:end] + ' | ' + str(r.groupdict())) # Make the replacement and adjust offsets for additional replacements l.insert(start, end, rep.registryName) offset += len(rep.registryName) - end + start #----------------------------------------------------------------------------------- # IDENTIFY CLASS DEFINITIONS # Find class definitions so they can be added to the VIZME registry. res = classPattern.search(l.redacted) if res: registryEntry = self._writeRegistryEntry( out, cacheOut, registryEntry) className = res.group('class').strip() registryEntry = '\n%s.%s ?= %s' % ( CoffeescriptDependency.REGISTRY, className, className) classList.append(className) #----------------------------------------------------------------------------------- # CHECK FOR MISSING CLASSES # Search and find any missing class imports. If a possible missing import is found # flag it in the response. res = missingPattern.finditer(l.redacted) if res: for r in res: cn = r.group('class').strip() start = r.start() if cn == className: continue # Ignore anything in all CAPS! if cn.upper() == cn: continue # Ignore globally defined objects and classes if cn in CoffeescriptBuilder._GLOBAL_CLASSES + analyzer.globalObjects: continue self._warnings.append({ 'id': CoffeescriptBuilder._WARN_ID_MISSING_IMPORT, 'class': cn, 'line': l.lineNumber, 'package': dep.package }) print('\n') self._log.write( 'WARNING: Possible missing import\n\tmissing: %s\n\tfrom: %s [line #%s]' % (cn, dep.package, str(l.lineNumber))) #----------------------------------------------------------------------------------- # LINE DEBUGGER ANALYSIS c = l.redacted.strip() skip = skipNextLine or not l.isSignificant skipNextLine = False if not skip: skips = [ 'class', 'try', 'catch', 'else', 'when', '.', '+', '-', '/', '=', '*', ',', 'and', 'or' ] for s in skips: if c.startswith(s): skip = True break if not skip: skips = ['->', '=>'] methodPattern = re.compile('^(?P<method>[^:]+)') for s in skips: if c.endswith(s): skip = True res = methodPattern.search(c) if res and res.group('method'): methodName = res.group('method') elif c.startswith('$'): methodName = '$' break # Check for line continuations if l.isSignificant: skips = ['.', '+', '-', '/', '=', '*', ',', 'and', 'or'] for s in skips: if c.endswith(s): skipNextLine = True break if self._trace: self._log.write( c.replace('\n', '') + ('\n\t@@@@ skip: ' + str(skip) + '\n\t@@@@ parens: ' + str(openParens) + '\n\t@@@@ braces: ' + str(openBraces) + '\n\t@@@@ brackets: ' + str(openBraces) + '\n\t@@@@ skipNext: ' + str(skipNextLine))) if self._debug and not skip and openParens == 0 and openBraces == 0 and openBrackets == 0: debugLine = 'window.___vmiDebug(\'%s\', \'%s\', \'%s\', %s)\n' % \ (dep.package, className, methodName, str(l.lineNumber)) indent = len(l.indent) > len(lastWhitespace) dedent = len(l.indent) < len(lastWhitespace) skips = [')', ']', '}'] skip = False for s in skips: if c.startswith(s): skip = True break if dedent and skip: lastWhitespace = lastWhitespace else: lastWhitespace = l.indent codePattern = re.compile('(?P<code>[^\s\t\n]+)') res = codePattern.search(c) if not res or len(res.groupdict()['code']) == 0: if self._trace: self._log.write('EMPTY: "' + c + '"') debugLine = '' l.insert(0, 0, l.indent + debugLine) if l.isSignificant: openParens += l.redacted.count('(') - l.redacted.count(')') openBrackets += l.redacted.count('[') - l.redacted.count(']') openBraces += l.redacted.count('{') - l.redacted.count('}') #--------------------------------------------------------------------------------------- # WRITE MODIFIED OUTPUT out.write(l.source) if cacheOut: cacheOut.write(l.source) self._writeRegistryEntry(out, cacheOut, registryEntry) if cacheOut: cacheOut.close() return True #___________________________________________________________________________________________________ _compileToJavascript def _compileToJavascript(self, target, assembledFile, jsIncludeOverrides=None): # Use the Coffeescript compiler to create a JS compilation of the assembled CS file result = SystemUtils.executeCommand( ['coffee', '-c', '--bare', assembledFile]) status = result['code'] output = result['out'] errors = 0 forceVerbose = False #------------------------------------------------------------------------------------------- # ERROR HANDLING # Check the error status of the compilation process and if a failure occurred parse the # error results for display and logging. if status: outputLines = str(output).replace('\r', '').split('\n') for line in outputLines: if line.startswith('Error:') or line.startswith( 'SyntaxError:'): errors += 1 result = CoffeescriptBuilder._parseError(line) if result: self._log.add(result) else: forceVerbose = True if forceVerbose: self._log.add(output) self._report[target.package] = errors if self._verbose: print("\n\n") if errors == 0 and status == 0: self._log.write('Compilation complete: ' + target.compiledPath) else: self._log.write('Compilation FAILED: ' + target.package) f = open(target.compiledPath, 'r') res = f.read() f.close() #___________________________________________________________________________________________________ _parseIncludes def _parseIncludes(self, target, rootTarget=None): """Doc...""" if rootTarget is None: rootTarget = target if not rootTarget.package in self._imports: self._imports[rootTarget.package] = [] if not rootTarget.package in self._requires: self._requires[rootTarget.package] = [] if not rootTarget.package in self._includes: self._includes[rootTarget.package] = [] if not os.path.exists(target.path): print("\n") self._log.add('WARNING: Missing import.\n\tPACKAGE: ' + target.package + '\n\tFILE: ' \ + target.path) print("\n") return f = open(target.path) for line in f: # import parse dependency = CoffeescriptDependency.createImport( line, self._rootPath) if dependency and not dependency.isInList( self._imports[rootTarget.package]): self._parseIncludes(dependency, rootTarget) self._imports[rootTarget.package].append(dependency) continue # require parse dependency = CoffeescriptDependency.createRequire( line, self._rootPath) if dependency and not dependency.isInList( self._imports[rootTarget.package]): self._requires[rootTarget.package].append(dependency) continue # include parse dependency = CoffeescriptDependency.createInclude( line, self._rootPath) if dependency and not dependency.isInList( self._includes[rootTarget.package]): self._includes[rootTarget.package].append(dependency) continue f.close() self._imports[rootTarget.package].append(target) #___________________________________________________________________________________________________ _processRequires def _processRequires(self, target): currentTarget = self._imports[target.package].pop() while len(self._requires[target.package]) > 0: self._parseIncludes(self._requires[target.package].pop(0), target) outlist = [] for item in self._imports[target.package]: if not item.isInList(outlist) and not item.compare(currentTarget): outlist.append(item) self._imports[target.package] = outlist self._imports[target.package].append(currentTarget) #___________________________________________________________________________________________________ _getLibraryData def _getLibraryData(self, target): targets = [] modules = [] imports = [] includes = [] src = open(target.path, 'r') for line in src: # target parse d = CoffeescriptDependency.create(line, self._rootPath) if not d: continue if d.dependencyType == CoffeescriptDependency.TARGET_TYPE: targets.append(d) elif d.dependencyType == CoffeescriptDependency.IMPORT_TYPE: imports.append(d) elif d.dependencyType == CoffeescriptDependency.REQUIRE_TYPE: imports.append(d) elif d.dependencyType == CoffeescriptDependency.INCLUDE_TYPE: includes.append(d) elif d.dependencyType == CoffeescriptDependency.MODULE_TYPE: modules.append(d) else: continue self._parseIncludes(d) src.close() return targets, imports, modules, includes #___________________________________________________________________________________________________ _compileAllInDirectory @staticmethod def _compileAllInDirectory(path, rootPath=None, debug=False, trace=False, force=False, compress=False): results = '' missing = {} count = 0 for f in CoffeescriptBuilder.getScriptsInPath(path): target = CoffeescriptDependency(f, rootPath) if not (target.exists and (target.isExec or target.isLib)): continue c = CoffeescriptBuilder(target, rootPath, debug=debug, trace=trace, force=force, compress=compress) c.construct() count += 1 for n, v in DictUtils.iter(c.report): num = max(0, 60 - len(n)) results += '\n' + n + ':' + ('.' * num) if v == 0: results += 'SUCCESS' elif v > 0: results += 'COMPILATION FAILED' else: results += 'ASSEMBLY FAILED' if len(c.warnings) > 0: results += '[' + str(len(c.warnings)) + ' WARNINGS]' for v in c.warnings: if not v[ 'id'] == CoffeescriptBuilder._WARN_ID_MISSING_IMPORT: continue key = v['package'] + '-' + v['class'] + '-' + str( v['line']) if key in missing: continue missing[key] = v if len(results) > 0: print('\nDIRECTORY ' + path + ' COMPILE RESULTS [' + str(count) + ']:' + results) return {'res': results, 'missing': missing} #___________________________________________________________________________________________________ _parseError @staticmethod def _parseError(error): """ Parses errors of the format: "Error: In /vizme2/website/js/vmi/blog/author/exec.ccs, Parse error on line 181: Unexpected 'INDENT'" """ ccsFile = None prefixReplacements = ['SyntaxError: In ', 'Error: In '] for p in prefixReplacements: error = error.replace(p, '') out = '\n-----------------------------------------------\nERROR: ' try: sep = error.index(',') ccsFile = error[:sep] except Exception: pass try: sep2 = error.index(':') out += error[sep2 + 1:].strip() + '\n' except Exception: if error and sep: out += error[sep + 1:].strip() + '\n' pattern = re.compile('line[\s\t]+(?P<linenumber>[0-9]+)') res = pattern.search(error) if res and len(res.groups()) > 0: lineNumber = int(res.groups()[0]) - 1 else: out += ' Unspecified location' return if ccsFile: padSize = len(str(lineNumber + 3)) jQueryName = 'Exec Function (JQUERY Document ready)' functionName = None className = None trace = '' f = open(ccsFile, 'r') for i, line in enumerate(f): if i > lineNumber + 4: break if i <= lineNumber: pattern = re.compile( '^class[\s\t]+(?P<classname>[a-zA-Z0-9_]+)') res = pattern.search(line) if res and len(res.groups()) > 0: className = res.groups()[0] functionName = None pattern = re.compile('^\$[\s\t]*[-=]+>') res = pattern.search(line) if res: className = jQueryName functionName = None pattern = re.compile( '[\s\t]*(?P<name>[a-zA-Z0-9_]+)[\s\t]*:[^-=>]*[-=]+>') res = pattern.search(line) if res and len(res.groups()) > 0: functionName = res.groups()[0] if i > lineNumber - 4: marker = ">>" if i == lineNumber else " " trace += marker + str(i).rjust(padSize) + '| ' + line f.close() if functionName: out += " " + ("METHOD" if className else "FUNCTION") + ": " + functionName + "\n" if className: out += " " + ("CLASS" if className != jQueryName else "EXEC") + ": " + className + "\n" out += " TRACE:\n" + trace return out + "\n"
def run(self): """Doc...""" self.saveSelection() count = self.fetch('count', 1000) self._size = self.fetch('size', 0.1) self._padding = self.fetch('padding', 0.0) transforms = cmds.ls(selection=True, type='transform') if not transforms: self._cleanup() self.putErrorResult(u'ERROR: No transforms selected') return shapes = [] totalVolume = 0.0 shapeCount = 0 for transform in transforms: shapeNames = cmds.listRelatives(transforms, shapes=True) if not shapeNames: continue for shape in shapeNames: try: box = TransformUtils.getBoundingBox(shape) cmds.select(shape, replace=True) shapeVolume = nimble.executeMelCommand('computePolysetVolume') totalVolume += shapeVolume shapes.append(dict( transform=transform, name=shape, box=box, weight=float(shapeVolume)) ) shapeCount += 1 except Exception as err: self._cleanup() NimbleEnvironment.logError(u'ERROR: Shape processing', err) self.putErrorResult( u'ERROR: Unable to process selection item %s -> %s' % (transform, shape) ) return if shapeCount == 0: self._cleanup() self.putErrorResult(u'ERROR: No polygon transforms found in selection') return try: for shape in shapes: if not self._createMeshPointNode(shape): self._cleanup() print(u'ERROR: Creation failure') self.putErrorResult(u'ERROR: Unable to create point test node') shape['weight'] /= totalVolume shapeCount = int(round(float(count)*shape['weight'])) for i in range(shapeCount): self._create(shape) self._removeMeshPointNode() except Exception as err: self._cleanup() print(Logger.createErrorMessage(u'ERROR: Creation failure', err)) self.putErrorResult(u'ERROR: Unable to create random box') return self._cleanup()
printResult(u'Interpreter', u'PASSED') # Check for PyAid try: from pyaid.ArgsUtils import ArgsUtils printResult(u'PyAid', u'PASSED') except Exception, err: printResult(u'PyAid', u'FAILED') print u'Unable to continue without PyAid' raise err from pyaid.debug.Logger import Logger from pyaid.file.FileUtils import FileUtils from pyaid.system.SystemUtils import SystemUtils logger = Logger('environmentCheck', printOut=True) # Check for Qt 4.X foundLocation = None for p in os.listdir(u'/usr/local/'): if p.startswith(u'Qt4.'): foundLocation = p printResult( u'Qt (%s)' % (foundLocation if foundLocation else u'4.x'), u'PASSED' if foundLocation else u'FAILED' ) # Check for PySide system dynamic libraries paths = [] for p in os.listdir(u'/usr/lib'): if p.endswith(u'.dylib') and p.startswith(u'libpyside-'): paths.append(p)
class DataFormatConverter(object): """A class for converting between various data interchange formats, e.g. XML and JSON.""" #=================================================================================================== # C L A S S #___________________________________________________________________________________________________ __init__ def __init__(self): """Creates a new instance of ClassTemplate.""" self._type = None self._src = None self._log = Logger('DataFormatConverter') self._path = None #=================================================================================================== # G E T / S E T #___________________________________________________________________________________________________ GS: propertyName @property def source(self): return self._src #=================================================================================================== # P U B L I C #___________________________________________________________________________________________________ load def load(self, path, fileType): if not os.path.exists(path): self._log.write('ERROR: Path does not exist [%s]. Unable to load.' % path) return False try: fh = codecs.open(path, 'r', 'utf-8') res = fh.read() fh.close() enc = res.encode('utf-8') self.loads(enc, fileType) except Exception as err: self._log.writeError('Failed to load source file [%s].' % path, err) return False self._path = path return True #___________________________________________________________________________________________________ load def loads(self, srcString, srcType): if srcString is None: self._log.write('ERROR: Source string is empty or invalid.') return False srcString = StringUtils.toStr2(srcString) self._path = None self._src = srcString self._type = srcType return True #___________________________________________________________________________________________________ convertDirectory def convertDirectory(self, path, srcType, targetType, recursive =False): if srcType is None or targetType is None: self._log.write('ERROR: Source and/or target types are invalid. Operation aborted.') return False if not os.path.exists(path): self._log.write('ERROR: The specified path [%s] does not exist. Operation aborted.' \ % str(path)) return False if recursive: FileUtils.walkPath(path, self._convertInDirectory, [srcType, targetType]) else: self._convertInDirectory([srcType, targetType], path, os.listdir(path)) return True #___________________________________________________________________________________________________ writeToFile def writeToFile(self, targetType, path =None): if path is None and self._path is None: self._log.write('ERROR: Unable to write to file, no path was specified.') return False # Assign the reader based on source type reader = self._getParserFromType() if reader is None: self._log.write('ERROR: Unrecognized source type [%s]. Unable to convert.' % self._type) return False # Assign writer based on target type writer = self._getParserFromType(targetType) if writer is None: self._log.write('ERROR: Unrecognized conversion target type [%s]. Unable to convert.' \ % targetType) return False path = path if path else self._path d = os.path.dirname(path) f = os.path.basename(path).split('.')[0] f += '.' + writer.TYPE_ID if not os.path.exists(d): os.makedirs(d) try: print(len(self._src)) src = reader.parse(self._src, None, True) except Exception as err: self._log.writeError('ERROR: Failed to parse source. Conversion aborted.', err) return False try: res = writer.serialize(src) except Exception as err: self._log.writeError('ERROR: Failed to serialized data. Conversion aborted.', err) return False out = os.path.join(d, f) try: fh = codecs.open(out, 'wb', 'utf-8') fh.write(res) fh.close() except Exception as err: self._log.writeError('ERROR: Failed to write file [%s]. Conversion aborted.' \ % str(out), err) return False self._log.write('Converted: [%s] => [%s].' % (self._path, out)) return True #___________________________________________________________________________________________________ getAsXML def getAsXML(self): """Doc...""" if self._type == XMLConfigParser.TYPE_ID: return self._src else: return self._convert(XMLConfigParser.TYPE_ID) #___________________________________________________________________________________________________ getAsJSON def getAsJSON(self): """Doc...""" if self._type == JSONConfigParser.TYPE_ID: return self._src else: return self._convert(JSONConfigParser.TYPE_ID) #___________________________________________________________________________________________________ getAsDictionary def getAsDictionary(self, asInterchangeFormat =False): reader = self._getParserFromType() reader.parse(self._src, None, asInterchangeFormat) #___________________________________________________________________________________________________ executeConversion @staticmethod def executeConversion(source =None, srcType =None, targetType =None, target =None, recursive =False): types = ['xml', 'json'] if source is None: source = queryGeneralValue('Enter the source file (or path) to convert:') if srcType is None and os.path.isfile(source): fileType = source.split('.')[-1].lower() if fileType in types: srcType = fileType if srcType is None: srcType = queryFromList('Specify source file(s) type:', types) if targetType is None: targetType = queryFromList('Specify target file(s) type:', types) d = DataFormatConverter() if os.path.isfile(source): d.load(source, srcType) d.writeToFile(targetType, target) else: d.convertDirectory(source, srcType, targetType, recursive) #=================================================================================================== # P R O T E C T E D #___________________________________________________________________________________________________ _convert def _convert(self, targetType): reader = self._getParserFromType() data = reader.parse(self._src, None, True) if data is None: self._log.write('ERROR: Failed to parse input from. Skipping conversion.') return None writer = self._getParserFromType(targetType) return writer.serialize(data) #___________________________________________________________________________________________________ _getParserFromType def _getParserFromType(self, typeID =None): if typeID is None: typeID = self._type if typeID == XMLConfigParser.TYPE_ID: return XMLConfigParser elif typeID == JSONConfigParser.TYPE_ID: return JSONConfigParser else: self._log.write('ERROR: _getParserFromType() failed for type: ' + str(typeID)) return None #___________________________________________________________________________________________________ _convertInDirectory def _convertInDirectory(self, types, dirname, names): if dirname.find('.svn') != -1: return reader = self._getParserFromType(types[0]) writer = self._getParserFromType(types[1]) for n in names: if not n.endswith(reader.TYPE_ID): continue src = os.path.join(dirname, n) self.load(src, reader.TYPE_ID) self.writeToFile(writer.TYPE_ID)
def getLogger(cls): """getLogger doc...""" return Logger(cls, printOut=False, logFolder=cls.LOG_PATH)
class MarkupTag(object): """A class for...""" #=================================================================================================== # C L A S S TAG = '' _TAG_LIST = None _MARGIN_TOP_STYLE_ATTR_PATTERN = re.compile('margin-top:[^\'";]+') _STYLE_ATTR_PATTERN = re.compile('style=(("[^"]*")|(\'[^\']*\'))') _TAG_INSERT_PATTERN = re.compile('<[^>]+>') #___________________________________________________________________________________________________ __init__ def __init__(self, *args, **kwargs): """ Creates a new instance of MarkupTag. @@@param attributeSource:string If specified this will be used as the source attribute data for the tag. For parsed tags this will override the attribute data that was supplied in the actual tag definition text. However, in the procedural and/or independent cases where no attribute definition existed, this will take its place. """ self._processor = ArgsUtils.get('processor', None, kwargs, args, 0) self._block = ArgsUtils.get('block', None, kwargs, args, 1) self._index = ArgsUtils.get('index', 0, kwargs, args, 2) tagName = ArgsUtils.get('tagName', None, kwargs, args, 3) self._procedural = ArgsUtils.get('procedural', False, kwargs, args, 4) attributes = ArgsUtils.get('attributes', None, kwargs, args, 5) self._independent = ArgsUtils.get('independent', False, kwargs) self._attrData = None self._attrsReady = False self._voidTag = ArgsUtils.get('void', None, kwargs) self._leafTag = ArgsUtils.get('leaf', None, kwargs) self._isInsertsTag = ArgsUtils.get('inserts', None, kwargs) self._passthruTag = ArgsUtils.get('passthru', None, kwargs) self._renderOverride = ArgsUtils.get('renderOverride', None, kwargs) self._renderTemplate = ArgsUtils.get('renderTemplate', None, kwargs) self._replacementName = ArgsUtils.get('replacementName', None, kwargs) self._classMetadata = {} self._errors = [] self._parent = ArgsUtils.get('parent', None, kwargs) self._replacement = '' self._offset = 0 self._name = self.getClassAttr('TAG', '') if tagName is None else tagName.lower() if self._independent: self._log = Logger(self) self._attrs = AttributeData( self, ArgsUtils.get('attributeSource', u'', kwargs), attributes=attributes) else: self._log = self._processor.logger start = self._block.start + (0 if self._procedural else len(self.tagName) + 3) self._attrs = AttributeData( self, ArgsUtils.get('attributeSource', u'', kwargs) if self._procedural else self._processor.source[start:self._block.end-1], attributes=attributes) #=================================================================================================== # G E T / S E T #___________________________________________________________________________________________________ GS: apiLevel @property def apiLevel(self): return 2 #___________________________________________________________________________________________________ GS: primaryAttribute @property def primaryAttribute(self): return self.getClassAttr('PRIMARY_ATTR', None) #___________________________________________________________________________________________________ GS: isProcedural @property def isProcedural(self): return self._procedural #___________________________________________________________________________________________________ GS: block @property def block(self): return self._block @block.setter def block(self, value): self._block = value #___________________________________________________________________________________________________ GS: index @property def index(self): return self._index #___________________________________________________________________________________________________ GS: processor @property def processor(self): return self._processor #___________________________________________________________________________________________________ GS: isBlockDisplay @property def isBlockDisplay(self): return self.getClassAttr('BLOCK_DISPLAY', False) #___________________________________________________________________________________________________ GS: isBlockTag @property def isBlockTag(self): return False #___________________________________________________________________________________________________ GS: isVoidTag @property def isVoidTag(self): """ Specifies whether or not the tag is a void tag. Void tags render as an empty string and are useful for conditional rendering and hierarchical data management.""" if self._voidTag is None: return self.getClassAttr('VOID_TAG', False) return self._voidTag #___________________________________________________________________________________________________ GS: isLeafTag @property def isLeafTag(self): if self._leafTag is None: return self.getClassAttr('LEAF_TAG', False) return self._leafTag #___________________________________________________________________________________________________ GS: isPassthruTag @property def isPassthruTag(self): if self._passthruTag is None: return self.getClassAttr('PASSTHRU_TAG', False) return self._passthruTag #___________________________________________________________________________________________________ GS: isInsertsTag @property def isInsertsTag(self): if self._isInsertsTag is None: return self.getClassAttr('INSERTS_TAG', True) return self._isInsertsTag #___________________________________________________________________________________________________ GS: tagName @property def tagName(self): return self._name #___________________________________________________________________________________________________ GS: replacement @property def replacement(self): return self._replacement #___________________________________________________________________________________________________ GS: attrs @property def attrs(self): return self._attrs #___________________________________________________________________________________________________ GS: renderOffset @property def renderOffset(self): return self._offset #___________________________________________________________________________________________________ GS: aheadCapPolicy @property def aheadCapPolicy(self): return self.getAttrFromClass('AHEAD_CAP_POLICY') #___________________________________________________________________________________________________ GS: backCapPolicy @property def backCapPolicy(self): return self.getAttrFromClass('BACK_CAP_POLICY') #___________________________________________________________________________________________________ GS: parent @property def parent(self): return self._parent @parent.setter def parent(self, value): self._parent = value #___________________________________________________________________________________________________ GS: renderTemplate @property def renderTemplate(self): if self._renderTemplate is None: self._renderTemplate = self.getClassAttr('TEMPLATE', '') return self._renderTemplate @renderTemplate.setter def renderTemplate(self, value): self._renderTemplate = value #___________________________________________________________________________________________________ GS: log @property def log(self): return self._log #___________________________________________________________________________________________________ GS: replacementName @property def replacementName(self): return self._replacementName #=================================================================================================== # P U B L I C #___________________________________________________________________________________________________ getAttributeList @classmethod def getAttributeList(cls): t = TagAttributesEnum out = t.THEME + t.ID + t.HTML_CLASS + t.HTML_STYLE + t.HTML_DATA + t.ACCENTED + t.CLEAR + \ t.GROUP + t.HTML_ATTR if cls.getAttrFromClass('PRIMARY_ATTR', None): out += t.VALUE return out #___________________________________________________________________________________________________ clone def clone(self, tree=True, replacements=None, **kwargs): if replacements and self.replacementName: if not isinstance(replacements, list): replacements = [replacements] for r in replacements: if r.replacementName == self.replacementName: return r return self._cloneImpl(**kwargs) #___________________________________________________________________________________________________ getNonPassthruRootTag def getNonPassthruRootTag(self): if self.isPassthruTag: return None return self #___________________________________________________________________________________________________ confirmClosed def confirmClosed(self): return True #___________________________________________________________________________________________________ useBackground def useBackground(self): self.attrs.classes.add('v-S-bck', self.attrs.styleGroup) #___________________________________________________________________________________________________ addError def addError(self, value): self._errors.append(value) #___________________________________________________________________________________________________ makeRenderAttributes def makeRenderAttributes(self): # Don't allow the tags _renderImpl to be called multiple times if self._attrsReady: return self._attrData try: self._attrData = self._renderImpl() except Exception, err: MarkupTagError( tag=self, errorDef=MarkupTagError.RENDER_FAILURE ).log() self._log.writeError([ 'Tag Render failure', 'TAG' + str(self) ], err) return None self._attrsReady = True return self._attrData
class SocketHandler(SocketServer.StreamRequestHandler): """A class for...""" #=================================================================================================== # C L A S S SERVICE_UID = 'test' VERBOSE = False WORK_PATH = '/var/lib/' RUN_PATH = '/var/run/' LOG_PATH = '/var/log/' #___________________________________________________________________________________________________ __init__ def __init__(self, request, client_address, server): self._log = Logger(self) self._log.write('Socket handler created') SocketServer.StreamRequestHandler.__init__(self, request, client_address, server) #=================================================================================================== # G E T / S E T #___________________________________________________________________________________________________ GS: returnResponse @property def returnResponse(self): return getattr(self.__class__, 'RETURN_RESPONSE', False) #=================================================================================================== # P U B L I C #___________________________________________________________________________________________________ handle def handle(self): try: data = self.rfile.readline().strip() self._log.write('HANDLE: ' + str(data)) try: result = self._respondImpl(JSON.fromString(unquote(data))) except Exception as err: self._log.writeError('RESPOND FAILURE', err) if self.returnResponse: self.wfile.write(JSON.asString({'error': 1})) return if self.returnResponse: out = {'error': 0} if result: out['payload'] = result self.wfile.write(out) except Exception as err: self._log.write('HANDLE FAILURE', err) return #=================================================================================================== # P R O T E C T E D #___________________________________________________________________________________________________ _respondImpl def _respondImpl(self, data): pass
def runPythonExec(script, kwargs=None): from nimble.NimbleEnvironment import NimbleEnvironment from nimble.data.NimbleResponseData import NimbleResponseData from nimble.data.enum.DataKindEnum import DataKindEnum try: nimble.cmds.undoInfo(openChunk=True) except Exception as err: return False try: # Create a new, temporary module in which to run the script module = imp.new_module('runExecTempModule') # Initialize the script with script inputs setattr(module, NimbleEnvironment.REMOTE_KWARGS_KEY, kwargs if kwargs is not None else dict()) setattr(module, NimbleEnvironment.REMOTE_RESULT_KEY, dict()) # Executes the script in the new module exec_(script, module.__dict__) # Find a NimbleScriptBase derived class definition and if it exists, run it to populate the # results for name, value in Reflection.getReflectionDict(module).iteritems(): if not inspect.isclass(value): continue if NimbleScriptBase in value.__bases__: getattr(module, name)().run() break # Retrieve the results object that contains all results set by the execution of the script result = getattr(module, NimbleEnvironment.REMOTE_RESULT_KEY) except Exception as err: logger = Logger('runPythonExec', printOut=True) logger.writeError('ERROR: Failed Remote Script Execution', err) result = NimbleResponseData( kind=DataKindEnum.PYTHON_SCRIPT, response=NimbleResponseData.FAILED_RESPONSE, error=str(err)) # If a result dictionary contains an error key format the response as a failure try: errorMessage = ArgsUtils.extract( NimbleEnvironment.REMOTE_RESULT_ERROR_KEY, None, result) if errorMessage: return NimbleResponseData( kind=DataKindEnum.PYTHON_SCRIPT, response=NimbleResponseData.FAILED_RESPONSE, error=errorMessage, payload=result) except Exception as err: pass try: nimble.cmds.undoInfo(closeChunk=True) except Exception as err: return False return result
class NimbleEnvironment(object): """A class for...""" #=================================================================================================== # C L A S S # Default flags sent with client requests to nimble server CONNECTION_FLAGS = 0x00000000 SERVER_HOST = None CONNECTION_HOST = None # When true NimbleRemoteScripts are run in the remote environment by default instead of # within Maya. Makes it possible to debug without reloading Maya's Python interpreter for # each change. Disable when running in production. TEST_REMOTE_MODE = False # Enables gzip compression of the communication to and/or from the nimble server ENABLE_COMPRESSION = False # Size of a single chunk of data sent over the socket during communication. Larger messages # are broken into multiple chunks of sizes less than or equal to this length SOCKET_CHUNK_SIZE = 8192 # Number of times socket calls should be attempted before returning in failure REMOTE_RETRY_COUNT = 3 # Termination string used to identify the end of a nimble message TERMINATION_IDENTIFIER = '#@!NIMBLE_MSG_ENDS!@#' # Dictionary key in remote script file's globals() that contain the payload to be returned # to the remote Nimble environment once script execution is complete REMOTE_RESULT_KEY = '__nimbleRemoteResponse__' # Dictionary key in remote script file's globals() that contain the arguments send by the # remote Nimble environment when the script file action is requested REMOTE_KWARGS_KEY = '__nimbleRemoteKwargs__' # Error key within the REMOTE_RESULT dictionary in remote scripts that contains an error # message for the remote execution. When this key is set the NimbleResultData is set to failure # and the error message included in the result REMOTE_RESULT_ERROR_KEY = '__nimbleRemoteError__' REMOTE_RESULT_WARNING_KEY = '__nimbleRemoteWarning__' logger = Logger('nimble', printOut=True) _inMaya = None _mayaPort = 7800 _externalPort = 7801 _logLevel = 0 _mayaUtils = None _connectionLifetime = 10 #=================================================================================================== # G E T / S E T @ClassGetter def CONNECTION_LIFETIME(cls): return cls._connectionLifetime @ClassGetter def SOCKET_RESPONSE_CHUNK_SIZE(cls): return 200 if cls.isWindows else cls.SOCKET_CHUNK_SIZE @ClassGetter def isWindows(cls): return sys.platform.startswith('win') #=================================================================================================== # P U B L I C @classmethod def inMaya(cls, override=None): if override is not None: cls._inMaya = override if cls._inMaya is not None: return cls._inMaya if os.name == 'posix': pattern = re.compile('/(M|m)aya20[0-9]*/Maya.app') else: pattern = re.compile('[\\/]+(M|m)aya20[0-9]*[\\/]+Python') if pattern.search(sys.prefix): try: # noinspection PyUnresolvedReferences from maya import utils as mu cls._mayaUtils = mu cls._inMaya = True except Exception as err: cls._inMaya = False return cls._inMaya return cls._inMaya @classmethod def logError(cls, *args, **kwargs): isInMaya = cls.inMaya() and cls._mayaUtils is not None if isInMaya and not threading.currentThread().name.lower( ) == 'mainthread': execution.executeWithResult(cls._logError, *args, **kwargs) else: cls.logger.writeError(*args, **kwargs) @classmethod def log(cls, message): isInMaya = cls.inMaya() and cls._mayaUtils is not None if isInMaya and not threading.currentThread().name.lower( ) == 'mainthread': execution.executeWithResult(cls._logMessage, message) else: cls._logMessage(message) @classmethod def getServerLogLevel(cls): return cls._logLevel @classmethod def setServerLogLevel(cls, level=0): cls._logLevel = level return cls._logLevel @classmethod def getServerPort(cls, inMaya=None): return cls._mayaPort if cls.inMaya( override=inMaya) else cls._externalPort @classmethod def getServerHost(cls): return cls.SERVER_HOST if cls.SERVER_HOST else 'localhost' @classmethod def getConnectionPort(cls, inMaya=None): return cls._externalPort if cls.inMaya( override=inMaya) else cls._mayaPort @classmethod def getConnectionHost(cls): return cls.CONNECTION_HOST if cls.CONNECTION_HOST else 'localhost' #=================================================================================================== # P R O T E C T E D @classmethod def _logMessage(cls, *args, **kwargs): cls.logger.write(*args, **kwargs) @classmethod def _logError(cls, *args, **kwargs): cls.logger.writeError(*args, **kwargs)
class IncludeCompressor(object): #=================================================================================================== # C L A S S _REMOVE_COMMENT_RE = re.compile('/\*.+\*/', re.DOTALL) _REMOVE_COMMENT_LINE_RE = re.compile('(^|\n)[\s\t]*//.+(\n|$)') JS_TYPE = 'js' CSS_TYPE = 'css' #___________________________________________________________________________________________________ __init__ def __init__(self, compileCoffee=False): self._log = Logger('IncludeCompressor') self._compileCoffee = compileCoffee #=================================================================================================== # P U B L I C #___________________________________________________________________________________________________ compress def compress(self, rootPath): if not self._fileExists(rootPath): return False elif os.path.isfile(rootPath): return self.compressFile(rootPath) else: return self.compressPath(rootPath) #___________________________________________________________________________________________________ compressFile def compressFile(self, rootPath, directory=None): if not self._fileExists(rootPath): return False if self._compileCoffee: try: from pyaid.web.coffeescript.CoffeescriptBuilder import CoffeescriptBuilder CoffeescriptBuilder.compileAllOnPath(rootPath, os.path.dirname(rootPath), True) self._log.write('Coffeescript compiled.') except Exception as err: self._log.writeError('Failed to compile coffeescript file.', err) return False return self._compressFile(rootPath, directory) #___________________________________________________________________________________________________ compressPath def compressPath(self, rootPath): # First compile any coffee scripts to js files if self._compileCoffee: try: from pyaid.web.coffeescript.CoffeescriptBuilder import CoffeescriptBuilder CoffeescriptBuilder.compileAllOnPath(rootPath, rootPath, True) self._log.write('Coffee scripts compiled.') except Exception as err: self._log.writeError('Failed to compile coffeescript files.', err) return False FileUtils.walkPath(rootPath, self._compressInFolder, None) self._log.write('Compression operation complete.') return True #=================================================================================================== # P R O T E C T E D #___________________________________________________________________________________________________ _fileExists def _fileExists(self, rootPath): if not os.path.exists(rootPath): self._log.write('ERROR: [%s] does not exist. Operation aborted.' % rootPath) return False return True #___________________________________________________________________________________________________ _compressFile def _compressFile(self, target, directory): # Skip compiled files. if target.endswith('comp.js') or target.endswith('comp.css'): return False if target.endswith('.js'): fileType = IncludeCompressor.JS_TYPE elif target.endswith('.css'): fileType = IncludeCompressor.CSS_TYPE else: return False if not directory: directory = '' if not directory.endswith(os.sep) and not target.startswith(os.sep): directory += os.sep inFile = directory + target tempFile = directory + target + '.temp' try: fh = open(inFile, 'r') fileString = fh.read() fh.close() except Exception as err: self._log.writeError('FAILED: Unable to read ' + str(inFile), err) return False if fileType == IncludeCompressor.CSS_TYPE: fileString = fileString.replace('@charset "utf-8";', '') ofn = (target[0:-3] + 'comp.css') else: ofn = (target[0:-2] + 'comp.js') try: fh = open(tempFile, 'w') fh.write(fileString) fh.close() except Exception as err: self._log.writeError( 'FAILED: Unable to write temp file ' + str(tempFile), err) return False outFile = directory + '/' + ofn cmd = ['minify', '"%s"' % tempFile, '"%s"' % outFile] result = SystemUtils.executeCommand(cmd) if result['code']: self._log.write('FAILED: Unable to compress ' + str(inFile)) if os.path.exists(tempFile): os.remove(tempFile) if not os.path.exists(outFile): self._log.write('FAILED: ' + target + ' -> ' + ofn) return False elif fileType == IncludeCompressor.JS_TYPE: f = open(outFile, 'r') compressed = f.read() f.close() compressed = IncludeCompressor._REMOVE_COMMENT_RE.sub( '', compressed) compressed = IncludeCompressor._REMOVE_COMMENT_LINE_RE.sub( '', compressed) f = open(outFile, 'w') f.write(compressed.strip()) f.close() inSize = SizeUnits.SizeConversion.bytesToKilobytes(inFile, 2) outSize = SizeUnits.SizeConversion.bytesToKilobytes(outFile, 2) saved = SizeUnits.SizeConversion.convertDelta( inSize, outSize, SizeUnits.SIZES.KILOBYTES, 2) self._log.write( 'Compressed[%s]: %s -> %s [%sKB -> %sKB | Saved: %sKB]' % (fileType, target, ofn, inSize, outSize, saved)) return True #___________________________________________________________________________________________________ _compressInFolder def _compressInFolder(self, dumb, directory, names): if directory.find('.svn') != -1: return for fn in names: self._compressFile(fn, directory)
def __init__(self, compileCoffee=False): self._log = Logger('IncludeCompressor') self._compileCoffee = compileCoffee