def __filterCallers(self, i): if (i == -1): return # invalid filter, empty box or resetet self.__currentCallerFilter = i - 1 # -1 Because of "ALL" self.clearLog() Log.refreshConsole()
def fetchParserData(self): if self._assertConnection() and self.transaction is not None: msg = { "key": Protocol.SESSION, "subkey": SessionProtocol.FETCHPARSERDATA } self.transaction.send(msg) ret = self.transaction.asyncRead( staging=True, attr=("subkey", SessionProtocol.FETCHPARSERDATA)) if ret: if ret["status"]: parserRows = ret["ParserRows"] parserKeys = ret["ParserKeys"] parserHandle = ret["ParserHandle"] parserLog = ret["ParserLogs"] for phase, key in parserKeys: self.parser.sendParserRegisterKeys(phase, key) for phase, row in parserRows: self.parser.sendParserUpdate(phase, row) for event, line, groups in parserHandle: self.parser.sendParserHandle(event, line, groups) for log, error in parserLog: if error: Log.error(log, self.getCallerId()) else: Log.log(log, self.getCallerId()) return else: self._handleErrors(ret["error"]) self._handleErrors(["Failed to fetch parser data from host."])
def __init__(self, listOfLogfiles): ''' Initializes the concatinator with a list of Logfiles to get a list of iterators concerning to the merged logs call concate() ''' self.__listeners = [] events = { Concatenator.finish: re.compile('Optimization Done.'), Concatenator.interrupt: re.compile( "((?:Snapshotting solver state to (?:binary proto|HDF5) file ).*)" ), Concatenator.resume: re.compile("((?<= Resuming from ).*)"), } for logFile in listOfLogfiles: log_iter = self.__createIterFromFile(logFile) parser = Parser(log_iter, events) listener = Concatenator.SnapshotListener(logFile) parser.addListener(listener) try: parser.parseLog() self.__listeners.append(listener) except: callerId = Log.getCallerId("Error Parsing Log File:" + str(logFile)) Log.error("", callerId)
def _promptForAutoNetImport(self, netPath): """Open a message box and ask the user if he/she wants to import the net definition given in netPath. netPath contains a net definition that has been extracted from a loaded solver definition. """ # check whether the specified file does exist if os.path.isfile(netPath): # get the file content with open(netPath, 'r') as file: netPrototxt = file.read() msgBox = QMessageBox() msgBox.setWindowTitle("Barista") msgBox.setText( self.tr("Solver definition contains a network reference.")) msgBox.setInformativeText( self.tr("Do you want to import the network, too?")) msgBox.setDetailedText("File:\n{}\n\nNet definition:\n{}".format( netPath, netPrototxt)) msgBox.setStandardButtons(QMessageBox.Yes | QMessageBox.No) msgBox.setDefaultButton(QMessageBox.Yes) ret = msgBox.exec_() if ret == QMessageBox.Yes: self.openNet(netPath) else: callerId = Log.getCallerId('file_loader') Log.log( "Found network reference in loaded solver definition, but the file {} does not exist." .format(netPath), callerId)
def loadFiles(self, filenames): """ Loads every log file in the list of filenames and adds them to the list of logs. """ for i in range(len(filenames)): f = filenames[i] if not os.path.exists(f): Log.error("External log not found: " + f, Log.getCallerId("plotter")) else: parser = Parser(open(f, 'r'), OrderedDict(), Log.getCallerId(str(f))) head, tail = os.path.split(str(f)) logId = "external_" + tail logName = "[ext] " + tail self.putLog(logId, parser, logName=logName) parser.parseLog() # This is for saving the loaded logs in the project file # (create the necessary docks if they don't exist) if self.__settings is None: self.__settings = {"logFiles": {logId: f}} elif "logFiles" not in self.__settings: self.__settings["logFiles"] = {logId: f} else: self.__settings["logFiles"][logId] = f
def __init__(self, layers, view, vertical=False): self.vertical = vertical if (vertical): NodeSort.Node.OFFSET = 70 else: NodeSort.Node.OFFSET = 30 self.__hasCycle = False self.columnWidths = [] self.rowHeights = [] # all abstract nodes (type Node) that exist self.nodes = [] # all gui nodes that exist self.guiNodes = [] if (type(layers) is list): Log.error("wrong type of layers", callerId) return # fills nodes and guiNodes self.getAbstractNodes(layers.values()) if (self.__hasCycle): Log.error("There is a Cycle in that graph, cannot be handled yet", callerId) return if (not layers.values()): return self.sort(self.nodes, view)
def printLog(self, log, error=False): if self.log_id is not None: if not error: Log.log(log, self.log_id) else: Log.error(log, self.log_id) else: self.printLogSignl.emit(log, error)
def proceed(self, snapshot=None): """ Continue training from the (last) snapshot. Return True if the process was continued False if the continuation failed """ if self.getState() is State.PAUSED: self.__ensureDirectory(self.getSnapshotDirectory()) self.__ensureDirectory(self.logs) if snapshot is None: snapshot = self.getLastSnapshot() self.rid += 1 try: self.getParser().setLogging(True) self.proc = Popen([ caffeVersions.getVersionByName( self.project.getCaffeVersion()).getBinarypath(), 'train', '-solver', self.getSolver(), '-snapshot', snapshot ], stdout=PIPE, stderr=STDOUT, cwd=self.getDirectory()) try: self.tee = Popen( ['tee', '-a', self.getRunLogFileName()], stdin=self.proc.stdout, stdout=PIPE) except Exception as e: # continue without tee Log.error('Failed to start tee: ' + str(e), self.getCallerId()) self.setState(State.RUNNING) Log.log( 'Session ' + self.getRunLogFileName(True) + ' was proceeded', self.getCallerId()) self.__startParsing() return True except Exception as e: # check if caffe root exists Log.error('Failed to continue session: ' + str(e), self.getCallerId()) if os.file.exists( caffeVersions.getVersionByName( self.project.getCaffeVersion()).getBinarypath() ) is False: Log.error( 'CAFFE_BINARY directory does not exists: ' + caffe_bin + '! Please set CAFFE_BINARY to run a session.', self.getCallerId()) elif self.getState() in (State.FAILED, State.FINISHED): Log.error( 'Could not continue a session in state ' + str(self.getState()), self.getCallerId()) return False
def _addSoftmax(self): """Add a softmax layer to the very end of the net, but only if a SoftmaxWithLoss layer was used before.""" # check whether the net used to contain a SoftmaxWithLoss layer softmaxWithLossWasUsed = False for id, layer in self._originalNetworkDictionary["layers"].iteritems(): if layer["type"].name() == "SoftmaxWithLoss": softmaxWithLossWasUsed = True break if softmaxWithLossWasUsed: # ensure that the remaining deployment net has at least one layer if len(self._deployedNetworkDictionary["layers"]) > 0: softmaxLayerType = info.CaffeMetaInformation( ).availableLayerTypes()["Softmax"] # do not add another softmax, if the current deployment network already contains one softmaxAlreadyIncluded = False for id, layer in self._deployedNetworkDictionary[ "layers"].iteritems(): if layer["type"].name() == softmaxLayerType.name(): softmaxAlreadyIncluded = True break if not softmaxAlreadyIncluded: # get the very last layer lastLayerId = self._deployedNetworkDictionary[ "layerOrder"][-1] lastLayer = self._deployedNetworkDictionary["layers"][ lastLayerId] # ensure that the determined last layer does have a top blob if "top" in lastLayer["parameters"] and len( lastLayer["parameters"]["top"]) > 0: # create new softmax layer with default values and add it to the deployment net name = "softmax" position = len( self._deployedNetworkDictionary["layers"]) softmaxLayer, softmaxLayerId = self._dHelper.addLayer( softmaxLayerType, name, position) # connect the softmax layer with the existing network softmaxLayer["parameters"]["bottom"] = [ lastLayer["parameters"]["top"][0] ] # name the output softmaxLayer["parameters"]["top"] = ["probabilities"] else: Log.log( "Could not add Softmax layer as the very last layer of the deployment net does not have any " "top blobs.", self._logId) else: Log.log( "Could not add Softmax layer as the remaining deployment net does not have any layers.", self._logId)
def getSession(self, SID): if SID in self.getValidSIDs(): return self.__sessions[SID] else: Log.log( "Session " + str(SID) + " could not be loaded. Valid IDs are: " + ", ".join([str(i) for i in self.getValidSIDs()]), self.getCallerId()) return None
def getInternalNetFile(self, log=False): """ Returns the original net prototxt file name. When the log flag is set, a message will be sent to the logger console if the file does not exist. """ if log: if not os.path.isfile(self.__netInternalFile): Log.log( "This sessions net file: " + self.__netInternalFile + " does not exist.", self.caller_id) return self.__netInternalFile
def getSolver(self, log=False): """ Returns the solver prototxt file name. When the log flag is set, a message will be sent to the logger console if the file does not exist. """ if log: if not os.path.isfile(self.__solverFile): Log.log( "This sessions Solverfile: " + self.__solverFile + " does not exist.", self.caller_id) return self.__solverFile
def getActiveSID(self): if self.__activeSID: if len(self.__sessions) > 0: if self.__activeSID not in self.__sessions: Log.log( "The Active Session is no longer available. The Project seems to be broken. The active Session is set to the highest ID available.", self.callerId) self.setActiveSID(self.__sessions.keys()[-1]) Log.log("Active Session set to " + str(self.__activeSID), self.callerId) return self.__activeSID
def open(self): '''open the database from the set path.''' if self._env: self.close() if self._path: if os.path.exists(self._path + "data.mdb") or os.path.exists(self._path + "/data.mdb"): self._env = lmdb.open(self._path, max_dbs=2) else: Log.error("Dir is not valid LMDB: " + self._path, self.logid)
def open(self): '''open the database from the set path.''' if self._db: self.close() if self._path: if os.path.exists(self._path + "CURRENT") or os.path.exists(self._path + "/CURRENT"): self._db = leveldb.LevelDB(self._path) else: Log.error("Dir is not valid LEVELDB: " + self._path, self.logid)
def open(self): '''open the database from the set path.''' if self._db: self.close() if self._path: if os.path.exists(self._path): try: self._db = h5.File(self._path, 'r') except: Log.error("File not valid HDF5: " + self._path, self._logid) self._db = None elif not self._pathOfHdf5Txt: Log.error("File does not exist: " + self._path, self._logid)
def __ensureDirectory(self, directory): """ Creates a directory if it does not exist. """ if directory == '': return if not os.path.exists(directory): try: os.makedirs(directory) Log.log('Created directory: ' + directory, self.getCallerId()) except Exception as e: Log.error( 'Failed to create directory ' + directory + ')' + str(e), self.getCallerId())
def _printLog(self): msg = self.transaction.asyncRead(attr=("subkey", SessionProtocol.PRINTLOG)) logs = msg["log"] for log, error in logs: if not isinstance(log, list): log = [log] # if msg["isError"]: if error: self._handleErrors(log) else: for l in log: Log.log(l, self.getCallerId())
def _deployAndExportUnsafe(self): """ This will be triggered when the user clicks the deploy button. It validates the user input and displays messages and errors if additional input is required. If everything is validated successfully, the session is exported to the destination directory. """ # If no snapshot exists, we show an error message and close the dialog. if not self._hasSnapshotsOrDisplayError(): self.close() return # Get the destination folder from the current user input. If the input # is empty, we show an error message and cancel. destinationFolder = self._getDestinationOrDisplayError() if not destinationFolder: return # Check if the path already exsists. If it doesn't exist yet, let the user # decide whether to create all missing folders and abort otherwise. folderExists = os.path.isdir(destinationFolder) if not folderExists and not self._askDirectoryCreatePermission(): return # Ensure that the full path points to a folder and not a file. if not self._ensurePathIsFolderOrDisplayError(destinationFolder): return # Determine the destination file paths for destinationPrototxtFile, caffemodelDestination = self._getDestinationFilePaths(destinationFolder) # Check if any of the destination files already exsist and ask the user # if they should be replaced. Abort if the user decides not to replace one # of the files. if not self._checkFilesDontExistOrAskReplacePermission([destinationPrototxtFile, caffemodelDestination]): return # Export files. session = self._selectedSession() snapshot = self._selectedSnapshot() caffemodelContents = session.readCaffemodelFile(self._replaceLast(snapshot, 'solverstate', 'caffemodel')) # Start deployment. deployedNet = session.readDeployedNetAsString() # Write prototxt file. with open(destinationPrototxtFile, 'w') as file: file.write(deployedNet) # Write caffemodel file. with open(caffemodelDestination, 'w') as caffemodelFile: caffemodelFile.write(caffemodelContents) Log.log("Deployment files have been saved successfully to {}.".format(destinationPrototxtFile), self.getCallerId()) # Close the current dialog. self.close()
def save(self): Log.log("Saving current Session status to disk.", self.getCallerId()) if self._assertConnection(): msg = {"key": Protocol.SESSION, "subkey": SessionProtocol.SAVE} self.transaction.send(msg) ret = self.transaction.asyncRead(attr=("subkey", SessionProtocol.SAVE)) if ret: if ret["status"]: return True else: self._handleErrors(ret["error"]) self._handleErrors(["Could not save session."]) return False
def changeSession(self, newSID, oldSID=None): """Changes the active session within one project. The current State of the netManager is saved to the old session. The state of the new session is loaded to the netManager.""" self.storeSessionState(SID=oldSID, stateDict=None) self._project.setActiveSID(newSID) if self._project.getActiveSID() == newSID: self.loadSessionState(SID=newSID) else: Log.log( "New Session " + str(newSID) + " could not be set. Valid SIDs are: " + ", ".join([str(id) for id in self._project.getValidSIDs()]), self._viewManager.sessionController.getCallerId()) return
def _getCurrentDatum(self): from backend.caffe.path_loader import PathLoader caffe = PathLoader().importCaffe() if self._cursor: raw_datum = self._cursor.value() datum = caffe.proto.caffe_pb2.Datum() try: datum.ParseFromString(raw_datum) except: Log.error("LMDB does not contain valid data: " + self._path, self.logid) return None return datum
def createRemoteSession(self, remote, state_dictionary=None): """use this only to create entirely new sessions. to load existing use the loadRemoteSession command""" msg = {"key": Protocol.GETCAFFEVERSIONS} reply = sendMsgToHost(remote[0], remote[1], msg) if reply: remoteVersions = reply["versions"] if len(remoteVersions) <= 0: msgBox = QMessageBox( QMessageBox.Warning, "Error", "Cannot create remote session on a host witout a caffe-version" ) msgBox.addButton("Ok", QMessageBox.NoRole) msgBox.exec_() return None sid = self.getNextSessionId() msg = { "key": Protocol.CREATESESSION, "pid": self.projectId, "sid": sid } layers = [] for layer in state_dictionary["network"]["layers"]: layers.append(state_dictionary["network"]["layers"][layer] ["parameters"]["type"]) msg["layers"] = layers ret = sendMsgToHost(remote[0], remote[1], msg) if ret: if ret["status"]: uid = ret["uid"] else: for e in ret["error"]: Log.error(e, self.getCallerId()) return None else: Log.error('Failed to create remote session! No connection to Host', self.getCallerId()) return None session = ClientSession(self, remote, uid, sid) if state_dictionary is not None: session.state_dictionary = state_dictionary self.__sessions[sid] = session self.newSession.emit(sid) return sid
def setActiveSID(self, sid): validSIDs = self.getValidSIDs() if sid in validSIDs: self.__activeSID = sid self.activeSessionChanged.emit(sid) else: Log.error( "Could not set active session to " + str(sid) + " valid Session-IDs are: " + ", ".join([str(s) for s in validSIDs]), self.getCallerId()) if not self.__activeSID: self.__activeSID = validSIDs[-1] if len( validSIDs) > 0 else None Log.log("Active session set to " + str(self.__activeSID), self.getCallerId())
def __updateLayerList(self): """ Update the layer list with available layers found in the net description. """ # getLayers if self.__currentSessionId is not None: session = self.__sessionDict[self.__currentSessionId] if isinstance(session, ClientSession): netInternal = session.loadInternalNetFile() currentNetwork = loader.loadNet(netInternal) layerNames = map( lambda layer: layer["parameters"]["name"], filter( lambda layer: layer["type"].name() in self.ALLOWED_LAYERTYPES, currentNetwork["layers"].values() ) ) layerNameList = sorted(layerNames) else: try: currentNetworkPath = session.getInternalNetFile() file = open(currentNetworkPath, 'r') currentNetwork = loader.loadNet(file.read()) # get all the names of the layers, which match the desired type layerNames = map( lambda layer: layer["parameters"]["name"], filter( lambda layer: layer["type"].name() in self.ALLOWED_LAYERTYPES, currentNetwork["layers"].values() ) ) layerNameList = sorted(layerNames) except IOError: callerId = Log.getCallerId('weight-plotter') Log.error("Could not open the network of this session.", callerId) layerNameList =[] else: layerNameList =[] # updates the layer Combobox with the current layers self.layerComboBox.replaceItems(layerNameList) if self.__currentLayerName is None or self.__currentLayerName not in layerNameList: if not layerNameList == []: self.__currentLayerName = layerNameList[-1] else: self.__currentLayerName = None self.layerComboBox.setCurrentText(self.__currentLayerName)
def __init__(self, netPrototxtContents): """Create a deployment version of the given network. netPrototxtContents: string The contents of the prototxt file to deploy. """ # create a logger id self._logId = Log.getCallerId('deployment') self._originalNetworkDictionary = loader.loadNet(netPrototxtContents) # init further attributes self._dataLayers = dict( ) # a dictionary containing all data layers. keys are the layer ids. self._labelBlobNames = [ ] # a list containing all names of blobs, that represent any labels self._dataBlobNames = [ ] # a list containing all names of blobs, that represent any input data self._inputBlobShapes = dict( ) # the keys of this dictionary equal self._dataBlobNames. self._deployedNetworkDictionary = copy.deepcopy( self._originalNetworkDictionary) self._dHelper = DictHelper(self._deployedNetworkDictionary) # start deployment self._createDeployedNetwork()
def _getFirstDatum(self): from backend.caffe.path_loader import PathLoader caffe = PathLoader().importCaffe() iter = self._getIter() if iter: for key, value in iter: raw_datum = value datum = caffe.proto.caffe_pb2.Datum() try: datum.ParseFromString(raw_datum) except: Log.error( "LEVELDB does not contain valid data: " + self._path, self.logid) return None return datum
def delete(self): """ Delete the session directory and disconnect signals. """ self.pause() try: shutil.rmtree(self.getDirectory()) except Exception as e: Log.error('Could not remove session directory: ' + str(e), self.getCallerId()) try: self.stateChanged.disconnect() self.iterationChanged.disconnect() self.snapshotAdded.disconnect() self.project.deleteSession.emit(self.sid) except Exception as e: pass Log.removeCallerId(self.caller_id, False)
def _modifyH5TxtFile(self, dir, state=None): net = None if state: net = state["network"] else: session = self.getActiveSession() if session: state_dict = session.state_dictionary if state_dict: if "network" in state_dict: net = state_dict["network"] if net: h = helper.DictHelper(net) for layerId, layer in net.get("layers", {}).iteritems(): paramKey = "hdf5_data_param.source" if h.layerParameterIsSet(layerId, paramKey): paramValue = h.layerParameter(layerId, paramKey) if paramValue is not None and not os.path.isabs( paramValue): newFilename = str(layerId) + ".txt" newFilepath = os.path.join(dir, newFilename) oldPath = os.path.join( dir, os.path.join(os.pardir, os.path.join(os.pardir, paramValue))) if os.path.exists(oldPath): with open(newFilepath, "w") as f: lines = [ line.rstrip('\n') for line in open(oldPath) ] for line in lines: if line is not "": if line[:1] == '.': line = os.path.join( os.pardir, os.path.join(os.pardir, line)) f.write("\n" + line) else: Log.error( 'Failed to copy hdf5txt file. File does not exists: ' + oldPath, self.getCallerId())
def __getNetwork(self, sess_id=None, snap_id=None): """ Return the caffe network of the current session and snapshot. """ if sess_id is None: sess_id = self.__currentSessionId if snap_id is None: snap_id = self.__currentSnapshotId if sess_id and snap_id: # Creates a Dictionary of Sessions Ids, which contains Snapshot Ids # which point to Layer Id which point to # already opened Networks. net = None if sess_id in self.__alreadyOpenSnapshots: session_snapshots = self.__alreadyOpenSnapshots[sess_id] if snap_id in session_snapshots: net = session_snapshots[snap_id] if net: # cached net found return net else: # snapshot was accessed for the first time # create and cache the net session = self.__sessionDict[sess_id] snapName = snap_id.replace('solverstate', 'caffemodel') if isinstance(session, ClientSession): net = session.loadNetParameter(snapName) if net is None: return else: snapshotPath = session.getSnapshotDirectory() snapshotPath = str(os.path.join(snapshotPath, snapName)) if not os.path.exists(snapshotPath): Log.error('Snapshot file '+snapshotPath+' does not exist!', self.getCallerId()) return net = loadNetParameter(snapshotPath) if net is not None: if sess_id not in self.__alreadyOpenSnapshots.keys(): self.__alreadyOpenSnapshots[sess_id] = {snap_id: net} else: self.__alreadyOpenSnapshots[sess_id][snap_id] = net return net else: # Show a warning message Log.error('The hdf5 snapshot format is not supported for the weight visualization! ' 'This can be changed by setting the snapshot_format parameter in the solver properties.', self.getCallerId())