def loadPathHistoryFromPrefs(self): proj = self.project pathSelector = self.treeWidget.childrenWidget.pathSelector pathSelector.clear() sPathList = getPref("file_browser|path_history", []) for sPath in sPathList: sFullLibName, sRelPath = addEndSlash(sPath).split("/", 1) rcLib = proj.loadedLibraries.get(sFullLibName) if not rcLib: continue if sRelPath: rcDir = rcLib.getEntry(sRelPath, dbNode=False) if not rcDir: continue sRelPath = rcDir.relPath() sTreePath = addEndSlash(("/" + pathJoin(rcLib.label, sRelPath))) sEntryPath = pathJoin(rcLib.fullName, sRelPath) pathSelector.addItem(sTreePath, sEntryPath) self.treeWidget.childrenWidget.pathHistoryLoaded = True pathSelector.setCurrentIndex(-1)
def scan(proj, sDbPath): sDbPath = pathNorm(sDbPath) library = proj.libraryFromDbPath(sDbPath) sCurSite = proj.getCurrentSite() dbNodeDct = proj._db.findNodes({"file":{"$regex":"^{}".format(addEndSlash(sDbPath))}, "source_size":{"$exists":True}}, asDict=True) sortKey = lambda n: n._data["#parent"] + "_" + n.name dbVersIter = (n for n in dbNodeDct.itervalues() if ("#parent" in n._data)) dbVersList = sorted(dbVersIter, key=sortKey, reverse=True) grpIter = groupby(dbVersList, key=lambda n: n._data["#parent"]) #for k, g in grpIter: # vn = next(g) # hn = dbNodeDct.get(vn.getField("#parent")) # if int(vn.version) != int(hn.version): # print vn.version, hn.version, hn.file dbNodeList = tuple(next(g) for _, g in grpIter) numNodes = len(dbNodeList) foundList = numNodes * [None] errorList = [] for i, n in enumerate(dbNodeList): print "checking {}/{}: {}".format(i + 1, numNodes, n.file) rcEntry = library.entryFromDbPath(n.file, dbNode=False, assertLibrary=False) if not rcEntry: continue rcEntry._cacheDbNode(n) if rcEntry.fileSize != rcEntry.sourceSize: foundList[i] = rcEntry continue try: n = dbNodeDct[n.getField("#parent")] except KeyError as e: logMsg(e.message, warning=True) errorList.append((rcEntry, e)) continue rcEntry = library.entryFromDbPath(n.file, dbNode=False, assertLibrary=False) if not rcEntry: sOrigin = n.origin if sOrigin and (sOrigin in n._data): if ("synced_" + sOrigin in n._data): rcEntry = library.entryFromDbPath(n.file, weak=True, dbNode=False, assertLibrary=False) errorList.append((rcEntry, EnvironmentError("missing"))) continue rcEntry._cacheDbNode(n) if rcEntry.fileSize != rcEntry.sourceSize: foundList[i] = rcEntry return foundList, errorList, sDbPath