def logRcFileData(proj, sDbDirPath, sFilePattern, sortBy="", **kwargs): sAllSites = proj.listAllSites() count = 0 sizeSum = 0 files = iterFileFromDb(proj, sDbDirPath, sFilePattern, **kwargs) if sortBy: files = sorted(files, key=lambda f:getattr(f, sortBy)) for f in files: syncData = f._dbnode.getData(*sAllSites) print "{:<48}| {:<15}| {:<30}| {}".format(f.name, toDisplayText(f.fileSize), toDisplayText(f.fsMtime), syncData, #", ".join(sorted(syncData.iterkeys())), ) sizeSum += f.fileSize count += 1 sMsg = "- {} '{}' files".format(count, sFilePattern) print "{:<48}| {:<15}".format(sMsg, toDisplayText(MemSize(sizeSum)))
def report(foundList, errorList, sDbPath=""): def _u(v): return unicode_(v) if isinstance(v, basestring) else v numNodes = len(foundList) foundList = sorted((e for e in foundList if e), key=lambda e:e.dbMtime) table = [] for rcEntry in foundList: try: dbnode = rcEntry._dbnode sOnlineTime = strftime(long(dbnode.synced_online) / 1000) if dbnode.synced_online else "" texts = map(_u, (rcEntry.dbPath(), rcEntry.author, rcEntry.origin, strftime(toTimestamp(rcEntry.dbMtime)), cmpStr(rcEntry.dbMtime, rcEntry.fsMtime), strftime(toTimestamp(rcEntry.fsMtime)), repr(rcEntry.fileSize), cmpStr(rcEntry.fileSize, rcEntry.sourceSize), repr(rcEntry.sourceSize), sOnlineTime, )) except Exception as e: print toStr(e), ":", rcEntry.absPath() continue table.append(texts) headers = ["file", "author", "site", "published", "", "modified", "current size", "", "source size", "synced online"] print tabulate(table, headers, tablefmt="simple") print len(foundList), "bad on", numNodes, "files - scan errors:", len(errorList) sFileName = sDbPath.strip("/").replace("/", "_") + "_report.html" sHtmlPath = pathResolve("%USERPROFILE%/Documents/{}".format(sFileName)) sCharset = '<head>\n<meta charset="UTF-8">\n</head>\n' with codecs.open(sHtmlPath, "w", "utf_8") as fo: fo.writelines(sCharset + tabulate(table, headers, tablefmt="html")) for rcEntry, e in errorList: print rcEntry.dbPath(), type(e), e.message, toDisplayText(rcEntry.dbMtime)
def setSyncRulesToDefault(proj, entityType="", dryRun=True, includeOmitted=False, filters=None, fields=None): syncedPathItems = [] if not entityType: sEntityTypes = ("asset", "shot") else: sEntityTypes = argToTuple(entityType) bAssets = "asset" in sEntityTypes bShots = "shot" in sEntityTypes if bAssets: allSgAstList = proj.listAllSgAssets(includeOmitted=includeOmitted, moreFilters=filters, moreFields=fields) # allSgAstList.append({"code":"chfjhgrjlkhdjtr"}) print "Assets:", len(allSgAstList) if bShots: allSgShotList = proj.listAllSgShots(includeOmitted=includeOmitted, moreFilters=filters, moreFields=fields) print "Shots:", len(allSgShotList) if bAssets: syncedPathItems = list(iterSyncedResource(proj, "asset", allSgAstList)) if bShots: syncedPathItems.extend(iterSyncedResource(proj, "shot", allSgShotList)) grpIter = groupby(syncedPathItems, key=lambda x:x[0].parentEntity()) for k, grp in grpIter: damEntity = next(grp)[0] parentDir = damEntity.getResource("public", dbNode=False, weak=True).parentDir() print "loading DbNodes for '{}'".format(k) parentDir.loadChildDbNodes(recursive=True, noVersions=True) rcRuleItems = [] for damEntity, sAbsPath, sSyncRuleList in syncedPathItems: rcEntry = damEntity.getLibrary().getEntry(sAbsPath, dbNode=False) #print rcEntry, sAbsPath, damEntity, rcEntry.syncRules if not rcEntry: continue rcEntry.syncRules = sorted(str(u) for u in rcEntry.syncRules) if rcEntry.syncRules == sSyncRuleList: continue sTime = toDisplayText(rcEntry.dbMtime) # sTime = "" # dbNode = rcEntry._dbnode # if dbNode: # sTime = toDisplayText(datetime.fromtimestamp(dbNode.time / 1000)) print rcEntry.relPath(), rcEntry.syncRules, sSyncRuleList, sTime rcRuleItems.append((rcEntry, sSyncRuleList)) # if not dryRun: # rcEntry.setSyncRules(sSyncRuleList) return rcRuleItems