def handleStartSolveMissingLink(wfile, request, session): productionId=int(session["production_id"]) production = indexer.getProduction(productionId) fileId=int(request["file_id"]) elementId=int(request["element_id"]) #determine original file if from elementid elementDetails = indexer.getElement(elementId) elementType=elementDetails[indexer.INDEX_ELEMENT_TYPE] orFileId = elementDetails[indexer.INDEX_ELEMENT_FILE_ID] orFileDetails = indexer.getFile(orFileId) tasks = [] if elementType=='ID': newElementId=fileId newElementDetails=indexer.getElement(newElementId) cl = ChangeIDElement() cl.fileDetails = orFileDetails cl.fileId = orFileId cl.productionDetails = production cl.libraryDetails=indexer.getElement(elementDetails[indexer.INDEX_LIBRARY_ID]) cl.elementDetails=elementDetails cl.newElementName=newElementDetails[indexer.INDEX_ELEMENT_NAME] tasks.append(cl) else: newFileDetails = indexer.getFile(fileId) cl = ChangeLibrary() cl.fileDetails = orFileDetails cl.fileId = orFileId cl.newFileDetails = newFileDetails cl.productionDetails = production cl.libraryDetails = elementDetails tasks.append(cl) session["tasks"]=tasks wfile.write("""[]""".encode())
def handleSvnUpdate(wfile, request, session): """Service to retrieve a list of all available productions """ productionId = request["production_id"] production = indexer.getProduction(productionId) svn.svnUpdate(production[2], production[5], production[6]) wfile.write(json.dumps([]).encode())
def handleGetFileView(wfile, request, session): """Service to retrieve all file level information being: production file elements of the file references of the file used by of the file """ productionId=int(request["production_id"]) fileId=int(request["file_id"]) indexer.updateIndex(productionId) production = indexer.getProduction(productionId) file = indexer.getFileDetails(fileId) elements = indexer.getFileElements(fileId) references = indexer.getFileReferences(fileId) usedby = indexer.getFileUsedBy(fileId) result = [] result.append(productionToObject(production)) if file != None: result.append(fileDetailToObject(file)) else: file=indexer.getFile(fileId) result.append(fileToObject(file)) result.append(elementsToObject(elements)) result.append(referencesToObject(references)) result.append(usedbysToObject(usedby)) wfile.write(json.dumps(result).encode())
def handleGetFileView(wfile, request, session): """Service to retrieve all file level information being: production file elements of the file references of the file used by of the file """ productionId = int(request["production_id"]) fileId = int(request["file_id"]) indexer.updateIndex(productionId) production = indexer.getProduction(productionId) file = indexer.getFileDetails(fileId) elements = indexer.getFileElements(fileId) references = indexer.getFileReferences(fileId) usedby = indexer.getFileUsedBy(fileId) result = [] result.append(productionToObject(production)) if file != None: result.append(fileDetailToObject(file)) else: file = indexer.getFile(fileId) result.append(fileToObject(file)) result.append(elementsToObject(elements)) result.append(referencesToObject(references)) result.append(usedbysToObject(usedby)) wfile.write(json.dumps(result).encode())
def handleSvnCommit(wfile, request, session): message = request["message"] production_id = request["production_id"] production_result = indexer.getProduction(production_id) production_path = production_result[2] svn.svnCommit(production_path, message) wfile.write("[]\r\n".encode()); return
def handleSvnCommit(wfile, request, session): message = request["message"] production_id = request["production_id"] production_result = indexer.getProduction(production_id) production_path = production_result[2] svn.svnCommit(production_path, message) wfile.write("[]\r\n".encode()) return
def handleGetProductionView(wfile, request, session): """Service to retrieve all production level information being: production files of the production scenes of the production missing links of the production """ if ("production_id" in request): production = indexer.getProduction(request["production_id"]) else: production = indexer.getActiveProduction() result = [] if production is not None: productionId = production[0] session["production_id"] = productionId #fix for dependancy services.. indexer.updateIndex(productionId) files = indexer.getProductionFiles(productionId) scenes = indexer.getAllScenes(productionId) errors = indexer.getConsistencyErrors(productionId) try: states = svn.svnStat(production[2]) except pysvn.ClientError as e: states = [] temp = {} assignedFiles = [] for stat in states: if stat.entry is None: if path.isfile(stat.path): temp[path.normcase( stat.path)] = [None, None, str(stat.text_status)] elif stat.entry.kind != svn.pysvn.node_kind.dir: temp[path.normcase(stat.path)] = [ stat.entry.commit_revision.number, stat.entry.commit_author, str(stat.text_status) ] for file in files: abspath = path.normcase(path.join(production[2], file[3])) if abspath in temp: ass = [file, temp[abspath]] del temp[abspath] else: ass = [file, ["", "", "unversioned"]] assignedFiles.append(ass) for key, arr in temp.items(): ass = [[ -1, productionId, path.basename(key), path.relpath(key, production[2]), 0, 0 ], arr] assignedFiles.append(ass) result.append(productionToObject(production)) result.append(files2ToObject(assignedFiles)) result.append(scenesToObject(scenes)) result.append(errorsToObject(errors)) wfile.write(json.dumps(result).encode())
def handleSvnAdd(wfile, request, session): file_id = request["file_id"] production_id = request["production_id"] file_location = request["file_location"] addAll = request["add_all"] if file_id != -1: result = indexer.getFile(file_id) file_name = result[2] rel_file_path = result[3] production_result = indexer.getProduction(production_id) production_path = production_result[2] location = path.join(production_path, rel_file_path) svn.svnAdd(location) else: production_result = indexer.getProduction(production_id) production_path = production_result[2] location = path.join(production_path, file_location) svn.svnAdd(location) wfile.write("[]\r\n".encode()) return
def getAbsoluteFilename(productionId, fileId): """ determine the absolute path the given file input: productinoId - id of the production, the file is part of fileId - id of the file output: string - the absolute path """ production = indexer.getProduction(productionId) pfile = indexer.getFile(fileId) return os.path.join(production[2], pfile[3])
def handleSvnAdd(wfile, request, session): file_id = request["file_id"] production_id = request["production_id"] file_location = request["file_location"] addAll = request["add_all"] if file_id!=-1: result = indexer.getFile(file_id) file_name = result[2] rel_file_path = result[3] production_result = indexer.getProduction(production_id) production_path = production_result[2] location = path.join(production_path, rel_file_path) svn.svnAdd(location) else: production_result = indexer.getProduction(production_id) production_path = production_result[2] location = path.join(production_path, file_location) svn.svnAdd(location) wfile.write("[]\r\n".encode()); return
def handleStartRenameDirectory(wfile, request, session): productionId=int(request["production_id"]) production=indexer.getProduction(productionId) sourceDirectory=str(request["source_directory"]) targetLastDirectoryName=str(request["target_directory_name"]) targetDirectory = os.path.join(os.path.dirname(sourceDirectory), targetLastDirectoryName) targetAbsoluteDirectory = os.path.join(production[indexer.INDEX_PRODUCTION_LOCATION], targetDirectory) files = indexer.getProductionFiles(productionId); #perform checks if (sourceDirectory==targetDirectory): wfile.write("""[{"message":"Target directory is same as source."}]""".encode()) return; if (os.path.exists(targetAbsoluteDirectory)): wfile.write("""[{"message":"Target directory already exists."}]""".encode()) return; filesInside = [] tasks=[] for file in files: if file[indexer.INDEX_FILE_LOCATION].startswith(sourceDirectory): filesInside.append(file) referencesOutside = {} for file in filesInside: referencesFromOutside = indexer.getFileUsedBy(file[indexer.INDEX_FILE_ID]) for reference in referencesFromOutside: referenceFile = indexer.getFile(reference[indexer.INDEX_REFERENCE_FILE_ID]) if not referenceFile[indexer.INDEX_FILE_LOCATION].startswith(sourceDirectory): if referenceFile not in referencesOutside.keys(): referencesOutside[referenceFile]=[] if file not in referencesOutside[referenceFile]: referencesOutside[referenceFile].append(file) for referenceFile in referencesOutside.keys(): for file in referencesOutside[referenceFile]: ac = ChangeReference() ac.fileId = referenceFile[indexer.INDEX_FILE_ID] ac.fileDetails = referenceFile ac.referenceFileId = file[indexer.INDEX_FILE_ID] ac.newLocation = os.path.dirname(file[indexer.INDEX_FILE_LOCATION].replace(sourceDirectory, targetDirectory, 1)) ac.currentFilename = file[indexer.INDEX_FILE_NAME] ac.currentFileLocation = file[indexer.INDEX_FILE_LOCATION] ac.productionDetails=production tasks.append(ac) renameDir = RenameDirectory() renameDir.productionDetails=production renameDir.sourceDirectory = sourceDirectory renameDir.targetDirectory = targetDirectory tasks.append(renameDir) session["tasks"]=tasks if wfile != None: wfile.write("""[]""".encode())
def handleStartRenameElement(wfile, request, session): productionId = int(session["production_id"]) production = indexer.getProduction(productionId) fileId = int(request["file_id"]) elementId = int(request["element_id"]) fileDetails = indexer.getFile(fileId) elementDetails = indexer.getElementDetails(elementId) newElementName = request["new_name"] for row in indexer.getFileElementByName(fileId, newElementName): wfile.write("""[{"message":"Element already exists."}]""".encode()) return if newElementName[0:2] != elementDetails[2][0:2]: wfile.write(("""[{"message":"Type of element cannot be changed.[""" + elementDetails[2][0:2] + """]"}]""").encode()) return tasks = [] filesDone = [] usedby = indexer.getReferenceToElement(productionId, fileId, elementDetails[2]) for used in usedby: ofileId = used[0] if ofileId not in filesDone: ac = RenameIDElement() ac.fileId = ofileId ac.fileDetails = indexer.getFile(ofileId) ac.elementDetails = elementDetails ac.referenceFileId = fileId ac.newElementName = newElementName ac.currentElementName = elementDetails[2] ac.currentFilename = fileDetails[2] ac.currentFileLocation = fileDetails[3] ac.productionDetails = production filesDone.append(ofileId) tasks.append(ac) bu = RenameElement() bu.fileId = fileId bu.fileDetails = fileDetails bu.elementDetails = elementDetails bu.currentFilename = fileDetails[2] bu.newElementName = newElementName bu.productionDetails = production tasks.append(bu) session["tasks"] = tasks wfile.write("""[]""".encode())
def handleStartRenameElement(wfile, request, session): productionId=int(session["production_id"]) production = indexer.getProduction(productionId) fileId=int(request["file_id"]) elementId=int(request["element_id"]) fileDetails = indexer.getFile(fileId) elementDetails = indexer.getElementDetails(elementId) newElementName = request["new_name"] for row in indexer.getFileElementByName(fileId, newElementName): wfile.write("""[{"message":"Element already exists."}]""".encode()) return if newElementName[0:2] != elementDetails[2][0:2]: wfile.write(("""[{"message":"Type of element cannot be changed.["""+elementDetails[2][0:2]+"""]"}]""").encode()) return tasks = [] filesDone = [] usedby = indexer.getReferenceToElement(productionId, fileId, elementDetails[2]) for used in usedby: ofileId = used[0] if ofileId not in filesDone: ac = RenameIDElement() ac.fileId = ofileId ac.fileDetails = indexer.getFile(ofileId) ac.elementDetails = elementDetails ac.referenceFileId = fileId ac.newElementName = newElementName ac.currentElementName = elementDetails[2] ac.currentFilename = fileDetails[2] ac.currentFileLocation = fileDetails[3] ac.productionDetails=production filesDone.append(ofileId) tasks.append(ac) bu = RenameElement() bu.fileId = fileId bu.fileDetails = fileDetails bu.elementDetails = elementDetails bu.currentFilename = fileDetails[2] bu.newElementName = newElementName bu.productionDetails = production tasks.append(bu) session["tasks"]=tasks wfile.write("""[]""".encode())
def handleGetProductionView(wfile, request, session): """Service to retrieve all production level information being: production files of the production scenes of the production missing links of the production """ if ("production_id" in request ) : production = indexer.getProduction(request["production_id"]) else : production = indexer.getActiveProduction() result = [] if production is not None: productionId=production[0] session["production_id"]=productionId #fix for dependancy services.. indexer.updateIndex(productionId) files = indexer.getProductionFiles(productionId) scenes = indexer.getAllScenes(productionId) errors = indexer.getConsistencyErrors(productionId) try: states = svn.svnStat(production[2]) except pysvn.ClientError as e: states=[] temp = {} assignedFiles=[] for stat in states: if stat.entry is None: if path.isfile(stat.path): temp[path.normcase(stat.path)] = [None,None,str(stat.text_status)] elif stat.entry.kind != svn.pysvn.node_kind.dir: temp[path.normcase(stat.path)] = [stat.entry.commit_revision.number,stat.entry.commit_author,str(stat.text_status)] for file in files: abspath = path.normcase(path.join(production[2], file[3])) if abspath in temp: ass =[file, temp[abspath]] del temp[abspath] else: ass =[file, ["","","unversioned"]] assignedFiles.append(ass) for key, arr in temp.items(): ass = [[-1, productionId, path.basename(key), path.relpath(key, production[2]), 0,0 ], arr] assignedFiles.append(ass); result.append(productionToObject(production)) result.append(files2ToObject(assignedFiles)) result.append(scenesToObject(scenes)) result.append(errorsToObject(errors)) wfile.write(json.dumps(result).encode())
def handleGet(wfile, request, session): factory=DependanciesSVGFactory() # productionId, fileId productionId=session["production_id"] indexer.updateIndex(productionId) production = indexer.getProduction(productionId) fileId=request.get("file_id", session.get("file_id",None)) detail = request.get("display", "detail") == "detail" view = request.get("view", "uses") factory.RenderNavigation=request.get("navigation", "0")=="1" factory.Detail = detail filter = request.get("filter", "all") factory.Production=production result=[] if fileId == None: view="production" if view == "production": result = indexer.queryDependancy(productionId, filter) if view == "uses": result = indexer.queryDependancyUses(productionId, fileId, filter) if view == "used": result = indexer.queryDependancyUsed(productionId, fileId, filter) if view == "neighbour": result = indexer.queryDependancyNeighbour(productionId, fileId, filter) items = [] for item in result: rec = {} rec["source_file_id"]=item[4] rec["source_file_location"]=item[0] rec["target_file_id"]=item[5] rec["target_file_location"]=item[1] rec["element_name"]=item[3] rec["element_type"]=item[2] items.append(rec) wfile.write(json.dumps(items).encode());
def handleStartRenameFile(wfile, request, session): productionId = int(session["production_id"]) production = indexer.getProduction(productionId) fileId = int(request["file_id"]) fileDetails = indexer.getFile(fileId) newFilename = request["new_filename"] if os.path.exists( os.path.join( os.path.dirname(os.path.join(production[2], fileDetails[3])), newFilename)): wfile.write("""[{"message":"File already exists."}]""".encode()) return tasks = [] filesDone = [] usedby = indexer.getFileUsedBy(fileId) for used in usedby: ofileId = used[3] if ofileId not in filesDone: ac = RenameLibrary() ac.fileId = ofileId ac.fileDetails = indexer.getFile(ofileId) ac.referenceFileId = fileId ac.newFilename = newFilename ac.currentFilename = fileDetails[2] ac.currentFileLocation = fileDetails[3] ac.productionDetails = production filesDone.append(ofileId) tasks.append(ac) bu = RenameFile() bu.fileId = fileId bu.fileDetails = fileDetails bu.currentFilename = fileDetails[2] bu.newFilename = newFilename bu.productionDetails = production tasks.append(bu) session["tasks"] = tasks wfile.write("""[]""".encode())
def handleSvnRevert(wfile, request, session): file_id = request["file_id"] file_path = request["file_location"] revertAll = request["revert_all"] production_id = request["production_id"] production_result = indexer.getProduction(production_id) production_path = production_result[2] if file_id==None and file_path==None and revertAll: svn.svnRevert(production_path, revertAll) elif file_id!=None and not revertAll: result = indexer.getFile(file_id) rel_file_path = result[3] location = path.join(production_path, rel_file_path) svn.svnRevert(location) elif file_path!=None and not revertAll: rel_file_path = file_path location = path.join(production_path, rel_file_path) svn.svnRevert(location) wfile.write("[]\r\n".encode()); return
def handleSvnRevert(wfile, request, session): file_id = request["file_id"] file_path = request["file_location"] revertAll = request["revert_all"] production_id = request["production_id"] production_result = indexer.getProduction(production_id) production_path = production_result[2] if file_id == None and file_path == None and revertAll: svn.svnRevert(production_path, revertAll) elif file_id != None and not revertAll: result = indexer.getFile(file_id) rel_file_path = result[3] location = path.join(production_path, rel_file_path) svn.svnRevert(location) elif file_path != None and not revertAll: rel_file_path = file_path location = path.join(production_path, rel_file_path) svn.svnRevert(location) wfile.write("[]\r\n".encode()) return
def handleStartMoveFile(wfile, request, session): productionId=int(session["production_id"]) production = indexer.getProduction(productionId) fileId=int(request["file_id"]) fileDetails = indexer.getFile(fileId) newLocation = request["new_location"] if os.path.exists(os.path.join(production[2],newLocation,fileDetails[2])): wfile.write("""[{"message":"File already exists."}]""".encode()) return tasks = [] filesDone = [] usedby = indexer.getFileUsedBy(fileId) for used in usedby: ofileId = used[3] if ofileId not in filesDone: ac = ChangeReference() ac.fileId = ofileId ac.fileDetails = indexer.getFile(ofileId) ac.referenceFileId = fileId ac.newLocation = newLocation ac.currentFilename = fileDetails[2] ac.currentFileLocation = fileDetails[3] ac.productionDetails=production filesDone.append(ofileId) tasks.append(ac) bu = MoveFile() bu.fileId = fileId bu.fileDetails = fileDetails bu.currentFilename = fileDetails[2] bu.currentFileLocation = fileDetails[3] bu.newLocation = newLocation bu.productionDetails=production tasks.append(bu) session["tasks"]=tasks wfile.write("""[]""".encode())
def handleGetSVG(wfile, request, session): factory=DependanciesSVGFactory() # productionId, fileId productionId=session["production_id"] indexer.updateIndex(productionId) production = indexer.getProduction(productionId) fileId=request.get("file_id", session.get("file_id",None)) detail = request.get("display", "detail") == "detail" view = request.get("view", "uses") factory.RenderNavigation=request.get("navigation", "0")=="1" factory.Detail = detail filter = request.get("filter", "all") factory.Production=production result=[] if fileId == None: view="production" if view == "production": result = indexer.queryDependancy(productionId, filter) if view == "uses": result = indexer.queryDependancyUses(productionId, fileId, filter) if view == "used": result = indexer.queryDependancyUsed(productionId, fileId, filter) if view == "neighbour": result = indexer.queryDependancyNeighbour(productionId, fileId, filter) if detail: display="detail" else: display="global" factory.URLTemplate = "/".join(["svg", "1", view, filter, display]) factory.Render(wfile, result)
def handleStartSolveMissingLink(wfile, request, session): productionId = int(session["production_id"]) production = indexer.getProduction(productionId) fileId = int(request["file_id"]) elementId = int(request["element_id"]) #determine original file if from elementid elementDetails = indexer.getElement(elementId) elementType = elementDetails[indexer.INDEX_ELEMENT_TYPE] orFileId = elementDetails[indexer.INDEX_ELEMENT_FILE_ID] orFileDetails = indexer.getFile(orFileId) tasks = [] if elementType == 'ID': newElementId = fileId newElementDetails = indexer.getElement(newElementId) cl = ChangeIDElement() cl.fileDetails = orFileDetails cl.fileId = orFileId cl.productionDetails = production cl.libraryDetails = indexer.getElement( elementDetails[indexer.INDEX_LIBRARY_ID]) cl.elementDetails = elementDetails cl.newElementName = newElementDetails[indexer.INDEX_ELEMENT_NAME] tasks.append(cl) else: newFileDetails = indexer.getFile(fileId) cl = ChangeLibrary() cl.fileDetails = orFileDetails cl.fileId = orFileId cl.newFileDetails = newFileDetails cl.productionDetails = production cl.libraryDetails = elementDetails tasks.append(cl) session["tasks"] = tasks wfile.write("""[]""".encode())
def handleStartMoveDirectory(wfile, request, session): productionId = int(request["production_id"]) production = indexer.getProduction(productionId) sourceDirectory = str(request["source_directory"]) targetDirectory = str(request["target_directory"]) targetAbsoluteDirectory = os.path.join( production[indexer.INDEX_PRODUCTION_LOCATION], targetDirectory) #perform checks if (sourceDirectory == targetDirectory): wfile.write( """[{"message":"Target directory is same as source."}]""".encode()) return if (os.path.exists(targetAbsoluteDirectory)): wfile.write( """[{"message":"Target directory already exists."}]""".encode()) return files = indexer.getProductionFiles(productionId) filesInside = [] tasks = [] for file in files: if file[indexer.INDEX_FILE_LOCATION].startswith(sourceDirectory): filesInside.append(file) referencesOutside = {} for file in filesInside: referencesFromOutside = indexer.getFileUsedBy( file[indexer.INDEX_FILE_ID]) for reference in referencesFromOutside: referenceFile = indexer.getFile( reference[indexer.INDEX_REFERENCE_FILE_ID]) if not referenceFile[indexer.INDEX_FILE_LOCATION].startswith( sourceDirectory): if referenceFile not in referencesOutside.keys(): referencesOutside[referenceFile] = [] if file not in referencesOutside[referenceFile]: referencesOutside[referenceFile].append(file) for referenceFile in referencesOutside.keys(): for file in referencesOutside[referenceFile]: ac = ChangeReference() ac.fileId = referenceFile[indexer.INDEX_FILE_ID] ac.fileDetails = referenceFile ac.referenceFileId = file[indexer.INDEX_FILE_ID] ac.newLocation = os.path.dirname( file[indexer.INDEX_FILE_LOCATION].replace( sourceDirectory, targetDirectory, 1)) ac.currentFilename = file[indexer.INDEX_FILE_NAME] ac.currentFileLocation = file[indexer.INDEX_FILE_LOCATION] ac.productionDetails = production tasks.append(ac) referencesInside = {} for file in filesInside: referencesFromInside = indexer.getFileReferences( file[indexer.INDEX_FILE_ID]) for reference in referencesFromInside: referenceFile = indexer.getFile( reference[indexer.INDEX_REFERENCE_FILE_ID]) if referenceFile != None and not referenceFile[ indexer.INDEX_FILE_LOCATION].startswith(sourceDirectory): if referenceFile not in referencesInside.keys(): referencesInside[referenceFile] = [] if file not in referencesInside[referenceFile]: referencesInside[referenceFile].append(file) for referenceFile in referencesInside.keys(): for file in referencesInside[referenceFile]: ac = ChangeReferenceForMove() ac.fileId = file[indexer.INDEX_FILE_ID] ac.fileDetails = file ac.referenceFileDetails = referenceFile ac.referenceFileId = referenceFile[indexer.INDEX_FILE_ID] ac.targetDirectory = targetDirectory ac.sourceDirectory = sourceDirectory ac.currentFilename = file[indexer.INDEX_FILE_NAME] ac.currentFileLocation = file[indexer.INDEX_FILE_LOCATION] ac.productionDetails = production tasks.append(ac) moveDir = MoveDirectory() moveDir.productionDetails = production moveDir.sourceDirectory = sourceDirectory moveDir.targetDirectory = targetDirectory tasks.append(moveDir) session["tasks"] = tasks if wfile != None: wfile.write("""[]""".encode())