def handleGetProductionView(wfile, request, session):
    """Service to retrieve all production level information
    being:
        production
        files of the production
        scenes of the production
        missing links of the production
    """
    if ("production_id" in request):
        production = indexer.getProduction(request["production_id"])
    else:
        production = indexer.getActiveProduction()
    result = []
    if production is not None:
        productionId = production[0]
        session["production_id"] = productionId  #fix for dependancy services..
        indexer.updateIndex(productionId)
        files = indexer.getProductionFiles(productionId)
        scenes = indexer.getAllScenes(productionId)
        errors = indexer.getConsistencyErrors(productionId)
        try:
            states = svn.svnStat(production[2])
        except pysvn.ClientError as e:
            states = []
        temp = {}
        assignedFiles = []
        for stat in states:
            if stat.entry is None:
                if path.isfile(stat.path):
                    temp[path.normcase(
                        stat.path)] = [None, None,
                                       str(stat.text_status)]
            elif stat.entry.kind != svn.pysvn.node_kind.dir:
                temp[path.normcase(stat.path)] = [
                    stat.entry.commit_revision.number,
                    stat.entry.commit_author,
                    str(stat.text_status)
                ]
        for file in files:
            abspath = path.normcase(path.join(production[2], file[3]))
            if abspath in temp:
                ass = [file, temp[abspath]]
                del temp[abspath]
            else:
                ass = [file, ["", "", "unversioned"]]
            assignedFiles.append(ass)
        for key, arr in temp.items():
            ass = [[
                -1, productionId,
                path.basename(key),
                path.relpath(key, production[2]), 0, 0
            ], arr]
            assignedFiles.append(ass)
        result.append(productionToObject(production))
        result.append(files2ToObject(assignedFiles))
        result.append(scenesToObject(scenes))
        result.append(errorsToObject(errors))
    wfile.write(json.dumps(result).encode())
def handleStartRenameDirectory(wfile, request, session):
    productionId=int(request["production_id"])
    production=indexer.getProduction(productionId)
    sourceDirectory=str(request["source_directory"])
    targetLastDirectoryName=str(request["target_directory_name"])
    targetDirectory = os.path.join(os.path.dirname(sourceDirectory), targetLastDirectoryName)
    targetAbsoluteDirectory = os.path.join(production[indexer.INDEX_PRODUCTION_LOCATION], targetDirectory)
    files = indexer.getProductionFiles(productionId);
    #perform checks
    if (sourceDirectory==targetDirectory):
        wfile.write("""[{"message":"Target directory is same as source."}]""".encode())
        return;
    if (os.path.exists(targetAbsoluteDirectory)):
        wfile.write("""[{"message":"Target directory already exists."}]""".encode())
        return;
    filesInside = []
    tasks=[]
    for file in files:
        if file[indexer.INDEX_FILE_LOCATION].startswith(sourceDirectory):
            filesInside.append(file)

    referencesOutside = {}
    for file in filesInside:
        referencesFromOutside = indexer.getFileUsedBy(file[indexer.INDEX_FILE_ID])
        for reference in referencesFromOutside:
            referenceFile = indexer.getFile(reference[indexer.INDEX_REFERENCE_FILE_ID])
            if not referenceFile[indexer.INDEX_FILE_LOCATION].startswith(sourceDirectory):
                if referenceFile not in referencesOutside.keys():
                    referencesOutside[referenceFile]=[]
                if file not in referencesOutside[referenceFile]:
                    referencesOutside[referenceFile].append(file)

    for referenceFile in referencesOutside.keys():
        for file in referencesOutside[referenceFile]:
            ac = ChangeReference()
            ac.fileId = referenceFile[indexer.INDEX_FILE_ID] 
            ac.fileDetails = referenceFile
            ac.referenceFileId = file[indexer.INDEX_FILE_ID]
            ac.newLocation = os.path.dirname(file[indexer.INDEX_FILE_LOCATION].replace(sourceDirectory, targetDirectory, 1))
            ac.currentFilename = file[indexer.INDEX_FILE_NAME]
            ac.currentFileLocation = file[indexer.INDEX_FILE_LOCATION]
            ac.productionDetails=production
            tasks.append(ac)

    renameDir = RenameDirectory()
    renameDir.productionDetails=production
    renameDir.sourceDirectory = sourceDirectory
    renameDir.targetDirectory = targetDirectory
    tasks.append(renameDir)
    
    session["tasks"]=tasks
    if wfile != None:
        wfile.write("""[]""".encode())
Example #3
0
def handleGetMissingLinkSolutions(wfile, request, session):
    if "production_id" in request:
        productionId = int(request["production_id"])
    else:
        productionId = int(session["production_id"])
    elementId = int(request["element_id"])
    elementDetails = indexer.getElement(elementId)
    result = []
    if elementDetails[indexer.INDEX_ELEMENT_TYPE] == 'LI':
        solutions = indexer.queryMissingLinkSolutions(productionId, elementId)
        for solution in solutions:
            result.append(solutionToObject(solution))
    elif elementDetails[indexer.INDEX_ELEMENT_TYPE] == 'ID':
        solutions = indexer.queryMissingLinkSolutionsID(
            productionId, elementId)
        for solution in solutions:
            result.append(solutionIDToObject(solution))
    elif elementDetails[indexer.INDEX_ELEMENT_TYPE] == 'IM':
        files = indexer.getProductionFiles(productionId)
        sfn = os.path.basename(
            elementDetails[indexer.INDEX_ELEMENT_LI_NAME].replace("\\", "/"))
        extension = os.path.splitext(sfn)
        still = []
        for f in files:
            fname = f[indexer.INDEX_FILE_NAME]
            if fname == sfn:
                result.append(solutionToObject(f, 1.0))
            else:
                if not fname.endswith(".blend"):
                    still.append(f)
        files = still
        still = []
        for f in files:
            fname = f[indexer.INDEX_FILE_NAME]
            if fname.startswith(extension[0]):
                result.append(solutionToObject(f, 0.75))
            else:
                still.append(f)
        files = still
        still = []
        for f in files:
            fname = f[indexer.INDEX_FILE_NAME]
            if fname.endswith(extension[1]):
                result.append(solutionToObject(f, 0.5))
            else:
                still.append(f)
        for f in still:
            result.append(solutionToObject(f, 0.25))

    wfile.write(json.dumps(result).encode())
def handleGetMissingLinkSolutions(wfile, request, session):
    if "production_id" in request:
        productionId=int(request["production_id"])
    else:
        productionId=int(session["production_id"])
    elementId=int(request["element_id"])
    elementDetails = indexer.getElement(elementId)
    result = []
    if elementDetails[indexer.INDEX_ELEMENT_TYPE] == 'LI':
        solutions = indexer.queryMissingLinkSolutions(productionId, elementId)
        for solution in solutions:
            result.append(solutionToObject(solution))
    elif elementDetails[indexer.INDEX_ELEMENT_TYPE] == 'ID':
        solutions = indexer.queryMissingLinkSolutionsID(productionId, elementId)
        for solution in solutions:
            result.append(solutionIDToObject(solution))        
    elif elementDetails[indexer.INDEX_ELEMENT_TYPE] == 'IM':
        files = indexer.getProductionFiles(productionId)
        sfn = os.path.basename(elementDetails[indexer.INDEX_ELEMENT_LI_NAME].replace("\\", "/"))
        extension = os.path.splitext(sfn)
        still = []
        for f in files:
            fname = f[indexer.INDEX_FILE_NAME]
            if fname == sfn:
                result.append(solutionToObject(f, 1.0))
            else:
                if not fname.endswith(".blend"):
                    still.append(f)
        files = still
        still=[]
        for f in files:            
            fname = f[indexer.INDEX_FILE_NAME]
            if fname.startswith(extension[0]):
                result.append(solutionToObject(f, 0.75))
            else:
                still.append(f)
        files = still
        still=[]
        for f in files:            
            fname = f[indexer.INDEX_FILE_NAME]
            if fname.endswith(extension[1]):
                result.append(solutionToObject(f, 0.5))
            else:
                still.append(f)
        for f in still:            
            result.append(solutionToObject(f, 0.25))
            
            
    wfile.write(json.dumps(result).encode())
def handleGetProductionView(wfile, request, session):
    """Service to retrieve all production level information
    being:
        production
        files of the production
        scenes of the production
        missing links of the production
    """
    if ("production_id" in request ) :
        production = indexer.getProduction(request["production_id"])
    else :
        production = indexer.getActiveProduction()
    result = []
    if production is not None:
        productionId=production[0]
        session["production_id"]=productionId #fix for dependancy services..
        indexer.updateIndex(productionId)
        files = indexer.getProductionFiles(productionId)
        scenes = indexer.getAllScenes(productionId)
        errors = indexer.getConsistencyErrors(productionId)
        try:
            states = svn.svnStat(production[2])
        except pysvn.ClientError as e:
            states=[]
        temp = {}
        assignedFiles=[]
        for stat in states:
            if stat.entry is None:
                if path.isfile(stat.path):
                    temp[path.normcase(stat.path)] = [None,None,str(stat.text_status)]
            elif stat.entry.kind != svn.pysvn.node_kind.dir:
                temp[path.normcase(stat.path)] = [stat.entry.commit_revision.number,stat.entry.commit_author,str(stat.text_status)]
        for file in files:
            abspath = path.normcase(path.join(production[2], file[3]))
            if abspath in temp:
                ass =[file, temp[abspath]]
                del temp[abspath]
            else:
                ass =[file, ["","","unversioned"]]
            assignedFiles.append(ass)
        for key, arr in temp.items():
            ass = [[-1, productionId, path.basename(key), path.relpath(key, production[2]), 0,0 ], arr]
            assignedFiles.append(ass);
        result.append(productionToObject(production))
        result.append(files2ToObject(assignedFiles))
        result.append(scenesToObject(scenes))
        result.append(errorsToObject(errors))
    wfile.write(json.dumps(result).encode())
Example #6
0
def handleStartMoveDirectory(wfile, request, session):
    productionId = int(request["production_id"])
    production = indexer.getProduction(productionId)
    sourceDirectory = str(request["source_directory"])
    targetDirectory = str(request["target_directory"])
    targetAbsoluteDirectory = os.path.join(
        production[indexer.INDEX_PRODUCTION_LOCATION], targetDirectory)
    #perform checks
    if (sourceDirectory == targetDirectory):
        wfile.write(
            """[{"message":"Target directory is same as source."}]""".encode())
        return
    if (os.path.exists(targetAbsoluteDirectory)):
        wfile.write(
            """[{"message":"Target directory already exists."}]""".encode())
        return
    files = indexer.getProductionFiles(productionId)
    filesInside = []
    tasks = []
    for file in files:
        if file[indexer.INDEX_FILE_LOCATION].startswith(sourceDirectory):
            filesInside.append(file)

    referencesOutside = {}
    for file in filesInside:
        referencesFromOutside = indexer.getFileUsedBy(
            file[indexer.INDEX_FILE_ID])
        for reference in referencesFromOutside:
            referenceFile = indexer.getFile(
                reference[indexer.INDEX_REFERENCE_FILE_ID])
            if not referenceFile[indexer.INDEX_FILE_LOCATION].startswith(
                    sourceDirectory):
                if referenceFile not in referencesOutside.keys():
                    referencesOutside[referenceFile] = []
                if file not in referencesOutside[referenceFile]:
                    referencesOutside[referenceFile].append(file)

    for referenceFile in referencesOutside.keys():
        for file in referencesOutside[referenceFile]:
            ac = ChangeReference()
            ac.fileId = referenceFile[indexer.INDEX_FILE_ID]
            ac.fileDetails = referenceFile
            ac.referenceFileId = file[indexer.INDEX_FILE_ID]
            ac.newLocation = os.path.dirname(
                file[indexer.INDEX_FILE_LOCATION].replace(
                    sourceDirectory, targetDirectory, 1))
            ac.currentFilename = file[indexer.INDEX_FILE_NAME]
            ac.currentFileLocation = file[indexer.INDEX_FILE_LOCATION]
            ac.productionDetails = production
            tasks.append(ac)

    referencesInside = {}
    for file in filesInside:
        referencesFromInside = indexer.getFileReferences(
            file[indexer.INDEX_FILE_ID])
        for reference in referencesFromInside:
            referenceFile = indexer.getFile(
                reference[indexer.INDEX_REFERENCE_FILE_ID])
            if referenceFile != None and not referenceFile[
                    indexer.INDEX_FILE_LOCATION].startswith(sourceDirectory):
                if referenceFile not in referencesInside.keys():
                    referencesInside[referenceFile] = []
                if file not in referencesInside[referenceFile]:
                    referencesInside[referenceFile].append(file)

    for referenceFile in referencesInside.keys():
        for file in referencesInside[referenceFile]:
            ac = ChangeReferenceForMove()
            ac.fileId = file[indexer.INDEX_FILE_ID]
            ac.fileDetails = file
            ac.referenceFileDetails = referenceFile
            ac.referenceFileId = referenceFile[indexer.INDEX_FILE_ID]
            ac.targetDirectory = targetDirectory
            ac.sourceDirectory = sourceDirectory
            ac.currentFilename = file[indexer.INDEX_FILE_NAME]
            ac.currentFileLocation = file[indexer.INDEX_FILE_LOCATION]
            ac.productionDetails = production
            tasks.append(ac)

    moveDir = MoveDirectory()
    moveDir.productionDetails = production
    moveDir.sourceDirectory = sourceDirectory
    moveDir.targetDirectory = targetDirectory
    tasks.append(moveDir)

    session["tasks"] = tasks
    if wfile != None:
        wfile.write("""[]""".encode())