def FileSearch(self, request, context):
     username = request.username
     filename = request.filename
     print(username + " " + filename)
     metadata = self.pickledbMetadataobj.getFileData(username)
     if filename in metadata:
         return fileService_pb2.ack(success=True, message="File Found!")
     return fileService_pb2.ack(success=False, message="File not found")
Пример #2
0
    def FileSearch(self, request, data):
        username, filename = request.username, request.filename

        if (self.fileExists(username, filename) == 1):
            return fileService_pb2.ack(success=True,
                                       message="File exists in the cluster.")
        else:
            return fileService_pb2.ack(
                success=False, message="File does not exist in the cluster.")
    def FileDelete(self, request, context):
        print("In FileDelete")

        if (self.fileExists(request.username, request.filename) == False):
            return fileService_pb2.ack(
                success=False,
                message="File {} does not exist.".format(request.filename))

        fileMeta = db.parseMetaData(request.username, request.filename)
        print("FileMeta = ", fileMeta)

        primaryIP, replicaIP = -1, -1
        channel1, channel2 = -1, -1
        if (fileMeta[0] in self.clusterLeaders):
            primaryIP = self.clusterLeaders[fileMeta[0]]
            channel1 = self.clusterStatus.isChannelAlive(primaryIP)

        if (fileMeta[1] in self.clusterLeaders):
            replicaIP = self.clusterLeaders[fileMeta[1]]
            channel2 = self.clusterStatus.isChannelAlive(replicaIP)

        print("PrimarIP={}, replicaIP={}".format(primaryIP, replicaIP))

        if (channel1 != -1):
            print("Making fileDelete call to primary")
            stub = fileService_pb2_grpc.FileserviceStub(channel1)
            response = stub.FileDelete(
                fileService_pb2.FileInfo(username=request.username,
                                         filename=request.filename))
            print("Received response = ", response.success)

        if (channel2 != -1):
            print("Making fileDelete call to replica")
            stub = fileService_pb2_grpc.FileserviceStub(channel2)
            response = stub.FileDelete(
                fileService_pb2.FileInfo(username=request.username,
                                         filename=request.filename))
            print("Received response = ", response.success)

        if (response.success == True):
            db.deleteEntry(request.username + "_" + request.filename)
            print("Successfully deleted.")
            return fileService_pb2.ack(
                success=True,
                message="File successfully deleted from cluster : " +
                fileMeta[0])
        else:
            print("Could not delete from replica")
            return fileService_pb2.ack(success=False, message="Internal error")
Пример #4
0
 def MetaDataInfo(self, request, context):
     print("Inside Metadatainfo")
     fileName = request.filename
     seqValues = request.seqValues
     db.saveMetaDataOnOtherNodes(fileName, seqValues)
     ack_message = "Successfully saved the metadata on " + self.serverAddress
     return fileService_pb2.ack(success=True, message=ack_message)
Пример #5
0
 def ReplicateFile(self, request, context):
     #print("request =", request.shortest_path[request.currentpos])
     # # next_node = request.shortest_path[request.currentpos]
     # if request.currentpos == len(request.shortest_path) - 1:
     #     cache.saveVClock(str(request), str(request))
     #     return fileService_pb2.ack(success=True, message="Data Replicated.")
     # else:
     # forward_server_addr = self.getneighbordata(next_node)
     #path = json.loads(request.shortest_path)
     forward_coordinates = request.shortest_path[request.currentpos]
     print("forward coord =", forward_coordinates)
     forward_server_addr = self.getneighbordata(forward_coordinates)
     print("forward IP =", forward_server_addr)
     forward_port = 50051
     forward_channel = grpc.insecure_channel(forward_server_addr + ":" +
                                             str(forward_port))
     forward_stub = fileService_pb2_grpc.FileserviceStub(forward_channel)
     request.currentpos += 1
     rList = [1, 2, 3, 4, 5]
     arr = bytearray(rList)
     updated_request = fileService_pb2.FileData(
         initialReplicaServer=request.initialReplicaServer,
         bytearray=request.bytearray,
         vClock=request.vClock,
         shortest_path=request.shortest_path,
         currentpos=request.currentpos + 1)
     forward_resp = forward_stub.ReplicateFile(updated_request)
     print("forward_resp", forward_resp)
     return fileService_pb2.ack(success=True, message="Data Forwarded.")
 def getLeaderInfo(self, request, context):
     print("getLeaderInfo Called")
     address = request.ip + ":" + request.port
     self.clusterLeaders[request.clusterName] = address
     print("ClusterLeaders: ", self.clusterLeaders)
     channel = grpc.insecure_channel('{}'.format(address))
     self.ip_channel_dict[address] = channel
     return fileService_pb2.ack(success=True, message="Leader Updated.")
Пример #7
0
    def FileDelete(self, request, data):
        username = request.username
        filename = request.filename

        if (int(db.get("primaryStatus")) == 1):

            if (self.fileExists(username, filename) == 0):
                print("File does not exist")
                return fileService_pb2.ack(success=False,
                                           message="File does not exist")

            print("Fetching metadata from leader")
            metadata = db.parseMetaData(request.username, request.filename)
            print("Successfully retrieved metadata from leader")

            deleteHelper = DeleteHelper(self.hostname, self.serverPort,
                                        self.activeNodesChecker)
            deleteHelper.deleteFileChunksAndMetaFromNodes(
                username, filename, metadata)

            return fileService_pb2.ack(
                success=True,
                message="Successfully deleted file from the cluster")

        else:
            seqNo = -1

            try:
                seqNo = request.seqNo
            except:
                return fileService_pb2.ack(success=False,
                                           message="Internal Error")

            metaDataKey = username + "_" + filename
            dataChunkKey = username + "_" + filename + "_" + str(seqNo)

            if (db.keyExists(metaDataKey) == 1):
                print("FileDelete: Deleting the metadataEntry from local db :")
                db.deleteEntry(metaDataKey)
            if (db.keyExists(dataChunkKey)):
                print("FileDelete: Deleting the data chunk from local db: ")
                db.deleteEntry(dataChunkKey)

            return fileService_pb2.ack(
                success=True,
                message="Successfully deleted file from the cluster")
Пример #8
0
    def UpdateFile(self, request_iterator, context):

        username, filename = "", ""
        fileData = bytes("", 'utf-8')

        for request in request_iterator:
            fileData += request.data
            username, filename = request.username, request.filename

        def getFileChunks(fileData):
            # Maximum chunk size that can be sent
            CHUNK_SIZE = 4000000

            outfile = os.path.join('files', fileName)

            sTime = time.time()

            while True:
                chunk = fileData.read(CHUNK_SIZE)
                if not chunk: break

                yield fileService_pb2.FileData(username=username,
                                               filename=fileName,
                                               data=chunk,
                                               seqNo=1)
            print("Time for upload= ", time.time() - sTime)

        if (int(db.get("primaryStatus")) == 1):
            channel = grpc.insecure_channel('{}'.format(self.serverAddress))
            stub = fileService_pb2_grpc.FileserviceStub(channel)

            response1 = stub.FileDelete(
                fileService_pb2.FileInfo(username=userName, filename=fileName))

            if (response1.success):
                response2 = stub.UploadFile(getFileChunks(fileData))
                if (response2.success):
                    return fileService_pb2.ack(
                        success=True, message="File suceessfully updated.")
                else:
                    return fileService_pb2.ack(success=False,
                                               message="Internal error.")
            else:
                return fileService_pb2.ack(success=False,
                                           message="Internal error.")
Пример #9
0
    def UploadFile(self, request_iterator, context):
        global chunk_id
        print('here', self.leader)

        if self.leader:
            print("I am the leader")
            chunk_id = 0
            for chunk in request_iterator:
                username = chunk.username
                filename = chunk.filename
                destination = self.nodeSelect.leastUtilizedNode(
                    self.serverAddress)
                print(destination, self.serverAddress)
                if destination == 9999:
                    return fileService_pb2.ack(success=False,
                                               message="No active nodes!")
                if destination == self.serverAddress:
                    print("data stored on primary")
                    ##Metdata broadcast
                    ##Mongodb store
                else:
                    child_response = self.sendDataToDestination(
                        chunk, destination)
                    if not child_response.success:
                        return fileService_pb2.ack(
                            success=False,
                            message="Error saving chunk at: " + destination)
                    # chunk_id+=1
                    ##Metdata broadcast
            return fileService_pb2.ack(success=True,
                                       message="Data has been saved!")
        else:
            print("I am NOT the leader")
            for request in request_iterator:
                print("data stored on" + request.username)
                ##mongodb save data
            return fileService_pb2.ack(success=True,
                                       message="Data has been saved at " +
                                       self.serverAddress)
    def UploadFile(self, request_iterator, context):
        activeIpList = self.activeNodeObj.getActiveIpsDict()
        if self.leader:
            chunk_id = 0
            for chunk in request_iterator:
                username = chunk.username
                filename = chunk.filename
                metadata = self.pickledbMetadataobj.getFileData(username)
                if filename in metadata:
                    return fileService_pb2.ack(success=True,
                                               message="File Already Present!")
                destination = self.nodeSelect.leastUtilizedNode()
                if destination == 9999:
                    return fileService_pb2.ack(success=False,
                                               message="No active nodes!")
                if str(destination) == str(self.serverAddress):
                    chunk_id += 1
                    self.databaseHandlerObj.insertData(
                        chunk.username, chunk.filename + str(chunk_id),
                        chunk.data)
                    self.broadcastMetadata(chunk.username, chunk.filename,
                                           str(chunk_id), str(destination))

                else:
                    chunk_id += 1
                    self.sendDataToDestination(chunk, destination, chunk_id)

            return fileService_pb2.ack(success=True, message="Saved data!")

        else:
            for request in request_iterator:
                self.databaseHandlerObj.insertData(
                    request.username, request.filename + str(request.chunk_id),
                    request.data)
                self.broadcastMetadata(request.username, request.filename,
                                       str(request.chunk_id),
                                       str(self.serverAddress))
                return fileService_pb2.ack(success=True,
                                           message="Data has been saved!")
    def FileSearch(self, request, context):

        if (self.fileExists(request.username, request.filename) == False):
            return fileService_pb2.ack(
                success=False,
                message="File {} does not exist.".format(request.filename))

        fileMeta = db.parseMetaData(request.username, request.filename)

        primaryIP, replicaIP = -1, -1
        channel1, channel2 = -1, -1

        if (fileMeta[0] in self.clusterLeaders):
            primaryIP = self.clusterLeaders[fileMeta[0]]
            channel1 = self.clusterStatus.isChannelAlive(primaryIP)

        if (fileMeta[1] in self.clusterLeaders):
            replicaIP = self.clusterLeaders[fileMeta[1]]
            channel2 = self.clusterStatus.isChannelAlive(replicaIP)

        if (channel1 != -1):
            stub = fileService_pb2_grpc.FileserviceStub(channel1)
            response = stub.FileSearch(
                fileService_pb2.FileInfo(username=request.username,
                                         filename=request.filename))

        if (channel2 != -1):
            stub = fileService_pb2_grpc.FileserviceStub(channel2)
            response = stub.FileSearch(
                fileService_pb2.FileInfo(username=request.username,
                                         filename=request.filename))

        if (response.success == True):
            return fileService_pb2.ack(success=True, message="File exists! ")
        else:
            return fileService_pb2.ack(
                success=False, message="File does not exist in any cluster.")
    def UploadFile(self, request_iterator, context):
        print("Inside Server method ---------- UploadFile")

        node, node_replica, clusterName, clusterReplica = self.clusterStatus.leastUtilizedNode(
            self.clusterLeaders)

        if (node == -1):
            return fileService_pb2.ack(success=False,
                                       message="No Active Clusters.")

        print("Node found is:{}, replica is:{}".format(node, node_replica))

        channel1 = self.ip_channel_dict[node]
        stub1 = fileService_pb2_grpc.FileserviceStub(channel1)
        if (node_replica != "" and node_replica in self.ip_channel_dict):
            channel2 = self.ip_channel_dict[node_replica]
            stub2 = fileService_pb2_grpc.FileserviceStub(channel2)
        else:
            stub2 = None

        filename, username = "", ""
        data = bytes("", 'utf-8')

        for request in request_iterator:
            filename, username = request.filename, request.username
            data = request.data
            break

        if (self.fileExists(username, filename)):
            return fileService_pb2.ack(
                success=False,
                message=
                "File already exists for this user. Please rename or delete file first."
            )

        def sendDataStreaming(username, filename, data):
            yield fileService_pb2.FileData(username=username,
                                           filename=filename,
                                           data=data)
            for request in request_iterator:
                data += request.data
                yield fileService_pb2.FileData(username=request.username,
                                               filename=request.filename,
                                               data=request.data)

        resp1 = stub1.UploadFile(
            sendDataStreaming(username, filename, request.data))

        # Replicate to alternate cluster
        if (stub2 is not None):
            t1 = Thread(target=self.replicateData,
                        args=(
                            stub2,
                            username,
                            filename,
                            data,
                        ))
            t1.start()

        if (resp1.success):
            db.saveMetaData(username, filename, clusterName, clusterReplica)
            db.saveUserFile(username, filename)

        return resp1
Пример #13
0
    def UploadFile(self, request_iterator, context):
        print("Inside Server method ---------- UploadFile")
        data = bytes("", 'utf-8')
        username, filename = "", ""
        totalDataSize = 0
        active_ip_channel_dict = self.activeNodesChecker.getActiveChannels()

        # list to store the info related to file location.
        metaData = []

        # If the node is the leader of the cluster.
        if (int(db.get("primaryStatus")) == 1):
            print("Inside primary upload")
            currDataSize = 0
            currDataBytes = bytes("", 'utf-8')
            seqNo = 1

            # Step 1:
            # Get 2 least loaded nodes based on the CPU stats.
            # 'Node' is where the actual data goes and 'node_replica' is where replica will go.
            node, node_replica = self.getLeastLoadedNode()

            if (node == -1):
                return fileService_pb2.ack(
                    success=False,
                    message="Error Saving File. No active nodes.")

            # Step 2:
            # Check whether file already exists, if yes then return with message 'File already exists'.
            for request in request_iterator:
                username, filename = request.username, request.filename
                print("Key is-----------------", username + "_" + filename)
                if (self.fileExists(username, filename) == 1):
                    print("sending neg ack")
                    return fileService_pb2.ack(
                        success=False,
                        message=
                        "File already exists for this user. Please rename or delete file first."
                    )
                break

            # Step 3:
            # Make chunks of size 'UPLOAD_SHARD_SIZE' and start sending the data to the least utilized node trough gRPC streaming.
            currDataSize += sys.getsizeof(request.data)
            currDataBytes += request.data

            for request in request_iterator:

                if ((currDataSize + sys.getsizeof(request.data)) >
                        UPLOAD_SHARD_SIZE):
                    response = self.sendDataToDestination(
                        currDataBytes, node, node_replica, username, filename,
                        seqNo, active_ip_channel_dict[node])
                    metaData.append([node, seqNo, node_replica])
                    currDataBytes = request.data
                    currDataSize = sys.getsizeof(request.data)
                    seqNo += 1
                    node, node_replica = self.getLeastLoadedNode()
                else:
                    currDataSize += sys.getsizeof(request.data)
                    currDataBytes += request.data

            if (currDataSize > 0):
                response = self.sendDataToDestination(
                    currDataBytes, node, node_replica, username, filename,
                    seqNo, active_ip_channel_dict[node])
                metaData.append([node, seqNo, node_replica])

            # Step 4:
            # Save the metadata on the primary node after the completion of sharding.
            if (response.success):
                db.saveMetaData(username, filename, metaData)
                db.saveUserFile(username, filename)

            # Step 5:
            # Make a gRPC call to replicate the matadata on all the other nodes.
            self.saveMetadataOnAllNodes(username, filename, metaData)

            return fileService_pb2.ack(success=True, message="Saved")

        # If the node is not the leader.
        else:
            print("Saving the data on my local db")
            sequenceNumberOfChunk = 0
            dataToBeSaved = bytes("", 'utf-8')

            # Gather all the data from gRPC stream
            for request in request_iterator:
                username, filename, sequenceNumberOfChunk = request.username, request.filename, request.seqNo
                dataToBeSaved += request.data
            key = username + "_" + filename + "_" + str(sequenceNumberOfChunk)

            # Save the data in local DB.
            db.setData(key, dataToBeSaved)

            # After saving the chunk in the local DB, make a gRPC call to save the replica of the chunk on different
            # node only if the replicaNode is present.
            if (request.replicaNode != ""):
                print("Sending replication to ", request.replicaNode)
                replica_channel = active_ip_channel_dict[request.replicaNode]
                t1 = Thread(target=self.replicateChunkData,
                            args=(
                                replica_channel,
                                dataToBeSaved,
                                username,
                                filename,
                                sequenceNumberOfChunk,
                            ))
                t1.start()
                # stub = fileService_pb2_grpc.FileserviceStub(replica_channel)
                # response = stub.UploadFile(self.sendDataInStream(dataToBeSaved, username, filename, sequenceNumberOfChunk, ""))

            return fileService_pb2.ack(success=True, message="Saved")
 def metadataUpdate(self, request, context):
     self.pickledbMetadataobj.insertData(request.username, request.filename,
                                         request.chunk_id,
                                         request.destination)
     return fileService_pb2.ack(success=True,
                                message="MetaData has been saved!")
Пример #15
0
 def ReplicateFile(self, request, context):
     self.write_to_mem(request)
     print("request", request.initialReplicaServer)
     return fileService_pb2.ack(success=True, message="Data Replicated.")