def DownloadFile(self, request_iterator, context):
        activeIpList = self.activeNodeObj.getActiveIpsDict()
        username = request_iterator.username
        filename = request_iterator.filename
        if (self.leader):
            metadata = self.pickledbMetadataobj.getData(username, filename)
            for item in metadata:
                if item[1] == self.serverAddress:
                    fname = filename + str(item[0])
                    result = self.databaseHandlerObj.getData(username, fname)
                    yield fileService_pb2.FileData(username=result[0],
                                                   filename=filename,
                                                   data=result[2],
                                                   chunk_id=int(item[0]))

                else:
                    data = self.getDataFromNode(username, filename, item[0],
                                                item[1])
                    for d in data:
                        yield d

        else:
            result = self.databaseHandlerObj.getData(username, filename)
            chunk_id = request_iterator.sequence_no
            yield fileService_pb2.FileData(username=result[0],
                                           filename=result[1][:-len(chunk_id)],
                                           data=result[2],
                                           chunk_id=int(chunk_id))
 def sendDataStreaming(username, filename, data):
     yield fileService_pb2.FileData(username=username,
                                    filename=filename,
                                    data=data)
     for request in request_iterator:
         data += request.data
         yield fileService_pb2.FileData(username=request.username,
                                        filename=request.filename,
                                        data=request.data)
 def ReplicateFile(self, request, context):
     #print("request =", request.shortest_path[request.currentpos])
     # # next_node = request.shortest_path[request.currentpos]
     # if request.currentpos == len(request.shortest_path) - 1:
     #     cache.saveVClock(str(request), str(request))
     #     return fileService_pb2.ack(success=True, message="Data Replicated.")
     # else:
     # forward_server_addr = self.getneighbordata(next_node)
     #path = json.loads(request.shortest_path)
     forward_coordinates = request.shortest_path[request.currentpos]
     print("forward coord =", forward_coordinates)
     forward_server_addr = self.getneighbordata(forward_coordinates)
     print("forward IP =", forward_server_addr)
     forward_port = 50051
     forward_channel = grpc.insecure_channel(forward_server_addr + ":" +
                                             str(forward_port))
     forward_stub = fileService_pb2_grpc.FileserviceStub(forward_channel)
     request.currentpos += 1
     rList = [1, 2, 3, 4, 5]
     arr = bytearray(rList)
     updated_request = fileService_pb2.FileData(
         initialReplicaServer=request.initialReplicaServer,
         bytearray=request.bytearray,
         vClock=request.vClock,
         shortest_path=request.shortest_path,
         currentpos=request.currentpos + 1)
     forward_resp = forward_stub.ReplicateFile(updated_request)
     print("forward_resp", forward_resp)
     return fileService_pb2.ack(success=True, message="Data Forwarded.")
示例#4
0
def get_file_chunks(filename):
	with open(filename, 'rb') as f:
		while True:
			piece = f.read(CHUNK_SIZE)
			if len(piece) == 0:
				return
			yield fileService_pb2.FileData(username='******', filename= filename,data=piece)
 def streamData(username, filename, data):
     chunk_size = 4000000
     start, end = 0, chunk_size
     while (True):
         chunk = data[start:end]
         if (len(chunk) == 0): break
         start = end
         end += chunk_size
         yield fileService_pb2.FileData(username=username,
                                        filename=filename,
                                        data=chunk)
示例#6
0
def sendFileInChunks(username, filename, i):
     # Maximum chunk size that can be sent
    CHUNK_SIZE=4000000

    outfile = os.path.join('files', fileName)
    
    with open(outfile, 'rb') as infile:
        while True:
            chunk = infile.read(CHUNK_SIZE)
            if not chunk: break
            yield fileService_pb2.FileData(username=username+"_"+str(i), filename=fileName, data=chunk, seqNo=1)
 def getChunksinStream(self, chunk, chunk_id):
     end = sys.getsizeof(chunk.data)
     start = 0
     while True:
         data = chunk.data[start:end]
         if end > sys.getsizeof(chunk.data):
             break
         start = end
         end = end + sys.getsizeof(chunk.data)
         yield fileService_pb2.FileData(username=chunk.username,
                                        filename=chunk.filename,
                                        data=data,
                                        chunk_id=chunk_id)
示例#8
0
    def DownloadFile(self, request, context):

        # Check if file exists
        if (self.fileExists(request.username, request.filename) == False):
            return fileService_pb2.FileData(username=request.username,
                                            filename=request.filename,
                                            data=bytes("", 'utf-8'))

        fileMeta = db.parseMetaData(request.username, request.filename)

        primaryIP, replicaIP = -1, -1
        channel1, channel2 = -1, -1
        if (fileMeta[0] in self.clusterLeaders):
            primaryIP = self.clusterLeaders[fileMeta[0]]
            channel1 = self.clusterStatus.isChannelAlive(primaryIP)

        if (fileMeta[1] in self.clusterLeaders):
            replicaIP = self.clusterLeaders[fileMeta[1]]
            channel2 = self.clusterStatus.isChannelAlive(replicaIP)

        if (channel1):
            stub = fileService_pb2_grpc.FileserviceStub(channel1)
            responses = stub.DownloadFile(
                fileService_pb2.FileInfo(username=request.username,
                                         filename=request.filename))
            for response in responses:
                yield response
        elif (channel2):
            stub = fileService_pb2_grpc.FileserviceStub(channel2)
            responses = stub.DownloadFile(
                fileService_pb2.FileInfo(username=request.username,
                                         filename=request.filename))
            for response in responses:
                yield response
        else:
            return fileService_pb2.FileData(username=request.username,
                                            filename=request.filename,
                                            data=bytes("", 'utf-8'))
示例#9
0
 def sendDataInStream(self, dataBytes, username, filename, seqNo,
                      replicaNode):
     chunk_size = 4000000
     start, end = 0, chunk_size
     while (True):
         chunk = dataBytes[start:end]
         if (len(chunk) == 0): break
         start = end
         end += chunk_size
         yield fileService_pb2.FileData(username=username,
                                        filename=filename,
                                        data=chunk,
                                        seqNo=seqNo,
                                        replicaNode=replicaNode)
示例#10
0
        def getFileChunks(fileData):
            # Maximum chunk size that can be sent
            CHUNK_SIZE = 4000000

            outfile = os.path.join('files', fileName)

            sTime = time.time()

            while True:
                chunk = fileData.read(CHUNK_SIZE)
                if not chunk: break

                yield fileService_pb2.FileData(username=username,
                                               filename=fileName,
                                               data=chunk,
                                               seqNo=1)
            print("Time for upload= ", time.time() - sTime)
示例#11
0
def getFileChunks():
     # Maximum chunk size that can be sent
    CHUNK_SIZE=4000000

    username = input("Enter Username: "******"Enter filename: ")

    outfile = os.path.join('files', fileName)
    
    sTime=time.time()
    with open(outfile, 'rb') as infile:
        while True:
            chunk = infile.read(CHUNK_SIZE)
            if not chunk: break

            # Do what you want with each chunk (in dev, write line to file)
            yield fileService_pb2.FileData(username=username, filename=fileName, data=chunk, seqNo=1)
    print("Time for upload= ", time.time()-sTime)
示例#12
0
    def replicateContent(self, message, initialReplicaServer, address):
        # logic to pick up bytes from memory and transmit
        bytes_read_from_memory = str.encode("Srinivas")
        intial_Replicate_Server = self.localIP
        hostname = address
        serverAddress = "localhost"
        serverPort = 50051
        channel = grpc.insecure_channel(serverAddress + ":" + str(serverPort))
        replicate_stub = fileService_pb2_grpc.FileserviceStub(channel)
        if message.countOfReplica == 1:
            vClock = cache.getFileVclock("file1")
            vClock.ip2.address = hostname
            # vClock = {
            # "ip1": {"address": self.localIP, "timestamp": time.time()},
            # "ip2": {"address": hostname, "timestamp": ""},
            # "ip3": {"address": "", "timestamp": ""}
            # }
        else:
            vClock = cache.getFileVclock("file1")
            vClock.ip3.address = hostname
            file = "file1"
            message = json.dump({"vClock": vClock, "filename": file}, )
            self.transmit_message(message, intial_Replicate_Server,
                                  vClock.ip2.address.decode("utf-8"), False, 0,
                                  "sync", "file1")
            self.transmit_message(message, intial_Replicate_Server,
                                  vClock.ip3.address.decode("utf-8"), False, 0,
                                  "sync", "file1")

        #print("vclock from redis", cache.getFileVclock("file1"))
        request = fileService_pb2.FileData(
            initialReplicaServer=intial_Replicate_Server,
            bytearray=bytes_read_from_memory,
            vClock=json.dumps(vClock))
        resp = replicate_stub.ReplicateFile(request)
        print(resp)
示例#13
0
 def dummy_generator(self, chunk):
     yield fileService_pb2.FileData(username=chunk.username,
                                    filename=chunk.filename,
                                    data=chunk.data)
示例#14
0
    def DownloadFile(self, request, context):

        print("Inside Download")

        # If the node is the leader of the cluster.
        if (int(db.get("primaryStatus")) == 1):

            print("Inside primary download")

            # Check if file exists
            if (self.fileExists(request.username, request.filename) == 0):
                print("File does not exist")
                yield fileService_pb2.FileData(username=request.username,
                                               filename=request.filename,
                                               data=bytes("", 'utf-8'),
                                               seqNo=0)
                return

            # If the file is present in cache then just fetch it and return. No need to go to individual node.
            if (self.lru.has_key(request.username + "_" + request.filename)):
                print("Fetching data from Cache")
                CHUNK_SIZE = 4000000
                fileName = request.username + "_" + request.filename
                filePath = self.lru[fileName]
                outfile = os.path.join(filePath, fileName)

                with open(outfile, 'rb') as infile:
                    while True:
                        chunk = infile.read(CHUNK_SIZE)
                        if not chunk: break
                        yield fileService_pb2.FileData(
                            username=request.username,
                            filename=request.filename,
                            data=chunk,
                            seqNo=1)

            # If the file is not present in the cache, then fetch it from the individual node.
            else:
                print("Fetching the metadata")

                # Step 1: get metadata i.e. the location of chunks.
                metaData = db.parseMetaData(request.username, request.filename)

                print(metaData)

                #Step 2: make gRPC calls and get the fileData from all the nodes.
                downloadHelper = DownloadHelper(self.hostname, self.serverPort,
                                                self.activeNodesChecker)
                data = downloadHelper.getDataFromNodes(request.username,
                                                       request.filename,
                                                       metaData)
                print("Sending the data to client")

                #Step 3: send the file to supernode using gRPC streaming.
                chunk_size = 4000000
                start, end = 0, chunk_size
                while (True):
                    chunk = data[start:end]
                    if (len(chunk) == 0): break
                    start = end
                    end += chunk_size
                    yield fileService_pb2.FileData(username=request.username,
                                                   filename=request.filename,
                                                   data=chunk,
                                                   seqNo=request.seqNo)

                # Step 4: update the cache based on LRU(least recently used) algorithm.
                self.saveInCache(request.username, request.filename, data)

        # If the node is not the leader, then just fetch the fileChunk from the local db and stream it back to leader.
        else:
            key = request.username + "_" + request.filename + "_" + str(
                request.seqNo)
            print(key)
            data = db.getFileData(key)
            chunk_size = 4000000
            start, end = 0, chunk_size
            while (True):
                chunk = data[start:end]
                if (len(chunk) == 0): break
                start = end
                end += chunk_size
                yield fileService_pb2.FileData(username=request.username,
                                               filename=request.filename,
                                               data=chunk,
                                               seqNo=request.seqNo)
示例#15
0
import sys
sys.path.append('./proto')
import grpc
import fileService_pb2
import fileService_pb2_grpc


# open a gRPC channel
channel = grpc.insecure_channel('localhost:50051')

# create a stub (client)
stub = fileService_pb2_grpc.FileserviceStub(channel)

# create a valid request message
request = fileService_pb2.FileData(initialReplicaServer="initialReplicaServer", message="message",
                                                 vClock="vClock")

# make the call
resp = stub.ReplicateFile(request)

# et voilà
print(resp)
示例#16
0
def getFileData():
    fileName = input("Enter filename:")
    outfile = os.path.join('files', fileName)
    file_data = open(outfile, 'rb').read()
    fileData = fileService_pb2.FileData(fileName=fileName, data=file_data)
    return fileData