Exemplo n.º 1
0
    def start(self):
        db = chunk_database()
        sz = db.get_total_size()

        if sz < TRIGGER_LEVEL * self.max_cap:
            pass

        time.sleep(DISK_BALANCER_PERIOD)
Exemplo n.º 2
0
def get_chunk_data(hash, offset, len, ssips, ssport, filenm):
    db = chunk_database()

    # If the chunk is already in cache, no need to fetch it
    if db.is_incache(hash):
        data = ''
        #print "Data present in local cache"
        with open(CHUNKS_DIR + hash, 'r') as f:
            f.seek(offset)
            data = f.read(len)
        return data

    # if not present in local cache, get it from the server
    else:
        ssip_arr = ssips.split(",")[:-1]

        for ssip in ssip_arr:
            try:
                print("Attempting to read %s from %s" % (hash, ssip))
                channel = grpc.insecure_channel(ssip + ":" + str(ssport))
                ss_stub = storageserver_pb2_grpc.StorageServerStub(channel)

                # Create CHunkInfo Object to be passed to the storage server
                chunk_info = ChunkInfo()
                chunk_info.filename = filenm
                chunk_info.hash = hash
                chunk_info.offset = offset
                chunk_info.len = len

                # Send data to visualizer
                n = 10
                sender_ip = ssip
                receivers_ip = [CLIENT_IP]
                rabbitmq_utils.add_to_transfer_queue(sender_ip, receivers_ip,
                                                     n)

                # Get ChunkData from the storage server for the requested chunk
                chunk_data = ss_stub.GetChunkData(chunk_info).data
                print "Downloaded %d from %s" % (chunk_info.len, ssip)

                # Update the in-cache attribute in the table
                db.set_incache(hash, True)

                # Write to chunk file
                with open(CHUNKS_DIR + hash, 'w+') as f:
                    f.write(chunk_data)

                return chunk_data[offset:offset + len]

            except Exception:
                print("Failed to read %s from %s" % (hash, ssip))
                continue
Exemplo n.º 3
0
    def GetChunkData(self, request, context):
        db = chunk_database()
        chunk_data = ChunkData()

        if not db.is_chunk_present(request.hash):
            chunk_data.data = None
            return chunk_data

        with open(CHUNKS_DIR + request.hash, "r") as f:
            s = f.read()

        chunk_data.data = s

        return chunk_data
Exemplo n.º 4
0
    def PushChunkData(self, request_iterator, context):
        db = chunk_database()
        # iter1, iter2 = itertools.tee(request_iterator)
        iter2 = request_iterator
        start_time = time()
        # print "In Push Chunk Data"
        # print request_iterator

        c_list = []

        should_duplicate = False
        request_iterator_duplicate = []

        first = True
        closest_ss_ip = ""
        for chunkinfodata in request_iterator:

            if first:
                first = False
                # Decrement copies factor and check if replication is required
                chunkinfodata.copies -= 1
                should_duplicate = chunkinfodata.copies > 0
                print("Copies = %d, duplicate? = " % chunkinfodata.copies)
                print(should_duplicate)
                # Get closest Server
                if should_duplicate:
                    closest_ss_ip = chunkinfodata.chunkinfo.seeders[
                        -chunkinfodata.copies].ip

        # Copy the chunk data only if it is already not present
        # print("\n\n 1.Before checking for chunk in DB @ %s"%str(time()-start_time))
            if not db.is_chunk_present(chunkinfodata.chunkinfo.hash):
                # Add to DB
                db.add_chunk(chunkinfodata.chunkinfo.hash,
                             chunkinfodata.chunkinfo.len)

                #	print("2. After checking for chunk in DB @ %s"%str(time()-start_time))
                # write to chunk file
                with open(CHUNKS_DIR + chunkinfodata.chunkinfo.hash,
                          "w+") as f:
                    f.write(chunkinfodata.chunkdata.data)
                #	print("3. Done with writing chunk @ %s"%str(time()-start_time))

        # TODO: Verify if this change is reflected in the other iterator
        # seeder = chunkinfodata.chunkinfo.seeders[0]#.add()
        # seeder.ip = STORAGE_SERVER_IP
        # seeder.port = int(STORAGE_SERVER_PORT)
        # seeder.vivaldimetric = chunkinfodata.chunkinfo.seeders[0].vivaldimetric

        #   print("4. Before appending @ %s"%str(time()-start_time))
            c_list.append(chunkinfodata.chunkinfo)

            # If replication enabled, change the seeder details
            if should_duplicate:
                request_iterator_duplicate.append(chunkinfodata)
        #  print("5. After appending @ %s"%str(time()-start_time))
        # print("6. Done writing to files @ %s"%str(time()-start_time))

        # Send update to Chunk Server about the chunk info
        # Node info should be added before sending the update to chunk server
        channel = grpc.insecure_channel(self.chunserver_ip + ":" +
                                        CHUNK_SERVER_PORT)
        stub = chunkserver_pb2_grpc.ChunkServerStub(channel)

        # print c_list
        if not should_duplicate:
            x = stub.RouteUpdate(iter(c_list))
        # print "7. got response from server @ %s"%str(time()-start_time)

        # If the chunk has to be replicated to another storage server
        if should_duplicate:

            print "Duplicating to %s" % closest_ss_ip

            # Establish GRPC channel
            channel_ss = grpc.insecure_channel(closest_ss_ip + ":" +
                                               STORAGE_SERVER_PORT)
            stub_ss = storageserver_pb2_grpc.StorageServerStub(channel_ss)

            # Asynchronously send data to the other server
            resp_future_ss = stub_ss.PushChunkData.future(
                iter(request_iterator_duplicate))
            resp_ss = resp_future_ss.result()
            print "Issued Async duplication"

            # Send data to visualizer
            n = len(request_iterator_duplicate)
            sender_ip = STORAGE_SERVER_IP
            receivers_ip = [closest_ss_ip]
            rabbitmq_utils.add_to_transfer_queue(sender_ip, receivers_ip, n)

        ec = Error()
        ec.ec = 0
        return ec
Exemplo n.º 5
0
 def GetCapacity(self, request, context):
     c = Capacity()
     db = chunk_database()
     c.maxcap = self.max_cap
     c.cursz = db.get_total_size()
     return c