コード例 #1
0
def run(raft_ip, raft_port, file_name, downloads_folder="Downloads"):
    with grpc.insecure_channel(raft_ip + ':' + raft_port) as channel:
        stub = rpc.DataTransferServiceStub(channel)
        request = file_transfer.FileInfo()
        request.fileName = file_name

        file_location_info = stub.RequestFileInfo(request)
        log_info("Response received: ")
        pprint.pprint(file_location_info)

    proxies = []
    for proxy in file_location_info.lstProxy:
        proxies.append(proxy)

    if file_location_info.isFileFound:
        with open(downloads_folder + "/" + file_name, 'wb') as f:
            with grpc.insecure_channel(proxies[0].ip + ':' +
                                       proxies[0].port) as channel:
                stub = rpc.DataTransferServiceStub(channel)
                request = file_transfer.ChunkInfo()
                request.fileName = file_name
                request.startSeqNum = 0
                for chunk_num in range(file_location_info.maxChunks):
                    request.chunkId = chunk_num
                    for response in stub.DownloadChunk(request):
                        f.write(response.data)
コード例 #2
0
def download_chunk(file_name, chunk_num, start_seq_num, proxy_address, proxy_port, downloads_folder="Downloads"):
    log_info("requesting for :", file_name, "chunk no :", chunk_num, "from", proxy_address, ":", proxy_port)

    global next_sequence_to_download
    global maximum_number_of_sequences

    chunk_data = bytes()

    with grpc.insecure_channel(proxy_address + ':' + proxy_port) as channel:
        stub = rpc.DataTransferServiceStub(channel)
        request = file_transfer.ChunkInfo()
        request.fileName = file_name
        request.chunkId = chunk_num
        request.startSeqNum = start_seq_num
        try:
            for response in stub.DownloadChunk(request):
                log_info("Response received: Chunk", response.chunkId, "Sequence:", response.seqNum, "/",
                         response.seqMax)
                next_sequence_to_download[chunk_num] = response.seqNum + 1
                maximum_number_of_sequences[chunk_num] = response.seqMax
                chunk_data += response.data

        except grpc.RpcError:
            log_info("Failed to connect to data center..Retrying !!")

        write_file_chunks(response, os.path.join(os.path.dirname(os.path.realpath(__file__)), downloads_folder), chunk_data)

        log_info("request completed for :", file_name, "chunk no :", chunk_num, "from", proxy_address, ":", proxy_port,
                 "last seq :", next_sequence_to_download[chunk_num], "max seq :",
                 maximum_number_of_sequences[chunk_num])
コード例 #3
0
def upload_chunk(file_path, file_name, chunk_num, proxy_address, proxy_port):
    # log_info("requesting for :", file_path, "chunk no :", chunk_num, "from", proxy_address, ":", proxy_port)
    with grpc.insecure_channel(proxy_address + ':' + proxy_port) as channel:
        stub = file_transfer_rpc.DataTransferServiceStub(channel)
        stub.UploadFile(file_upload_iterator(file_path, file_name, chunk_num))
        print("Chunk Uploaded:", chunk_num)
コード例 #4
0
def run(raft_ip, raft_port, file_name):
    with grpc.insecure_channel(raft_ip + ':' + raft_port) as channel:
        stub = file_transfer_rpc.DataTransferServiceStub(channel)
        file_path = file_name

        file_info = os.path.basename(file_path).split(".")
        extension = ""
        if len(file_info) > 1:
            extension = "." + file_info[1]

        file_name = file_info[0] + "_" + str(math.ceil(
            time.time())) + extension
        file_size = file_utils.get_file_size(file_path)

        request = file_transfer.FileUploadInfo()
        request.fileName = file_name
        request.fileSize = file_size

        response = stub.RequestFileUpload(request)
        log_info("Got list of proxies: ", response.lstProxy)
        # pprint.pprint(response.lstProxy)

        if len(response.lstProxy) == 0:
            print("Could not upload file. Please try again later.")
            return

    num_of_chunks = file_utils.get_max_file_chunks(file_path)

    lst_chunk_upload_info = []

    file_paths = []
    file_names = []
    chunk_nums = []
    proxy_addresses = []
    proxy_ports = []

    for chunk_num in range(num_of_chunks):
        selected_proxy = get_rand_hashing_node_from_node_info_object(
            response.lstProxy, file_name, chunk_num)

        proxy_address = selected_proxy.ip
        proxy_port = selected_proxy.port

        chunk_upload_info = raft_proto.ChunkUploadInfo()
        chunk_upload_info.chunkId = chunk_num
        chunk_upload_info.uploadedDatacenter.ip = proxy_address
        chunk_upload_info.uploadedDatacenter.port = proxy_port

        lst_chunk_upload_info.append(chunk_upload_info)

        file_paths.append(file_path)
        file_names.append(file_name)
        chunk_nums.append(chunk_num)
        proxy_addresses.append(proxy_address)
        proxy_ports.append(proxy_port)

    pool = ThreadPool(THREAD_POOL_SIZE)
    pool.starmap(
        upload_chunk,
        zip(file_paths, file_names, chunk_nums, proxy_addresses, proxy_ports))
    pool.close()
    pool.join()

    print("READ DIFF")
    pprint.pprint(read_diff)
    print("SEND DIFF")
    pprint.pprint(send_diff)

    print("TOTAL READ TIME:", sum(read_diff))
    s = 0
    for x in send_diff[1:]:
        s += x[1]
    print("TOTAL SEND TIME:", s)

    log_info(
        "################################################################################"
    )
    log_info("File Upload Completed. To download file use this name: ",
             file_name)
コード例 #5
0
def run(raft_ip, raft_port, file_name, chunks=-1, downloads_folder="Downloads", dc_ip="", dc_port=""):
    global next_sequence_to_download
    global maximum_number_of_sequences

    failed_chunks = {}

    def whole_file_downloaded(failed_chunks_dict):
        is_whole_file_downloaded = True

        for i in range(len(next_sequence_to_download)):
            if next_sequence_to_download[i] < maximum_number_of_sequences[i]:
                failed_chunks_dict[i] = next_sequence_to_download[i]
                is_whole_file_downloaded = False

        return is_whole_file_downloaded

    file_location_info = file_transfer.FileLocationInfo()

    if chunks == -1:
        with grpc.insecure_channel(raft_ip + ':' + raft_port) as channel:
            stub = rpc.DataTransferServiceStub(channel)
            request = file_transfer.FileInfo()
            request.fileName = file_name

            file_location_info = get_file_location(stub, request)
            log_info("file_location_info")
            # pprint.pprint(file_location_info)

            next_sequence_to_download = [0] * file_location_info.maxChunks
            maximum_number_of_sequences = [float('inf')] * file_location_info.maxChunks
    else:
        next_sequence_to_download = [0] * (chunks + 1)
        maximum_number_of_sequences = [0] * (chunks + 1)
        maximum_number_of_sequences[chunks] = float('inf')

    while not whole_file_downloaded(failed_chunks):
        file_names = []
        chunk_nums = []
        next_sequence_to_download_arr = []
        proxy_addresses = []
        proxy_ports = []
        downloads_folders = []

        for chunk_num in failed_chunks.keys():
            if chunks == -1:
                selected_proxy = get_rand_hashing_node_from_node_info_object(file_location_info.lstProxy, file_name,
                                                                             chunk_num)
                proxy_address = selected_proxy.ip
                proxy_port = selected_proxy.port
                log_info("proxy selected", proxy_address, proxy_port)
            else:
                # data_center direct
                proxy_address = dc_ip
                proxy_port = dc_port
                log_info("data center selected", proxy_address, proxy_port)

            file_names.append(file_name)
            chunk_nums.append(chunk_num)
            next_sequence_to_download_arr.append(next_sequence_to_download[chunk_num])
            proxy_addresses.append(proxy_address)
            proxy_ports.append(proxy_port)
            downloads_folders.append(downloads_folder)

        pool = ThreadPool(THREAD_POOL_SIZE)
        pool.starmap(download_chunk,
                     zip(file_names, chunk_nums, next_sequence_to_download_arr, proxy_addresses, proxy_ports,
                         downloads_folders))
        pool.close()
        pool.join()

        log_info("number_of_sequences_downloaded ", next_sequence_to_download)
        log_info("maximum_number_of_sequences ", maximum_number_of_sequences)

    if chunks == -1:
        log_info("calling merge ")
        merge_chunks(file_location_info.fileName,
                     os.path.join(os.path.dirname(os.path.realpath(__file__)), "Downloads"),
                     file_location_info.maxChunks)

        log_info("Merge Completed")