Exemplo n.º 1
0
def handle_register_job(reader, writer):
    print(
        "-------------------------- REGISTER JOB --------------------------------",
        time.time())
    jobname_len = yield from reader.read(INT)
    jobname_len, = struct.Struct("!i").unpack(jobname_len)
    jobname = yield from reader.read(jobname_len + 3 * INT + SHORT)
    jobname, num_lambdas, jobGB, peakMbps, latency_sensitive = struct.Struct(
        "!" + str(jobname_len) + "siiih").unpack(jobname)
    jobname = jobname.decode('utf-8')

    # generate jobid
    if 'gg' in jobname:
        jobid = jobname + '-1234'
        jobid_int = 1234
    else:
        jobid_int = randint(0, 1000000)
        jobid = jobname + "-" + str(jobid_int)

    print("received hints ", jobid, num_lambdas, jobGB, peakMbps,
          latency_sensitive)
    # create dir named jobid
    createdirsock = pocket.connect(NAMENODE_IP, NAMENODE_PORT)
    if createdirsock is None:
        return
    pocket.create_dir(createdirsock, None, jobid)
    #pocket.close(createdirsock)

    if jobGB == 0 or peakMbps == 0:
        jobGB, peakMbps = compute_GB_Mbps_with_hints(num_lambdas, jobGB,
                                                     peakMbps,
                                                     latency_sensitive)

    # generate weightmask
    wmask, wmask_str = yield from generate_weightmask(jobid, jobGB, peakMbps,
                                                      latency_sensitive)
    # wmask = [(ioctlcmd.calculate_datanode_hash("10.1.88.82", 50030), 1)]

    # register job in table
    err = add_job(jobid, jobGB, peakMbps, wmask, wmask_str)

    # send wmask to metadata server
    ioctlsock = yield from ioctlcmd.connect(NAMENODE_IP, NAMENODE_PORT)
    if ioctlsock is None:
        return
    yield from ioctlcmd.send_weightmask(ioctlsock, jobid, wmask)

    # reply to client with jobid int
    resp_packer = struct.Struct(RESP_STRUCT_FORMAT + "i")
    resp = (RESP_LEN_BYTES + INT, TICKET, JOB_CMD, err, REGISTER_OPCODE,
            jobid_int)
    pkt = resp_packer.pack(*resp)
    writer.write(pkt)
    print(
        "-------------------------- REGISTERED JOB --------------------------------"
    )

    return
Exemplo n.º 2
0
def lambda_handler(event, context):
    # create a file of size (datasize) in bytes
    iter = 100
    datasize = 1024  #bytes
    jobid = "latency-test".join(
        random.sample(string.ascii_letters + string.digits, 6))
    namenode_ip = "10.1.47.178"

    file_tmp = '/tmp/file_tmp2'
    with open(file_tmp, 'w') as f:
        text = 'a' * datasize
        f.write(text)

    # connect to pocket
    p = pocket.connect(namenode_ip, 9070)

    # test read/write through buffer
    dir = jobid + "microbenchmark"
    pocket.create_dir(p, dir, "")
    jobid = dir

    t0 = time.time()
    pocket_write_buffer(p, jobid, iter, text, datasize)
    t1 = time.time()
    print("==========================================")
    print("Stats for " + str(iter) + " iter of " + str(datasize) +
          " bytes write_buffer:")
    throughput = iter * datasize * 8 / (t1 - t0) / 1e9
    print("throughput (Gb/s) = " + str(throughput))
    print("latency (us) = " + str((t1 - t0) / iter * 1e6))
    print("==========================================")

    text_back = " " * datasize
    t0 = time.time()
    pocket_read_buffer(p, jobid, iter, text_back, datasize)
    t1 = time.time()
    print("==========================================")
    print("Stats for " + str(iter) + " iter of " + str(datasize) +
          " bytes read_buffer:")
    throughput = iter * datasize * 8 / (t1 - t0) / 1e9
    print("throughput (Gb/s) = " + str(throughput))
    print("latency (us) = " + str((t1 - t0) / iter * 1e6))
    print("==========================================")

    t0 = time.time()
    pocket_lookup(p, jobid, iter)
    t1 = time.time()
    print("==========================================")
    print("Stats for " + str(iter) + " iter of " + str(datasize) +
          " bytes lookup (metadata RPC):")
    throughput = iter * datasize * 8 / (t1 - t0) / 1e9
    print("throughput (Gb/s) = " + str(throughput))
    print("latency (us) = " + str((t1 - t0) / iter * 1e6))
    print("==========================================")

    os.remove(file_tmp)
    return
Exemplo n.º 3
0
def lambda_handler(event, context):
    # create a file of size (datasize) in bytes
    iter = 50000
    datasize = 1024  #bytes
    jobid = "latency-test"
    namenode_ip = "10.1.0.10"

    file_tmp = '/tmp/file_tmp'
    with open(file_tmp, 'w') as f:
        text = 'a' * datasize
        f.write(text)

    # connect to pocket
    p = pocket.connect(namenode_ip, 9070)

    # test read/write through buffer
    dir = jobid + "1"
    pocket.create_dir(p, dir, "")
    jobid = dir

    t0 = time.time()
    pocket_write_buffer(p, jobid, iter, text, datasize)
    t1 = time.time()
    print "=========================================="
    print "Stats for " + str(iter) + " iter of " + str(
        datasize) + " bytes write_buffer:"
    throughput = iter * datasize * 8 / (t1 - t0) / 1e9
    print "throughput (Gb/s) = " + str(throughput)
    print "latency (us) = " + str((t1 - t0) / iter * 1e6)
    print "=========================================="

    text_back = " " * datasize
    t0 = time.time()
    pocket_read_buffer(p, jobid, iter, text_back, datasize)
    t1 = time.time()
    print "=========================================="
    print "Stats for " + str(iter) + " iter of " + str(
        datasize) + " bytes read_buffer:"
    throughput = iter * datasize * 8 / (t1 - t0) / 1e9
    print "throughput (Gb/s) = " + str(throughput)
    print "latency (us) = " + str((t1 - t0) / iter * 1e6)
    print "=========================================="

    t0 = time.time()
    pocket_lookup(p, jobid, iter)
    t1 = time.time()
    print "=========================================="
    print "Stats for " + str(iter) + " iter of " + str(
        datasize) + " bytes lookup (metadata RPC):"
    throughput = iter * datasize * 8 / (t1 - t0) / 1e9
    print "throughput (Gb/s) = " + str(throughput)
    print "latency (us) = " + str((t1 - t0) / iter * 1e6)
    print "=========================================="

    os.remove(file_tmp)
    return
Exemplo n.º 4
0
def handle_register_job(reader, writer):
    print(
        "-------------------------- REGISTER JOB --------------------------------",
        time.time())
    jobname_len = yield from reader.read(INT)
    jobname_len, = struct.Struct("!i").unpack(jobname_len)
    jobname = yield from reader.read(jobname_len + 3 * INT + SHORT)
    jobname, num_lambdas, jobGB, peakMbps, latency_sensitive = struct.Struct(
        "!" + str(jobname_len) + "siiih").unpack(jobname)
    jobname = jobname.decode('utf-8')

    # generate jobid
    if 'gg' in jobname:
        jobid = jobname + '-1234'
        jobid_int = 1234
    else:
        jobid_int = randint(0, 1000000)
        jobid = jobname + "-" + str(jobid_int)

    print("received hints ", jobid, num_lambdas, jobGB, peakMbps,
          latency_sensitive)
    # create dir named jobid
    # NOTE: this is blocking but we are not yielding
    createdirsock = pocket.connect(NAMENODE_IP, NAMENODE_PORT)
    if createdirsock is None:
        return
    ret = pocket.create_dir(createdirsock, None, jobid)

    if jobGB == 0 or peakMbps == 0:
        jobGB, peakMbps = compute_GB_Mbps_with_hints(num_lambdas, jobGB,
                                                     peakMbps,
                                                     latency_sensitive)
    nvm_ip = []
    for each_ip in nvm_ip:
        print("Adding this nvme server" + each_ip)
        add_nvme_datanodes(each_ip)
    # generate weightmask
    print("Generating weightmask")
    wmask, wmask_str = yield from generate_weightmask(jobid, jobGB, peakMbps,
                                                      latency_sensitive)
    # wmask = [(ioctlcmd.calculate_datanode_hash("10.1.88.82", 50030), 1)]
    # register job in table
    #  nvm_ip = ['10.1.80.147', '10.1.71.111']
    #  for each_ip in nvm_ip:
    #    print("Adding this nvme server" + each_ip)
    #    add_nvme_datanodes(each_ip)
    #wmask[1] = (ioctlcmd.calculate_datanode_hash(nvm_ip, 1234), 0.25)
    #wmask_str[1] = (nvm_ip + ':1234', 0.24999999999999994)

    err = add_job(jobid, jobGB, peakMbps, wmask, wmask_str)

    # send wmask to metadata server
    ioctlsock = yield from ioctlcmd.connect(NAMENODE_IP, NAMENODE_PORT)
    if ioctlsock is None:
        return
    yield from ioctlcmd.send_weightmask(ioctlsock, jobid, wmask)

    # reply to client with jobid int
    resp_packer = struct.Struct(RESP_STRUCT_FORMAT + "i")
    resp = (RESP_LEN_BYTES + INT, TICKET, JOB_CMD, err, REGISTER_OPCODE,
            jobid_int)
    pkt = resp_packer.pack(*resp)
    writer.write(pkt)
    print(
        "-------------------------- REGISTERED JOB --------------------------------"
    )

    return
Exemplo n.º 5
0
def lambda_handler(event, context):
    # create a file of size (datasize) in bytes
    iter = 10000
    datasize = 32  #bytes

    file_tmp = '/tmp/file_tmp'
    with open(file_tmp, 'w') as f:
        text = 'a' * datasize
        f.write(text)

    # write to crail
    p = pocket.connect("10.1.129.91", 9070)
    jobid = 'lambda3'
    print pocket.create_dir(p, 'new_dir', jobid)
    print pocket.put(p, file_tmp, "test", jobid)
    return

    r = pocket.register_job(p, jobid)  # works if return 0
    if r != 0:
        print "registration failed"
        return

    time_list = []
    t0 = time.time()
    pocket_write(p, jobid, iter, file_tmp)
    t1 = time.time()

    print "=========================================="
    #print np.percentile(time_list, 90)
    print "Stats for " + str(iter) + " iter of " + str(
        datasize) + " bytes write:"
    throughput = iter * datasize * 8 / (t1 - t0) / 1e9
    print "throughput (Gb/s) = " + str(throughput)
    print "time (s) = " + str(t1 - t0)
    print "latency (us) = " + str((t1 - t0) / iter * 1e6)
    print "=========================================="

    t0 = time.time()
    pocket_read(p, jobid, iter, file_tmp)
    t1 = time.time()

    print "=========================================="
    print "Stats for " + str(iter) + " iter of " + str(
        datasize) + " bytes read:"
    throughput = iter * datasize * 8 / (t1 - t0) / 1e9
    print "throughput (Gb/s) = " + str(throughput)
    print "time (s) = " + str(t1 - t0)
    print "latency (us) = " + str((t1 - t0) / iter * 1e6)
    print "=========================================="

    t0 = time.time()
    pocket_lookup(p, jobid, iter)
    t1 = time.time()

    print "=========================================="
    print "Stats for " + str(iter) + " iter of " + str(
        datasize) + " bytes lookup:"
    throughput = iter * datasize * 8 / (t1 - t0) / 1e9
    print "throughput (Gb/s) = " + str(throughput)
    print "time (s) = " + str(t1 - t0)
    print "latency (us) = " + str((t1 - t0) / iter * 1e6)
    print "=========================================="

    os.remove(file_tmp)
    return