示例#1
0
def runTest(data):
    from multiprocessing import Process as P
    clusterID = data["clusID"]
    jmxName = data["jmx_name"]
    output = data["output"]
    clusterMngr = clusterMngrs[session["_id"]]

    def wrapper():
        if clusterMngr.checkStatus(socketio.sleep):
            clusterMngr.refreshConnections()
            clusterMngr.updateRemotehost()
            clusterMngr.startSlavesServer()
            socketio.sleep(3)
            #clusterMngr.esCheck()############
            clusterMngr.runTest(jmxName, output)
            clusterMngr.stopSlavesServer()
            emit('cluster_finished', {'msg': "finished"},
                 namespace='/redirect',
                 room=clusterMngr.sid)
            print("Finished\n")
        else:
            socketio.emit('time_out',
                          namespace='/redirect',
                          room=clusterMngr.sid)

    p = P(target=wrapper)
    with jredirectors[clusterMngr.sid]:
        p.start()
    processes[clusterID] = p
示例#2
0
def uploadFiles():
    from multiprocessing import Process as P
    clusterID = request.form["clusID"]
    files = request.files.getlist("file")
    clusterMngr = clusterMngrs[session["_id"]]

    def wrapper():
        path_to_upload = os.path.join(os.getcwd(), app.config['UPLOAD_FOLDER'],
                                      clusterID)
        if clusterMngr.checkStatus(socketio.sleep):
            for file in files:
                filename = secure_filename(file.filename)
                file.save(
                    os.path.join(app.config['UPLOAD_FOLDER'] + clusterID + "/",
                                 filename))
            clusterMngr.refreshConnections()
            clusterMngr.uploadFiles()
            try:
                clusterMngr.setUploadDir(path_to_upload)
                tmp = os.listdir(path_to_upload)
            except:
                tmp = []
            tmp = [ff for ff in tmp if not ff.startswith(".")]
            jmxList = [f for f in tmp if f.endswith(".jmx")]
            socketio.emit('upload_done',
                          json.dumps({
                              "jmxList": jmxList,
                              "files": tmp
                          }),
                          namespace='/redirect',
                          room=clusterMngr.sid)
            # don't know why in mp, must print something after emit, otherwise it won't emit.
            # probably because print calls jredirector, which contains socketio.sleep
            print("")
        else:
            socketio.emit('time_out',
                          "{}",
                          namespace='/redirect',
                          room=clusterMngr.sid)
            print("")

    p = P(target=wrapper)
    with jredirectors[clusterMngr.sid]:
        p.start()
    return ""
示例#3
0

def f(x):
    return x * x


def ellmatr(i, j, k):

    with lock:

        sleep(1)
        result = A[i][0] * B[0][j] + A[i][1] * B[1][j]
        # proc_name=current_process().name
        # print(proc_name)
        if (k != 2):
            print(result, end=' ')
        else:
            print(result, "\n")


if __name__ == '__main__':
    for i in range(3):
        k = -1
        for j in range(3):
            k += 1
            pr = P(target=ellmatr, args=(i, j, k))
            proc.append(pr)
            pr.start()
    for p in proc:
        p.join()
示例#4
0
import os
import argparse
from multiprocessing import Process as P

def worker(name):
	os.system("mdl " + name)


parser = argparse.ArgumentParser()
parser.add_argument("t", type = int)
parser.add_argument("m")
args = parser.parse_args()

lis = []
for i in range(args.t):
	proc = P(target = worker, args = (args.m, ))
	proc.start()
	lis.append(proc)

for i in lis:
	i.join()
示例#5
0
    test_dic = load_data(
        "/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
    test_data = test_dic[b'data']
    test_label = test_dic[b'labels']
    #test_data = (test_data - mean) / std
    valid_dataset = [test_data, test_label]
    #valid_dataset = [data[49500:], labels[49500:]]

    lis = []
    que = Queue(1)
    que.put(0)
    lock = Lock()

    p = "lyy.CIFAR10.train"
    for i in range(args.t):
        proc = P(target=worker,
                 args=(train_dataset, p, que, lock, True, mean, std))
        proc.start()
        lis.append(proc)

    que_val = Queue(1)
    que_val.put(0)
    p_val = "lyy.CIFAR10.valid"
    for i in range(args.t // 10 + 1):
        proc = P(target=worker,
                 args=(valid_dataset, p_val, que_val, lock, False, mean, std))
        proc.start()
        lis.append(proc)

    for i in lis:
        i.join()
示例#6
0
#!/usr/bin/python
from multiprocessing import Process as P
import time


def mp(worker="Joe", sleeptime=5):
    print(worker, "sleeping: %d sec" % sleeptime)
    time.sleep(sleeptime)
    print(worker, "Done")


w1 = P(target=mp, args=('A', 2))
w2 = P(target=mp, args=('B', 40))
w3 = P(target=mp, args=())
start_time = time.time()
print(
    "Starting w1 = P(target=mp, args=('A', 2)) and w2 = P(target=mp, args=('B', 40)) and w3=P(target=mp)"
)
w1.start()
w2.start()
w3.start()
print("Joining w1.join(timeout=3); w2.join(timeout=4); w3.join(timeout=6)")
w1.join(timeout=3)
w2.join(timeout=4)
w3.join(6)
if w1.is_alive():
    print "w1 Timeout"
    terminate()
    w1.join()
if w2.is_alive():
    print "w2 Timeout"
示例#7
0
        "/unsullied/sharefs/liuyanyi02/lyy/CIFAR/cifar-10-batches-py/test_batch"
    )
    test_data = test_dic[b'data']
    test_label = test_dic[b'labels']
    #test_data = (test_data - mean) / std
    valid_dataset = [test_data, test_label]
    #valid_dataset = [data[49500:], labels[49500:]]

    lis = []
    que = Queue(1)
    que.put(0)
    lock = Lock()

    p = "lyy.CIFAR10.train"
    for i in range(args.t):
        proc = P(target=worker,
                 args=(train_dataset, p, que, lock, True, mean, std))
        proc.start()
        lis.append(proc)
    """
	que_val = Queue(1)
	que_val.put(0)
	p_val = "lyy.CIFAR10.valid"
	for i in range(args.t // 10 + 1):
		proc = P(target = worker, args = (valid_dataset, p_val, que_val, lock, False, mean, std))
		proc.start()
		lis.append(proc)
	"""

    for i in lis:
        i.join()
示例#8
0
文件: line.py 项目: dannylty/cakeshop

if __name__ == '__main__':
    nprocess = int(argv[1])
    q = Q()
    for _ in range(nprocess):
        q.put(Line())
    target = np.random.normal(scale=1, size=4)
    # target = np.array([1, -0.5, -3, 10])
    points = np.array([
        target[0] * x**3 + target[1] * x**2 + target[2] * x + target[3]
        for x in range(-5, 5)
    ])  # + np.random.normal(size = 10)
    pipes = [Pipe() for _ in range(nprocess)]
    processes = [
        P(target=lineprocess, args=(q, pipes[i][1], points, 10000, 10))
        for i in range(nprocess)
    ]

    for p in processes:
        p.start()

    for p in processes:
        p.join()

    print("________________ Results ________________\n")
    print("Target:", target)
    print("Points:", points)
    plt.plot(points, color="red")

    for a, _ in pipes:
示例#9
0
logfile = os.path.abspath("/home/oraant/study/data2/test2.log")
loghandler = CRFHandler(logfile, 'a', 1024 * 4, 3)

logger = getLogger()
logger.addHandler(loghandler)
logger.setLevel(INFO)


def log(name):

    pid = os.getpid()
    msg = str(pid) + ', ' + name

    for i in range(40):
        time = random() / 10
        sleep(time)
        logger.info(msg)


p1 = P(target=log, args=('p1', ))
p2 = P(target=log, args=('p2', ))
p3 = P(target=log, args=('p3', ))

p1.start()
p2.start()
p3.start()
log('main')

# 结论:多进程不冲突,而且可以循环写
def log(msg):
    logger.error('log in child: %d' % msg)


try:
    number = sys.argv[1]
except:
    number = 10
number = int(number)

print 'number is %d' % number
oldtime = time.time()
print 'start time %s' % oldtime

l = []
for i in range(number):
    p = P(target=log, args=(i, ))
    l.append(p)

with open('tmp.log', 'w') as file:
    file.write('')

map(lambda x: x.start(), l)
logger.error('log in father')

map(lambda x: x.join(), l)
newtime = time.time()
print 'end time %s' % newtime
print 'gap time %s' % (newtime - oldtime)