-
Notifications
You must be signed in to change notification settings - Fork 0
/
client.py
146 lines (115 loc) · 3.87 KB
/
client.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
import os,sys,argparse
import json
import time
import signal
import struct
import socket
import subprocess
import traceback
import threading
import multiprocessing
import logging
try:
import pycommons
from pycommons import generic_logging
if __name__ == '__main__':
generic_logging.init(level=logging.DEBUG)
except:
print 'No pycommons..continuing anyway'
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__file__)
import common
import protocol_pb2
def setup_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--host', '-H', type=str, required=True,
help='Host to connect to')
parser.add_argument('--port', '-p', type=int, default=15225,
help='Port on which host is running server')
parser.add_argument('--ncpus', '-j', type=int, default=multiprocessing.cpu_count(),
help='Number of jobs to run in parallel')
return parser
def handle_response(client_socket, queue, **kwargs):
length = struct.unpack('>Q', common.sock_read(client_socket, 8))[0]
logger.debug("Response length: %d" % (length))
msg = common.sock_read(client_socket, length)
logger.info("Received response")
response = protocol_pb2.Response()
response.ParseFromString(msg)
if response.status != protocol_pb2.OK:
logger.error(response.error)
return
else:
logger.debug("OK")
result = False
if response.type == protocol_pb2.Response.GENERIC:
result = True
if response.type == protocol_pb2.Response.JOB_RESPONSE:
# Read cmdline and start job to execute it
cmdline = response.jobResponse.cmdline
POOL.apply_async(func=run_cmdline, args=(cmdline, queue, True))
#run_cmdline(cmdline, queue)
result = True
return result
def issue_job_request(socket):
logger.debug("Issuing job request")
# Make a request for a new job to run
request = protocol_pb2.Request()
request.type = protocol_pb2.Request.JOB_REQUEST
send_request(socket, request)
def run_cmdline(cmdline, queue, signal_fix):
if signal_fix:
signal.signal(signal.SIGINT, signal.SIG_IGN)
ret, stdout, stderr = pycommons.run(cmdline)
queue.put((cmdline, ret, stdout, stderr))
def response_handler(socket, queue):
while True:
handle_response(socket, queue)
def send_request(socket, request):
msg = request.SerializeToString()
length = struct.pack('>Q', len(msg))
logger.debug("Request length: %d" % (len(msg)))
socket.sendall(length + msg)
logger.info("Request sent")
def client(host, port, ncpus, **kwargs):
try:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect((host, port))
except Exception:
logger.critical('Could not connect to server!')
logger.critical(traceback.format_exc())
return
request = protocol_pb2.Request()
request.type = protocol_pb2.Request.REGISTER_CLIENT_REQUEST
request.registerClientRequest.ncpus = ncpus
send_request(client_socket, request)
logger.info("Register client request sent")
if handle_response(client_socket, None, **kwargs) is False:
logger.error("Failed to register client")
sys.exit(-1)
manager = multiprocessing.Manager()
queue = manager.Queue()
t = threading.Thread(target=response_handler, args=(client_socket, queue))
t.start()
for j in range(ncpus):
issue_job_request(client_socket)
# Beyond this point, the handle_response function will take care
# of running the actual jobs. So we just handle the responses
while True:
cmdline, ret, stdout, stderr = queue.get()
request = protocol_pb2.Request()
request.type = protocol_pb2.Request.JOB_COMPLETE_REQUEST
request.jobCompleteRequest.cmdline = cmdline
request.jobCompleteRequest.ret = ret
request.jobCompleteRequest.stdout = stdout
request.jobCompleteRequest.stderr = stderr
send_request(client_socket, request)
issue_job_request(client_socket)
def main(argv):
parser = setup_parser()
args = parser.parse_args(argv[1:])
global POOL
POOL = multiprocessing.Pool(args.ncpus)
client(**args.__dict__)
if __name__ == '__main__':
main(sys.argv)