def main(): """Run on program start. Checks for other masters and initiates itself as either master or slave. """ m_sock = network.socket_setup(39500) try: master_addr = m_sock.recvfrom(4096)[1] except socket.timeout: state = 'master' m_sock.close() if state == 'master': mstr = master.Master() mstr.run() print_thread = Thread(target=mstr.print_system) print_thread.setDaemon(True) print_thread.start() elev = elevator.Elev() elev.state = 'master' elev.master_addr = mstr.ip elev.run(constants.ELEV_MODE) states.master() else: elev = elevator.Elev() elev.master_addr = master_addr[0] elev.run(constants.ELEV_MODE) states.slave(elev) backup = master.Master() elev.backup = backup states.backup(elev, backup) states.master()
def send_log(httpRequest, source): """ Send pecified frame """ source = source[:-4] log_server = master.Master().getLogServer() if not log_server.hasSource(source): httpRequest.send_error(404, 'Not found') return source = log_server.getSource(source) mtime = httpRequest.date_time_string(source['time']) if httpRequest.headers.get('if-modified-since') == mtime: httpRequest.send_response(304, 'Not modified') httpRequest.end_headers() else: httpRequest.send_response(200) httpRequest.send_header('Content-type', 'text/plain') httpRequest.send_header('Content-Length', str(source['size'])) httpRequest.send_header('Last-Modified', mtime) httpRequest.end_headers() httpRequest.wfile.write(source['buffer'].encode())
def function(masterfile): # Return number of data files linked to a master file: masterpath = "{}/{}".format(masterdir, masterfile) totalframes = master.get_number_of_files(masterpath) # Each master file in the list now used to create an instance of a class called 'Master' (from master.py): master_class = master.Master(args, masterpath, totalframes)
def nodeEnabled(self, httpRequest): """ Set node enabled flag """ try: nodeUUID = httpRequest.GET['nodeUUID'] enabled = int(httpRequest.GET['enabled']) except: _send_json(httpRequest, {'result': 'fail'}) return render_server = master.Master().getRenderServer() node = render_server.getNode(nodeUUID) answer = {'result': 'ok'} if node is None: answer['result'] = 'fail' else: if enabled == 0: node.setEnabled(False) else: node.setEnabled(True) _send_json(httpRequest, answer)
def setup(s3_application_url, bucket, max_nodes, params): # The master node is responsible for monitoring the health of data nodes # and maintains a queue of pending tasks. Since the goal is to emulate # EMR, we will not include the master node in the costs. m = master.Master(bucket, max_nodes, s3_application_url, params) m.setup() return m
def main(): parser = argparse.ArgumentParser() parser.add_argument("--s3_application_url", type=str, required=True, help="S3 URL for application code") parser.add_argument( "--parameters", type=str, required=True, help="File containing simulation distribution paramters") parser.add_argument("--result_folder", type=str, required=True, help="Folder to put results in") args = parser.parse_args() params = json.loads(open(args.parameters).read()) s3 = boto3.resource("s3") for bucket in ["maccoss-ec2", "maccoss-emr"]: s3.Bucket(bucket).objects.filter(Prefix="0/").delete() if not os.path.isdir(args.result_folder): os.mkdir(args.result_folder) if not os.path.isdir(args.result_folder + "/tasks"): os.mkdir(args.result_folder + "/tasks") if not os.path.isdir(args.result_folder + "/nodes"): os.mkdir(args.result_folder + "/nodes") shutil.copyfile(args.parameters, args.result_folder + "/" + args.parameters.split("/")[-1]) m = master.Master(args.s3_application_url, args.result_folder, params) m.setup() m.start(asynch=True) simulation.run(params, m, False) m.shutdown()
def completedJobs(self, httpRequest): """ Get list of all completed jobs """ render_server = master.Master().getRenderServer() jobs = render_server.getCompletedJobs() self._send_jobs(httpRequest, jobs)
def runningJobs(self, httpRequest): """ Get list of all running jobs """ render_server = master.Master().getRenderServer() jobs = render_server.getJobs() self._send_jobs(httpRequest, jobs)
def main(): if doesServiceExist(conf.MASTER_IP, conf.MASTER_PORT): print("%s:%s already been used! change another port" % (conf.MASTER_IP, conf.MASTER_PORT)) exit(1) master_node = master.Master(TaskLoader(), conf) server = RPCServerThread(master_node) server.start() master_node.serve_forever()
def main(): m = master.Master('conf.json') vocab = m.load_vocab() m.build_emb(vocab) m.load_data() m.creat_graph() m.train() logging.info("Done Train !")
def __init__(self, app): self.setup('ui/test.ui') self._app = app self._master = ms.Master('data.test', self) self.moving = [] for t in ('morp', 'dict', 'temp', 'symb'): tree = getattr(self._master, f'_{t}') model = getattr(self.ui, f'{t}TreeView') model.setModel(tree) self.ui.morpTreeView.doubleClicked.connect(self.move)
def jobs(self, httpRequest): """ Get list of all (runnign and completed) jobs """ render_server = master.Master().getRenderServer() jobs = render_server.getJobs() jobs += render_server.getCompletedJobs() jobs.sort(key=lambda a: a.getUUID()) self._send_jobs(httpRequest, jobs)
def main(): import master, os try: if os.path.exists("./chart.svg"): os.remove("./chart.svg") glob.master_obj = master.Master() glob.master_obj.start() app.run(host=cfg.listen_addr, port=cfg.listen_port) except: import traceback traceback.print_exc() os._exit(0)
def main(num_workers=DEFAULT_NUM_WORKERS, port_number=DEFAULT_PORT_NUM): # Remove old tmp dir tmp_dir = ".tmp" tmp_dir_exists = os.path.isdir(tmp_dir) if (tmp_dir_exists): shutil.rmtree(tmp_dir) # Create new temp directory os.mkdir(tmp_dir) # Create a new master and let it take over master_ = master.Master(num_workers, port_number)
def main(): ft = frontier.Frontier([ ('http://m.sohu.com/', 1), ]) http_fetcher = fetcher.HTTPFetcher() rb = robot.Robot(http_fetcher, [ ('/.*', AHandler), ]) id = fork_processes(0) if id == master_id(): master.Master(ft).start() else: worker.Worker(rb).start() IOLoop.instance().start()
def translate_path(path): """ Get file name to get receive """ http_server = master.Master().getHTTPServer() site_root = http_server.getSiteRoot() path = urllib.parse.splitquery(path)[0] path = urllib.parse.unquote(path) path = os.path.join(site_root, path[1:]) return os.path.realpath(path)
def send_frame(httpRequest, jobName, frameName): """ Send pecified frame """ jobName = jobName[4:] render_server = master.Master().getRenderServer() job = render_server.getJob(jobName) if job is None or frameName not in job.getRenderFiles(): httpRequest.send_error(404, 'Not found') return if 'thumbnail' in httpRequest.GET: fname = job.getThumbnail(frameName) if fname is None: fname = '/pics/not_avaliable.png' else: fname = os.path.join(job.getStoragePath(), 'out', frameName) try: with open(fname, 'rb') as handle: ctype = guess_type(fname) fs = os.fstat(handle.fileno()) mtime = httpRequest.date_time_string(fs[stat.ST_MTIME]) if httpRequest.headers.get('if-modified-since') == mtime: httpRequest.send_response(304, 'Not modified') httpRequest.end_headers() else: content_len = str(fs[stat.ST_SIZE]) httpRequest.send_response(200) httpRequest.send_header('Content-type', ctype) httpRequest.send_header('Content-Length', content_len) httpRequest.send_header('Last-Modified', mtime) httpRequest.end_headers() shutil.copyfileobj(handle, httpRequest.wfile) except IOError as e: if e.errno == errno.ENOENT: httpRequest.send_error(404, 'Not found') elif e.errno == errno.EACCES: httpRequest.send_error(403, 'Forbidden') elif e.errno == errno.EPIPE: # pipe was closed by client, what could we do? pass else: httpRequest.send_error(505, 'Internal server error')
def __init__(self): super(Server, self).__init__('server.pid') self.ClassDebug = debug.Debug() self.Debug = self.ClassDebug.Debug self.Debug("Initiate") self.LoadCFG = loadCFG.LoadCFG(self) try: self.Unitsync = __import__('pyunitsync') except: import unitsync self.Unitsync = unitsync.Unitsync(self.Config['UnitsyncPath']) self.Unitsync.Init(True, 1) self.LoadMaps() self.LoadMods() self.Master = master.Master(self) self.Hosts = {}
def main(): time1=time.time() parser = argparse.ArgumentParser(description='Arguments required to process the data: input, beamcenter, distance.') parser.add_argument('-i', '--input', type=str, nargs='+', required=True, help='Path of Directory containing HDF5 master file(s)') parser.add_argument('-b', '--beamcenter', type=int, nargs=2, required=True, help='Beam center in X and Y') parser.add_argument('-r', '--oscillations', type=float, default=1, help='Oscillation angle per well') parser.add_argument('-d', '--distance', type=float, required=True, help='Detector distance in mm') parser.add_argument('-w', '--wavelength', type=float, default=1.216, help='Wavelength in Angstrom') parser.add_argument('-f', '--framesperdegree', type=int, default=5, help='Number of frames per degree') parser.add_argument('--output', default=os.getcwd(), help='Use this option to change output directly') parser.add_argument('-sg', '--spacegroup', help='Space group') parser.add_argument('-u', '--unitcell', type=str, default="100 100 100 90 90 90", help='Unit cell') parser.parse_args() args = parser.parse_args() # We are going through each data file and creating a list called 'master_list' (defined in filter_master.py) # The list will store all master files for masterdir in args.input: master_list = fnmatch.filter(os.listdir(masterdir), "*master.h5") print(master_list) # Each element of the list now used to create an instance of a 'Master' class (defined in Master.py) for masterfile in master_list: # Return number of data files linked to a master file: masterpath = "{}/{}".format(masterdir, masterfile) totalframes = master.getNumberOfFiles_fast(masterpath) master_class = master.Master(args, masterpath, totalframes) master_class.create_and_run_Data_Wells() #write master dictionary to DICTIONARY.json # We just generated empty directory(ies) named with respect to the master(s) files, # AND returned the path to these directories time2 = time.time() print("Total time: {:.1f} s".format(time2-time1))
def factory(self, role, config): print(self) print(role) print(config) #role = self.checkForMaster(role, config) if role == "content": import content self.child = content.Content(config) print('Content') '''if role == "transcode": import transcode self.child = transcode.Transcode(config) print('transcode')''' if role == "master": import master self.child = None self.master = master.Master(config) print('master')
def registerNode(self, host_info, client_info): """ Register new node """ node = RenderNode(host_info, client_info) with self.nodes_lock: self.nodes.append(node) self.nodes_hash[node.getUUID()] = node log_server = master.Master().getLogServer() log_server.addSource('node-' + node.getUUID()) Logger.log('Registered new render node {0} at {1}' . \ format(node.getUUID(), node.getIP())) return node.getUUID()
def execute(httpRequest): """ Execute file/directory handler File ocntent or directory listing would be send to client """ http_server = master.Master().getHTTPServer() site_root = http_server.getSiteRoot() path = translate_path(httpRequest.path) if not PathUtil.isPathInside(path, site_root): httpRequest.send_error(403, 'Forbidden') return if os.path.isdir(path): prefix = httpRequest.path.split('?', 1)[0] prefix = prefix.split('#', 1)[0] if not prefix.endswith('/'): suffix = urllib.parse.splitquery(httpRequest.path)[1] if suffix is not None: suffix = '?' + suffix else: suffix = '' # redirect browser - doing basically what apache does httpRequest.send_response(301) httpRequest.send_header('Location', prefix + '/' + suffix) httpRequest.end_headers() return else: found = False for index in ['index.html', 'index.htm']: index = os.path.join(path, index) if os.path.exists(index): path = index found = True break if not found: list_directory(httpRequest, path) return send_file(httpRequest, path)
def nodes(self, httpRequest): """ Get list of all nodes """ render_server = master.Master().getRenderServer() nodes = render_server.getNodes() enc = [] for node in nodes: enc.append({ 'uuid': node.getUUID(), 'enabled': node.isEnabled(), 'ip': node.getIP(), 'host_info': node.getHostInfo(), 'hostname': node.getHostname() }) _send_json(httpRequest, enc)
def __init__(self) : #socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) #create id of server using the hash of IP address and MAC self.HOST = '' # Symbolic name meaning all available interfaces self.PORT = 8966 # All servers will listen on this port -- to listen to CLIENTS self.PEER_PORT = 9566 # to listen to PEERS self.ip = ni.ifaddresses('wlp3s0')[2][0]['addr'] self.socket_obj = {} #bound = self.socket_bind() #here a mutex has to be implemented #this file should be implemented using a database ... #wherein tthe server gets to know that it is the first server #hence must assume responsibility of 'Master' fHandle = open('master_stub.txt') data = fHandle.read() fHandle.close() data = data.strip() if data == '0' : print "Initiating master" master_node = master.Master() #dine else : # as the master writes to this file ... i.e. the database that it is the server # but for this file the master will put its ip address .... and read can be done simulaneously.. # when we generalise the server has to register to all of the servers live at that time # when the server demotes it to a normal server ... it disconnects and this has to be detected my the # reglar tier 2 server self.register_to_master(data) try: Thread(target=self.bind_and_serve, args=()).start() except Exception, errtxt: print errtxt
def logs_listing(httpRequest): """ Send list of jobs """ log_server = master.Master().getLogServer() all_sources = log_server.getSources() all_sources.sort(key=lambda x: x['name']) listing = [] for source in all_sources: item = { 'name': source['name'] + '.txt', 'is_dir': False, 'unix_time': source['time'], 'size': source['size'] } listing.append(item) send_listing(httpRequest, '/logs', listing, '/')
def run(process_id, process_num, unique_id): print 'process %d start ...' % process_id kernal = array([1.5]) step = int(random.random() * 3) + 2 for j in range(step): kernal = hstack((array([random.random() * 0.41 - 0.2]), kernal)) kernal = hstack((kernal, array([random.random() * 0.41 - 0.2]))) kernal = kernal / kernal.sum() config_network = { 'layer_size': [500, 400 + int(random.random() * 201), 1], # 神经网络的层级结构 'iter_times': 60 + int(random.random() * 100), # 训练的迭代次数 'lamda': random.random() * 2, # 正则化的 lamda 参数 'kernal': kernal, # 数据预处理时 用来 convolve 数据的矩阵 } master_config = { 'data': configLoad.config(), 'network': config_network, 'id': unique_id, 'record': False, } o_master = master.Master(master_config) o_master.run() _bpNN = o_master.getNN() run_result = { 'training_record': _bpNN.trainingRecord, 'test_record': _bpNN.testRecord, 'original_data': _bpNN.originalData, 'pred': _bpNN.pred } funcUtil.recordSparkStatus(unique_id, str(process_num + 2) + '\n', 'a') print 'process %d finish' % process_id return [master_config, run_result]
def jobPriority(self, httpRequest): """ Set job's priority """ try: jobUUID = httpRequest.GET['jobUUID'] priority = int(httpRequest.GET['priority']) except: _send_json(httpRequest, {'result': 'fail'}) return render_server = master.Master().getRenderServer() job = render_server.getJob(jobUUID) answer = {'result': 'ok'} if job is None: answer['result'] = 'fail' else: job.setPriority(priority) render_server.reorderJobs() _send_json(httpRequest, answer)
def unregisterNode(self, node): """ Unregister render node """ with self.jobs_lock: tasks = node.getTasks() for task in tasks: job = self.jobs_hash.get(task['jobUUID']) task_nr = task['task_nr'] Logger.log('Restart task {0} of job {1}' . format(task_nr, job.getUUID())) job.restartTask(task_nr) with self.nodes_lock: log_server = master.Master().getLogServer() log_server.removeSource('node-' + node.getUUID()) self.nodes.remove(node) del self.nodes_hash[node.getUUID()] Logger.log('Node {0} unregistered'.format(node.getUUID())) return True
def frames_listing(httpRequest, jobName): """ Send listing of frames """ jobName = jobName[4:] render_server = master.Master().getRenderServer() job = render_server.getJob(jobName) if job is None: httpRequest.send_error(404, 'Not found') return path = os.path.join(job.getStoragePath(), 'out') files = job.getRenderFiles() files.sort(key=lambda a: a.lower()) listing = [] for name in files: fullname = os.path.join(path, name) if os.path.isfile(fullname): st = os.stat(fullname) size = st[stat.ST_SIZE] unix_time = st[stat.ST_MTIME] else: size = 0 unix_time = 0 item = {'name': name, 'unix_time': unix_time, 'size': size} listing.append(item) send_listing(httpRequest, '/renders/job-' + jobName, listing, '/renders')
def jobs_listing(httpRequest): """ Send list of jobs """ render_server = master.Master().getRenderServer() jobs = render_server.getJobs() completed_jobs = render_server.getCompletedJobs() all_jobs = jobs + completed_jobs all_jobs.sort(key=lambda a: a.getUUID()) listing = [] for job in all_jobs: item = { 'name': 'job-' + job.getUUID(), 'is_dir': True, 'unix_time': job.getStartTime(), 'size': 0 } listing.append(item) send_listing(httpRequest, '/renders', listing, '/')