def simulateCombat(self, fleet1, fleet2, bonus1=[0, 0, 0], bonus2=[0, 0, 0], ressources=[0, 0, 0]): payload = {} payload["battleinput[0][1][901]"] = ressources[0] payload["battleinput[0][1][902]"] = ressources[1] payload["battleinput[0][1][903]"] = ressources[2] payload["battleinput[0][0][109]"] = bonus1[0] payload["battleinput[0][0][110]"] = bonus1[1] payload["battleinput[0][0][111]"] = bonus1[2] payload["battleinput[0][1][109]"] = bonus2[0] payload["battleinput[0][1][110]"] = bonus2[1] payload["battleinput[0][1][111]"] = bonus2[2] for shipId in fleet1.keys(): payload["battleinput[0][0][" + str(shipId) + "]"] = fleet1[shipId] for shipId in fleet2.keys(): payload["battleinput[0][1][" + str(shipId) + "]"] = fleet2[shipId] simulatorReq = Request(self.battleSimulatorPage, payload) self.execRequest(simulatorReq) rapportReq = Request( self.battleRapportPage + simulatorReq.content.replace('"', ''), payload) self.execRequest(rapportReq) return CombatReport.analyzeCombatReport(rapportReq.content)
def __init__(self): self.system_label=0 self.num_ini_request=0 self.num_layer=0 self.cur_layer=0 self.num_shared_layers=0 self.num_shared_layer_groups=0 self.cur_batch = [0]*max(LAYER_SIZE_VGG,LAYER_SIZE_RES) self.group_batch=[0]*max(GROUP_NUM_VGG,GROUP_NUM_RES) self.shared_layer_group_idx=[0]*10 self.num_old_request=0 self.num_new_request=0 self.ini_end_idx=0 self.new_end_idx=0 self.num_out=0 self.re_evaluate=False self.total_timestamp=0.0 self.batch_matrix = [] self.batch_matrix_pred = [] self.group_batch_matrix = [] self.group_batch_matrix_pred = [] for i in range(max(BATCH_SIZE_VGG,BATCH_SIZE_RES)): self.batch_matrix.append([0.0]*max(LAYER_SIZE_VGG,LAYER_SIZE_RES)) self.batch_matrix_pred.append([0.0]*max(LAYER_SIZE_VGG,LAYER_SIZE_RES)) self.group_batch_matrix.append([0.0]*max(GROUP_NUM_VGG,GROUP_NUM_RES)) self.group_batch_matrix_pred.append([0.0]*max(GROUP_NUM_VGG,GROUP_NUM_RES)) self.ini_req_list=[] self.new_req_list=[] for i in range(QUEUE_SIZE): self.ini_req_list.append(Request()) self.new_req_list.append(Request())
def add_up(self, request, floor, user): if user == "passenger": new_request = Request(2, request, floor, user) elif user == "operator": new_request = Request(0, request, floor, user) elif user == "firefighter": new_request = Request(0, request, floor, user) self.up_queue.put(new_request)
def get_start_requests(self): if len(self.metas) == len(self.start_urls): rets = [ Request(start_url, meta=meta.copy()) for start_url, meta in zip(self.start_urls, self.metas) ] else: rets = [Request(start_url, ) for start_url in self.start_urls] if self.START_FROM_LIKE_URL: for ind, r in enumerate(rets): if self.START_FROM_LIKE_URL in r.url: return rets[ind:] return rets
def add_request(self, request, floor, user): if user == "passenger": new_request = Request(2, request, floor, user) elif user == "operator": new_request = Request(0, request, floor, user) elif user == "firefighter": new_request = Request(0, request, floor, user) if floor < self.get_floor(): print("Floor " + str(floor) + "added to down queue") self.down_queue.put(new_request) else: print("Floor " + str(floor) + "added to up queue") self.up_queue.put(new_request)
def perform_put(self, *data): dict = data[0] key = dict.key preference_list = self.hash_ring.get_node(key, self.failed_nodes) if (self not in preference_list): coordinator = preference_list[0].id dict = Request("FORWARD-PUT", dict.key, dict.value, generate_random_number(), dict.client) Messaging.send_message(self, coordinator, dict) time.sleep(3) if REQUESTS.get(dict.request, False) != True: #print("Timedout PUT") dict.action = "PUT" dict.request = generate_random_number() self.socket.settimeout(None) self.failed_nodes.append(coordinator) self.perform_put(dict) else: self.vector_clock.update(self.id, self.get_sequence_no()) metadata = deepcopy(self.vector_clock) if dict.value[1] > metadata: metadata = dict.value[1] dict.value = (dict.value[0], metadata) dict.request = generate_random_number() Messaging.broadcast_put(self, preference_list, dict)
def application(environ,start_response): path = environ.get('PATH_INFO','/') print(path) # 站点根目录 rootPath = os.getcwd() environ['root_path'] = rootPath #生成请求对象 req = Request(environ,start_response) # 路由 for pattern,func in patterns: # print(re.match(pattern,path),pattern,path) result = re.match(pattern, path) if result: if func.__code__.co_argcount == 1: return func(req) elif func.__code__.co_argcount == len(result.groups()) + 1: return func(req,*result.groups()) else: start_response('200 ok', [('ContentType', 'text/html')]) return ["服务器内部错误,请检查代码".encode('utf8')] start_response('200 ok', [('ContentType', 'text/html')]) # 响应体,是一个可迭代对象,元素必须是字节流字符串 return ["<h1>404 Not found</h1>".encode('utf8')]
def retreive_key(self, *data): time.sleep(random.randint(0, 1000) / 1000) dict = data[0] from_node = data[1] val = self.kv.get(dict.key, None) response = Request("ACK-GET", dict.key, val, dict.request) Messaging.send_message(self, from_node, response)
def index(): info = json.loads(request.data) fulfilled_requests = [] timesC = { "Monday": ["18"], "Tuesday": [], "Wednesday": [], "Thursday": [], "Friday": [], "Saturday": [], "Sunday": [], } kerb = info["kerb"] course = info["course"] pending_requests.append(Request(kerb, course, 5, timesC)) pending_requests, fulfilled_requests = fulfill_request(pending_requests[0], pending_requests[1:], fulfilled_requests) req = fulfilled_requests[0][1] return_info = { "kerb": req.get_name(), "course": req.get_course(), } return json.dumps(return_info)
def simulateManyServer(num_secs, file_per_min, in_file, num_servers): request_list = [Server(file_per_min) for i in range(num_servers)] print_queue = Queue() waiting_times = [] with open(in_file) as lines: for line in lines: data = line.split(',') request = Request(int(data[0].strip()), data[1], int(data[2].strip())) print_queue.enqueue(request) current_server = 0 for current_second in range(num_secs): if (not request_list[current_server].busy()) and ( not print_queue.is_empty()): next_task = print_queue.dequeue() waiting_times.append(next_task.wait_time()) request_list[current_server].start_next(next_task) current_server = (current_server + 1) % len(request_list) for server in request_list: if server.busy: server.tick() average_wait = sum(waiting_times) / len(waiting_times) print("Average Wait %6.2f secs %3d tasks remaining." % (average_wait, print_queue.size()))
def newArrival(arrivalTS, totalServiceTime, timeout): request = Request(arrivalTS, totalServiceTime) arrivalEvent = Arrival(arrivalTS, request) timeoutEvent = Timeout(arrivalTS + timeout, request) return (arrivalEvent, timeoutEvent)
def publish_module_standard(module_name, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr, environ=os.environ, debug=0, request=None, response=None): must_die = 0 status = 200 after_list = [None] try: try: if response is None: response = Response(stdout=stdout, stderr=stderr) else: stdout = response.stdout if request is None: request = Request(stdin, environ, response) # make sure that the request we hand over has the # default layer/skin set on it; subsequent code that # wants to look up views will likely depend on it setDefaultSkin(request) response = publish(request, module_name, after_list, debug=debug) except SystemExit, v: must_die = sys.exc_info() request.response.exception(must_die) except ImportError, v: if isinstance(v, tuple) and len(v) == 3: must_die = v elif hasattr(sys, 'exc_info'): must_die = sys.exc_info() else: must_die = SystemExit, v, sys.exc_info()[2] request.response.exception(1, v)
def list(req): # Logic to get list of the data # Integrate the JSON here images = models.ImageInfo.objects.all() print(type(images)) for image in images: # print(type(image.imageUri)) image_string = base64.b64decode(image.imageUri[23:]) # print(image_string) request = Request(image_string) image.logo = request.response['logo'] image.label = request.response['label'] image.text = request.response['text'] image.color = request.response['color'] image.save() # For image format refer in models.py file in myapp folder list = models.ImageInfo.objects.all() '''result = [] for item in list: temp = {}; temp["id"] = item.id temp["logo"] = item.logo temp["text"] = item.text temp["color"] = item.color result.append(temp);''' context = {'result': list, 'STATIC_URL': settings.STATIC_URL} return render(req, 'list.html', context)
def handoff(self): for node in self.check_for_sync: keys = list(self.sync_kv[node].keys()) values = list(self.sync_kv[node].values()) synchronize = Request("SYNC", keys, values, generate_random_number()) Messaging.send_message(self, node, synchronize)
def rename(self, name): #Returns true if renaming worked reqP = Request(self.player.ia.renamingPage + name + "&cp=" + str(self.id), {}) response = self.player.ia.execRequest(reqP) if reqP.response.status_code == 200: self.name = name return True return False
def broadcast_get(from_node, node_list, msg): if (len(node_list) == 0): if len(HISTORY.get(msg.request, set())) < from_node.W: from_node.socket.sendto(pickle.dumps("FAILURE"), ('localhost', msg.client)) return msg = Request("FETCH", msg.key, msg.value, generate_random_number(), msg.client) nodes = [] for node in node_list: nodes.append(node.id) #print("Preference list="+node.id+msg.action) Messaging.send_message(from_node, node.id, msg) cur_time = time.time() while int(time.time() - cur_time) < 3: if not REQUESTS.get(msg.request, False) and len( HISTORY.get(msg.request, set())) >= from_node.R: result = list() for id, val in HISTORY[msg.request]: if val != None: result += val #print([(number,vector.clock) for number,vector in result]) result = from_node.perform_syntactic_reconcilation(result) from_node.socket.sendto(pickle.dumps(result), ('localhost', msg.client)) #for num,clocks in result: # print(str(msg.key)+" "+str(num)+" "+str(clocks.clock)) REQUESTS[msg.request] = True readers = set([id for id, val in HISTORY[msg.request]]) failed_nodes = set(nodes) - readers from_node.failed_nodes += list(failed_nodes) Messaging.retry_get_request(from_node, failed_nodes, msg, readers)
def scanSystem(self, galaxy, system): #TODO add check for not enough deut payload = {} payload["galaxy"] = galaxy payload["system"] = system scanSystemRequest = Request(self.player.ia.galaxyPage + "&cp=" + str(self.id), payload) self.player.ia.execRequest(scanSystemRequest) soup = BeautifulSoup(scanSystemRequest.content, "html.parser") #parse all available locations divContent = soup.find("div", id="content") systemTable = divContent.find("table", recursive=False) locationList = systemTable.find_all("tr")[2:-5] #the first 2 and last 5 are headers planets = [] locationNumber = 0 for location in locationList: locationNumber += 1 tdList = location.find_all("td") if tdList[0].a is None: # if there is a planet in this location nameWithActivity = tdList[2].text #TODO remove activity from the name planet = Planet(None, nameWithActivity, [galaxy, system, locationNumber, 1], None) planets.append(planet) #TODO add moon for planet in self.player.planets: if planet.pos[0] == galaxy and planet.pos[1] == system: planet.lastKnownSystem = planets return planets
def getSize(self): reqP = Request(self.player.ia.overviewPage + "&cp=" + str(self.id), {}) self.player.ia.execRequest(reqP) soup = BeautifulSoup(reqP.content, "html.parser") #parse the size self.sizeUsed = int(soup.find(attrs={"title": 'Cases occupées'}).text) self.sizeMax = int(soup.find(attrs={"title": 'Cases max. disponibles'}).text)
def read(self): """ split the log file into "blobs" which are defined as chunks of text separated by a blank line. if the blob contains output from the RequestProcessor, create a Request object append Requests only if they pass filters (if defined) """ s = open(self.path, 'r').read() print "reading from %s" % self.path blobs = s.split("\n\n") print "processing %d blobs" % len(blobs) ## requestHeaderPattern = "DCS RequestProcessor: PROCESS" requestHeaderPattern = "org.apache.struts.action.RequestProcessor process" requests = [] for blob in blobs: line1 = blob.split("\n")[0] if string.find(line1, requestHeaderPattern) != -1: try: request = Request(blob) requests.append(request) except: print "failed to contstruct Request:", sys.exc_type, sys.exc_value continue else: ## print "%s not found" % requestHeaderPattern ## print "\tfirst line: \n%s" % line1 pass self.requests = requests
def upgrade(self, planetId): if self.id != None: payload = {'cmd': 'insert', 'tech': self.id} reqB = Request( self.player.ia.researchPage + "&cp=" + str(planetId), payload) self.player.ia.execRequest(reqB) return reqB
def case_2(): global strAlbum global strArtist completer = WordCompleter(["run", "exit", "album=", "artist="]) strPrompt = ">>>" lstInput = session.prompt(strPrompt, completer=completer).split() if (lstInput[0] == "exit"): print("Goodbye") sys.exit(1) elif (lstInput[0] == "album="): s = " " strAlbum = s.join(lstInput[1:]) elif (lstInput[0] == "artist="): s = " " strArtist = s.join(lstInput[1:]) elif (lstInput[0] == "run"): print("building Query...") urlObj = UrlBuilder() reqObj = Request() boolResponse = reqObj.checkAlbumAdded(strAlbum, strArtist) if (boolResponse == True): print("Error: Album already added.") sys.exit(1) strQuery = urlObj.getUrlAlbum(strAlbum, strArtist) print("Query: " + strQuery) albumObj = reqObj.getAblumInfo(strQuery) postUrl = urlObj.postUrlCreate(albumObj) reqObj.postAlbumInfo(postUrl)
def broadcast_put(from_node, node_list, msg): if (len(node_list) == 0): if len(HISTORY.get(msg.request, set())) < from_node.W: from_node.socket.sendto(pickle.dumps("FAILURE"), ('localhost', msg.client)) elif REQUESTS.get(msg.request, False) == True: from_node.socket.sendto(pickle.dumps("SUCCESS"), ('localhost', msg.client)) return msg = Request("STORE", msg.key, msg.value, generate_random_number(), msg.client) nodes = [] for node in node_list: nodes.append(node.id) #print("Preference list="+node.id+msg.action + "from "+from_node.id +" "+str(msg.key)+":"+str(msg.value[0])) Messaging.send_message(from_node, node.id, msg) cur_time = time.time() while int(time.time() - cur_time) < 3: if not REQUESTS.get(msg.request, False) and len( HISTORY.get(msg.request, set())) >= from_node.W: #send client success message from_node.socket.sendto(pickle.dumps("SUCCESS"), ('localhost', msg.client)) REQUESTS[msg.request] = True HISTORY.get(msg.request, set()).add(from_node.id) failed_nodes = set(nodes) - HISTORY[msg.request] from_node.failed_nodes = from_node.failed_nodes + list(failed_nodes) #print("FAILED NODES "+str(from_node.failed_nodes)) Messaging.retry_put_request(from_node, failed_nodes, msg, HISTORY[msg.request])
def produce_on_request_type(self, request_type): request_list = [] # produce arrival time list arrival_time_list = [] prev_sum = 0 while prev_sum < self.total_time: arrival_slot = np.random.exponential(request_type.arrival_rate) prev_sum += arrival_slot arrival_time_list.append(prev_sum) # produce service time list service_time_list = [np.random.exponential(request_type.service_rate) \ for _ in arrival_time_list] for arrival_time, service_time in \ zip(arrival_time_list, service_time_list): id = str(arrival_time) + '-' + str(request_type.id) request_list.append(Request(id,\ request_type.source, request_type.sink, \ arrival_time, request_type.bandwidth_list[0], \ service_time, request_type)) if not request_type.isStatic: # produce scale request # initial bandwidth current_bandwidth = random.choice(request_type.bandwidth_list) prev_sum = 0 while prev_sum < service_time: if current_bandwidth > request_type.bandwidth_list[0]: scale_bw = current_bandwidth- \ request_type.bandwidth_list[0] # service time for scale request scale_service_time = \ np.random.exponential(request_type.switch_rate_list[1]) request_list.append(Request(id,request_type.source, \ request_type.sink, \ arrival_time+prev_sum, scale_bw, \ scale_service_time, request_type, True)) prev_sum += scale_service_time current_bandwidth = request_type.bandwidth_list[0] else: lower_bw_service_time = \ np.random.exponential(request_type.switch_rate_list[0]) prev_sum += lower_bw_service_time current_bandwidth = request_type.bandwidth_list[1] return request_list
def upd_time(self, dt): self.time_to_finish -= dt if self.time_to_finish <= 1e-5: self.time_to_finish = self.work_time_distribution.generate() return Request() return None
def generate_request(t, always_create=False): if np.random.rand() < a_r or always_create: request = Request( random_position(), t, store_positions[np.random.randint(0, len(store_positions) - 1)]) return request return None
def get_request(requestId): with open(requests_file_path + str(requestId) + suffix, 'r') as f: for line in f: request = ast.literal_eval(line.rstrip("\n")) #def __init__(self, Tp, Ls, Pr, Ld, is_match_successful): r = Request(request[1], request[2], request[3], request[4], request[5]) r.id = request[0] return r
def _handle_socket(self, client_socket, client_addr): request = client_socket.recv(self.BUFFER_SIZE) print("Accept %s:%d" % client_addr) (type, verified, deadline) = self._verify_request(request) if verified: self._allow_client(Request(client_addr, deadline)) client_socket.send(self.response[type]) client_socket.close()
def upgrade(self): if self.id != None: payload = {'cmd': 'insert', 'building': self.id} reqB = Request( self.planet.player.ia.buildingPage + "&cp=" + str(self.planet.id), payload) self.planet.player.ia.execRequest(reqB) return reqB
def main(): memory=MemoryManagement() memory.makeMemory() r1=Request(1,2) r2=Request(2,10) r3=Request(3,30) r4=Request(4,32) r5=Request(5,31) r6=Request(6,29) r6=Request(6,2) #Add Requests memory.addMemRequest(r1) memory.addMemRequest(r2) memory.addMemRequest(r3) memory.addMemRequest(r4) memory.addMemRequest(r5) memory.addMemRequest(r6) print(memory.printRequests()) print(memory.printMemory()) #Main loop allocates and deallocates memory while memory.requests.length()!=0: memory.memsize() print(memory.printMemory()) print(memory.printMemory())
def resurrect(self, node): dragonsblood = Request("REVIVE", None) Messaging.send_message(self, node, dragonsblood) # client = Client(1) # client.put_req('x',1) # time.sleep(1) # client.get_req('x')