def put(self, path, content_length, body): filename = os.path.abspath(os.path.join(self.root, path.strip("/"))) if not filename.startswith(self.root): return response.Response(status.FORBIDDEN) elif os.path.isdir(filename): return response.Response(status.NOT_ALLOWED) elif not os.path.isdir(os.path.dirname(filename)): return response.Response(status.CONFLICT) created = not os.path.exists(filename) f = open(filename, "wb") if content_length: remaining = content_length while remaining > 0: buf = body.read(min(remaining, BLOCK_SIZE)) if len(buf): f.write(buf) remaining -= len(buf) else: break f.close() if created: return response.Response(status.CREATED) else: return response.Response(status.NO_CONTENT)
def get(self, path, with_body=True): filename = os.path.abspath(os.path.join(self.root, path.strip("/"))) if not filename.startswith(self.root): return response.Response(status.FORBIDDEN) elif not os.path.exists(filename): return response.Response(status.NOT_FOUND) if os.path.isdir(filename): body = None content_length = "0" if with_body: body = self._get_collection(filename) content_length = str(len(body)) return response.Response(status.OK, { "Content-Type": "text/html", "Content-Length": content_length }, [body] if with_body else None) else: st = os.stat(filename) headers = { "Content-Type": mimetypes.guess_type(filename)[0] or "application/octet-stream", "Content-Length": str(st.st_size) } return response.Response( status.OK, headers, FileIterator(filename) if with_body else None)
def put_request(self, data): try: if not self.__url: _response = response.Response() self.response = _response.error_response( 'The url is empty, please provide an url', []) else: req = requests.put(self.__url, data=json.dumps(data), headers=self.__headers) if req.status_code == 200: _response = response.Response() self.response = _response.success_response( 'success', req.json()) else: _response = response.Response() self.response['error'] = _response.error_response( 'Fail with a status different than 200, check the data for more info', req.json()) return self.response except requests.exceptions.RequestException as e: _response = response.Response() self.response = _response.error_response(repr(e), []) return self.response
def read(self, address, current_step): r = None #Check if this is main memory #Main memory is always a hit if not self.next_level: r = response.Response({self.name: True}, self.hit_time) else: #Parse our address to look through this cache block_offset, index, tag = self.parse_address(address) #Get the tags in this set in_cache = list(self.data[index].keys()) #If this tag exists in the set, this is a hit if tag in in_cache: r = response.Response({self.name: True}, self.hit_time) else: #Read from the next level of memory r = self.next_level.read(address, current_step) r.deepen(self.write_time, self.name) if self.policy == 'Inclusive': print('Inclusive cache') #TODO: your code here elif self.policy == 'Exclusive': print('Exclusive cache') #TODO: your code here else: #If there's space in this set, add this block to it if len(in_cache) < self.associativity: self.data[index][tag] = block.Block( self.block_size, current_step, False, address) else: #Find the oldest block and replace it oldest_tag = in_cache[0] for b in in_cache: if self.data[index][b].last_accessed < self.data[ index][oldest_tag].last_accessed: oldest_tag = b #Write the block back down if it's dirty and we're using write back if self.write_back: if self.data[index][oldest_tag].is_dirty(): self.logger.info('\tWriting back block ' + address + ' to ' + self.next_level.name) temp = self.next_level.write( self.data[index][oldest_tag].address, True, current_step) r.time += temp.time #Delete the old block and write the new one del self.data[index][oldest_tag] self.data[index][tag] = block.Block( self.block_size, current_step, False, address) return r
def handle(self, method, host, path, headers, body): func = getattr(self, "do_" + method, None) if func: try: return func(host, path, headers, body) except: import traceback print(traceback.format_exc(), file=sys.stderr) return response.Response(status.INTERVAL_SERVER_ERROR) else: return response.Response(status.NOT_IMPLEMENTED)
def __init__(self, url, headers=None): self.__url = url self.__payload = {} self.response = {} if headers is not None: if isinstance(headers, dict): self.__headers = headers self.response = response.Response().success_response('ok', []) else: self.response = response.Response().error_response( 'The headers attribute must be a dictionary', []) raise ValueError('The headers attribute must be a dictionary')
def mkcol(self, path): dirname = os.path.abspath(os.path.join(self.root, path.strip("/"))) if not dirname.startswith(self.root): return response.Response(status.FORBIDDEN) elif os.path.exists(dirname): return response.Response(status.NOT_ALLOWED) elif not os.path.isdir(os.path.dirname(dirname)): return response.Response(status.CONFLICT) os.mkdir(dirname) return response.Response(status.CREATED, {}, None)
def delete(self, path): filename = os.path.abspath(os.path.join(self.root, path.strip("/"))) if not filename.startswith(self.root): return response.Response(status.FORBIDDEN) if os.path.isfile(filename): os.remove(filename) elif os.path.isdir(filename): shutil.rmtree(filename) elif not os.path.exists(filename): return response.Response(status.NOT_FOUND) return response.Response(status.NO_CONTENT)
def executeRequest(self, action, post_data): curl = pycurl.Curl() #main handler for Curl actions # Setting request params self.action_url = self.url + action curl.setopt(pycurl.URL, self.action_url) curl.setopt(pycurl.FAILONERROR, True) curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.TIMEOUT, 30) curl.setopt(pycurl.POST, True) if self.verifySSL: curl.setopt(pycurl.SSL_VERIFYHOST, 2) curl.setopt(pycurl.SSL_VERIFYPEER, 1) curl.setopt(pycurl.POSTFIELDS, urllib.urlencode(post_data)) # Required for reading output buffer = StringIO.StringIO() curl.setopt(pycurl.WRITEFUNCTION, buffer.write) curl.perform() # Execute request content = buffer.getvalue() if content == "": status = response.FAILURE content = curl.errstr() else: status = response.SUCCESS buffer.close() curl.close() return response.Response(status, content)
def reevaluate(ndvs, nobj, eps): model = response.Response() agg = Aggregates() pfpf = PFPF() table = get_data(ndvs, nobj, eps) names = dvnames(ndvs) dvs = list(table[names].values) if ndvs == 18: dvs = [aviation.twentyseven_from_eighteen_dvs(row) for row in dvs] dvs = [list(row) for row in dvs] outputs = [model.evaluate_wide(row) for row in dvs] ten = [list(agg.minmax(row)) for row in outputs] tenth = [pfpf.pfpf(row) for row in dvs] for row, pfpf in zip(ten, tenth): row.append(pfpf) three = [aviation.three_from_ten_objs(row) for row in ten] newtable = dvs for row, a, b, c in zip(newtable, outputs, ten, three): row.extend(a) row.extend(b) row.extend(c) names = dvnames(27) + outputnames() + objnames(10) + objnames(3) df = pandas.DataFrame(data=newtable, columns=names) minify(df) return df
def do_copy(self, host, path, headers, body): overwrite = headers.get("Overwrite", "T") if overwrite not in ("T", "F"): return response.Response(status.BAD_REQUEST) destination = headers.get("Destination") if not destination: return response.Response(status.BAD_REQUEST) url = urlparse.urlparse(destination) if url.netloc.lower() != host.lower(): return response.Response(status.BAD_GATEWAY) dest_path = urllib.unquote(url.path).decode("utf-8") return self.backend.copy(path, dest_path, overwrite == "T")
def do_options(self, host, path, headers, body): methods = [] for method in dir(self): if method.startswith("do_"): methods.append(method[3:].upper()) return response.Response(status.OK, {"Allow": ",".join(methods)})
def check(self): """checks the website, if there is an error, the url is wrong or the internet connection is down""" try: r = requests.get(self.url) except requests.exceptions.MissingSchema: # url not valid self.responses.append(response.Response(-1, -1, time.time())) self.wrong_url = True return False except requests.exceptions.ConnectionError: # maybe no internet, maybe wrong url but we consider that the website is down self.responses.append(response.Response(-1, -1, time.time())) return False self.responses.append( response.Response(int(r.status_code), r.elapsed.total_seconds(), time.time())) return int(r.status_code) in self.ok_responses
def _get_response(self, http_method, endpoint, data=None, files=None, timeout=15, **request_kwargs): headers = request_kwargs.get("headers", {}) if not isinstance(data, basestring) and headers.get("Content-Type") == "application/json": data = json.dumps(data, default=json_encoder) try: http_response = urllib2.urlopen(self._get_request(http_method, endpoint, data, files, **request_kwargs), timeout=timeout) except urllib2.HTTPError as e: http_response = e return response.Response(http_response)
async def train_controller(self, request): print("Train") symbol = (await request.post())["symbol"] print(symbol) resp = self.operations["Train"].get(symbol, None) if not resp: resp = response.Response() self.operations["Train"][symbol] = resp train_behaviour = self.RequestTrainBehaviour(symbol) self.add_behaviour(train_behaviour) return resp.get_json()
def __init__(self, action, params={}, *args, **kwargs): import _app import response if not action.im_self: obj = action.im_class() action = getattr(obj, action.__name__) self.response = response.Response(_app.app, action, params) Exception.__init__(self, *args, **kwargs)
def do_put(self, host, path, headers, body): content_length = headers.get("Content-Length") if content_length is not None: if content_length: content_length = int(content_length) else: content_length = 0 # gvfs/1.12.1 sends Content-Length header without value return self.backend.put(path, content_length, body) else: return response.Response(status.LENGTH_REQUIRED)
def _handle_recv(self, rev_data): self._wait_pong_count = 0 #收到服务器数据,说明与服务器的连接正常,即清0 try: resp = response.Response(rev_data) handle_result, result_data = resp.handle(self._callback) if handle_result in self._handler.keys(): self._handler[handle_result](result_data) except Exception as e: if rev_data == 'No Heartbeat!': logger.warn('客户端与服务器断开连接,尝试重连...') self._reconnect() else: logger.error('客户端解析数据异常: %s' % (e))
def do_propfind(self, host, path, headers, body): # RFC says, we should default to "infinity" if Depth header not given # but RFC says also that we do not need to support "inifity" for # performance or security reasons. So we default to "1". depth = int(headers.get("Depth", 1)) # TODO if content-length 0 and no body => assume allprop content_length = int(headers.get("Content-Length", 0)) request_xml = RequestXml(body.read(content_length)) try: return self.backend.propfind(path, depth, request_xml) except IOError: return response.Response(status.NOT_FOUND)
async def list_controller(self, request): print("List") resp = self.operations[config.LIST_OPERATION] if not resp: resp = response.Response() self.operations[config.LIST_OPERATION] = resp request_list_behaviour = self.RequestListBehaviour() self.add_behaviour(request_list_behaviour) return resp.get_json() else: prev_resp: response.Response = copy(resp) if resp.status == response.Status.DONE: resp.status = response.Status.ACTIVE request_list_behaviour = self.RequestListBehaviour() self.add_behaviour(request_list_behaviour) return prev_resp.get_json()
def handle_timeout(self): if(self.retransmitions < coap.MAX_RETRANSMIT-1): self.sock.sendto(self.coapRequest, self.servidor) self._changeTimeoutValue(self.base_timeout*2) self.retransmitions = self.retransmitions + 1 self.reload_timeout() self.enable_timeout() print(self.retransmitions) print(self.timeout) else: self._changeTimeoutValue(random.uniform(coap.ACK_TIMEOUT, coap.ACK_TIMEOUT * coap.ACK_RANDOM_FACTOR)) self.reload_timeout() self.disable_timeout() self.response = response.Response(0, 0, 0, 0, 0, -1) self.state = coap.idle self.retransmitions = 0 self.disable()
async def decision_controller(self, request): print("Decision") symbol = (await request.post())["symbol"] print(symbol) resp = self.operations[config.DECISION_OPERATION].get(symbol, None) if not resp: resp = response.Response() self.operations[config.DECISION_OPERATION][symbol] = resp decision_behaviour = self.RequestDecisionBehaviour(symbol) self.add_behaviour(decision_behaviour) return resp.get_json() else: prev_resp: response.Response = copy(resp) if prev_resp.status == response.Status.DONE: resp.status = response.Status.ACTIVE decision_behaviour = self.RequestDecisionBehaviour(symbol) self.add_behaviour(decision_behaviour) return prev_resp.get_json()
def evaluate(transform_dvs, transform_objs): model = response.Response() agg = Aggregates() pfpf = PFPF() line = sys.stdin.readline() while line: dvs = [float(xx) for xx in re.split("[ ,\t]", line.strip())] dvs = transform_dvs(dvs) outputs = model.evaluate_wide(dvs) outputs = agg.convert_row(outputs) objectives = agg.minmax(outputs) objectives.append(pfpf.pfpf(dvs)) objectives = transform_objs(objectives) constraint = agg.constr_violation(outputs) objectives.append(constraint) towrite = " ".join([unicode(xx) for xx in objectives]) + "\n" sys.stdout.write(towrite) sys.stdout.flush() line = sys.stdin.readline()
def http(sock): while True: obj, conn = sock.accept() body = [] p = HttpParser() while True: data = obj.recv(1024) if not data: break recved = len(data) nparsed = p.execute(data, recved) assert nparsed == recved if p.is_partial_body(): body.append(p.recv_body()) if p.is_message_complete(): break yield response.Response(obj, p, ''.join(body), conn[0])
def move(self, src, dst, overwrite): if not dst.startswith(self.base_path): return response.Response(status.FORBIDDEN) source = os.path.join(self.root, src.strip("/")) source = os.path.abspath(source) destination = dst[len(self.base_path):] destination = os.path.join(self.root, destination.strip("/")) destination = os.path.abspath(destination) if not source.startswith(self.root) or not destination.startswith( self.root): return response.Response(status.FORBIDDEN) elif source == destination: return response.Response(status.FORBIDDEN) elif not os.path.isdir(os.path.dirname(destination)): return response.Response(status.CONFLICT) elif not overwrite and os.path.exists(destination): return response.Response(status.PRECONDITION_FAILED) created = not os.path.exists(destination) if os.path.isdir(destination): shutil.rmtree(destination) elif os.path.isfile(destination): os.remove(destination) if os.path.isdir(source): shutil.move(source, destination) elif os.path.isfile(source): os.rename(source, destination) # TODO will this work between partitions? if created: return response.Response(status.CREATED) else: return response.Response(status.NO_CONTENT)
def request(self): req = requests.get(path) response = response.Response(req) return response
def write(self, address, from_cpu, current_step): #wat is cache pls r = None if not self.next_level: r = response.Response({self.name: True}, self.write_time) else: block_offset, index, tag = self.parse_address(address) in_cache = list(self.data[index].keys()) if tag in in_cache: #Set dirty bit to true if this block was in cache self.data[index][tag].write(current_step) if self.write_back: r = response.Response({self.name: True}, self.write_time) else: #Send to next level cache and deepen results if we have write through self.logger.info('\tWriting through block ' + address + ' to ' + self.next_level.name) r = self.next_level.write(address, from_cpu, current_step) r.deepen(self.write_time, self.name) else: if self.policy == 'Inclusive': print('Inclusive cache') #TODO: your code here elif self.policy == 'Exclusive': print('Exclusive cache') #TODO: your code here else: if len(in_cache) < self.associativity: #If there is space in this set, create a new block and set its dirty bit to true if this write is coming from the CPU self.data[index][tag] = block.Block( self.block_size, current_step, from_cpu, address) if self.write_back: r = response.Response({self.name: False}, self.write_time) else: self.logger.info('\tWriting through block ' + address + ' to ' + self.next_level.name) r = self.next_level.write(address, from_cpu, current_step) r.deepen(self.write_time, self.name) elif len(in_cache) == self.associativity: #If this set is full, find the oldest block, write it back if it's dirty, and replace it oldest_tag = in_cache[0] for b in in_cache: if self.data[index][b].last_accessed < self.data[ index][oldest_tag].last_accessed: oldest_tag = b if self.write_back: if self.data[index][oldest_tag].is_dirty(): self.logger.info('\tWriting back block ' + address + ' to ' + self.next_level.name) r = self.next_level.write( self.data[index][oldest_tag].address, from_cpu, current_step) r.deepen(self.write_time, self.name) else: self.logger.info('\tWriting through block ' + address + ' to ' + self.next_level.name) r = self.next_level.write(address, from_cpu, current_step) r.deepen(self.write_time, self.name) del self.data[index][oldest_tag] self.data[index][tag] = block.Block( self.block_size, current_step, from_cpu, address) if not r: r = response.Response({self.name: False}, self.write_time) return r
if mode == 1: self.pp(0) print resp for i in a: if i not in self.history_count: self.history_count[i] = 1 else: self.history_count[i] += 1 if len(self.history_dialogue) == 3: last_dialogue = self.history_dialogue.pop() for i in last_dialogue: self.history_count[i] -= 1 if mode == 2: if flago is not True: return resp else: return '---$---' #if __name__ =='__main__': # search_object = response.Response() # IRIS = ChatBot(search_object) # IRIS.initialise() # IRIS.main("did you change your hair",1) # del search_object # del IRIS #print "yahoo" search_object = response.Response() IRIS = ChatBot(search_object)
def do_proppatch(self, path, headers, body): return response.Response(status.NOT_IMPLEMENTED)
def __init__(self, request, content=None, template=None): self.__content = content self.__template = template self.__request = request self.__response = response.Response()