def get_schedule(self): if not self.validate_data: return url_proc = UrlProcessor(self.orig_lat, self.orig_lng, self.dest_lat, self.dest_lng) '''Algorithmn to update count and range''' url_list = url_proc.create_url(24, 24) logger.info("Processing URL List {}".format(url_list)) if len(url_list) > 0: request_processor = RequestProcessor(url_list) resp = request_processor.process_requests(url_list) for ts_epoch, duration in resp: logger.info("{} {}".format(ts_epoch, duration)) return resp
def lambda_handler(event, context): try: upload_signer = S3PolicySigner(os.environ["UPLOAD_S3_BUCKET"]) download_signer = S3PolicySigner(os.environ["THUMBNAILS_S3_BUCKET"]) request_processor = RequestProcessor( upload_signer, download_signer, int(os.environ["UPLOAD_LIMIT_IN_MB"]), os.environ["ALLOWED_IMAGE_EXTENSIONS"].split(","), ) result = request_processor.process_request( context.aws_request_id, event["pathParameters"]["extension"]) print(f"result: {result}") return json_response(result, os.environ["CORS_ORIGIN"]) except Exception as e: return error_response(str(e), os.environ["CORS_ORIGIN"])
def get_schedule(self): if not self.validate_data: return url_proc = UrlProcessor(self.orig_lat, self.orig_lng, self.dest_lat, self.dest_lng, self.local_time) '''Algorithmn to update count and range''' url_list = url_proc.create_url(24, 24) logger.info("Processing URL List {}".format(url_list)) if len(url_list) > 0: request_processor = RequestProcessor(url_list) resp = request_processor.process_requests(url_list) path_proc = PathProcessor(resp) min, max, curr = path_proc.getDetails() try: resp.append(('min_duration', min)) resp.append(('max_duration', max)) resp.append(('cur_duration', curr)) except Exception as e: print e for ts_epoch, duration in resp: logger.info("{} {}".format(ts_epoch, duration)) return resp
def handle(self): """ Handle a request """ # deserialize the request request_msg = pickle.loads(self.request.recv(settings.PACKET_MAX_SIZE)) # process the request response_msg = RequestProcessor.process( self.server.document_repository, request_msg) # serialize and send the response self.request.sendall(pickle.dumps(response_msg))
class TestRequestProcessor(TestCase): def setUp(self): self.rp = RequestProcessor(file_reader=self.create_fake_file_reader()) def test_process(self): req = Mock(["get_url"]) req.get_url.return_value = "/fake_file_name.html" res = self.rp.process(req) self.assertEqual(res.get_content(), "fake file content!") def create_fake_file_reader(self): file_reader = Mock(["read_file"]) def fake_read_file(fname): if fname == "fake_file_name.html": return "fake file content!" raise Exception("No such file") file_reader.read_file.side_effect = fake_read_file return file_reader
def setUp(self): self.rp = RequestProcessor(file_reader=self.create_fake_file_reader())
from socket import socket, AF_INET, SOCK_STREAM from request_processor import RequestProcessor from http_wrapper import HTTPRequest from simple_file_reader import SimpleFileReader host = "localhost" port = 12000 request_processor = RequestProcessor(file_reader=SimpleFileReader("./")) def main(): s = socket(AF_INET, SOCK_STREAM) s.bind(("", port)) s.listen(1) print("The server is ready to receive") while True: conn, addr = s.accept() msg = conn.recv(2048).decode() try: conn.send(process_message(msg).encode()) print("Request processed") except: print("Can't process the request") conn.close()