def unsatisfiable_range(address, req, cur_day, cur_time, cur_month): data = 'HTTP/1.1 416 Requested Range Not Satisfiable\r\n' status_code = 416 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str( cur_time.hour) + ":" + str(cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" log.make_entry(address, req, cur_time, cur_month, status_code, 0, '', 'error', 'Range header field value not satisfiable') return data
def not_acceptable(address, req, cur_day, cur_time, cur_month): data = 'HTTP/1.1 406 Not Acceptable\r\n' status_code = 406 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str( cur_time.hour) + ":" + str(cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" log.make_entry(address, req, cur_time, cur_month, status_code, 0, '', 'error', 'Header value not acceptable') return data
def proxy_auth_req(address, req, cur_day, cur_time, cur_month): data = "HTTP/1.1 407 Proxy Authentication Required\r\n" status_code = 407 data += "Proxy-Authenticate: Basic\r\n" data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str( cur_time.hour) + ":" + str(cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" log.make_entry(address, req, cur_time, cur_month, status_code, 0, '', 'warn', 'Proxy authentication required') return data
remove(clientSocket) break #removes the client from the list of clients def remove(connection): if (connection in list_of_clients): list_of_clients.remove(connection) while True: if len(list_of_clients) <= connections: #this accepts a connection from the client #it returns a socket object and address bound to the socket on the client side connection, address = server.accept() #add the user to the list of users list_of_clients.append(connection) #creating a new thread for each user start_new_thread(clientThread, (connection, address)) else: log.make_entry('127.0.0.1', '', cur_time, cur_month, 0, 0, '', 'notice', 'Max simultaneous connections exceeded') break server.close() exit()
def put(req, address, msg): is_valid_uri = req[0].split(' ') cur_time = datetime.datetime.now() cur_day = get_date.getday(str(cur_time.day) + " " + str(cur_time.month) + " " + str(cur_time.year)) cur_month = get_date.getMonth(str(cur_time.month)) cur_day = str(cur_day) #validate the header host, type, proxy_auth, range, encoding, if_mod_since, if_unmod_since = headers.check_headers(req) if host == 0: data = "HTTP/1.1 400 Bad Request\r\n" status_code = 400 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += '\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, 0, '', 'error', 'Bad request') return data if is_valid_uri[1][0] != '/': content = "Bad Request\r\n" data = "HTTP/1.1 400 Bad Request\r\n" status_code = 400 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" data += content + "\r\n" log.make_entry(address, req[0], cur_time, cur_month, status_code, 0) return data #find the type of document in the request for line in req: header = line.split(':') if header[0] == 'Content-Type': doc_type = header[1] break #check if the request contains cookie header cookie_id = cookie.generate_cookie(req) if cookie_id == False: set_cookie_header = 0 else: set_cookie_header = 1 #get all the files and folders in the root files = os.listdir(document_root) uri = is_valid_uri[1] for f in website_files: if f == uri: content = "You are not authorized to access the file\r\n" data = "HTTP/1.1 405 Method Not Allowed\r\n" status_code = 405 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Content-Type: text/plain\r\n" data += "Content-Length: " + str(len(content)) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" data += content + "\r\n" log.make_entry(address, req[0], cur_time, cur_month, status_code, len(content)) return data headers.Dict[uri] = cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) uri = uri.split('/') uri_page = uri[len(uri) - 1] uri_page += ".html" #check if the requested URL exists or not(files) for file in files: if uri_page != str(file): found_file = 0 found = 0 else: found_file = 1 found = 1 break if found_file == 0: uri_page = uri_page.replace('.html', '') #check for text files for file in files: if uri_page != str(file): found_file = 0 found = 0 else: found_file = 1 found = 1 break image_files = os.listdir(images_folder) #check for the images if found_file == 0: for image in image_files: if uri_page == str(image): found = 1 uri_page = "images/" + uri_page break else: found = 0 if found_file == 1: if doc_type.find('html') >= 0 or doc_type.find('plain') >= 0: i = 0 res_file = open(str(document_root) + '/' + uri_page, 'r+') file_stats = os.stat(str(document_root) + '/' + uri_page) res_file.seek(0) res_file.truncate() res_file.writelines(msg) data = "HTTP/1.1 200 OK\r\n" status_code = 200 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" data += res_file.read() + "\r\n" log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() return data if found == 1: if doc_type.find('png') >= 0 or doc_type.find('jpeg') >= 0: res_file = open(str(document_root) + '/' + uri_page, 'rb+') file_stats = os.stat(str(document_root) + '/' + uri_page) res_file.seek(0) res_file.truncate() res_file.write(msg) res_file.close() data = "HTTP/1.1 200 OK\r\n" status_code = 200 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" res_file = open(str(document_root) + '/' + uri_page, 'rb') bin_data = res_file.read() log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() return (data, bin_data) #creating a new resource data = "HTTP/1.1 201 Created\r\n" status_code = 201 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" if doc_type.find('html') >= 0 or doc_type.find('plain') >= 0: res_file = open(str(document_root) + '/' + is_valid_uri[1], 'a') file_stats = os.stat(str(document_root) + '/' + is_valid_uri[1]) res_file.writelines(msg) res_file.close() res_file = open(str(document_root) + '/' + is_valid_uri[1], 'r') data += res_file.read() data += '\r\n' return data elif doc_type.find('png') >= 0 or doc_type.find('jpeg') >= 0: res_file = open(str(document_root) + '/' + is_valid_uri[1], 'w+b') file_stats = os.stat(str(document_root) + '/' + is_valid_uri[1]) res_file.write(msg) res_file.close() res_file = open(str(document_root) + '/' + is_valid_uri[1], 'rb') bin_data = res_file.read() log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() return (data, bin_data)
def delete(req, address): #get all the files and folders in the root files = os.listdir(document_root) request = req[0].split(' ') uri = request[1] cur_time = datetime.datetime.now() cur_day = get_date.getday(str(cur_time.day) + " " + str(cur_time.month) + " " + str(cur_time.year)) cur_month = get_date.getMonth(str(cur_time.month)) cur_day = str(cur_day) #validate the header host, type, proxy_auth, range, encoding, if_mod_since, if_unmod_since = headers.check_headers(req) if host == 0: data = "HTTP/1.1 400 Bad Request\r\n" status_code = 400 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += '\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, 0, '', 'error', 'Bad request') return data #check if the request contains cookie header cookie_id = cookie.generate_cookie(req) if cookie_id == False: set_cookie_header = 0 else: set_cookie_header = 1 for f in website_files: if f == uri: content = "You are not authorized to access the file\r\n" data = "HTTP/1.1 405 Method Not Allowed\r\n" status_code = 405 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Content-Type: text/plain\r\n" data += "Content-Length: " + str(len(content)) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" data += content + "\r\n" log.make_entry(address, req[0], cur_time, cur_month, status_code, len(content)) return data uri = uri.split('/') uri_page = uri[len(uri) - 1] uri_page += ".html" #check if the requested URL exists or not(files) for file in files: if uri_page != str(file): found_file = 0 found = 0 else: found_file = 1 found = 1 break if found_file == 0: uri_page = uri_page.replace('.html', '') #check for text files for file in files: if uri_page != str(file): found_file = 0 found = 0 else: found_file = 1 found = 1 break image_files = os.listdir(images_folder) #check for the images if found_file == 0: for image in image_files: if uri_page == str(image): found = 1 uri_page = "images/" + uri_page break else: found = 0 #if the requested document is not found if found == 0: res_file = open(str(document_root) + '/404.html', 'r') file_stats = os.stat(str(document_root) + '/404.html') data = "HTTP/1.1 404 Not Found\r\n" status_code = 404 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Content-Type: text/html\r\n" data += "Content-Length: " + str(file_stats.st_size) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" data += res_file.read() data += '\r\n\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() return data else: data = "HTTP/1.1 200 OK\r\n" status_code = 200 data += "Date: " + cur_day[0:3] + ", " + str(cur_time.day) + " " + cur_month + " " + str(cur_time.year) + " " + str(cur_time.hour) + ":" + str(cur_time.minute) + ":" + str(cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" if found_file == 1: res_file = open(str(document_root) + '/' + uri_page, 'r') file_stats = os.stat(str(document_root) + '/' + uri_page) if uri_page.find('.txt') >= 0: data += "Content-Type: text/plain\r\n" else: data += "Content-Type: text/html\r\n" data += "Content-Length: " + str(file_stats.st_size) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" data += res_file.read() + "\r\n" log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() os.remove(str(document_root) + '/' + uri_page) return data else: res_file = open(str(document_root) + '/' + uri_page, 'rb') file_stats = os.stat(str(document_root) + '/' + uri_page) bin_data = res_file.read() if uri_page.find('.jpg'): data += "Content-Type: image/jpg\r\n" elif uri_page.find('.png'): data += "Content-Type: image/png\r\n" data += "Content-Length: " + str(file_stats.st_size) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" image_body = bin_data log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() os.remove(str(document_root) + '/' + uri_page) return (data, image_body)
def post(req, address, msg): #get all the files and folders in the root files = os.listdir(document_root) request = req[0].split(' ') uri = request[1] cur_time = datetime.datetime.now() cur_day = get_date.getday( str(cur_time.day) + " " + str(cur_time.month) + " " + str(cur_time.year)) cur_month = get_date.getMonth(str(cur_time.month)) cur_day = str(cur_day) #validate the header host, type, proxy_auth, range, encoding, if_mod_since, if_unmod_since = headers.check_headers( req) if host == 0: data = "HTTP/1.1 400 Bad Request\r\n" status_code = 400 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += '\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, 0, '', 'error', 'Bad request') return data #check if the request contains cookie header cookie_id = cookie.generate_cookie(req) if cookie_id == False: set_cookie_header = 0 else: set_cookie_header = 1 if uri == '/': data = "HTTP/1.1 200 OK\r\n" status_code = 200 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" log.make_entry(address, req[0], cur_time, cur_month, status_code, len(msg), msg) return data uri = uri.split('/') uri_page = uri[len(uri) - 1] uri_page += ".html" #check if the requested URL exists or not(files) for file in files: if uri_page != str(file): found_file = 0 found = 0 else: found_file = 1 found = 1 break if found_file == 0: uri_page = uri_page.replace('.html', '') #check for text files for file in files: if uri_page != str(file): found_file = 0 found = 0 else: found_file = 1 found = 1 break image_files = os.listdir(images_folder) #check for the images if found_file == 0: for image in image_files: if uri_page == str(image): found = 1 uri_page = "images/" + uri_page break else: found = 0 if found == 0: data = "HTTP/1.1 404 Not Found\r\n" status_code = 404 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" if set_cookie_header == 1: data += "set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" log.make_entry(address, req[0], cur_time, cur_month, status_code, len(msg)) return data else: data = "HTTP/1.1 200 OK\r\n" status_code = 200 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" log.make_entry(address, req[0], cur_time, cur_month, status_code, len(msg), msg) return data
def get(req, address): #get all the files and folders in the root files = os.listdir(document_root) request = req[0].split(' ') uri = request[1] cur_time = datetime.datetime.now() cur_day = get_date.getday( str(cur_time.day) + " " + str(cur_time.month) + " " + str(cur_time.year)) cur_month = get_date.getMonth(str(cur_time.month)) cur_day = str(cur_day) #validate the header host, type, proxy_auth, range, encoding, if_mod_since, if_unmod_since = headers.check_headers( req) if host == 0: data = "HTTP/1.1 400 Bad Request\r\n" status_code = 400 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += '\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, 0, '', 'error', 'Bad request') return data if if_mod_since == 0: data = "HTTP/1.1 304 Not Modified\r\n" status_code = 304 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += '\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, 0) return data if if_unmod_since == 0: data = "HTTP/1.1 412 Precondition Failed\r\n" status_code = 412 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += '\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, 0) return data #check if the request contains cookie header cookie_id = cookie.generate_cookie(req) if cookie_id == False: set_cookie_header = 0 else: set_cookie_header = 1 for f in website_files: if f == uri: content = "You are not authorized to access the file\r\n" data = "HTTP/1.1 405 Method Not Allowed\r\n" status_code = 405 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day ) + " " + cur_month + " " + str(cur_time.year) + " " + str( cur_time.hour) + ":" + str(cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Content-Type: text/plain\r\n" data += "Content-Length: " + str(len(content)) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" data += content + "\r\n" log.make_entry(address, req[0], cur_time, cur_month, status_code, len(content), '', 'warn', 'Client not allowed to access the file') return data #if requested URL is root document if uri == '/': accept = 0 for element in html_list: if element == type: accept = 1 break if accept == 0: data = not_acceptable(address, req[0], cur_day, cur_time, cur_month) return data res_file = open(str(document_root) + '/index.html', 'r') file_stats = os.stat(str(document_root) + '/index.html') data = "HTTP/1.1 200 OK\r\n" status_code = 200 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Content-Type: text/html\r\n" data += "Content-Length: " + str(file_stats.st_size) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" if range == 'none': data += '\r\n' + res_file.read() else: data += 'Content-Range: ' + range + '/' + str( file_stats.st_size) + '\r\n\r\n' partial_data = res_file.read() byte_pos = range.split('-') if byte_pos[0] == '': data += partial_data[file_stats.st_size - int(byte_pos[1]):] elif byte_pos[1] == '': data += partial_data[int(byte_pos[0]):] elif int(byte_pos[0]) > int(byte_pos[1]): data = unsatisfiable_range(address, req[0], cur_day, cur_time, cur_month) return data else: data += partial_data[int(byte_pos[0]):int(byte_pos[1])] data = data.replace('200', '206') data = data.replace('OK', 'Partial Content') if (encoding == 'gzip, deflate'): log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() elif (encoding != 'identity'): if (encoding == 'gzip'): entity = data.split('\r\n\r\n') encoded_entity = gzip.compress(entity[1].encode()) entity[0] = entity[0].replace(str(file_stats.st_size), str(len(encoded_entity))) entity[0] += '\r\nContent-Coding: gzip' data = entity[0].encode() + '\r\n\r\n'.encode( ) + encoded_entity elif (encoding == 'zlib'): entity = data.split('\r\n\r\n') encoded_entity = zlib.compress(entity[1].encode()) entity[0] = entity[0].replace(str(file_stats.st_size), str(len(encoded_entity))) entity[0] += '\r\nContent-Coding: zlib' data = entity[0].encode() + '\r\n\r\n'.encode( ) + encoded_entity else: data = not_acceptable(address, req[0], cur_day, cur_time, cur_month) log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() return data if (uri == '/example'): data = "HTTP/1.1 302 Found\r\n" status_code = 302 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "Location: " + temp_redirect + '\r\n' data += '\r\n' data += temp_redirect + '\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, len('http://localhost:2000/302\r\n')) return data if (uri == '/example_2'): data = "HTTP/1.1 301 Moved Permanently\r\n" status_code = 301 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "Location: " + permanent_redirect + '\r\n' data += '\r\n' data += permanent_redirect + '\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, len('http://localhost:2000/301\r\n')) return data uri = uri.split('/') uri_page = uri[len(uri) - 1] uri_page += ".html" #check if the requested URL exists or not(files) for file in files: if uri_page != str(file): found_file = 0 found = 0 else: found_file = 1 found = 1 break if found_file == 0: uri_page = uri_page.replace('.html', '') #check for text files for file in files: if uri_page != str(file): found_file = 0 found = 0 else: found_file = 1 found = 1 break image_files = os.listdir(images_folder) #check for the images if found_file == 0: for image in image_files: if uri_page == str(image): found = 1 uri_page = "images/" + uri_page break else: found = 0 #if the requested document is not found if found == 0: res_file = open(str(document_root) + '/404.html', 'r') file_stats = os.stat(str(document_root) + '/404.html') data = "HTTP/1.1 404 Not Found\r\n" status_code = 404 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" data += "Content-Type: text/html\r\n" data += "Content-Length: " + str(file_stats.st_size) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" data += "\r\n" data += res_file.read() data += '\r\n\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size, '', 'error', 'File was not found') res_file.close() return data else: data = "HTTP/1.1 200 OK\r\n" status_code = 200 data += "Date: " + cur_day[0:3] + ", " + str( cur_time.day) + " " + cur_month + " " + str( cur_time.year) + " " + str(cur_time.hour) + ":" + str( cur_time.minute) + ":" + str( cur_time.second) + " " + str(LOCAL_TIMEZONE) + "\r\n" if found_file == 1: if uri_page.find('.txt') >= 0: accept = 0 for element in plain_list: if element == type.strip(): accept = 1 break if accept == 0: data = not_acceptable(address, req[0], cur_day, cur_time, cur_month) return data data += "Content-Type: text/plain\r\n" else: accept = 0 for element in html_list: if element == type.strip(): accept = 1 break if accept == 0: data = not_acceptable(address, req[0], cur_day, cur_time, cur_month) return data if uri_page.find('proxy.html') >= 0: if (proxy_auth == 0): data = proxy_auth_req(address, req[0], cur_day, cur_time, cur_month) return data data += "Content-Type: text/html\r\n" res_file = open(str(document_root) + '/' + uri_page, 'r') file_stats = os.stat(str(document_root) + '/' + uri_page) data += "Content-Length: " + str(file_stats.st_size) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" if range == 'none': data += '\r\n' + res_file.read() else: data += 'Content-Range: ' + range + '/' + str( file_stats.st_size) + '\r\n' data += "\r\n" partial_data = res_file.read() byte_pos = range.split('-') if byte_pos[0] == '': data += partial_data[file_stats.st_size - int(byte_pos[1]):] elif byte_pos[1] == '': data += partial_data[int(byte_pos[0]):] elif int(byte_pos[0]) > int(byte_pos[1]): data = unsatisfiable_range(address, req[0], cur_day, cur_time, cur_month) return data else: data += partial_data[int(byte_pos[0]):int(byte_pos[1])] data = data.replace('200', '206') data = data.replace('OK', 'Partial Content') data += "\r\n" if (encoding == 'gzip, deflate'): log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() elif (encoding != 'identity'): if (encoding == 'gzip'): entity = data.split('\r\n\r\n') encoded_entity = gzip.compress(entity[1].encode()) entity[0] = entity[0].replace(str(file_stats.st_size), str(len(encoded_entity))) entity[0] += '\r\nContent-Coding: gzip' data = entity[0].encode() + '\r\n\r\n'.encode( ) + encoded_entity elif (encoding == 'zlib'): entity = data.split('\r\n\r\n') encoded_entity = zlib.compress(entity[1].encode()) entity[0] = entity[0].replace(str(file_stats.st_size), str(len(encoded_entity))) entity[0] += '\r\nContent-Coding: zlib' data = entity[0].encode() + '\r\n\r\n'.encode( ) + encoded_entity else: data = not_acceptable(address, req[0], cur_day, cur_time, cur_month) return data log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() else: log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() return data else: if uri_page.find('.jpg') >= 0: accept = 0 for element in jpg_list: if element == type.strip(): accept = 1 break if accept == 0: data = not_acceptable(address, req[0], cur_day, cur_time, cur_month) return data data += "Content-Type: image/jpeg\r\n" elif uri_page.find('.png') >= 0: accept = 0 for element in png_list: if element == type.strip(): accept = 1 break #print(type.strip()) if accept == 0: data = not_acceptable(address, req[0], cur_day, cur_time, cur_month) return data data += "Content-Type: image/png\r\n" res_file = open(str(document_root) + '/' + uri_page, 'rb') file_stats = os.stat(str(document_root) + '/' + uri_page) data += "Content-Length: " + str(file_stats.st_size) + "\r\n" if set_cookie_header == 1: data += "Set-Cookie: " + str(cookie_id) + '\r\n' data += "Server: Aayush/0.1\r\n" data += "Connection: Closed\r\n" if range == 'none': bin_data = res_file.read() else: data += 'Content-Range: ' + range + '/' + str( file_stats.st_size) + '\r\n' partial_data = res_file.read() byte_pos = range.split('-') if byte_pos[0] == '': bin_data = partial_data[file_stats.st_size - int(byte_pos[1]):] elif byte_pos[1] == '': bin_data = partial_data[int(byte_pos[0]):] elif int(byte_pos[0]) > int(byte_pos[1]): data = unsatisfiable_range(address, req[0], cur_day, cur_time, cur_month) return data else: bin_data = partial_data[int(byte_pos[0]):int(byte_pos[1])] data = data.replace('200', '206') data = data.replace('OK', 'Partial Content') image_body = bin_data if (encoding == 'gzip, deflate'): log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() elif (encoding != 'identity'): if (encoding == 'gzip'): data += 'Content-Coding: gzip\r\n' image_body = gzip.compress(bin_data) data = data.replace(str(file_stats.st_size), str(len(image_body))) elif (encoding == 'zlib'): data += 'Content-Coding: zlib\r\n' image_body = zlib.compress(bin_data) data = data.replace(str(file_stats.st_size), str(len(image_body))) else: data = not_acceptable(address, req[0], cur_day, cur_time, cur_month) return data data += '\r\n' log.make_entry(address, req[0], cur_time, cur_month, status_code, file_stats.st_size) res_file.close() return (data, image_body)