def process_zip(self, uploaded_zip_file, spj, dir=""): # 读取压缩包 try: zip_file = zipfile.ZipFile(uploaded_zip_file, "r") # python内置函数 except zipfile.BadZipFile: raise APIError("Bad zip file") # 得到.in/.ou的文件名list name_list = zip_file.namelist() test_case_list = self.filter_name_list(name_list, spj=spj, dir=dir) if not test_case_list: raise APIError("Empty file") # 随机一个test_case_id作为存储数据的文件夹名字,给操作权限 test_case_id = rand_str() test_case_dir = os.path.join(settings.TEST_CASE_DIR, test_case_id) os.mkdir(test_case_dir) os.chmod(test_case_dir, 0o710) size_cache = {} md5_cache = {} # 将压缩包里.in/.out文件中的"\r\n"换成"\n",并重新写到这个目录下 for item in test_case_list: with open(os.path.join(test_case_dir, item), "wb") as f: content = zip_file.read(f"{dir}{item}").replace(b"\r\n", b"\n") size_cache[item] = len(content) if item.endswith(".out"): md5_cache[item] = hashlib.md5(content.rstrip()).hexdigest() f.write(content) test_case_info = {"spj": spj, "test_cases": {}} # 新建一个info文件,写入相关信息 info = [] if spj: for index, item in enumerate(test_case_list): data = {"input_name": item, "input_size": size_cache[item]} info.append(data) test_case_info["test_cases"][str(index + 1)] = data else: # ["1.in", "1.out", "2.in", "2.out"] => [("1.in", "1.out"), ("2.in", "2.out")] test_case_list = zip(*[test_case_list[i::2] for i in range(2)]) for index, item in enumerate(test_case_list): data = { "stripped_output_md5": md5_cache[item[1]], "input_size": size_cache[item[0]], "output_size": size_cache[item[1]], "input_name": item[0], "output_name": item[1] } info.append(data) test_case_info["test_cases"][str(index + 1)] = data with open(os.path.join(test_case_dir, "info"), "w", encoding="utf-8") as f: f.write(json.dumps(test_case_info, indent=4)) for item in os.listdir(test_case_dir): os.chmod(os.path.join(test_case_dir, item), 0o640) return info, test_case_id
def _get_user_item(self, request): item_id = request.GET.get("item_id") user = request.user try: todo_item = TodoItem.objects.select_related('user').get(id=item_id) if not check_object_permission(todo_item, user): raise APIError(HTTPStatus.FORBIDDEN, err="item is not allowed query by this user") return todo_item except TodoItem.DoesNotExist: raise APIError(HTTPStatus.BAD_REQUEST, err="Todo item does not exist")
def process_zip(self, uploaded_zip_file, spj, test_case_id, dir=""): try: zip_file = zipfile.ZipFile(uploaded_zip_file, "r") except zipfile.BadZipFile: raise APIError("Bad zip file") name_list = zip_file.namelist( ) #['1.in', '1.out/', '1.out/part-r-00000', '1.out/part-r-00002', '1.out/part-r-00001', '2.in', '2.out/', '2.out/part-r-00000', '2.out/part-r-00002', '2.out/part-r-00001'] test_case_list = sorted(name_list, key=natural_sort_key) if not test_case_list: raise APIError("Empty file") test_case_dir = os.path.join(settings.TEST_CASE_DIR, test_case_id) if os.path.exists(test_case_dir): shutil.rmtree(test_case_dir) os.mkdir(test_case_dir) os.chmod(test_case_dir, 0o710) partitions = 1 index = 1 for item in test_case_list: if item == str(index) + '.out/': if str(index) + '.in' not in test_case_list: raise APIError("Input and output must be paired") if item == str(index) + '.in': if str(index) + '.out/' not in test_case_list: raise APIError("Input and output must be paired") if item == str(index) + '.out/': os.mkdir(os.path.join(test_case_dir, item)) os.chmod(os.path.join(test_case_dir, item), 0o710) index += 1 continue with open(os.path.join(test_case_dir, item), "wb") as f: content = zip_file.read(f"{dir}{item}").replace(b"\r\n", b"\n") f.write(content) pathsplits = os.path.split(item) if len(pathsplits) >= 2 and pathsplits[-2].endswith(".out"): partitions = max(partitions, int(os.path.split(item)[-1])) # print("partition = ",partitions) outinfo = OutInfo(test_case_dir, partitions) info = outinfo.generate_info() return info, test_case_id
def process_json(self, data, spj): if 'testcases' not in data: return APIError("No testcases in data") testcases = data['testcases'] test_case_id = rand_str() test_case_dir = os.path.join(settings.TEST_CASE_DIR, test_case_id) os.mkdir(test_case_dir) os.chmod(test_case_dir, 0o710) for i, testcase in enumerate(testcases): in_path = os.path.join(test_case_dir, "{}.in".format(i + 1)) with open(in_path, "w") as f: f.write(testcase['input']) if 'output' in testcase: out_path = os.path.join(test_case_dir, "{}.out".format(i + 1)) with open(out_path, "w") as f: f.write(testcase['output']) test_case_info = {"spj": spj, "test_cases": {}} info = [] if spj: for i, testcase in enumerate(testcases): data = { "input_name": str(i + 1) + ".in", "input_size": len(testcase["input"]) } info.append(data) test_case_info["test_cases"][str(i + 1)] = data else: for i, testcase in enumerate(testcases): data = { 'stripped_output_md5': hashlib.md5(testcase['output'].rstrip().encode( 'utf-8')).hexdigest(), 'input_size': len(testcase['input']), 'output_size': len(testcase['output']), 'input_name': str(i + 1) + ".in", 'output_name': str(i + 1) + ".out" } info.append(data) test_case_info["test_cases"][str(i + 1)] = data with open(os.path.join(test_case_dir, "info"), "w", encoding="utf-8") as f: f.write(json.dumps(test_case_info, indent=4)) for item in os.listdir(test_case_dir): os.chmod(os.path.join(test_case_dir, item), 0o640) return info, test_case_id
def ensure_created_by(obj, user): e = APIError(msg=f"{obj.__class__.__name__} does not exist") if not user.is_admin_role(): raise e if isinstance(obj, Problem): if not user.can_mgmt_all_problem() and obj.created_by != user: raise e elif obj.created_by != user: raise e
def put(self, request): data = request.json_data try: book = Book.objects.get(id=data["id"]) book.name = data["name"] book.save() return self.response(BookSerializer(book)) except Book.DoesNotExist: raise APIError(code=404, err="book not exist")
def process_zip(self, uploaded_zip_file, spj, dir=""): src_testcase_dir = uploaded_zip_file + dir name_list = os.listdir(src_testcase_dir) test_case_list = self.filter_name_list(name_list, spj=spj, dir='') if not test_case_list: raise APIError("Empty file") test_case_id = rand_str() test_case_dir = os.path.join(settings.TEST_CASE_DIR, test_case_id) os.mkdir(test_case_dir) os.chmod(test_case_dir, 0o710) size_cache = {} md5_cache = {} for item in test_case_list: with open(os.path.join(test_case_dir, item), "w") as f: with open(f"{src_testcase_dir}{item}") as fsrc: content = fsrc.read().replace("\r\n", "\n") size_cache[item] = len(content) if item.endswith(".out"): md5_cache[item] = hashlib.md5( content.rstrip().encode('utf-8')).hexdigest() f.write(content) test_case_info = {"spj": spj, "test_cases": {}} info = [] if spj: for index, item in enumerate(test_case_list): data = {"input_name": item, "input_size": size_cache[item]} info.append(data) test_case_info["test_cases"][str(index + 1)] = data else: # ["1.in", "1.out", "2.in", "2.out"] => [("1.in", "1.out"), ("2.in", "2.out")] test_case_list = zip(*[test_case_list[i::2] for i in range(2)]) for index, item in enumerate(test_case_list): data = { "stripped_output_md5": md5_cache[item[1]], "input_size": size_cache[item[0]], "output_size": size_cache[item[1]], "input_name": item[0], "output_name": item[1] } info.append(data) test_case_info["test_cases"][str(index + 1)] = data with open(os.path.join(test_case_dir, "info"), "w", encoding="utf-8") as f: f.write(json.dumps(test_case_info, indent=4)) for item in os.listdir(test_case_dir): os.chmod(os.path.join(test_case_dir, item), 0o640) return info, test_case_id
def ensure_created_by(obj, user): if not user.is_admin_role() or (user.is_admin() and obj.created_by != user): raise APIError(msg=f"{obj.__class__.__name__} does not exist")