def register_request(request): if request.method == "POST": form = NewUserForm(request.POST) if form.is_valid(): user = form.save() if user is not None: login(request, user) home_dir = Directory(user_id=request.user, directory_name='home') try: home_dir.save() except: messages.error( request, "There Has Been A Problem, Please Try Again.") return redirect('/') messages.success(request, "Registration successful.") return redirect("/") messages.error(request, "Unsuccessful registration. Invalid information.") form = NewUserForm() return render(request=request, template_name="register.html", context={ "register_form": form, "title": "Register" })
def put(self, request, dir, format=None): if (request.data['name'] == ""): payload = { "info": "DIR Name Cannot Be Empty", } return JsonResponse(payload, safe=False) if (request.data['name'] == "home"): payload = { "info": "DIR Cannot Be Named As Home", } return JsonResponse(payload, safe=False) try: parent_dir = Directory.objects.get(pk=dir) if (parent_dir.user_id_id != request.user.id): payload = { "info": "You Don't Have Permissions to that DIR", } return JsonResponse(payload, safe=False) except Directory.DoesNotExist: parent_dir = None if parent_dir is None: payload = { "info": "Parent DIR Does Not Exist", } return JsonResponse(payload, safe=False) if parent_dir.is_directory == 0: payload = { "info": "Parent DIR Cannot Be A File", } return JsonResponse(payload, safe=False) existing_childs = Directory.objects.filter( parent_directory=parent_dir.pk) for child in existing_childs: if (str(child.directory_name).lower() == str( request.data['name']).lower()): payload = { "info": "Cannot Have Duplicate Folder Names", } return JsonResponse(payload, safe=False) new_dir = Directory(user_id=request.user, directory_name=request.data['name']) try: new_dir.parent_directory = parent_dir new_dir.save() except: payload = { "info": "There Was A Problem!", } return JsonResponse(payload, safe=False) payload = { "info": "Folder Created Sucessfully", "dir": new_dir.pk, "directory_name": new_dir.directory_name, } return JsonResponse(payload, safe=False)
def create_directory(request): if request.method == "POST": username = request.POST["username"] user = User.objects.get(username=username) repo = request.POST["repo"] repo = Repository.objects.get(name=repo) directory = request.POST["directory"] # /hellollo/welcomeworld/ dir = "/" subdir = "/" name = "" if directory.count("/") > 2: s = directory.split("/") for i in range(1, len(s)-2): print(i) dir += s[i] dir += "/" print(colored(dir, "green")) subdir += s[len(s)-2] + "/" name = s[len(s)-2] else: name = directory.replace("/", "") print(colored((subdir, name), "red")) subdir = Directory.objects.get(repo_id=repo.id, path=dir) d = Directory(repo_id=repo.id, dir_id=subdir.id, path=directory, name=name) d.save() return JsonResponse({"message": "success"})
def create_extracted_dir_uuids(job, transfer_mdl, extraction_target, sip_directory, file_): """Assign UUIDs to directories via ``Directory`` objects in the database. """ Directory.create_many(dir_paths_uuids=_get_subdir_paths( job=job, root_path=extraction_target, path_prefix_to_repl=sip_directory, original_location=file_.originallocation), unit_mdl=transfer_mdl)
def create_extracted_dir_uuids( job, transfer_mdl, output_file_path, sip_directory, file_): """Assign UUIDs to directories via ``Directory`` objects in the database. """ Directory.create_many( _get_subdir_paths( job, output_file_path, sip_directory, file_.originallocation), transfer_mdl)
def main(transfer_path, transfer_uuid, include_dirs): """Assign UUIDs to all of the directories (and subdirectories, i.e., all unique directory paths) in the absolute system path ``transfer_path``, such being the root directory of the transfer with UUID ``transfer_uuid``. Do this only if ``include_dirs`` is ``True``. """ _exit_if_not_include_dirs(include_dirs) Directory.create_many( get_dir_uuids(_get_subdir_paths(transfer_path), logger), _get_transfer_mdl(transfer_uuid)) return 0
def create_folder(request, username, repo, url="/"): try: b = request.GET["b"] except: b = 'master' if request.method == "POST": r = Repository.objects.get(name=repo, user_id=request.user.id) if url == "/": root = Directory.objects.get(name="/", repo_id=r.id, branch=b) d = Directory(dir_id=root.id, repo_id=r.id, name=request.POST["name"], path="/" + request.POST["name"] + "/", branch=b) d.save() return HttpResponseRedirect( f"/repo/{username}/{repo}/{request.POST['name']}/") else: d = Directory.objects.get(path="/" + url + "/", branch=b) dir = Directory(dir_id=d.id, path="/" + url + "/" + request.POST["name"] + "/", repo_id=r.id, name=request.POST["name"], branch=b) dir.save() return HttpResponseRedirect( f"/repo/{username}/{repo}/{url}/{request.POST['name']}/") #return to the new dir else: return render(request, "repo/folder.html", { "repo": repo, "username": username })
def fork(request, username, repo): user = User.objects.get(username=username) r = Repository.objects.get(user_id=user.id, name=repo) Repository(user_id=request.user.id, name=repo, description=r.description, status=r.status, fork=r.id).save() user_r = Repository.objects.get(user_id=request.user.id, name=repo, description=r.description, status=r.status) Branch(repo_id=user_r.id, name="master").save() dirs = Directory.objects.filter(repo_id=r.id) for d in dirs: if d.path == "/": Directory(repo_id=user_r.id, subdir=0, name=d.name, path=d.path, branch=d.branch).save() else: path = d.path.split("/") path.pop(len(path) - 1) path.pop(len(path) - 1) subdir_path = dirize(path) print( colored((d.path, d.path.split("/"), path, subdir_path), "red")) subdir = Directory.objects.get(repo_id=user_r.id, path=subdir_path) Directory(repo_id=user_r.id, subdir=subdir.id, name=d.name, path=d.path, branch=d.branch).save() dir = Directory.objects.get(repo_id=user_r.id, path=d.path) files = File.objects.filter(repo_id=r.id, subdir=d.id) for f in files: File(repo_id=user_r.id, filename=f.filename, subdir=dir.id, branch=f.branch, path=f.path, url=f.url).save() return HttpResponseRedirect(f"/repo/{request.user.username}/{repo}")
def files_in_folder(gh_access_token, path, username, repo, repo_id, request=None): r = requests.get( f"https://api.github.com/repos/{username}/{repo}/contents/" + path, headers={"Authorization": f"token {gh_access_token}"}) for i in r.json(): print(colored(path + "/", "red")) subdir = Directory.objects.get(repo_id=repo_id, path=path + "/") if i["type"] == "dir": Directory(repo_id=repo_id, subdir=subdir.id, name=i["name"], path=path + "/" + i["name"] + "/", branch="master").save() files_in_folder(gh_access_token, path + "/" + i["name"], username, repo, repo_id) else: content = requests.get(i["download_url"]).text File(repo_id=repo_id, filename=i["name"], subdir=subdir.id, url=upload_s3(request, data=content, filename=i["name"]), branch="master", path=path + "/" + i["name"] + "/").save() print(path + "/" + i["name"])
def sync_repo(request): if request.method == "POST": Repository(user_id=request.user.id, name=request.POST["repo"], description="", status=0).save() repo_id = Repository.objects.get(user_id=request.user.id, name=request.POST["repo"]).id Directory(repo_id=repo_id, subdir=0, name="", path="/", branch="master").save() print(repo_id) files_in_folder(request.session["github_access_token"], "", request.POST["username"], request.POST["repo"], repo_id) return HttpResponseRedirect( f"/repo/{request.user.username}/{request.POST['repo']}") else: # r = requests.get("https://api.github.com/user/repos", headers={ # "Authorization": f"token {request.session['github_access_token']}", # # "Accept": "application/vnd.github.v3+json", # }) # return JsonResponse(r.json(), safe=False) return render(request, "github/sync-repo.html")
def upload(r, b, url): """ r: Repository object b: Branch name url: pure folder path for the file return subdir id """ path = "/" subdir = Directory.objects.get(repo_id=r.id, path="/", branch=b).id url = url.split("/") for i in url: if i == "": break else: path += f"{i}/" try: d = Directory.objects.get(repo_id=r.id, path=path, branch=bool) print(colored(path + " exist", "red")) except Directory.DoesNotExist: Directory(repo_id=r.id, subdir=subdir, name=i, path=path, branch=b).save() print(colored(path + " doesn't exist.", "blue")) try: subdir = Directory.objects.get(repo_id=r.id, path=path, branch=b).id print(colored(path + " is the new subdir", "magenta")) except Exception as e: d = Directory.objects.filter(repo_id=r.id, path=path, branch=b) d[::-1][0].delete() d = Directory.objects.get(repo_id=r.id, path=path, branch=b) subdir = d.id return subdir
def upload(request, username, repo): try: b = request.GET["b"] except: b = "master" user = User.objects.get(username=username) r = Repository.objects.get(name=repo, user_id=user.id) if request.method == "POST": url = request.POST["path"][:-1].split("/") path = "/" subdir = Directory.objects.get(repo_id=r.id, path="/", branch=b).id f_url = upload_s3(request) for i in url: print(i) if i == "": break else: path += f"{i}/" try: d = Directory.objects.get(repo_id=r.id, path=path, branch=b) print(colored(path + " exist", "red")) except Directory.DoesNotExist: Directory(repo_id=r.id, subdir=subdir, name=i, path=path, branch=b).save() print(colored(path + " doesn't exist.", "blue")) try: subdir = Directory.objects.get(repo_id=r.id, path=path, branch=b).id print(colored(path + " is the new subdir", "magenta")) except Exception as e: d = Directory.objects.filter(repo_id=r.id, path=path, branch=b) d[::-1][0].delete() d = Directory.objects.get(repo_id=r.id, path=path, branch=b) subdir = d.id File(repo_id=r.id, filename=request.FILES["file"].name, subdir=subdir, url=f_url, branch=b, path=path + request.FILES["file"].name + "/").save() try: c = Commit.objects.get(commit_id=request.POST["commit_id"], branch=b) except: Commit(commit_id=request.POST["commit_id"], repo_id=r.id, user_id=request.user.id, message=request.POST["message"], branch=b).save() c = Commit.objects.get(commit_id=request.POST["commit_id"], branch=b) f = File.objects.get(repo_id=r.id, filename=request.FILES["file"].name, subdir=subdir, url=f_url, branch=b, path=path + request.FILES["file"].name + "/") Commit_File(commit_id=c.commit_id, url=f.url, path=f.path).save() return JsonResponse({"data": "success"}) else: return render(request, "repo/upload.html", { "repo": repo, "username": username, "commit_id": str(uuid.uuid4()), })
def main(transfer_uuid, sip_directory, date, task_uuid, delete=False): files = File.objects.filter(transfer=transfer_uuid, removedtime__isnull=True) if not files: print('No files found for transfer: ', transfer_uuid) transfer_mdl = Transfer.objects.get(uuid=transfer_uuid) # We track whether or not anything was extracted because that controls what # the next microservice chain link will be. # If something was extracted, then a new identification step has to be # kicked off on those files; otherwise, we can go ahead with the transfer. extracted = False for file_ in files: try: format_id = FileFormatVersion.objects.get(file_uuid=file_.uuid) # Can't do anything if the file wasn't identified in the previous step except: print('Not extracting contents from', os.path.basename(file_.currentlocation), ' - file format not identified', file=sys.stderr) continue if format_id.format_version is None: print('Not extracting contents from', os.path.basename(file_.currentlocation), ' - file format not identified', file=sys.stderr) continue # Extraction commands are defined in the FPR just like normalization # commands try: command = FPCommand.active.get( fprule__format=format_id.format_version, fprule__purpose='extract', fprule__enabled=True, ) except FPCommand.DoesNotExist: print('Not extracting contents from', os.path.basename(file_.currentlocation), ' - No rule found to extract', file=sys.stderr) continue # Check if file has already been extracted if already_extracted(file_): print('Not extracting contents from', os.path.basename(file_.currentlocation), ' - extraction already happened.', file=sys.stderr) continue file_path = file_.currentlocation.replace('%transferDirectory%', sip_directory) if command.script_type == 'command' or command.script_type == 'bashScript': args = [] command_to_execute = command.command.replace('%inputFile%', file_path) command_to_execute = command_to_execute.replace('%outputDirectory%', output_directory(file_path, date)) else: command_to_execute = command.command args = [file_path, output_directory(file_path, date)] exitstatus, stdout, stderr = executeOrRun(command.script_type, command_to_execute, arguments=args, printing=True) if not exitstatus == 0: # Dang, looks like the extraction failed print('Command', command.description, 'failed!', file=sys.stderr) else: extracted = True print('Extracted contents from', os.path.basename(file_path)) # Assign UUIDs and insert them into the database, so the newly- # extracted files are properly tracked by Archivematica extracted_path = output_directory(file_path, date) for extracted_file in tree(extracted_path): assign_uuid(extracted_file, file_.uuid, transfer_uuid, date, task_uuid, sip_directory, file_.currentlocation) # Assign UUIDs to directories via ``Directory`` objects in the # database. if transfer_mdl.diruuids: Directory.create_many( get_dir_uuids( _get_subdir_paths(extracted_path, sip_directory), logger), transfer_mdl) # We may want to remove the original package file after extracting its contents if delete: delete_and_record_package_file(file_path, file_.uuid, file_.currentlocation) if extracted: return 0 else: return -1