def retrieve(self, request, pk, *args, **kwargs): throttle = throttling.ImportDumpModeRateThrottle() if not throttle.allow_request(request, self): self.throttled(request, throttle.wait()) project = get_object_or_404(self.get_queryset(), pk=pk) self.check_permissions(request, 'export_project', project) if settings.CELERY_ENABLED: task = tasks.dump_project.delay(request.user, project) tasks.delete_project_dump.apply_async( (project.pk, project.slug), countdown=settings.EXPORTS_TTL) return response.Accepted({"export_id": task.id}) path = "exports/{}/{}-{}.json".format(project.pk, project.slug, uuid.uuid4().hex) content = ContentFile(ExportRenderer().render( service.project_to_dict(project), renderer_context={ "indent": 4 }).decode('utf-8')) default_storage.save(path, content) response_data = {"url": default_storage.url(path)} return response.Ok(response_data)
def import_project(self, request, *args, **kwargs): self.check_permissions(request, "import_project", None) token = request.DATA.get('token', None) project_id = request.DATA.get('project', None) if not project_id: raise exc.WrongArguments(_("The project param is needed")) options = { "template": request.DATA.get('template', "kanban"), "users_bindings": request.DATA.get("users_bindings", {}), "keep_external_reference": request.DATA.get("keep_external_reference", False), "is_private": request.DATA.get("is_private", False), } if settings.CELERY_ENABLED: task = tasks.import_project.delay(request.user.id, token, project_id, options) return response.Accepted({"pivotal_import_id": task.id}) importer = PivotalImporter(request.user, token) project = importer.import_project(project_id, options) project_data = { "slug": project.slug, "my_permissions": ["view_us"], "is_backlog_activated": project.is_backlog_activated, "is_kanban_activated": project.is_kanban_activated, } return response.Ok(project_data)
def retrieve(self, request, pk, *args, **kwargs): throttle = throttling.ImportDumpModeRateThrottle() if not throttle.allow_request(request, self): self.throttled(request, throttle.wait()) project = get_object_or_404(self.get_queryset(), pk=pk) self.check_permissions(request, 'export_project', project) dump_format = request.QUERY_PARAMS.get("dump_format", "plain") if settings.CELERY_ENABLED: task = tasks.dump_project.delay(request.user, project, dump_format) tasks.delete_project_dump.apply_async( (project.pk, project.slug, task.id, dump_format), countdown=settings.EXPORTS_TTL) return response.Accepted({"export_id": task.id}) if dump_format == "gzip": path = "exports/{}/{}-{}.json.gz".format(project.pk, project.slug, uuid.uuid4().hex) with default_storage.open(path, mode="wb") as outfile: services.render_project(project, gzip.GzipFile(fileobj=outfile)) else: path = "exports/{}/{}-{}.json".format(project.pk, project.slug, uuid.uuid4().hex) with default_storage.open(path, mode="wb") as outfile: services.render_project(project, outfile) response_data = {"url": default_storage.url(path)} return response.Ok(response_data)
def load_dump(self, request): throttle = throttling.ImportDumpModeRateThrottle() if not throttle.allow_request(request, self): self.throttled(request, throttle.wait()) self.check_permissions(request, "load_dump", None) dump = request.FILES.get('dump', None) if not dump: raise exc.WrongArguments(_("Needed dump file")) reader = codecs.getreader("utf-8") try: dump = json.load(reader(dump)) except Exception: raise exc.WrongArguments(_("Invalid dump format")) if Project.objects.filter(slug=dump['slug']).exists(): del dump['slug'] if settings.CELERY_ENABLED: task = tasks.load_project_dump.delay(request.user, dump) return response.Accepted({"import_id": task.id}) project = dump_service.dict_to_project(dump, request.user.email) response_data = ProjectSerializer(project).data return response.Created(response_data)
def load_dump(self, request): throttle = throttling.ImportDumpModeRateThrottle() if not throttle.allow_request(request, self): self.throttled(request, throttle.wait()) self.check_permissions(request, "load_dump", None) dump = request.FILES.get('dump', None) if not dump: raise exc.WrongArguments(_("Needed dump file")) reader = codecs.getreader("utf-8") try: dump = json.load(reader(dump)) is_private = dump.get("is_private", False) except Exception: raise exc.WrongArguments(_("Invalid dump format")) user = request.user (enough_slots, not_enough_slots_error) = users_service.has_available_slot_for_project( user, project=Project(is_private=is_private, id=None) ) if not enough_slots: raise exc.BadRequest(not_enough_slots_error) if Project.objects.filter(slug=dump['slug']).exists(): del dump['slug'] members = len(dump.get("memberships", [])) (enough_slots, not_enough_slots_error) = users_service.has_available_slot_for_project( user, project=Project(is_private=is_private, id=None), members=max(members, 1) ) if not enough_slots: raise exc.BadRequest(not_enough_slots_error) if settings.CELERY_ENABLED: task = tasks.load_project_dump.delay(user, dump) return response.Accepted({"import_id": task.id}) project = dump_service.dict_to_project(dump, request.user) response_data = ProjectSerializer(project).data return response.Created(response_data)
def load_dump(self, request): throttle = throttling.ImportDumpModeRateThrottle() if not throttle.allow_request(request, self): self.throttled(request, throttle.wait()) self.check_permissions(request, "load_dump", None) dump = request.FILES.get('dump', None) if not dump: raise exc.WrongArguments(_("Needed dump file")) reader = codecs.getreader("utf-8") try: dump = json.load(reader(dump)) except Exception: raise exc.WrongArguments(_("Invalid dump format")) slug = dump.get('slug', None) if slug is not None and Project.objects.filter(slug=slug).exists(): del dump['slug'] user = request.user dump['owner'] = user.email # Validate if the project can be imported is_private = dump.get("is_private", False) total_memberships = len([ m for m in dump.get("memberships", []) if m.get("email", None) != dump["owner"] ]) total_memberships = total_memberships + 1 # 1 is the owner (enough_slots, error_message ) = users_service.has_available_slot_for_import_new_project( user, is_private, total_memberships) if not enough_slots: raise exc.NotEnoughSlotsForProject(is_private, total_memberships, error_message) if settings.CELERY_ENABLED: task = tasks.load_project_dump.delay(user, dump) return response.Accepted({"import_id": task.id}) project = dump_service.dict_to_project(dump, request.user) response_data = ProjectSerializer(project).data return response.Created(response_data)
def import_project(self, request, *args, **kwargs): self.check_permissions(request, "import_project", None) token = request.DATA.get('token', None) project_id = request.DATA.get('project', None) if not project_id: raise exc.WrongArguments(_("The project param is needed")) template = request.DATA.get('template', "scrum") items_type = "user_stories" if template == "issues": items_type = "issues" template = "scrum" options = { "name": request.DATA.get('name', None), "description": request.DATA.get('description', None), "template": template, "type": items_type, "users_bindings": resolve_users_bindings(request.DATA.get("users_bindings", {})), "keep_external_reference": request.DATA.get("keep_external_reference", False), "is_private": request.DATA.get("is_private", False), } if settings.CELERY_ENABLED: task = tasks.import_project.delay(request.user.id, token, project_id, options) return response.Accepted({"task_id": task.id}) importer = GithubImporter(request.user, token) project = importer.import_project(project_id, options) project_data = { "slug": project.slug, "my_permissions": ["view_us"], "is_backlog_activated": project.is_backlog_activated, "is_kanban_activated": project.is_kanban_activated, } return response.Ok(project_data)
def load_dump(self, request): throttle = throttling.ImportDumpModeRateThrottle() if not throttle.allow_request(request, self): self.throttled(request, throttle.wait()) self.check_permissions(request, "load_dump", None) dump = request.FILES.get('dump', None) if not dump: raise exc.WrongArguments(_("Needed dump file")) if dump.content_type == "application/gzip": dump = gzip.GzipFile(fileobj=dump) reader = codecs.getreader("utf-8") try: dump = json.load(reader(dump)) except Exception: raise exc.WrongArguments(_("Invalid dump format")) slug = dump.get('slug', None) if slug is not None and Project.objects.filter(slug=slug).exists(): del dump['slug'] user = request.user dump['owner'] = user.email # Validate if the project can be imported is_private = dump.get("is_private", False) total_memberships = len([ m for m in dump.get("memberships", []) if m.get("email", None) != dump["owner"] ]) total_memberships = total_memberships + 1 # 1 is the owner (enough_slots, error_message) = users_services.has_available_slot_for_new_project( user, is_private, total_memberships) if not enough_slots: raise exc.NotEnoughSlotsForProject(is_private, total_memberships, error_message) # Async mode if settings.CELERY_ENABLED: task = tasks.load_project_dump.delay(user, dump) return response.Accepted({"import_id": task.id}) # Sync mode try: project = services.store_project_from_dict(dump, request.user) except err.TaigaImportError as e: # On Error ## remove project if e.project: e.project.delete_related_content() e.project.delete() return response.BadRequest({ "error": e.message, "details": e.errors }) else: # On Success project_from_qs = project_utils.attach_extra_info( Project.objects.all()).get(id=project.id) response_data = ProjectSerializer(project_from_qs).data return response.Created(response_data)
def import_project(self, request, *args, **kwargs): self.check_permissions(request, "import_project", None) url = request.DATA.get("url", None) token = self._get_token(request) project_id = request.DATA.get("project", None) if not project_id: raise exc.WrongArguments(_("The project param is needed")) if not url: raise exc.WrongArguments(_("The url param is needed")) options = { "name": request.DATA.get("name", None), "description": request.DATA.get("description", None), "users_bindings": resolve_users_bindings( request.DATA.get("users_bindings", {}) ), "keep_external_reference": request.DATA.get( "keep_external_reference", False ), "is_private": request.DATA.get("is_private", False), } importer_type = request.DATA.get("importer_type", "normal") if importer_type == "agile": importer = JiraAgileImporter(request.user, url, token) else: project_type = request.DATA.get("project_type", "scrum") if project_type == "kanban": options["template"] = "kanban" else: options["template"] = "scrum" importer = JiraNormalImporter(request.user, url, token) types_bindings = { "epic": [], "us": [], "task": [], "issue": [], } for issue_type in importer.list_issue_types(project_id): if project_type in ["scrum", "kanban"]: # Set the type bindings if issue_type["subtask"]: types_bindings["task"].append(issue_type) elif issue_type["name"].upper() == "EPIC": types_bindings["epic"].append(issue_type) elif issue_type["name"].upper() in [ "US", "USERSTORY", "USER STORY", ]: types_bindings["us"].append(issue_type) elif issue_type["name"].upper() in ["ISSUE", "BUG", "ENHANCEMENT"]: types_bindings["issue"].append(issue_type) else: types_bindings["us"].append(issue_type) elif project_type == "issues": # Set the type bindings if issue_type["subtask"]: continue types_bindings["issue"].append(issue_type) elif project_type == "issues-with-subissues": types_bindings["issue"].append(issue_type) else: raise exc.WrongArguments( _("Invalid project_type {}").format(project_type) ) options["types_bindings"] = types_bindings if settings.CELERY_ENABLED: task = tasks.import_project.delay( request.user.id, url, token, project_id, options, importer_type ) return response.Accepted({"task_id": task.id}) project = importer.import_project(project_id, options) project_data = { "slug": project.slug, "my_permissions": ["view_us"], "is_backlog_activated": project.is_backlog_activated, "is_kanban_activated": project.is_kanban_activated, } return response.Ok(project_data)