Пример #1
0
    def startbulkdelete(self, request):
        if not isinstance(request.data, list):
            raise serializers.ValidationError(
                "POST data must be a list of task descriptions")

        tasks = map(partial(validate_deletion_task, request), request.data)

        job_ids = []

        for task in tasks:
            task.update({"type": "DELETECHANNEL"})
            if task["node_ids"] or task["exclude_node_ids"]:
                task["file_size"] = None
                task["total_resources"] = None
            delete_job_id = queue.enqueue(
                call_command,
                "deletecontent",
                task["channel_id"],
                track_progress=True,
                extra_metadata=task,
            )
            job_ids.append(delete_job_id)

        resp = [
            _job_to_response(queue.fetch_job(job_id)) for job_id in job_ids
        ]

        return Response(resp)
Пример #2
0
    def startdiskbulkimport(self, request):
        if not isinstance(request.data, list):
            raise serializers.ValidationError(
                "POST data must be a list of task descriptions")

        tasks = map(partial(validate_local_import_task, request), request.data)

        job_ids = []

        for task in tasks:
            task.update({"type": "DISKIMPORT"})
            import_job_id = queue.enqueue(
                _diskimport,
                task["channel_id"],
                task["datafolder"],
                drive_id=task["drive_id"],
                extra_metadata=task,
                track_progress=True,
                cancellable=True,
            )
            job_ids.append(import_job_id)

        resp = [
            _job_to_response(queue.fetch_job(job_id)) for job_id in job_ids
        ]

        return Response(resp)
Пример #3
0
    def startremotebulkimport(self, request):
        if not isinstance(request.data, list):
            raise serializers.ValidationError(
                "POST data must be a list of task descriptions")

        tasks = map(partial(validate_remote_import_task, request),
                    request.data)

        job_ids = []

        for task in tasks:
            task.update({"type": "REMOTEIMPORT"})
            import_job_id = queue.enqueue(
                _remoteimport,
                task["channel_id"],
                task["baseurl"],
                peer_id=task["peer_id"],
                extra_metadata=task,
                cancellable=True,
            )
            job_ids.append(import_job_id)

        resp = [
            _job_to_response(queue.fetch_job(job_id)) for job_id in job_ids
        ]

        return Response(resp)
Пример #4
0
 def retrieve(self, request, pk=None):
     try:
         task = _job_to_response(queue.fetch_job(pk))
         return Response(task)
     except JobNotFound:
         try:
             task = _job_to_response(priority_queue.fetch_job(pk))
         except JobNotFound:
             raise Http404("Task with {pk} not found".format(pk=pk))
Пример #5
0
    def startdiskchannelimport(self, request):
        task = validate_local_import_task(request, request.data)

        task.update({"type": "DISKCHANNELIMPORT"})

        job_id = queue.enqueue(
            call_command,
            "importchannel",
            "disk",
            task["channel_id"],
            task["datafolder"],
            extra_metadata=task,
            cancellable=True,
        )

        resp = _job_to_response(queue.fetch_job(job_id))
        return Response(resp)
Пример #6
0
    def startremotechannelimport(self, request):

        task = validate_remote_import_task(request, request.data)

        task.update({"type": "REMOTECHANNELIMPORT"})

        job_id = queue.enqueue(
            call_command,
            "importchannel",
            "network",
            task["channel_id"],
            baseurl=task["baseurl"],
            extra_metadata=task,
            cancellable=True,
        )
        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Пример #7
0
    def startexportlogcsv(self, request):
        """
        Dumps in csv format the required logs.
        By default it will be dump contentsummarylog.

        :param: logtype: Kind of log to dump, summary or session
        :returns: An object with the job information

        """
        csv_export_filenames = {
            "session": "content_session_logs.csv",
            "summary": "content_summary_logs.csv",
        }
        log_type = request.data.get("logtype", "summary")
        if log_type in csv_export_filenames.keys():
            logs_dir = os.path.join(conf.KOLIBRI_HOME, "log_export")
            filepath = os.path.join(logs_dir, csv_export_filenames[log_type])
        else:
            raise Http404(
                "Impossible to create a csv export file for {}".format(
                    log_type))
        if not os.path.isdir(logs_dir):
            os.mkdir(logs_dir)

        job_type = ("EXPORTSUMMARYLOGCSV"
                    if log_type == "summary" else "EXPORTSESSIONLOGCSV")

        job_metadata = {"type": job_type, "started_by": request.user.pk}

        job_id = queue.enqueue(
            call_command,
            "exportlogs",
            log_type=log_type,
            output_file=filepath,
            overwrite="true",
            extra_metadata=job_metadata,
            track_progress=True,
        )

        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Пример #8
0
    def startdiskcontentimport(self, request):
        task = validate_local_import_task(request, request.data)

        task.update({"type": "DISKCONTENTIMPORT"})

        job_id = queue.enqueue(
            call_command,
            "importcontent",
            "disk",
            task["channel_id"],
            task["datafolder"],
            node_ids=task["node_ids"],
            exclude_node_ids=task["exclude_node_ids"],
            extra_metadata=task,
            track_progress=True,
            cancellable=True,
        )

        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Пример #9
0
    def startremotecontentimport(self, request):

        task = validate_remote_import_task(request, request.data)
        task.update({"type": "REMOTECONTENTIMPORT"})

        job_id = queue.enqueue(
            call_command,
            "importcontent",
            "network",
            task["channel_id"],
            baseurl=task["baseurl"],
            node_ids=task["node_ids"],
            exclude_node_ids=task["exclude_node_ids"],
            extra_metadata=task,
            track_progress=True,
            cancellable=True,
        )

        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Пример #10
0
    def startchannelupdate(self, request):

        sourcetype = request.data.pop("sourcetype", None)
        new_version = request.data.pop("new_version", None)

        if sourcetype == "remote":
            task = validate_remote_import_task(request, request.data)
            task.update({"type": "UPDATECHANNEL", "new_version": new_version})
            job_id = queue.enqueue(
                _remoteimport,
                task["channel_id"],
                task["baseurl"],
                peer_id=task["peer_id"],
                node_ids=task["node_ids"],
                is_updating=True,
                extra_metadata=task,
                track_progress=True,
                cancellable=True,
            )
        elif sourcetype == "local":
            task = validate_local_import_task(request, request.data)
            task.update({"type": "UPDATECHANNEL", "new_version": new_version})
            job_id = queue.enqueue(
                _diskimport,
                task["channel_id"],
                task["datafolder"],
                drive_id=task["drive_id"],
                node_ids=task["node_ids"],
                is_updating=True,
                extra_metadata=task,
                track_progress=True,
                cancellable=True,
            )
        else:
            raise serializers.ValidationError(
                "sourcetype must be 'remote' or 'local'")

        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Пример #11
0
    def startdataportalsync(self, request):
        """
        Initiate a PUSH sync with Kolibri Data Portal.
        """
        task = {
            "facility": request.data["facility"],
            "type": "SYNCDATAPORTAL",
            "started_by": request.user.pk,
        }

        job_id = queue.enqueue(
            call_command,
            "sync",
            facility=task["facility"],
            noninteractive=True,
            extra_metadata=task,
            track_progress=False,
            cancellable=False,
        )
        # attempt to get the created Task, otherwise return pending status
        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Пример #12
0
    def startdiskexport(self, request):
        """
        Export a channel to a local drive, and copy content to the drive.
        """

        task = validate_local_export_task(request, request.data)

        task.update({"type": "DISKCONTENTEXPORT"})

        task_id = queue.enqueue(
            _localexport,
            task["channel_id"],
            task["drive_id"],
            track_progress=True,
            cancellable=True,
            node_ids=task["node_ids"],
            exclude_node_ids=task["exclude_node_ids"],
            extra_metadata=task,
        )

        # attempt to get the created Task, otherwise return pending status
        resp = _job_to_response(queue.fetch_job(task_id))

        return Response(resp)