Exemple #1
0
def begin_request_soud_sync(server, user):
    """
    Enqueue a task to request this SoUD to be
    synced with a server
    """
    info = get_device_info()
    if not info["subset_of_users_device"]:
        # this does not make sense unless this is a SoUD
        logger.warn("Only Subsets of Users Devices can do this")
        return
    queue.enqueue(request_soud_sync, server, user)
Exemple #2
0
    def startdiskexport(self, request):
        """
        Export a channel to a local drive, and copy content to the drive.

        """

        task = validate_local_export_task(request, request.data)

        task.update({"type": "DISKCONTENTEXPORT"})

        task_id = queue.enqueue(
            _localexport,
            task["channel_id"],
            task["drive_id"],
            track_progress=True,
            cancellable=True,
            node_ids=task["node_ids"],
            exclude_node_ids=task["exclude_node_ids"],
            extra_metadata=task,
        )

        # attempt to get the created Task, otherwise return pending status
        resp = _job_to_response(queue.fetch_job(task_id))

        return Response(resp)
Exemple #3
0
    def startdiskbulkexport(self, request):
        if not isinstance(request.data, list):
            raise serializers.ValidationError(
                "POST data must be a list of task descriptions")

        tasks = map(partial(validate_local_export_task, request), request.data)

        job_ids = []

        for task in tasks:
            task.update({"type": "DISKEXPORT"})
            export_job_id = queue.enqueue(
                _localexport,
                task["channel_id"],
                task["drive_id"],
                track_progress=True,
                cancellable=True,
                extra_metadata=task,
            )
            job_ids.append(export_job_id)

        resp = [
            _job_to_response(queue.fetch_job(job_id)) for job_id in job_ids
        ]

        return Response(resp)
Exemple #4
0
    def startdeletechannel(self, request):
        """
        Delete a channel and all its associated content from the server
        """
        task = validate_deletion_task(request, request.data)

        task.update({"type": "DELETECONTENT"})

        if task["node_ids"] or task["exclude_node_ids"]:
            task["file_size"] = None
            task["total_resources"] = None

        task_id = queue.enqueue(
            call_command,
            "deletecontent",
            task["channel_id"],
            node_ids=task["node_ids"],
            exclude_node_ids=task["exclude_node_ids"],
            force_delete=task["force_delete"],
            track_progress=True,
            extra_metadata=task,
        )

        # attempt to get the created Task, otherwise return pending status
        resp = _job_to_response(queue.fetch_job(task_id))

        return Response(resp)
Exemple #5
0
    def startbulkdelete(self, request):
        if not isinstance(request.data, list):
            raise serializers.ValidationError(
                "POST data must be a list of task descriptions")

        tasks = map(partial(validate_deletion_task, request), request.data)

        job_ids = []

        for task in tasks:
            task.update({"type": "DELETECHANNEL"})
            if task["node_ids"] or task["exclude_node_ids"]:
                task["file_size"] = None
                task["total_resources"] = None
            delete_job_id = queue.enqueue(
                call_command,
                "deletecontent",
                task["channel_id"],
                track_progress=True,
                extra_metadata=task,
            )
            job_ids.append(delete_job_id)

        resp = [
            _job_to_response(queue.fetch_job(job_id)) for job_id in job_ids
        ]

        return Response(resp)
Exemple #6
0
    def startremotebulkimport(self, request):
        if not isinstance(request.data, list):
            raise serializers.ValidationError(
                "POST data must be a list of task descriptions")

        tasks = map(partial(validate_remote_import_task, request),
                    request.data)

        job_ids = []

        for task in tasks:
            task.update({"type": "REMOTEIMPORT"})
            import_job_id = queue.enqueue(
                _remoteimport,
                task["channel_id"],
                task["baseurl"],
                extra_metadata=task,
                cancellable=True,
            )
            job_ids.append(import_job_id)

        resp = [
            _job_to_response(queue.fetch_job(job_id)) for job_id in job_ids
        ]

        return Response(resp)
Exemple #7
0
def startpeerfacilitysync(server, user_id):
    """
    Initiate a SYNC (PULL + PUSH) of a specific facility from another device.
    """

    user = FacilityUser.objects.get(pk=user_id)
    facility_id = user.facility.id

    device_info = get_device_info()

    extra_metadata = prepare_sync_task(
        facility_id,
        user_id,
        user.username,
        user.facility.name,
        device_info["device_name"],
        device_info["instance_id"],
        server,
        type="SYNCPEER/FULL",
    )

    job_data = prepare_peer_sync_job(server,
                                     facility_id,
                                     user.username,
                                     user.password,
                                     extra_metadata=extra_metadata)

    job_id = queue.enqueue(call_command, "sync", **job_data)

    return job_id
Exemple #8
0
    def startchannelupdate(self, request):

        sourcetype = request.data.pop("sourcetype", None)
        new_version = request.data.pop("new_version", None)

        if sourcetype == "remote":
            task = validate_remote_import_task(request, request.data)
            task.update({"type": "UPDATECHANNEL", "new_version": new_version})
            job_id = queue.enqueue(
                _remoteimport,
                task["channel_id"],
                task["baseurl"],
                peer_id=task["peer_id"],
                node_ids=task["node_ids"],
                is_updating=True,
                extra_metadata=task,
                track_progress=True,
                cancellable=True,
            )
        elif sourcetype == "local":
            task = validate_local_import_task(request, request.data)
            task.update({"type": "UPDATECHANNEL", "new_version": new_version})
            job_id = queue.enqueue(
                _diskimport,
                task["channel_id"],
                task["datafolder"],
                drive_id=task["drive_id"],
                node_ids=task["node_ids"],
                is_updating=True,
                extra_metadata=task,
                track_progress=True,
                cancellable=True,
            )
        else:
            raise serializers.ValidationError(
                "sourcetype must be 'remote' or 'local'")

        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Exemple #9
0
    def startdiskchannelimport(self, request):
        task = validate_local_import_task(request, request.data)

        task.update({"type": "DISKCHANNELIMPORT"})

        job_id = queue.enqueue(
            call_command,
            "importchannel",
            "disk",
            task["channel_id"],
            task["datafolder"],
            extra_metadata=task,
            cancellable=True,
        )

        resp = _job_to_response(queue.fetch_job(job_id))
        return Response(resp)
Exemple #10
0
    def startremotechannelimport(self, request):

        task = validate_remote_import_task(request, request.data)

        task.update({"type": "REMOTECHANNELIMPORT"})

        job_id = queue.enqueue(
            call_command,
            "importchannel",
            "network",
            task["channel_id"],
            baseurl=task["baseurl"],
            extra_metadata=task,
            cancellable=True,
        )
        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Exemple #11
0
    def startexportlogcsv(self, request):
        """
        Dumps in csv format the required logs.
        By default it will be dump contentsummarylog.

        :param: logtype: Kind of log to dump, summary or session
        :returns: An object with the job information

        """
        csv_export_filenames = {
            "session": "content_session_logs.csv",
            "summary": "content_summary_logs.csv",
        }
        log_type = request.data.get("logtype", "summary")
        if log_type in csv_export_filenames.keys():
            logs_dir = os.path.join(conf.KOLIBRI_HOME, "log_export")
            filepath = os.path.join(logs_dir, csv_export_filenames[log_type])
        else:
            raise Http404(
                "Impossible to create a csv export file for {}".format(
                    log_type))
        if not os.path.isdir(logs_dir):
            os.mkdir(logs_dir)

        job_type = ("EXPORTSUMMARYLOGCSV"
                    if log_type == "summary" else "EXPORTSESSIONLOGCSV")

        job_metadata = {"type": job_type, "started_by": request.user.pk}

        job_id = queue.enqueue(
            call_command,
            "exportlogs",
            log_type=log_type,
            output_file=filepath,
            overwrite="true",
            extra_metadata=job_metadata,
            track_progress=True,
        )

        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Exemple #12
0
    def startdiskcontentimport(self, request):
        task = validate_local_import_task(request, request.data)

        task.update({"type": "DISKCONTENTIMPORT"})

        job_id = queue.enqueue(
            call_command,
            "importcontent",
            "disk",
            task["channel_id"],
            task["datafolder"],
            node_ids=task["node_ids"],
            exclude_node_ids=task["exclude_node_ids"],
            extra_metadata=task,
            track_progress=True,
            cancellable=True,
        )

        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Exemple #13
0
    def startremotecontentimport(self, request):

        task = validate_remote_import_task(request, request.data)
        task.update({"type": "REMOTECONTENTIMPORT"})

        job_id = queue.enqueue(
            call_command,
            "importcontent",
            "network",
            task["channel_id"],
            baseurl=task["baseurl"],
            node_ids=task["node_ids"],
            exclude_node_ids=task["exclude_node_ids"],
            extra_metadata=task,
            track_progress=True,
            cancellable=True,
        )

        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)
Exemple #14
0
    def startdataportalsync(self, request):
        """
        Initiate a PUSH sync with Kolibri Data Portal.
        """
        task = {
            "facility": request.data["facility"],
            "type": "SYNCDATAPORTAL",
            "started_by": request.user.pk,
        }

        job_id = queue.enqueue(
            call_command,
            "sync",
            facility=task["facility"],
            noninteractive=True,
            extra_metadata=task,
            track_progress=False,
            cancellable=False,
        )
        # attempt to get the created Task, otherwise return pending status
        resp = _job_to_response(queue.fetch_job(job_id))

        return Response(resp)