Exemplo n.º 1
0
    def create(self, request, exporter_pk):
        """
        Generates a Task to export the set of repositories assigned to a specific PulpExporter.
        """
        # Validate Exporter
        exporter = PulpExporter.objects.get(pk=exporter_pk).cast()
        ExporterSerializer.validate_path(exporter.path, check_is_dir=True)

        # Validate Export
        serializer = PulpExportSerializer(data=request.data,
                                          context={"exporter": exporter})
        serializer.is_valid(raise_exception=True)

        # Invoke the export
        export = PulpExport.objects.create(exporter=exporter,
                                           params=request.data)
        export.validated_versions = serializer.validated_data.get(
            "versions", None)
        export.validated_start_versions = serializer.validated_data.get(
            "start_versions", None)
        export.validated_chunk_size = serializer.validated_data.get(
            "chunk_size", None)

        task = dispatch(pulp_export, [exporter], kwargs={"the_export": export})

        return OperationPostponedResponse(task, request)
Exemplo n.º 2
0
    def test_read_only_params(self):
        data = {
            "full": True,
            "dry_run": False,
            "output_file_info": {
                "bar": "blech"
            }
        }
        serializer = PulpExportSerializer(data=data)
        self.assertTrue(serializer.is_valid())

        with self.assertRaises(AttributeError):
            serializer.output_file_info["bar"]
Exemplo n.º 3
0
    def create(self, request, exporter_pk):
        """
        Generates a Task to export the set of repositories assigned to a specific PulpExporter.
        """
        # Validate Exporter
        exporter = PulpExporter.objects.get(pk=exporter_pk).cast()
        ExporterSerializer.validate_path(exporter.path, check_is_dir=True)

        # Validate Export
        serializer = PulpExportSerializer(data=request.data,
                                          context={"exporter": exporter})
        serializer.is_valid(raise_exception=True)

        # Invoke the export
        task = dispatch(
            pulp_export,
            exclusive_resources=[exporter],
            kwargs={
                "exporter_pk": str(exporter.pk),
                "params": request.data
            },
        )

        return OperationPostponedResponse(task, request)
Exemplo n.º 4
0
 def test_validate_no_params(self):
     data = {}
     serializer = PulpExportSerializer(data=data)
     self.assertTrue(serializer.is_valid())
Exemplo n.º 5
0
    def test_chunk_size(self):
        # positive tests
        # bytes
        data = {"chunk_size": "100B"}
        serializer = PulpExportSerializer(data=data)
        self.assertTrue(serializer.is_valid())
        self.assertEqual(100, serializer.validated_data["chunk_size"])

        # kilobytes
        data = {"chunk_size": "100KB"}
        serializer = PulpExportSerializer(data=data)
        self.assertTrue(serializer.is_valid())
        self.assertEqual(100 * 1024, serializer.validated_data["chunk_size"])

        # megabytes
        data = {"chunk_size": "100MB"}
        serializer = PulpExportSerializer(data=data)
        self.assertTrue(serializer.is_valid())
        self.assertEqual(100 * 1024 * 1024,
                         serializer.validated_data["chunk_size"])

        # gigabytes
        data = {"chunk_size": "100GB"}
        serializer = PulpExportSerializer(data=data)
        self.assertTrue(serializer.is_valid())
        self.assertEqual(100 * 1024 * 1024 * 1024,
                         serializer.validated_data["chunk_size"])

        # terabytes
        data = {"chunk_size": "100TB"}
        serializer = PulpExportSerializer(data=data)
        self.assertTrue(serializer.is_valid())
        self.assertEqual(100 * 1024 * 1024 * 1024 * 1024,
                         serializer.validated_data["chunk_size"])

        # float-units
        data = {"chunk_size": "2.4GB"}
        serializer = PulpExportSerializer(data=data)
        self.assertTrue(serializer.is_valid())
        self.assertEqual(int(2.4 * 1024 * 1024 * 1024),
                         serializer.validated_data["chunk_size"])

        # negative tests
        # no units
        data = {"chunk_size": "100"}
        serializer = PulpExportSerializer(data=data)
        self.assertFalse(serializer.is_valid())

        # not-a-number
        data = {"chunk_size": "bazMB"}
        serializer = PulpExportSerializer(data=data)
        serializer.is_valid()

        # non-positive
        data = {"chunk_size": "0GB"}
        serializer = PulpExportSerializer(data=data)
        self.assertFalse(serializer.is_valid())

        # non-positive
        data = {"chunk_size": "-10KB"}
        serializer = PulpExportSerializer(data=data)
        self.assertFalse(serializer.is_valid())
Exemplo n.º 6
0
 def test_bad_params(self):
     data = {"baz": "bar"}
     serializer = PulpExportSerializer(data=data)
     self.assertFalse(serializer.is_valid())
Exemplo n.º 7
0
 def test_validate_bad_param_values(self):
     data = {"full": "bar", "dry_run": 0}
     serializer = PulpExportSerializer(data=data)
     self.assertFalse(serializer.is_valid())
Exemplo n.º 8
0
def pulp_export(exporter_pk, params):
    """
    Create a PulpExport to export pulp_exporter.repositories.

    1) Spit out all Artifacts, ArtifactResource.json, and RepositoryResource.json
    2) Spit out all *resource JSONs in per-repo-version directories
    3) Compute and store the sha256 and filename of the resulting tar.gz/chunks

    Args:
        exporter_pk (str): PulpExporter
        params (dict): request data

    Raises:
        ValidationError: When path is not in the ALLOWED_EXPORT_PATHS setting,
            OR path exists and is not a directory
    """
    pulp_exporter = PulpExporter.objects.get(pk=exporter_pk)
    serializer = PulpExportSerializer(data=params,
                                      context={"exporter": pulp_exporter})
    serializer.is_valid(raise_exception=True)
    the_export = PulpExport.objects.create(exporter=pulp_exporter,
                                           params=params)
    the_export.validated_versions = serializer.validated_data.get(
        "versions", None)
    the_export.validated_start_versions = serializer.validated_data.get(
        "start_versions", None)
    the_export.validated_chunk_size = serializer.validated_data.get(
        "chunk_size", None)

    try:
        the_export.task = Task.current()

        tarfile_fp = the_export.export_tarfile_path()

        path = Path(pulp_exporter.path)
        if not path.is_dir():
            path.mkdir(mode=0o775, parents=True)

        rslts = {}
        if the_export.validated_chunk_size:
            # write it into chunks
            with subprocess.Popen(
                [
                    "split",
                    "-a",
                    "4",
                    "-b",
                    str(the_export.validated_chunk_size),
                    "-d",
                    "-",
                    tarfile_fp + ".",
                ],
                    stdin=subprocess.PIPE,
            ) as split_process:
                try:
                    with tarfile.open(tarfile_fp,
                                      "w|gz",
                                      fileobj=split_process.stdin) as tar:
                        _do_export(pulp_exporter, tar, the_export)
                except Exception:
                    # no matter what went wrong, we can't trust the files we (may have) created.
                    # Delete the ones we can find and pass the problem up.
                    for pathname in glob(tarfile_fp + ".*"):
                        os.remove(pathname)
                    raise
            # compute the hashes
            global_hash = hashlib.sha256()
            paths = sorted([str(Path(p)) for p in glob(tarfile_fp + ".*")])
            for a_file in paths:
                a_hash = _compute_hash(a_file, global_hash)
                rslts[a_file] = a_hash
            tarfile_hash = global_hash.hexdigest()

        else:
            # write into the file
            try:
                with tarfile.open(tarfile_fp, "w:gz") as tar:
                    _do_export(pulp_exporter, tar, the_export)
            except Exception:
                # no matter what went wrong, we can't trust the file we created.
                # Delete it if it exists and pass the problem up.
                if os.path.exists(tarfile_fp):
                    os.remove(tarfile_fp)
                raise
            # compute the hash
            tarfile_hash = _compute_hash(tarfile_fp)
            rslts[tarfile_fp] = tarfile_hash

        # store the outputfile/hash info
        the_export.output_file_info = rslts

        # write outputfile/hash info to a file 'next to' the output file(s)
        output_file_info_path = tarfile_fp.replace(".tar.gz", "-toc.json")
        with open(output_file_info_path, "w") as outfile:
            if the_export.validated_chunk_size:
                chunk_size = the_export.validated_chunk_size
            else:
                chunk_size = 0
            chunk_toc = {
                "meta": {
                    "chunk_size": chunk_size,
                    "file": os.path.basename(tarfile_fp),
                    "global_hash": tarfile_hash,
                },
                "files": {},
            }
            # Build a toc with just filenames (not the path on the exporter-machine)
            for a_path in rslts.keys():
                chunk_toc["files"][os.path.basename(a_path)] = rslts[a_path]
            json.dump(chunk_toc, outfile)

        # store toc info
        toc_hash = _compute_hash(output_file_info_path)
        the_export.output_file_info[output_file_info_path] = toc_hash
        the_export.toc_info = {
            "file": output_file_info_path,
            "sha256": toc_hash
        }
    finally:
        # whatever may have happened, make sure we save the export
        the_export.save()

    # If an exception was thrown, we'll never get here - which is good, because we don't want a
    # 'failed' export to be the last_export we derive the next incremental from
    # mark it as 'last'
    pulp_exporter.last_export = the_export
    # save the exporter
    pulp_exporter.save()