Пример #1
0
    def ensure_service_directory(self, relative_name):
        """Create a directory in PROJECT_DIR/services with the given name.

        The name should be unique to the ServiceRequirement creating the directory,
        so usually the requirement's env var.

        Args:
            relative_name (str): name to distinguish this dir from other service directories
        """
        path = _service_directory(self._local_state_file, relative_name)
        makedirs_ok_if_exists(path)
        return path
Пример #2
0
    def do_test(dirname):
        dir1 = os.path.join(dirname, "foo")
        dir2 = os.path.join(dir1, "bar")
        dir3 = os.path.join(dir2, "baz")
        assert os.path.isdir(dirname)
        assert not os.path.isdir(dir1)
        assert not os.path.isdir(dir2)
        assert not os.path.isdir(dir3)

        makedirs_ok_if_exists(dir3)

        assert os.path.isdir(dir1)
        assert os.path.isdir(dir2)
        assert os.path.isdir(dir3)
 def _write_a_file(self, filename):
     try:
         makedirs.makedirs_ok_if_exists(os.path.dirname(filename))
         with codecs.open(filename, 'w', encoding='utf-8') as f:
             # we don't read the contents of the file for now, but
             # recording the version in it in case in the future
             # that is useful. We need to write something to the
             # file to bump its mtime if it already exists...
             f.write('{"anaconda_project_version": "%s"}\n' % version)
         return True
     except (IOError, OSError):
         # ignore errors because this is just an optimization, if we
         # fail we will survive
         return False
Пример #4
0
def _save_file(yaml, filename):
    contents = ryaml.dump(yaml, Dumper=ryaml.RoundTripDumper)

    try:
        # This is to ensure we don't corrupt the file, even if ruamel.yaml is broken
        ryaml.load(contents, Loader=ryaml.RoundTripLoader)
    except YAMLError as e:  # pragma: no cover (should not happen)
        print("ruamel.yaml bug; it failed to parse a file that it generated.", file=sys.stderr)
        print("  the parse error was: " + str(e), file=sys.stderr)
        print("Generated file was:", file=sys.stderr)
        print(contents, file=sys.stderr)
        raise RuntimeError("Bug in ruamel.yaml library; failed to parse a file that it generated: " + str(e))

    if not os.path.isfile(filename):
        # might have to make the directory
        dirname = os.path.dirname(filename)
        makedirs_ok_if_exists(dirname)
    _atomic_replace(filename, contents)
Пример #5
0
def _extract_files_tar(tar_path, src_and_dest, frontend):
    with tarfile.open(tar_path, mode='r') as tf:
        for (src, dest) in src_and_dest:
            frontend.info("Unpacking %s to %s" % (src, dest))
            member = tf.getmember(src)
            # we could also use tf._extract_member here, but the
            # solution below with only the public API isn't that
            # bad.
            if member.isreg():
                makedirs_ok_if_exists(os.path.dirname(dest))
                tf.makefile(member, dest)
            else:
                assert member.isdir()  # we filtered out other types
                makedirs_ok_if_exists(dest)

            try:
                tf.chown(member, dest, False)  # pragma: no cover (python 3.5 has another param)
            except TypeError:  # pragma: no cover
                tf.chown(member, dest)  # pragma: no cover (python 2.7, 3.4)
            tf.chmod(member, dest)
            tf.utime(member, dest)
Пример #6
0
def _extract_files_zip(zip_path, src_and_dest, frontend):
    # the zipfile API has no way to extract to a filename of
    # our choice, so we have to unpack to a temporary location,
    # then copy those files over.
    tmpdir = tempfile.mkdtemp()
    try:
        with zipfile.ZipFile(zip_path, mode='r') as zf:
            zf.extractall(tmpdir)
            for (src, dest) in src_and_dest:
                frontend.info("Unpacking %s to %s" % (src, dest))
                src_path = os.path.join(tmpdir, src)
                if os.path.isdir(src_path):
                    makedirs_ok_if_exists(dest)
                    shutil.copystat(src_path, dest)
                else:
                    makedirs_ok_if_exists(os.path.dirname(dest))
                    shutil.copy2(src_path, dest)
    finally:
        try:
            shutil.rmtree(tmpdir)
        except (IOError, OSError):
            pass
Пример #7
0
import sys
try:
    from backports.tempfile import TemporaryDirectory
except ImportError:
    from tempfile import TemporaryDirectory
import zipfile
import tempfile

from anaconda_project.internal.makedirs import makedirs_ok_if_exists
from anaconda_project.local_state_file import LocalStateFile
from anaconda_project.yaml_file import _load_string
from anaconda_project.project_file import (possible_project_file_names,
                                           DEFAULT_PROJECT_FILENAME)

local_tmp = os.path.abspath("./build/tmp")
makedirs_ok_if_exists(local_tmp)


def with_directory_contents(contents, func):
    tempd = TemporaryDirectory(prefix="test-")
    dirname = tempd.name
    try:
        for filename, file_content in contents.items():
            path = os.path.join(dirname, filename)
            if file_content is None:
                # make a directory
                makedirs_ok_if_exists(path)
            else:
                makedirs_ok_if_exists(os.path.dirname(path))
                with codecs.open(path, 'w', 'utf-8') as f:
                    f.write(file_content)
Пример #8
0
    def run(self, io_loop):
        """Run the download on the given io_loop."""
        assert self._client is None

        dirname = os.path.dirname(self._filename)
        try:
            makedirs.makedirs_ok_if_exists(dirname)
        except Exception as e:
            self._errors.append("Could not create directory '%s': %s" %
                                (dirname, e))
            raise gen.Return(None)

        if self._hash_algorithm is not None:
            hasher = getattr(hashlib, self._hash_algorithm)()
        self._client = httpclient.AsyncHTTPClient(
            io_loop=io_loop,
            max_clients=1,
            # without this we buffer a huge amount
            # of stuff and then call the streaming_callback
            # once.
            max_buffer_size=1024 * 1024,
            # without this we 599 on large downloads
            max_body_size=100 * 1024 * 1024 * 1024,
            force_instance=True)

        tmp_filename = self._filename + ".part"
        try:
            _file = open(tmp_filename, 'wb')
        except EnvironmentError as e:
            self._errors.append("Failed to open %s: %s" % (tmp_filename, e))
            raise gen.Return(None)

        def cleanup_tmp():
            try:
                _file.close()
                # future: we could save it in order to try
                # resuming a failed download midstream, but
                # pointless until the download code above
                # knows how to resume.
                os.remove(tmp_filename)
            except EnvironmentError:
                pass

        def writer(chunk):
            if len(self._errors) > 0:
                return

            if self._hash_algorithm is not None:
                hasher.update(chunk)

            try:
                _file.write(chunk)
            except EnvironmentError as e:
                # we can't actually throw this error or Tornado freaks out, so instead
                # we ignore all future chunks once we have an error, which does mean
                # we continue to download bytes that we don't use. yuck.
                self._errors.append("Failed to write to %s: %s" %
                                    (tmp_filename, e))

        try:
            timeout_in_seconds = 60 * 10  # pretty long because we could be dealing with huge files
            request = httpclient.HTTPRequest(
                url=self._url,
                streaming_callback=writer,
                request_timeout=timeout_in_seconds)
            try:
                response = yield self._client.fetch(request)
            except Exception as e:
                self._errors.append("Failed download to %s: %s" %
                                    (self._filename, str(e)))
                raise gen.Return(None)

            # assert fetch() was supposed to throw the error, not leave it here unthrown
            assert response.error is None

            if len(self._errors) == 0:
                try:
                    _file.close()  # be sure tmp_filename is flushed
                    rename.rename_over_existing(tmp_filename, self._filename)
                except EnvironmentError as e:
                    self._errors.append("Failed to rename %s to %s: %s" %
                                        (tmp_filename, self._filename, str(e)))

            if len(self._errors) == 0 and self._hash_algorithm is not None:
                self._hash = hasher.hexdigest()

            raise gen.Return(response)
        finally:
            cleanup_tmp()