def test_simple_store(caplog):
    with httpretty.core.httprettized():
        httpretty.HTTPretty.allow_net_connect = False

        httpretty.register_uri(
            httpretty.GET,
            'http://test.example',
            body='a body content',
            content_disposition='attachment; filename="foo.tar.gz"')
        query = {'url': 'http://test.example',
                 'sha': 'da39a3ee5e6b4b0d3255bfef95601890afd80709'}

        store = HTTPSimpleStore({}, None)
        metadata = store.get_resource_metadata(query)
        assert {metadata.url, metadata.sha} == set(query.values())

        tmpd = tempfile.mkdtemp()
        try:
            path = store.download_resource_content(metadata, tmpd)
            assert path is None
            assert any(
                ('expecting da39a3ee5e6b4b0d3255bfef95601890afd80709 got'
                 in c.getMessage() for c in caplog.records()))

            metadata.sha = '0c8ef1a401f4564abba7b85676464ac4bbb5cb05'
            path = store.download_resource_content(metadata, tmpd)
            assert path is not None
        finally:
            rm(tmpd, True)
Ejemplo n.º 2
0
    def download_file(self, url, dest, filename=None, validate=None):
        """Download a file.

        :param url: the url to GET
        :type url: str
        :param dest: local directory path for the downloaded file
        :type dest: str
        :param filename: the local path whether to store this resource, by
            default use the name provided  in the ``Content-Disposition``
            header.
        :param validate: function to call once the download is complete for
            detecting invalid / corrupted download. Takes the local path as
            parameter and returns a boolean.
        :type validate: (str) -> bool
        :return: the name of the file or None if there is an error
        :rtype: str
        """
        # When using stream=True, Requests cannot release the connection back
        # to the pool unless all the data is consumed or Response.close called.
        # Force Response.close by wrapping the code with contextlib.closing

        path = None
        try:
            with contextlib.closing(
                    self.request(method='GET', url=url,
                                 stream=True)) as response:
                content_length = int(response.headers.get('content-length', 0))
                e3.log.debug(response.headers)
                if filename is None:
                    if 'content-disposition' in response.headers:
                        filename = get_filename(
                            response.headers['content-disposition'])
                    if filename is None:
                        # Generate a temporary name
                        tmpf = tempfile.NamedTemporaryFile(delete=False,
                                                           dir=dest,
                                                           prefix='download.')
                        tmpf.close()
                        filename = tmpf.name

                path = os.path.join(dest, filename)
                logger.info('downloading %s size=%s', path, content_length)

                expected_size = content_length // self.CHUNK_SIZE
                with open(path, 'wb') as fd:
                    for chunk in e3.log.progress_bar(response.iter_content(
                            self.CHUNK_SIZE),
                                                     total=expected_size):
                        fd.write(chunk)
                if validate is None or validate(path):
                    return path
                else:
                    rm(path)
        except (requests.exceptions.RequestException, HTTPError) as e:
            # An error (timeout?) occurred while downloading the file
            logger.warning('download failed')
            logger.debug(e)
            if path is not None:
                rm(path)
            return None
Ejemplo n.º 3
0
def test_is_dir_empty():
    work_dir = os.getcwd()

    test_dir_path = os.path.join(work_dir, 'dir')
    deleted_file_path = os.path.join(test_dir_path, 'deleted2.txt')
    deleted_file2_path = os.path.join(test_dir_path, 'deleted.txt')
    mkdir(test_dir_path)

    ntfile = NTFile(test_dir_path)
    ntfile2 = NTFile(deleted_file_path)

    try:

        assert ntfile.is_dir_empty
        touch(deleted_file_path)
        touch(deleted_file2_path)
        assert not ntfile.is_dir_empty
        ntfile2.open(Access.DELETE, Share.DELETE)
        ntfile2.dispose()
        assert not ntfile.is_dir_empty
        rm(deleted_file2_path)
        assert ntfile.is_dir_empty

    finally:
        ntfile.close()
        ntfile2.close()
Ejemplo n.º 4
0
def test_is_dir_empty():
    work_dir = os.getcwd()

    test_dir_path = os.path.join(work_dir, 'dir')
    deleted_file_path = os.path.join(test_dir_path, 'deleted2.txt')
    deleted_file2_path = os.path.join(test_dir_path, 'deleted.txt')
    mkdir(test_dir_path)

    ntfile = NTFile(test_dir_path)
    ntfile2 = NTFile(deleted_file_path)

    try:

        assert ntfile.is_dir_empty
        touch(deleted_file_path)
        touch(deleted_file2_path)
        assert not ntfile.is_dir_empty
        ntfile2.open(Access.DELETE, Share.DELETE)
        ntfile2.dispose()
        assert not ntfile.is_dir_empty
        rm(deleted_file2_path)
        assert ntfile.is_dir_empty

    finally:
        ntfile.close()
        ntfile2.close()
Ejemplo n.º 5
0
    def install_package(self, p):
        if self.db.up_to_date(p):
            return
        if p in self.db:
            # p is installed but not up-to-date. We need first to uninstall it
            self.uninstall_package(p)

        package_path = self.ini.package_path(p)
        base_path = os.path.basename(package_path)
        tar_archive = base_path.rsplit('.', 1)[0]
        manifest_path = os.path.join(self.root_dir, 'etc', 'setup',
                                     p + '.lst.gz')

        with open(self.cyglog('install.log'), 'wb') as fd:
            os.chdir(self.root_dir)
            subprocess.check_call(
                    [os.path.join(setup_dir, '7z.exe'),
                     'x', '-y', package_path],
                    stdout=fd)
            self.unpack(tar_archive)
            file_list = self.unpack(tar_archive)
            with gzip.open(manifest_path, 'wb') as list_fd:
                list_fd.write('\n'.join(file_list) + '\n')
            rm(tar_archive)

        self.db += p
    def set_up(self):
        super().set_up()
        self.env.rewrite_baselines = self.env.options.rewrite

        # Directory that contains GPR files, shared by testcases
        self.env.ada_projects_path = os.path.join(
            self.root_dir, 'ada_projects'
        )

        # Unless specifically told not to, add test programs to the environment
        if not self.env.options.no_auto_path:
            repo_root = os.path.dirname(self.root_dir)

            def in_repo(*args):
                return os.path.join(repo_root, *args)

            os.environ['PATH'] = os.path.pathsep.join([
                in_repo('lkql', 'build', 'obj-mains'),
                in_repo('lkql_checker', 'bin'),
                os.environ['PATH'],
            ])

        # Ensure the testsuite starts with an empty directory to store source
        # trace files.
        self.env.traces_dir = os.path.join(self.working_dir, 'traces')
        if self.env.options.coverage:
            rm(self.env.traces_dir)
            mkdir(self.env.traces_dir)
Ejemplo n.º 7
0
def test_git_non_utf8():
    """Test with non utf-8 encoding in changelog."""
    working_tree = os.path.join(os.getcwd(), "working_tree")
    repo = GitRepository(working_tree)
    repo.init()
    os.chdir(working_tree)
    new_file = os.path.join(working_tree, "new.txt")
    commit_msg = os.path.join(working_tree, "commit.txt")

    with open(commit_msg, "wb") as fd:
        fd.write(b"\x03\xff")

    with open(new_file, "wb") as fd:
        fd.write(b"\x03\xff")

    repo.git_cmd(["add", "new.txt"])
    repo.git_cmd(["config", "user.email", "*****@*****.**"])
    repo.git_cmd(["config", "user.name", "e3 core"])
    repo.git_cmd(["commit", "-F", commit_msg])

    with closing(tempfile.NamedTemporaryFile(mode="w", delete=False)) as fd:
        repo.write_log(fd)
        tmp_filename = fd.name
    try:
        with open(tmp_filename) as fd:
            commits = list(repo.parse_log(fd, max_diff_size=1024))
    finally:
        rm(tmp_filename)

    assert "\\x03\\xff" in commits[0]["diff"]
Ejemplo n.º 8
0
def remove(path):
    """Delete the file or directory subtree designated by PATH"""

    print("from : %s" % os.getcwd())
    print("remove : %s" % path)

    # To prevent big damage if the input PATH argument happens to have been
    # miscomputed, we first attempt to move it locally, then remove the local
    # instance. The absence of computation on this local name makes it a tad
    # safer to manipulate and the attempt to move locally would fail for
    # obviously bogus arguments such as anything leading to a parent of the
    # current dir (e.g. "/", or ...).

    local_name = "./old_stuff_to_be_removed"

    # Note that what we have to remove maybe be a regular filee or an entire
    # directory subtree and that rm("recursive=True") is not guaranteed to
    # work for regular files.

    # Start by removing the current local instance, in case the previous
    # removal failed or was interrupted somehow.

    def isdir(path):
        return os.path.isdir(path) and not os.path.islink(path)

    if os.path.exists(local_name):
        rm(local_name, recursive=isdir(local_name))

    if os.path.exists(path):
        mv(path, local_name)
        rm(local_name, recursive=isdir(local_name))
Ejemplo n.º 9
0
    def create_data_dir(self, root_dir: str) -> None:
        """Create data to be pushed to bucket used by cloudformation for resources."""
        # Create directory specific to that lambda
        package_dir = os.path.join(root_dir, name_to_id(self.name), "package")

        # Install the requirements
        if self.requirement_file is not None:
            p = Run(
                python_script("pip")
                + ["install", f"--target={package_dir}", "-r", self.requirement_file],
                output=None,
            )
            assert p.status == 0

        # Copy user code
        self.populate_package_dir(package_dir=package_dir)

        # Create an archive
        create_archive(
            f"{self.name}_lambda.zip",
            from_dir=package_dir,
            dest=root_dir,
            no_root_dir=True,
        )

        # Remove temporary directory
        rm(package_dir, recursive=True)
Ejemplo n.º 10
0
    def write_scripts(self) -> None:
        from setuptools.command.easy_install import get_script_args

        # Retrieve sandbox_scripts entry points
        e3_distrib = get_distribution("e3-core")

        class SandboxDist:
            def get_entry_map(self, group):  # type: ignore
                if group != "console_scripts":
                    return {}
                return e3_distrib.get_entry_map("sandbox_scripts")

            def as_requirement(self):  # type: ignore
                return e3_distrib.as_requirement()

        for script in get_script_args(dist=SandboxDist()):
            script_name = script[0]
            script_content = script[1]
            target = os.path.join(self.bin_dir, script_name)
            rm(target)
            if not script_name.endswith(".exe"):
                script_content = script_content.replace(
                    "console_scripts", "sandbox_scripts")
            with open(target, "wb") as f:
                if isinstance(script_content, str):
                    f.write(script_content.encode("utf-8"))
                else:
                    f.write(script_content)
            chmod("a+x", target)
Ejemplo n.º 11
0
    def write_scripts(self):
        from setuptools.command.easy_install import get_script_args

        # Retrieve sandbox_scripts entry points
        e3_distrib = get_distribution('e3-core')

        class SandboxDist(object):
            def get_entry_map(self, group):
                if group != 'console_scripts':
                    return {}
                return e3_distrib.get_entry_map('sandbox_scripts')

            def as_requirement(self):
                return e3_distrib.as_requirement()

        for script in get_script_args(dist=SandboxDist()):
            script_name = script[0]
            script_content = script[1]
            target = os.path.join(self.bin_dir, script_name)
            rm(target)
            if not script_name.endswith('.exe'):
                script_content = script_content.replace(
                    'console_scripts', 'sandbox_scripts')
            with open(target, 'wb') as f:
                if isinstance(script_content, unicode):
                    f.write(script_content.encode('utf-8'))  # py3-only
                else:
                    f.write(script_content)  # py2-only
            chmod('a+x', target)
Ejemplo n.º 12
0
    def write_scripts(self):
        from setuptools.command.easy_install import get_script_args

        # Retrieve sandbox_scripts entry points
        e3_distrib = get_distribution('e3-core')

        class SandboxDist(object):
            def get_entry_map(self, group):
                if group != 'console_scripts':
                    return {}
                return e3_distrib.get_entry_map('sandbox_scripts')

            def as_requirement(self):
                return e3_distrib.as_requirement()

        for script in get_script_args(dist=SandboxDist()):
            script_name = script[0]
            script_content = script[1]
            target = os.path.join(self.bin_dir, script_name)
            rm(target)
            if not script_name.endswith('.exe'):
                script_content = script_content.replace(
                    'console_scripts', 'sandbox_scripts')
            with open(target, 'wb') as f:
                if isinstance(script_content, unicode):
                    f.write(script_content.encode('utf-8'))
                else:
                    f.write(script_content)
            chmod('a+x', target)
Ejemplo n.º 13
0
    def set(self, uid, value, timeout=DEFAULT_TIMEOUT):
        # Make sure that the cache dir exists
        self._create_cache_dir()
        dest_file = self.uid_to_file(uid)

        tmp_file = tempfile.NamedTemporaryFile(dir=self.cache_dir,
                                               delete=False)
        try:
            tmp_file.write(
                pickle.dumps(self.get_expiry_time(timeout),
                             pickle.HIGHEST_PROTOCOL))
            tmp_file.write(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
        except Exception as err:
            tmp_file.close()
            e3.log.debug('error when setting %s in %s:\n%s', uid, dest_file,
                         err)
            return False
        else:
            tmp_file.close()

            if sys.platform == 'win32':  # unix: no cover
                # atomic rename does not work on windows if the dest file
                # already exist
                rm(dest_file)
            os.rename(tmp_file.name, dest_file)
            return True

        finally:
            rm(tmp_file.name)
Ejemplo n.º 14
0
 def save_fingerprint(self, uid, fingerprint):
     """See Walk.save_fingerprint."""
     filename = self.fingerprint_filename(uid)
     if fingerprint is None:
         rm(filename)
     else:
         fingerprint.save_to_file(filename)
Ejemplo n.º 15
0
    def interrupt(self):
        """Interrupt the UxAS instance."""
        if self.process is not None and self.process.is_running:
            self.process.interrupt()

        if os.path.isfile(self.cfg_path):
            rm(self.cfg_path)
Ejemplo n.º 16
0
 def cleanup_working_dir(self) -> None:
     """Remove the working directory tree."""
     try:
         rm(self.working_dir(), True)
     except Exception:  # no cover
         # TODO (U222-013) For mysterious reasons, on Windows hosts,
         # sometimes executable files are still visible in the filesystem
         # even after the call to "os.unlink" returned with success. As a
         # result, removing the directory that contains them fails and thus
         # we get an exception.  At first we thought it could be related to
         # the system indexer
         # (https://superuser.com/questions/260375/why-would-system-continue-locking-
         # executable-file-handles-after-the-app-has-exit)
         # but this issue still occurs on systems that have it disabled.
         #
         # As far as we know (because we failed to pinpoint the exact reason
         # for this condition), these issues do not reveal any bug in tests
         # themselves, so silently ignore such errors.
         if self.env.host.os.name == "windows":
             self.result.log += (
                 f"\nError while cleaning up the working directory:"
                 f"\n{traceback.format_exc()}"
                 f"\nHost is running Windows: discarding this error...")
         else:
             raise
Ejemplo n.º 17
0
def test_build_space_exists():
    """Test the BuildSpace.exists method."""
    bs_name = os.path.abspath("foo")
    bs = BuildSpace(bs_name)

    # First, verify the behavior when the buildspace directory
    # doesn't even exist.
    assert not os.path.exists(bs_name), bs_name
    assert bs.exists() is False

    # Next, create the directory, but without anything in it.
    # In particular, the marker file isn't present, so
    # is_buildspace should still return False for that directory.
    mkdir(bs_name)
    assert bs.exists() is False

    # Create the buildspace, and then verify that is_buildspace
    # then returns True.
    bs.create()
    assert bs.exists() is True

    # Verify that we also return False if one of the subdirectories
    # is missing. To do that, first verify that the subdirectory
    # we picked does exist, then delete it, before observing
    # whether BuildSpace.exists now return False or not.
    one_subdir = bs.subdir(bs.DIRS[0])
    assert os.path.isdir(one_subdir)
    rm(one_subdir, recursive=True)
    assert bs.exists() is False
Ejemplo n.º 18
0
    def tear_down(self):
        """Execute operation when finalizing the testsuite.

        By default clean the working directory in which the tests
        were run
        """
        if self.main.args.enable_cleanup:
            rm(self.working_dir, True)
Ejemplo n.º 19
0
    def update(self, url=None, revision=None, force_and_clean=False):
        """Update a working copy or checkout a new one.

        If the directory is already a checkout, it tries to update it.
        If the directory is not associated to a (good) checkout or is empty
        it will checkout.
        The option --remove-unversioned of the svn subcommand
        cleanup exists only from svn version 1.9.
        :param url: URL of a SVN repository
        :type url: str
        :param revision: specific revision (default is last)
        :type revision: str | None
        :param force_and_clean: if True: erase the content of non empty
        working_copy and use '--force' option for the svn update/checkout
        command
        :type force_and_clean: bool
        :return: True if any local changes detected in the working copy
        :rtype: bool
        :raise: SVNError
        """
        def is_clean_svn_dir(dir_path):
            """Return a tuple (True if dir is SVN directory, True if clean)."""
            if os.path.exists(os.path.join(dir_path, '.svn')):
                try:
                    status = self.svn_cmd(['status'], output=PIPE).out.strip()
                except SVNError:  # defensive code
                    return False, False
                if 'warning: W' in status:
                    return False, False
                return True, status == ''
            return False, False

        def is_empty_dir(dir_path):
            """Return True if the path is a directory and is empty."""
            return os.path.isdir(dir_path) and not os.listdir(dir_path)

        options = ['--ignore-externals']
        if revision:
            options += ['-r', revision]
        if force_and_clean:
            options += ['--force']

        is_svn_dir, is_clean = is_clean_svn_dir(self.working_copy)
        if is_svn_dir and (is_clean or not force_and_clean) and \
                (not url or self.url == url):
            self.svn_cmd(['update'] + options)
            return not is_clean
        if os.path.exists(self.working_copy):
            if not is_empty_dir(self.working_copy) and not force_and_clean:
                raise SVNError('not empty {}'.format(self.working_copy, url),
                               origin='update')
            if is_svn_dir and not url:
                url = self.url
            rm(self.working_copy, recursive=True)

        mkdir(self.working_copy)
        self.svn_cmd(['checkout', url, '.'] + options)
        return not is_clean
Ejemplo n.º 20
0
    def RUN_CONFIG_SEQUENCE(toplev_options, toolchain_discriminant):
        """Arrange to generate the SUITE_CONFIG configuration file"""

        # In principle, this would be something like
        #
        #  gprconfig --config=C --config=Asm --config=Ada --target=powerpc-elf
        #
        # to latch the compiler selections for all the languages, plus extra
        # bits for the RTS selection.
        #
        # RTS selection by relative path (e.g.
        #   --RTS=powerpc-elf/ravenscar-full-prep) isn't supported by
        # gprconfig, however. It is supported gprbuild though, so we resort
        # to it here.

        # We build a temporary dummy project file in the current directory,
        # specifying languages only.
        with open("suite.gpr", "w") as tempgpr:

            # Given GNAT 5.04a1 does not come with a C++ compiler, we'd
            # resort to the system one to link if C++ is in the list of
            # languages, causing compatibility issues with the toolchain.

            # We just don't add C++ to the list of languages for such
            # toolchain.

            added_languages = (', "C++"'
                               if toolchain_discriminant != "5.04a1" else '')

            tempgpr.write(
                """
                project %(prjname)s is
                   for Languages use ("Asm", "C", "Ada" %(added_languages)s);
                end %(prjname)s;
            """ % {
                    'prjname': os.path.basename(tempgpr.name).split('.')[0],
                    'added_languages': added_languages
                })

        # We now run gprbuild -Ptemp.gpr --target=bla --RTS=blo, which
        # will complain about missing sources, but only after producing
        # an automatic config file with everything we need, and nothing
        # else (no other file).
        rm(BUILDER.SUITE_CGPR)

        extraopts = ['--target={}'.format(env.target.triplet)]
        if toplev_options.RTS:
            extraopts.append('--RTS=%s' % toplev_options.RTS)

        Run([
            GPRBUILD, '-P', tempgpr.name,
            '--autoconf=%s' % BUILDER.SUITE_CGPR
        ] + extraopts)

        rm(tempgpr.name)
Ejemplo n.º 21
0
    def tear_down(self) -> None:
        """Run finalization operations after a test has run.

        Subclasses can override this to run clean-ups after testcase execution.
        By default, this method removes the working directory (unless
        --disable-cleanup/--dev-temp is passed).

        See set_up's docstring for the rationale behind this API.
        """
        if self.working_dir_cleanup_enabled:
            rm(self.working_dir(), True)
Ejemplo n.º 22
0
    def _is_expired(fd):
        """Determine if an open cache file has expired.

        Automatically delete the file if it has passed its expiry time.
        """
        exp = pickle.load(fd)
        if exp is not None and exp < time.time():
            fd.close()
            rm(fd.name)
            return True
        return False
Ejemplo n.º 23
0
    def _is_expired(fd):
        """Determine if an open cache file has expired.

        Automatically delete the file if it has passed its expiry time.
        """
        exp = pickle.load(fd)
        if exp is not None and exp < time.time():
            fd.close()
            rm(fd.name)
            return True
        return False
Ejemplo n.º 24
0
    def send_event(self, event: Event) -> bool:
        def s3_cp(from_path: str, s3_url: str) -> bool:
            cmd = ["s3", "cp", f"--sse={self.sse}"]
            if self.aws_profile:
                cmd.append(f"--profile={self.aws_profile}")
            cmd += [from_path, s3_url]

            s3 = Run(python_script("aws") + cmd, output=None)
            return s3.status == 0

        # Push attachments to s3 and keep track of their url.
        s3_attachs = {}
        for name, attach in list(event.get_attachments().items()):
            attach_path = attach[0]
            # Push the attachment
            s3_url = f"{self.log_s3_url}/{self.s3_prefix(event)}{event.uid}/{name}"
            success = s3_cp(attach_path, s3_url)
            if not success:
                return False
            else:
                ctype, encoding = mimetypes.guess_type(attach_path)
                s3_attachs[name] = {
                    "s3_url": s3_url,
                    "encoding": encoding,
                    "ctype": ctype,
                }

        # Create the JSON to send on the event bucket
        s3_event = {"attachments": s3_attachs, "event": event.as_dict()}

        try:
            tempfile_name = None
            with closing(tempfile.NamedTemporaryFile(mode="w",
                                                     delete=False)) as fd:
                tempfile_name = fd.name
                json.dump(s3_event, fd)

            # Note that an event can be sent several times with a different
            # status. As a consequence the target url in s3 should be different
            # for call to send.
            success = s3_cp(
                tempfile_name,
                f"{self.event_s3_url}/{self.s3_prefix(event)}"
                f"{event.uid}-{unique_id()}.s3",
            )

            if not success:
                return False
            else:
                return True
        finally:
            if tempfile_name is not None:
                rm(tempfile_name)
Ejemplo n.º 25
0
    def create(self, quiet=False):
        """Create a build space.

        The function create all the necessary directories and files to have
        a valid empty build space

        :param quiet: do not print info messages
        :type quiet: bool
        """
        # Always clean the temp directory
        rm(self.get_subdir(name='tmp'), recursive=True)
        for d in self.dirs:
            mkdir(self.get_subdir(name=d), quiet=quiet)
Ejemplo n.º 26
0
    def setup_result_dir(self):
        """Create the output directory in which the results are stored."""
        if os.path.isdir(self.old_output_dir):
            rm(self.old_output_dir, True)
        if os.path.isdir(self.output_dir):
            mv(self.output_dir, self.old_output_dir)
        mkdir(self.output_dir)

        if self.main.args.dump_environ:
            with open(os.path.join(self.output_dir, 'environ.sh'), 'w') as f:
                for var_name in sorted(os.environ):
                    f.write('export %s=%s\n' %
                            (var_name, quote_arg(os.environ[var_name])))
Ejemplo n.º 27
0
    def tear_down(self, previous_values):
        failures = [v for v in previous_values.values() if
                    not isinstance(v, TestStatus) or v != TestStatus.PASS]
        if failures:
            self.result.set_status(TestStatus.FAIL,
                                   msg="%s subtests failed" % len(failures))
        else:
            self.result.set_status(TestStatus.PASS)

        self.push_result()

        if self.env.enable_cleanup:
            rm(self.test_env['working_dir'], recursive=True)
Ejemplo n.º 28
0
    def tear_down(self, previous_values):
        failures = [v for v in previous_values.values() if
                    not isinstance(v, TestStatus) or v != TestStatus.PASS]
        if failures:
            self.result.set_status(TestStatus.FAIL,
                                   msg="%s subtests failed" % len(failures))
        else:
            self.result.set_status(TestStatus.PASS)

        self.push_result()

        if self.env.enable_cleanup:
            rm(self.test_env['working_dir'], recursive=True)
Ejemplo n.º 29
0
def test_apply_patch():
    """Test SourceBuilder.apply_patch handling."""
    sb = e3.anod.package.SourceBuilder(name="d-src",
                                       fullname=lambda: "d-src.tgz",
                                       checkout=["d-git"])

    with open("my_patch", "w") as f:
        f.write("--- a_file\t2017-04-11 16:34:44.000000000 +0200\n"
                "+++ a_file\t2017-04-11 16:34:40.000000000 +0200\n"
                "@@ -0,0 +1 @@\n"
                "+new line\n")

    current_dir = os.getcwd()
    touch("a_file")
    sb.apply_patch(None, "my_patch", current_dir)

    with open("a_file") as f:
        assert f.read().strip() == "new line"

    sb = e3.anod.package.SourceBuilder(
        name="d-src",
        fullname=lambda: "d-src.tgz",
        prepare_src=lambda x, y: None,
        checkout=["d-git", "e-git"],
    )

    with pytest.raises(e3.anod.error.AnodError) as err:
        sb.apply_patch(None, "my_patch", current_dir)
    assert "no apply_patch" in str(err)

    sb = e3.anod.package.SourceBuilder(
        name="d-src",
        fullname=lambda: "d-src.tgz",
        prepare_src=lambda x, y: None,
        checkout=["d-git", "e-git"],
        apply_patch=lambda x, y, z: 42,
    )

    assert sb.apply_patch(None, None, None) == 42

    # Thirdparty source builders set the default patch command by default

    rm("a_file")
    touch("a_file")
    tsb = e3.anod.package.ThirdPartySourceBuilder(name="third-src.tgz")
    tsb.apply_patch(None, "my_patch", current_dir)

    with open("a_file") as f:
        assert f.read().strip() == "new line"
Ejemplo n.º 30
0
def test_move_to_trash():
    work_dir = os.getcwd()

    test_file_path = os.path.join(work_dir, 'test_mv_to_trash.txt')
    touch(test_file_path)
    ntfile = NTFile(test_file_path)
    ntfile.open(Access.READ_DATA)
    try:
        with pytest.raises(NTException):
            ntfile.move_to_trash()
    finally:
        ntfile.close()
    trash_path = ntfile.trash_path
    ntfile.move_to_trash()
    rm(trash_path)
Ejemplo n.º 31
0
def test_move_to_trash():
    work_dir = os.getcwd()

    test_file_path = os.path.join(work_dir, 'test_mv_to_trash.txt')
    touch(test_file_path)
    ntfile = NTFile(test_file_path)
    ntfile.open(Access.READ_DATA)
    try:
        with pytest.raises(NTException):
            ntfile.move_to_trash()
    finally:
        ntfile.close()
    trash_path = ntfile.trash_path
    ntfile.move_to_trash()
    rm(trash_path)
Ejemplo n.º 32
0
 def run(self):
     try:
         rm(self.data.anod_instance.build_space.build_dir, recursive=True)
         mkdir(self.data.anod_instance.build_space.build_dir)
         rm(self.data.anod_instance.build_space.install_dir, recursive=True)
         mkdir(self.data.anod_instance.build_space.install_dir)
         Env().store()
         cd(self.data.anod_instance.build_space.build_dir)
         self.data.anod_instance.jobs = Env().build.cpu.cores
         self.data.anod_instance.build()
         Env().restore()
         self.run_status = ReturnValue.success
     except Exception:
         logging.exception("got exception while building")
         self.run_status = ReturnValue.failure
Ejemplo n.º 33
0
    def setup_result_dir(self) -> None:
        """Create the output directory in which the results are stored."""
        assert self.main.args

        if os.path.isdir(self.old_output_dir):
            rm(self.old_output_dir, True)
        if os.path.isdir(self.output_dir):
            mv(self.output_dir, self.old_output_dir)
        mkdir(self.output_dir)

        if self.main.args.dump_environ:
            with open(os.path.join(self.output_dir, "environ.sh"), "w") as f:
                for var_name in sorted(os.environ):
                    f.write("export {}={}\n".format(
                        var_name, quote_arg(os.environ[var_name])))
Ejemplo n.º 34
0
    def update(
        self,
        vcs: Literal["git"] | Literal["svn"] | Literal["external"],
        url: str,
        revision: Optional[str] = None,
    ) -> ReturnValue:
        """Update content of the working directory.

        :param vcs: vcs kind
        :param url: repository url, when vcs is external the url is the path
             to the source directory
        :param revision: revision

        Note that when vcs is set to git or svn, the version control ignore
        setting is taken into account. Additionally, when the vcs is
        external and the source directory contains a .git subdirectory then
        git ignore setting is taken into account.
        """
        # Reset changelog file
        if os.path.isfile(self.changelog_file):
            rm(self.changelog_file)

        update: Callable[
            [str, Optional[str]], tuple[ReturnValue, Optional[str], Optional[str]]
        ]
        if vcs == "git":
            update = self.update_git
        elif vcs == "svn":
            update = self.update_svn
        elif vcs == "external":
            update = self.update_external
        else:
            assert_never()

        result, old_commit, new_commit = update(url=url, revision=revision)

        with open(self.metadata_file, "w") as fd:
            json.dump(
                {
                    "name": self.name,
                    "url": url,
                    "old_commit": old_commit,
                    "new_commit": new_commit,
                    "revision": revision,
                },
                fd,
            )
        return result
Ejemplo n.º 35
0
def test_apply_patch():
    """Test SourceBuilder.apply_patch handling."""
    sb = e3.anod.package.SourceBuilder(
        name='d-src',
        fullname=lambda: 'd-src.tgz',
        checkout=['d-git'])

    with open('my_patch', 'w') as f:
        f.write('--- a_file\t2017-04-11 16:34:44.000000000 +0200\n'
                '+++ a_file\t2017-04-11 16:34:40.000000000 +0200\n'
                '@@ -0,0 +1 @@\n'
                '+new line\n')

    current_dir = os.getcwd()
    touch('a_file')
    sb.apply_patch(None, 'my_patch', current_dir)

    with open('a_file') as f:
        assert f.read().strip() == 'new line'

    sb = e3.anod.package.SourceBuilder(
        name='d-src',
        fullname=lambda: 'd-src.tgz',
        prepare_src=lambda x, y: None,
        checkout=['d-git', 'e-git'])

    with pytest.raises(e3.anod.error.AnodError) as err:
        sb.apply_patch(None, 'my_patch', current_dir)
    assert 'no apply_patch' in str(err)

    sb = e3.anod.package.SourceBuilder(
        name='d-src',
        fullname=lambda: 'd-src.tgz',
        prepare_src=lambda x, y: None,
        checkout=['d-git', 'e-git'],
        apply_patch=lambda x, y, z: 42)

    assert sb.apply_patch(None, None, None) == 42

    # Thirdparty source builders set the default patch command by default

    rm('a_file')
    touch('a_file')
    tsb = e3.anod.package.ThirdPartySourceBuilder(name='third-src.tgz')
    tsb.apply_patch(None, 'my_patch', current_dir)

    with open('a_file') as f:
        assert f.read().strip() == 'new line'
Ejemplo n.º 36
0
    def reset(self, keep=None):
        """Reset build space.

        The function delete the complete buildspace. The only elements that
        are not deleted are the logs and the testsuite results.

        A call to self.create() is needed after calling this function.

        :param keep: a list of directory to keep in addition
            to results and log
        :type keep: list[str] | None
        """
        keep = set(keep) if keep is not None else set()
        keep.update(('results', 'log'))
        for d in (d for d in self.dirs if d not in keep):
            rm(self.get_subdir(name=d), True)
    def set(self, uid, value, timeout=DEFAULT_TIMEOUT):
        # Make sure that teh cache dir exists
        self._create_cache_dir()
        dest_file = self.uid_to_file(uid)

        tmp_file = tempfile.NamedTemporaryFile(
            dir=self.cache_dir, delete=False)
        try:
            tmp_file.write(pickle.dumps(self.get_expiry_time(timeout),
                                        pickle.HIGHEST_PROTOCOL))
            tmp_file.write(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
            tmp_file.close()

            os.rename(tmp_file.name, dest_file)

        except OSError as err:
            rm(tmp_file)
            e3.log.debug('error when setting %s in %s:\n%s',
                         uid, dest_file, err)
Ejemplo n.º 38
0
 def restore_env():
     Env().restore()
     rm(tempd, True)
def gplize(anod_instance, src_dir, force=False):
    """Remove GPL specific exception.

    This operate recursively on all .h .c .ad* .gp* files
    present in the directory passed as parameter

    :param anod_instance: an Anod instance
    :type anod_instance: Anod
    :param src_dir: the directory to process
    :type src_dir: str
    :param force: force transformation to gpl
    :type force: bool
    """
    def remove_paragraph(filename):
        begin = '-- .*As a .*special .*exception.* if other '\
            'files .*instantiate .*generics from .*(this)? .*|'\
            '-- .*As a .*special .*exception under Section 7 of GPL '\
            'version 3, you are.*|'\
            ' \* .*As a .*special .*exception.* if you .*link .*this'\
            ' file .*with other .*files to.*|'\
            ' \* .*As a .*special .*exception under Section 7 of GPL '\
            'version 3, you are.*|'\
            '\/\/ .*As a .*special .*exception.* if other files '\
            '.*instantiate .*generics from this.*|'\
            '\/\/ .*As a .*special .*exception under Section 7 of GPL '\
            'version 3, you are.*'
        end = '-- .*covered .*by .*the .*GNU Public License.*|'\
            '-- .*version 3.1, as published by the Free Software '\
            'Foundation.*--|'\
            '\/\/ .*covered by the  GNU Public License.*|'\
            '.*file .*might be covered by the  GNU Public License.*|'\
            '\/\/ .*version 3.1, as published by the Free Software'\
            ' Foundation.*\/\/|'\
            ' \* .*version 3.1, as published by the Free Software'\
            ' Foundation.*\*'

        output = StringIO()
        state = 2
        i = 0
        try:
            with open(filename) as f:
                for line in f:
                    # Detect comment type
                    if i == 1:
                        comment = line[0:2]
                        comment1 = comment
                        comment2 = comment
                        if comment == ' *':
                            comment2 = '* '
                    i += 1
                    # Detect begining of exception paragraph
                    if re.match(begin, line):
                        state = 0
                        output.write(
                            comment1 + (74 * " ") + comment2 + "\n")
                        continue
                    # Detect end of exception paragraph
                    if re.match(end, line):
                        if state == 0:
                            state = 1
                            output.write(
                                comment1 + (74 * " ") + comment2 + "\n")
                            continue
                    # Skip one line after the paragraph
                    if state == 1:
                        state = 3
                    # Replace exception lines with blank comment lines
                    if state == 0:
                        output.write(
                            comment1 + (74 * " ") + comment2 + "\n")
                        continue
                    # Write non exception lines
                    if (state == 2) or (state == 3):
                        output.write(line)
                if state == 0:
                    raise AnodError(
                        'gplize: End of paragraph was not detected in %s' % (
                            filename))
                with open(filename, "w") as dest_f:
                    dest_f.write(output.getvalue())
        finally:
            output.close()

    if anod_instance.sandbox.config.get('release_mode', '') == 'gpl' or force:
        anod_instance.log.debug('move files to GPL license')

        rm(os.path.join(src_dir, 'COPYING.RUNTIME'))
        gpb_files = find(src_dir, "*.gp*")
        ada_files = find(src_dir, "*.ad*")
        c_files = find(src_dir, "*.[hc]")
        java_files = find(src_dir, "*.java")

        for l in (gpb_files, ada_files, c_files, java_files):
            for k in l:
                remove_paragraph(k)
 def delete(self, uid):
     rm(self.uid_to_file(uid))
 def clear(self):
     rm(os.path.join(self.cache_dir, '*.cache'))