示例#1
0
def data_path(filename, prefix=Path('DATA')):
    """Computes the filename to store in the archive.

    Turns an absolute path containing '..' into a filename without '..', and
    prefixes with DATA/.

    Example:

    >>> data_path(PosixPath('/var/lib/../../../../tmp/test'))
    PosixPath(b'DATA/tmp/test')
    >>> data_path(PosixPath('/var/lib/../www/index.html'))
    PosixPath(b'DATA/var/www/index.html')
    """
    return prefix / filename.split_root()[1]
示例#2
0
文件: main.py 项目: kyleam/reprozip
def combine(args):
    """combine subcommand.

    Reads in multiple trace databases and combines them into one.

    The runs from the original traces are appended ('run_id' field gets
    translated to avoid conflicts).
    """
    traces = []
    for tracepath in args.traces:
        if tracepath == '-':
            tracepath = Path(args.dir) / 'trace.sqlite3'
        else:
            tracepath = Path(tracepath)
            if tracepath.is_dir():
                tracepath = tracepath / 'trace.sqlite3'
        traces.append(tracepath)

    reprozip.traceutils.combine_traces(traces, Path(args.dir))
    reprozip.tracer.trace.write_configuration(Path(args.dir),
                                              args.identify_packages,
                                              args.find_inputs_outputs,
                                              overwrite=True)
示例#3
0
文件: docker.py 项目: stain/reprozip
def docker_download(args):
    """Gets an output file out of the container.
    """
    target = Path(args.target[0])
    files = args.file
    unpacked_info = read_dict(target)

    if 'current_image' not in unpacked_info:
        logging.critical("Image doesn't exist yet, have you run setup/build?")
        sys.exit(1)
    image = unpacked_info['current_image']
    logging.debug("Downloading from image %s", image.decode('ascii'))

    ContainerDownloader(target, files, image, all_=args.all)
示例#4
0
 def __init__(self, path):
     path = Path(path)
     size = None
     if path.exists():
         if path.is_link():
             self.comment = "Link to %s" % path.read_link(absolute=True)
         elif path.is_dir():
             self.comment = "Directory"
         else:
             size = path.size()
             self.comment = hsize(size)
     self.what = None
     self.runs = defaultdict(lambda: None)
     File.__init__(self, path, size)
示例#5
0
def chroot_destroy_dir(args):
    """Destroys the directory.
    """
    target = Path(args.target[0])
    mounted = read_dict(target / '.reprounzip', 'chroot').get('mounted', False)

    if mounted:
        logging.critical("Magic directories might still be mounted")
        sys.exit(1)

    logging.info("Removing directory %s...", target)
    signals.pre_destroy(target=target)
    rmtree_fixed(target)
    signals.post_destroy(target=target)
示例#6
0
文件: common.py 项目: lcw/reprozip
def setup_logging(tag, verbosity):
    """Sets up the logging module.
    """
    levels = [logging.CRITICAL, logging.WARNING, logging.INFO, logging.DEBUG]
    console_level = levels[min(verbosity, 3)]
    file_level = logging.INFO
    min_level = min(console_level, file_level)

    # Create formatter, with same format as C extension
    fmt = "[%s] %%(asctime)s %%(levelname)s: %%(message)s" % tag
    formatter = LoggingDateFormatter(fmt)

    # Console logger
    handler = logging.StreamHandler()
    handler.setLevel(console_level)
    handler.setFormatter(formatter)

    # Set up logger
    rootlogger = logging.root
    rootlogger.setLevel(min_level)
    rootlogger.addHandler(handler)

    # File logger
    if os.environ.get('REPROZIP_NO_LOGFILE',
                      '').lower() in ('', 'false', '0', 'off'):
        dotrpz = Path('~/.reprozip').expand_user()
        try:
            if not dotrpz.is_dir():
                dotrpz.mkdir()
            filehandler = logging.handlers.RotatingFileHandler(str(dotrpz /
                                                                   'log'),
                                                               mode='a',
                                                               delay=False,
                                                               maxBytes=400000,
                                                               backupCount=5)
        except (IOError, OSError):
            logger.warning("Couldn't create log file %s", dotrpz / 'log')
        else:
            filehandler.setFormatter(formatter)
            filehandler.setLevel(file_level)
            rootlogger.addHandler(filehandler)

            filehandler.emit(
                logging.root.makeRecord(
                    __name__.split('.', 1)[0], logging.INFO, "(log start)", 0,
                    "Log opened %s %s",
                    (datetime.now().strftime("%Y-%m-%d"), sys.argv), None))

    logging.getLogger('urllib3').setLevel(logging.INFO)
示例#7
0
def trace(args):
    """trace subcommand.

    Simply calls reprozip.tracer.trace() with the arguments from argparse.
    """
    if args.arg0 is not None:
        argv = [args.arg0] + args.cmdline[1:]
    else:
        argv = args.cmdline
    if args.append and args.overwrite:
        logging.critical("You can't use both --continue and --overwrite")
        sys.exit(2)
    elif args.append:
        append = True
    elif args.overwrite:
        append = False
    else:
        append = None
    reprozip.tracer.trace.trace(args.cmdline[0], argv, Path(args.dir), append,
                                args.verbosity)
    reprozip.tracer.trace.write_configuration(Path(args.dir),
                                              args.identify_packages,
                                              args.find_inputs_outputs,
                                              overwrite=False)
示例#8
0
def docker_download(args):
    """Gets an output file out of the container.
    """
    target = Path(args.target[0])
    files = args.file
    unpacked_info = read_dict(target / '.reprounzip')

    if 'ran_container' not in unpacked_info:
        logging.critical("Container does not exist. Have you run the "
                         "experiment?")
        sys.exit(1)
    container = unpacked_info['ran_container']
    logging.debug("Downloading from container %s", container.decode('ascii'))

    ContainerDownloader(target, files, container)
示例#9
0
文件: default.py 项目: lcw/reprozip
def chroot_mount(args):
    """Mounts /dev and /proc inside the chroot directory.
    """
    target = Path(args.target[0])
    unpacked_info = metadata_read(target, 'chroot')

    # Create proc mount
    d = target / 'root/proc'
    d.mkdir(parents=True)
    subprocess.check_call(['mount', '-t', 'proc', 'none', str(d)])

    # Bind /dev from host
    for m in ('/dev', '/dev/pts'):
        d = join_root(target / 'root', Path(m))
        d.mkdir(parents=True)
        logger.info("Mounting %s on %s...", m, d)
        subprocess.check_call(['mount', '-o', 'bind', m, str(d)])

    unpacked_info['mounted'] = True
    metadata_write(target, unpacked_info, 'chroot')

    logger.warning("The host's /dev and /proc have been mounted into the "
                   "chroot. Do NOT remove the unpacked directory with "
                   "rm -rf, it WILL WIPE the host's /dev directory.")
示例#10
0
文件: docker.py 项目: kyleam/reprozip
def docker_upload(args):
    """Replaces an input file in the container.
    """
    target = Path(args.target[0])
    files = args.file
    unpacked_info = read_dict(target)
    input_files = unpacked_info.setdefault('input_files', {})

    try:
        ContainerUploader(target,
                          input_files,
                          files,
                          unpacked_info,
                          docker_cmd=args.docker_cmd.split())
    finally:
        write_dict(target, unpacked_info)
示例#11
0
def docker_setup_build(args):
    """Builds the container from the Dockerfile
    """
    target = Path(args.target[0])
    unpacked_info = read_dict(target)
    if 'initial_image' in unpacked_info:
        logger.critical("Image already built")
        sys.exit(1)

    if args.image_name:
        image = args.image_name[0]
        if not isinstance(image, bytes):
            image = image.encode('ascii')
    else:
        image = make_unique_name(b'reprounzip_image_')

    logger.info("Calling 'docker build'...")
    try:
        retcode = subprocess.call(args.docker_cmd.split() + ['build', '-t'] +
                                  args.docker_option + [image, '.'],
                                  cwd=target.path)
    except OSError:
        logger.critical("docker executable not found")
        sys.exit(1)
    else:
        if retcode != 0:
            logger.critical("docker build failed with code %d", retcode)
            sys.exit(1)
    logger.info("Initial image created: %s", image.decode('ascii'))

    unpacked_info['initial_image'] = image
    unpacked_info['current_image'] = image
    if 'DOCKER_MACHINE_NAME' in os.environ:
        unpacked_info['docker_host'] = {
            'type': 'docker-machine',
            'name': os.environ['DOCKER_MACHINE_NAME']
        }
    elif 'DOCKER_HOST' in os.environ:
        unpacked_info['docker_host'] = {
            'type':
            'custom',
            'env':
            dict((k, v) for k, v in iteritems(os.environ)
                 if k.startswith('DOCKER_'))
        }

    write_dict(target, unpacked_info)
示例#12
0
 def _get_package_for_file(self, filename):
     p = subprocess.Popen(['dpkg', '-S', filename.path],
                          stdout=subprocess.PIPE,
                          stderr=subprocess.PIPE)
     out, err = p.communicate()
     for l in out.splitlines():
         pkgname, f = l.split(b': ', 1)
         f = Path(f.strip())
         # 8-bit safe encoding, because this might be a localized error
         # message (that we don't care about)
         pkgname = (pkgname.decode('iso-8859-1')
                           .split(':', 1)[0])    # Removes :arch
         self.package_files[f] = pkgname
         if f == filename:
             if ' ' not in pkgname:
                 return pkgname
     return None
示例#13
0
def vagrant_setup_start(args):
    """Starts the vagrant-built virtual machine.
    """
    target = Path(args.target[0])
    read_dict(target / '.reprounzip')

    check_vagrant_version()

    logging.info("Calling 'vagrant up'...")
    try:
        retcode = subprocess.call(['vagrant', 'up'], cwd=target.path)
    except OSError:
        logging.critical("vagrant executable not found")
        sys.exit(1)
    else:
        if retcode != 0:
            logging.critical("vagrant up failed with code %d", retcode)
            sys.exit(1)
示例#14
0
    def run(self, files):
        reprounzip.common.record_usage(download_files=len(files))
        runs = self.get_runs_from_config()

        # No argument: list all the output files and exit
        if not files:
            print("Output files:")
            for i, run in enumerate(runs):
                if len(runs) > 1:
                    print("  Run %d:" % i)
                for output_name in run['output_files']:
                    print("    %s" % output_name)
            return

        self.prepare_download(files)

        # Get the path of each output file
        all_output_files = {}
        for run in runs:
            all_output_files.update(run['output_files'])

        try:
            # Download files
            for filespec in files:
                filespec_split = filespec.split(':', 1)
                if len(filespec_split) != 2:
                    logging.critical("Invalid file specification: %r",
                                     filespec)
                    sys.exit(1)
                output_name, local_path = filespec_split

                try:
                    remote_path = PosixPath(all_output_files[output_name])
                except KeyError:
                    logging.critical("Invalid output file: %r", output_name)
                    sys.exit(1)

                logging.debug("Downloading file %s", remote_path)
                if not local_path:
                    self.download_and_print(remote_path)
                else:
                    self.download(remote_path, Path(local_path))
        finally:
            self.finalize()
示例#15
0
def make_dir_writable(directory):
    """Context-manager that sets write permission on a directory.

    This assumes that the directory belongs to you. If the u+w permission
    wasn't set, it gets set in the context, and restored to what it was when
    leaving the context. u+x also gets set on all the directories leading to
    that path.
    """
    uid = os.getuid()

    try:
        sb = directory.stat()
    except OSError:
        pass
    else:
        if sb.st_uid != uid or sb.st_mode & 0o700 == 0o700:
            yield
            return

    # These are the permissions to be restored, in reverse order
    restore_perms = []
    try:
        # Add u+x to all directories up to the target
        path = Path('/')
        for c in directory.components[1:-1]:
            path = path / c
            sb = path.stat()
            if sb.st_uid == uid and not sb.st_mode & 0o100:
                logging.debug("Temporarily setting u+x on %s", path)
                restore_perms.append((path, sb.st_mode))
                path.chmod(sb.st_mode | 0o700)

        # Add u+wx to the target
        sb = directory.stat()
        if sb.st_uid == uid and sb.st_mode & 0o700 != 0o700:
            logging.debug("Temporarily setting u+wx on %s", directory)
            restore_perms.append((directory, sb.st_mode))
            directory.chmod(sb.st_mode | 0o700)

        yield
    finally:
        for path, mod in reversed(restore_perms):
            path.chmod(mod)
示例#16
0
def main():
    parser = argparse.ArgumentParser(
        description="Adds __future__ imports to Python files")
    parser.add_argument('-v',
                        '--verbose',
                        action='count',
                        dest='verbosity',
                        default=1)
    parser.add_argument('-e',
                        '--enable',
                        action='append',
                        help="Future import to enable")
    parser.add_argument('file',
                        nargs=argparse.ONE_OR_MORE,
                        help="File or directory in which to replace")
    args = parser.parse_args()
    levels = [logging.CRITICAL, logging.WARNING, logging.INFO, logging.DEBUG]
    logging.basicConfig(level=levels[args.verbosity])

    if not args.enable:
        logging.critical("Nothing to do")
        sys.exit(1)

    enable = set(to_bytes(feature) for feature in args.enable)
    unrecognized = enable - FUTURES
    if unrecognized:
        logging.critical("Error: unknown futures %s" % ', '.join(unrecognized))
        sys.exit(1)

    for target in args.file:
        target = Path(target)
        if target.is_file():
            if not target.name.endswith('.py'):
                logging.warning("File %s doesn't end with .py, processing "
                                "anyway..." % target)
            process_file(target, enable)
        elif target.is_dir():
            logging.info("Processing %s recursively..." % target)
            for filename in target.recursedir('*.py'):
                process_file(filename, enable)
        else:
            logging.warning("Skipping %s..." % target)
示例#17
0
 def test_get_files(self):
     files, inputs, outputs = self.do_test([
         ('proc', 0, None),
         ('open', 0, "/some/dir", True, FILE_WDIR),
         ('exec', 0, "/some/dir/ls", "/some/dir"),
         ('open', 0, "/some/otherdir/in", False, FILE_READ),
         ('open', 0, "/some/thing/created", True, FILE_WRITE),
         ('open', 0, "/some/thing/created/file", False, FILE_WRITE),
         ('open', 0, "/some/thing/created/file", False, FILE_READ),
         ('open', 0, "/some/thing/created", True, FILE_WDIR),
         ('exec', 0, "/some/thing/created/file", "/some/thing/created"),
     ])
     expected = [
         '/some/dir',
         '/some/dir/ls',
         '/some/otherdir/in',
         '/some/thing',
     ]
     self.assertEqual(set(Path(p) for p in expected),
                      set(fi.path for fi in files))
示例#18
0
文件: common.py 项目: stain/reprozip
    def extract_trace(self, target):
        """Extracts the trace database to the specified path.

        It is up to the caller to remove that file once done.
        """
        target = Path(target)
        if self.version == 1:
            member = self.tar.getmember('METADATA/trace.sqlite3')
        elif self.version == 2:
            try:
                member = self.tar.getmember('METADATA/trace.sqlite3.gz')
            except KeyError:
                member = self.tar.getmember('METADATA/trace.sqlite3')
        else:
            assert False
        member = copy.copy(member)
        member.name = str(target.components[-1])
        self.tar.extract(member, path=str(Path.cwd() / target.parent))
        target.chmod(0o644)
        assert target.is_file()
示例#19
0
    def extract_trace(self, target):
        """Extracts the trace database to the specified path.

        It is up to the caller to remove that file once done.
        """
        target = Path(target)
        if self.version == 1:
            member = self.tar.getmember('METADATA/trace.sqlite3')
            self._extract_file(member, target)
        elif self.version == 2:
            try:
                member = self.tar.getmember('METADATA/trace.sqlite3.gz')
            except KeyError:
                pass
            else:
                self._extract_file_gz(member, target)
                return
            member = self.tar.getmember('METADATA/trace.sqlite3')
            self._extract_file(member, target)
        else:
            assert False
示例#20
0
    def post_command(self):
        # Copy all the tracing artifacts here if not present already (e.g.
        # if session was a local shell)
        if not op.exists(self.local_trace_dir):
            remote_files = ["tracer.log", "trace.sqlite3"]
            if self.session.exists(
                    op.join(self.remote_trace_dir, self.extra_trace_file)):
                remote_files.append(self.extra_trace_file)
            for fname in remote_files:
                self.session.get(op.join(self.remote_trace_dir, fname),
                                 op.join(self.local_trace_dir, fname))
            lgr.info("Copied tracing artifacts under %s", self.local_trace_dir)
        else:
            lgr.debug(
                "Not copying %s from remote session "
                "since already exists locally", self.local_trace_dir)

        from reprozip.tracer.trace import write_configuration
        from rpaths import Path

        # we rely on hardcoded paths in reprozip
        write_configuration(directory=Path(self.local_trace_dir),
                            sort_packages=False,
                            find_inputs_outputs=True)

        local_extra_trace_file = op.join(self.local_trace_dir,
                                         self.extra_trace_file)
        if op.exists(local_extra_trace_file):
            with open(local_extra_trace_file, 'r') as fp:
                extra_files = yaml.safe_load(fp)
        else:
            extra_files = None

        from reproman.api import retrace
        reproman_spec_path = op.join(self.local_trace_dir, "reproman.yml")
        retrace(spec=op.join(self.local_trace_dir, "config.yml"),
                output_file=reproman_spec_path,
                resref=self.session,
                path=extra_files)
        lgr.info("ReproMan trace %s", reproman_spec_path)
示例#21
0
def list_directories(conn):
    """Gets additional needed directories from the trace database.

    Returns the directories which are used as a process's working directory or
    in which files are created.
    """
    cur = conn.cursor()
    executed_files = cur.execute(
            '''
            SELECT name, mode
            FROM opened_files
            WHERE mode = ? OR mode = ?
            ''',
            (FILE_WDIR, FILE_WRITE))
    executed_files = ((Path(n).resolve(), m) for n, m in executed_files)
    # If WDIR, the name is a folder that was used as working directory
    # If WRITE, the name is a file that was written to; its directory must
    # exist
    result = set(TracedFile(n if m == FILE_WDIR else n.parent)
                 for n, m in executed_files)
    cur.close()
    return result
示例#22
0
def find_all_links_recursive(filename, files):
    path = Path('/')
    for c in filename.components[1:]:
        # At this point, path is a canonical path, and all links in it have
        # been resolved

        # We add the next path component
        path = path / c

        # That component is possibly a link
        if path.is_link():
            # Adds the link itself
            files.add(path)

            target = path.read_link(absolute=True)
            # Here, target might contain a number of symlinks
            if target not in files:
                # Recurse on this new path
                find_all_links_recursive(target, files)
            # Restores the invariant; realpath might resolve several links here
            path = path.resolve()
    return path
示例#23
0
文件: docker.py 项目: Aloma/reprozip
def docker_setup_build(args):
    """Builds the container from the Dockerfile
    """
    target = Path(args.target[0])
    unpacked_info = read_dict(target / '.reprounzip')
    if 'initial_image' in unpacked_info:
        logging.critical("Image already built")
        sys.exit(1)

    image = make_unique_name(b'reprounzip_image_')

    logging.info("Calling 'docker build'...")
    retcode = subprocess.call(['docker', 'build', '-t', image, '.'],
                              cwd=target.path)
    if retcode != 0:
        logging.critical("docker build failed with code %d", retcode)
        sys.exit(1)
    logging.info("Initial image created: %s", image.decode('ascii'))

    unpacked_info['initial_image'] = image
    unpacked_info['current_image'] = image
    write_dict(target / '.reprounzip', unpacked_info)
示例#24
0
 def post(self):
     self._notebook_file = Path(self.get_body_argument('file'))
     name = self._notebook_file.unicodename
     if name.endswith('.ipynb'):
         name = name[:-6]
     name = u'%s_%s.rpz' % (name, datetime.now().strftime('%Y%m%d-%H%M%S'))
     self._pack_file = self._notebook_file.parent / name
     self.nbapp.log.info("reprozip: tracing request from client: file=%r",
                         self._notebook_file)
     self._tempdir = Path.tempdir()
     self.nbapp.log.info("reprozip: created temp directory %r",
                         self._tempdir)
     proc = Subprocess([
         sys.executable, '-c',
         'from reprozip_jupyter.main import main; main()', 'trace',
         '--dont-save-notebook', '-d', self._tempdir.path,
         self._notebook_file.path
     ],
                       stdin=subprocess.PIPE)
     proc.stdin.close()
     proc.set_exit_callback(self._trace_done)
     self.nbapp.log.info("reprozip: started tracing...")
示例#25
0
    def run(self, files):
        reprounzip.common.record_usage(download_files=len(files))
        output_files = dict(
            (n, f.path)
            for n, f in iteritems(self.get_config().inputs_outputs)
            if f.write_runs)

        # No argument: list all the output files and exit
        if not files:
            print("Output files:")
            for output_name in output_files:
                print("    %s" % output_name)
            return

        self.prepare_download(files)

        try:
            # Download files
            for filespec in files:
                filespec_split = filespec.split(':', 1)
                if len(filespec_split) != 2:
                    logging.critical("Invalid file specification: %r",
                                     filespec)
                    sys.exit(1)
                output_name, local_path = filespec_split

                try:
                    remote_path = output_files[output_name]
                except KeyError:
                    logging.critical("Invalid output file: %r", output_name)
                    sys.exit(1)

                logging.debug("Downloading file %s", remote_path)
                if not local_path:
                    self.download_and_print(remote_path)
                else:
                    self.download(remote_path, Path(local_path))
        finally:
            self.finalize()
示例#26
0
文件: default.py 项目: Aloma/reprozip
def chroot_run(args):
    """Runs the command in the chroot.
    """
    target = Path(args.target[0])
    read_dict(target / '.reprounzip', 'chroot')
    cmdline = args.cmdline

    # Loads config
    runs, packages, other_files = load_config_file(target / 'config.yml', True)

    selected_runs = get_runs(runs, args.run, cmdline)

    root = target / 'root'

    cmds = []
    for run_number in selected_runs:
        run = runs[run_number]
        cmd = 'cd %s && ' % shell_escape(run['workingdir'])
        cmd += '/usr/bin/env -i '
        cmd += ' '.join('%s=%s' % (k, shell_escape(v))
                        for k, v in iteritems(run['environ']))
        cmd += ' '
        # FIXME : Use exec -a or something if binary != argv[0]
        if cmdline is None:
            argv = [run['binary']] + run['argv'][1:]
        else:
            argv = cmdline
        cmd += ' '.join(shell_escape(a) for a in argv)
        userspec = '%s:%s' % (run.get('uid', 1000), run.get('gid', 1000))
        cmd = 'chroot --userspec=%s %s /bin/sh -c %s' % (
            userspec, shell_escape(unicode_(root)), shell_escape(cmd))
        cmds.append(cmd)
    cmds = ' && '.join(cmds)

    signals.pre_run(target=target)
    retcode = subprocess.call(cmds, shell=True)
    sys.stderr.write("\n*** Command finished, status: %d\n" % retcode)
    signals.post_run(target=target, retcode=retcode)
示例#27
0
def setup_logging(tag, verbosity):
    """Sets up the logging module.
    """
    levels = [logging.CRITICAL, logging.WARNING, logging.INFO, logging.DEBUG]
    console_level = levels[min(verbosity, 3)]
    file_level = logging.INFO
    min_level = min(console_level, file_level)

    # Create formatter, with same format as C extension
    fmt = "[%s] %%(asctime)s %%(levelname)s: %%(message)s" % tag
    formatter = LoggingDateFormatter(fmt)

    # Console logger
    handler = logging.StreamHandler()
    handler.setLevel(console_level)
    handler.setFormatter(formatter)

    # Set up logger
    logger = logging.root
    logger.setLevel(min_level)
    logger.addHandler(handler)

    # File logger
    dotrpz = Path('~/.reprozip').expand_user()
    try:
        if not dotrpz.is_dir():
            dotrpz.mkdir()
        filehandler = logging.handlers.RotatingFileHandler(str(dotrpz / 'log'),
                                                           mode='a',
                                                           delay=False,
                                                           maxBytes=400000,
                                                           backupCount=5)
    except (IOError, OSError):
        logging.warning("Couldn't create log file %s", dotrpz / 'log')
    else:
        filehandler.setFormatter(formatter)
        filehandler.setLevel(file_level)
        logger.addHandler(filehandler)
示例#28
0
文件: docker.py 项目: satra/reprozip
def docker_destroy_docker(args):
    """Destroys the container and images.
    """
    target = Path(args.target[0])
    unpacked_info = read_dict(target)
    if 'initial_image' not in unpacked_info:
        logging.critical("Image not created")
        sys.exit(1)

    initial_image = unpacked_info.pop('initial_image')

    if 'current_image' in unpacked_info:
        image = unpacked_info.pop('current_image')
        if image != initial_image:
            logging.info("Destroying image %s...", image.decode('ascii'))
            retcode = subprocess.call(['docker', 'rmi', image])
            if retcode != 0:
                logging.error("Error deleting image %s", image.decode('ascii'))

    logging.info("Destroying image %s...", initial_image.decode('ascii'))
    retcode = subprocess.call(['docker', 'rmi', initial_image])
    if retcode != 0:
        logging.error("Error deleting image %s", initial_image.decode('ascii'))
示例#29
0
    def __init__(self, enabled, target, display=None):
        self.enabled = enabled
        if not self.enabled:
            return

        self.target = target

        self.xauth = PosixPath('/.reprounzip_xauthority')
        self.display = display if display is not None else self.DISPLAY_NUMBER
        logging.debug(
            "X11 support enabled; will create Xauthority file %s "
            "for experiment. Display number is %d", self.xauth, self.display)

        # List of addresses that match the $DISPLAY variable
        possible, local_display = self._locate_display()
        tcp_portnum = ((6000 +
                        local_display) if local_display is not None else None)

        if ('XAUTHORITY' in os.environ
                and Path(os.environ['XAUTHORITY']).is_file()):
            xauthority = Path(os.environ['XAUTHORITY'])
        # Note: I'm assuming here that Xauthority has no XDG support
        else:
            xauthority = Path('~').expand_user() / '.Xauthority'

        # Read Xauthority file
        xauth_entries = {}
        if xauthority.is_file():
            with xauthority.open('rb') as fp:
                fp.seek(0, os.SEEK_END)
                size = fp.tell()
                fp.seek(0, os.SEEK_SET)
                while fp.tell() < size:
                    entry = Xauth.from_file(fp)
                    if (entry.name == 'MIT-MAGIC-COOKIE-1'
                            and entry.number == local_display):
                        if entry.family == Xauth.FAMILY_LOCAL:
                            xauth_entries[(entry.family, None)] = entry
                        elif (entry.family == Xauth.FAMILY_INTERNET
                              or entry.family == Xauth.FAMILY_INTERNET6):
                            xauth_entries[(entry.family,
                                           entry.address)] = entry
        # FIXME: this completely ignores addresses

        logging.debug("Possible X endpoints: %s", (possible, ))

        # Select socket and authentication cookie
        self.xauth_record = None
        self.connection_info = None
        for family, address in possible:
            # Checks that we have a cookie
            entry = family, (None if family is Xauth.FAMILY_LOCAL else address)
            if entry not in xauth_entries:
                continue
            if family == Xauth.FAMILY_LOCAL and hasattr(socket, 'AF_UNIX'):
                # Checks that the socket exists
                if not Path(address).exists():
                    continue
                self.connection_info = (socket.AF_UNIX, socket.SOCK_STREAM,
                                        address)
                self.xauth_record = xauth_entries[(family, None)]
                logging.debug(
                    "Will connect to local X display via UNIX "
                    "socket %s", address)
                break
            else:
                # Checks that we have a cookie
                family = self.X2SOCK[family]
                self.connection_info = (family, socket.SOCK_STREAM,
                                        (address, tcp_portnum))
                self.xauth_record = xauth_entries[(family, address)]
                logging.debug("Will connect to X display %s:%d via %s/TCP",
                              address, tcp_portnum,
                              "IPv6" if family == socket.AF_INET6 else "IPv4")
                break

        # Didn't find an Xauthority record -- assume no authentication is
        # needed, but still set self.connection_info
        if self.connection_info is None:
            for family, address in possible:
                # Only try UNIX sockets, we'll use 127.0.0.1 otherwise
                if family == Xauth.FAMILY_LOCAL:
                    if not hasattr(socket, 'AF_UNIX'):
                        continue
                    self.connection_info = (socket.AF_UNIX, socket.SOCK_STREAM,
                                            address)
                    logging.debug(
                        "Will connect to X display via UNIX socket "
                        "%s, no authentication", address)
                    break
            else:
                self.connection_info = (socket.AF_INET, socket.SOCK_STREAM,
                                        ('127.0.0.1', tcp_portnum))
                logging.debug(
                    "Will connect to X display 127.0.0.1:%d via IPv4/TCP, "
                    "no authentication", tcp_portnum)

        if self.connection_info is None:
            raise RuntimeError("Couldn't determine how to connect to local X "
                               "server, DISPLAY is %s" %
                               (repr(os.environ['DISPLAY'])
                                if 'DISPLAY' is os.environ else 'not set'))
示例#30
0
def functional_tests(raise_warnings, interactive, run_vagrant, run_docker):
    rpz_python = [os.environ.get('REPROZIP_PYTHON', sys.executable)]
    rpuz_python = [os.environ.get('REPROUNZIP_PYTHON', sys.executable)]

    # Can't match on the SignalWarning category here because of a Python bug
    # http://bugs.python.org/issue22543
    if raise_warnings:
        rpz_python.extend(['-W', 'error:signal'])
        rpuz_python.extend(['-W', 'error:signal'])

    if 'COVER' in os.environ:
        rpz_python.extend(['-m'] + os.environ['COVER'].split(' '))
        rpuz_python.extend(['-m'] + os.environ['COVER'].split(' '))

    reprozip_main = tests.parent / 'reprozip/reprozip/main.py'
    reprounzip_main = tests.parent / 'reprounzip/reprounzip/main.py'

    verbose = ['-v'] * 3
    rpz = rpz_python + [reprozip_main.absolute().path] + verbose
    rpuz = rpuz_python + [reprounzip_main.absolute().path] + verbose

    print("Command lines are:\n%r\n%r" % (rpz, rpuz))

    # ########################################
    # testrun /bin/echo
    #

    output = check_output(rpz + ['testrun', '/bin/echo', 'outputhere'])
    assert any(b' 1 | /bin/echo outputhere ' in l for l in output.splitlines())

    output = check_output(
        rpz + ['testrun', '-a', '/fake/path/echo', '/bin/echo', 'outputhere'])
    assert any(b' 1 | (/bin/echo) /fake/path/echo outputhere ' in l
               for l in output.splitlines())

    # ########################################
    # testrun multiple commands
    #

    check_call(rpz + [
        'testrun', 'bash', '-c', 'cat ../../../../../etc/passwd;'
        'cd /var/lib;'
        'cat ../../etc/group'
    ])
    check_call(rpz +
               ['trace', '--overwrite', 'bash', '-c', 'cat /etc/passwd;echo'])
    check_call(
        rpz +
        ['trace', '--continue', 'sh', '-c', 'cat /etc/group;/usr/bin/id'])
    check_call(rpz + ['pack'])
    check_call(rpuz + ['graph', 'graph.dot'])
    check_call(rpuz + ['graph', 'graph2.dot', 'experiment.rpz'])

    sudo = ['sudo', '-E']  # -E to keep REPROZIP_USAGE_STATS

    # ########################################
    # 'simple' program: trace, pack, info, unpack
    #

    def check_simple(args, stream, infile=1):
        output = check_output(args, stream).splitlines()
        try:
            first = output.index(b"Read 6 bytes")
        except ValueError:
            stderr.write("output = %r\n" % output)
            raise
        if infile == 1:
            assert output[first + 1] == b"a = 29, b = 13"
            assert output[first + 2] == b"result = 42"
        else:  # infile == 2
            assert output[first + 1] == b"a = 25, b = 11"
            assert output[first + 2] == b"result = 36"

    # Build
    build('simple', ['simple.c'])
    # Trace
    check_call(rpz + [
        'trace', '--overwrite', '-d', 'rpz-simple', './simple',
        (tests / 'simple_input.txt').path, 'simple_output.txt'
    ])
    orig_output_location = Path('simple_output.txt').absolute()
    assert orig_output_location.is_file()
    with orig_output_location.open(encoding='utf-8') as fp:
        assert fp.read().strip() == '42'
    orig_output_location.remove()
    # Read config
    with Path('rpz-simple/config.yml').open(encoding='utf-8') as fp:
        conf = yaml.safe_load(fp)
    other_files = set(Path(f).absolute() for f in conf['other_files'])
    expected = [Path('simple'), (tests / 'simple_input.txt')]
    assert other_files.issuperset([f.resolve() for f in expected])
    # Check input and output files
    inputs_outputs = conf['inputs_outputs']
    # Exactly one input: "arg1", "...simple_input.txt"
    # Output: 'arg2', "...simple_output.txt"
    # There might be more output files: the C coverage files
    found = 0
    for fdict in inputs_outputs:
        if Path(fdict['path']).name == b'simple_input.txt':
            assert fdict['name'] == 'arg1'
            assert fdict['read_by_runs'] == [0]
            assert not fdict.get('written_by_runs')
            found |= 0x01
        elif Path(fdict['path']).name == b'simple_output.txt':
            assert fdict['name'] == 'arg2'
            assert not fdict.get('read_by_runs')
            assert fdict['written_by_runs'] == [0]
            found |= 0x02
        else:
            # No other inputs
            assert not fdict.get('read_by_runs')
    assert found == 0x03
    # Pack
    check_call(rpz + ['pack', '-d', 'rpz-simple', 'simple.rpz'])
    Path('simple').rename('simple.orig')
    # Info
    check_call(rpuz + ['info', 'simple.rpz'])
    # Show files
    check_call(rpuz + ['showfiles', 'simple.rpz'])
    # Lists packages
    check_call(rpuz + ['installpkgs', '--summary', 'simple.rpz'])
    # Unpack directory
    check_call(rpuz + ['directory', 'setup', 'simple.rpz', 'simpledir'])
    # Run directory
    check_simple(rpuz + ['directory', 'run', 'simpledir'], 'err')
    output_in_dir = join_root(Path('simpledir/root'), orig_output_location)
    with output_in_dir.open(encoding='utf-8') as fp:
        assert fp.read().strip() == '42'
    # Delete with wrong command (should fail)
    p = subprocess.Popen(rpuz + ['chroot', 'destroy', 'simpledir'],
                         stderr=subprocess.PIPE)
    out, err = p.communicate()
    assert p.poll() != 0
    err = err.splitlines()
    assert b"Wrong unpacker used" in err[0]
    assert err[1].startswith(b"usage: ")
    # Delete directory
    check_call(rpuz + ['directory', 'destroy', 'simpledir'])
    # Unpack chroot
    check_call(
        sudo + rpuz +
        ['chroot', 'setup', '--bind-magic-dirs', 'simple.rpz', 'simplechroot'])
    try:
        output_in_chroot = join_root(Path('simplechroot/root'),
                                     orig_output_location)
        # Run chroot
        check_simple(sudo + rpuz + ['chroot', 'run', 'simplechroot'], 'err')
        with output_in_chroot.open(encoding='utf-8') as fp:
            assert fp.read().strip() == '42'
        # Get output file
        check_call(sudo + rpuz +
                   ['chroot', 'download', 'simplechroot', 'arg2:output1.txt'])
        with Path('output1.txt').open(encoding='utf-8') as fp:
            assert fp.read().strip() == '42'
        # Get random file
        check_call(sudo + rpuz + [
            'chroot', 'download', 'simplechroot',
            '%s:binc.bin' % (Path.cwd() / 'simple')
        ])
        assert same_files('simple.orig', 'binc.bin')
        # Replace input file
        check_call(sudo + rpuz + [
            'chroot', 'upload', 'simplechroot',
            '%s:arg1' % (tests / 'simple_input2.txt')
        ])
        check_call(sudo + rpuz + ['chroot', 'upload', 'simplechroot'])
        # Run again
        check_simple(sudo + rpuz + ['chroot', 'run', 'simplechroot'], 'err', 2)
        with output_in_chroot.open(encoding='utf-8') as fp:
            assert fp.read().strip() == '36'
        # Reset input file
        check_call(sudo + rpuz + ['chroot', 'upload', 'simplechroot', ':arg1'])
        # Run again
        check_simple(sudo + rpuz + ['chroot', 'run', 'simplechroot'], 'err')
        with output_in_chroot.open(encoding='utf-8') as fp:
            assert fp.read().strip() == '42'
        # Replace input file via path
        check_call(sudo + rpuz + [
            'chroot', 'upload', 'simplechroot',
            '%s:%s' % (tests / 'simple_input2.txt', tests / 'simple_input.txt')
        ])
        check_call(sudo + rpuz + ['chroot', 'upload', 'simplechroot'])
        # Run again
        check_simple(sudo + rpuz + ['chroot', 'run', 'simplechroot'], 'err', 2)
        # Delete with wrong command (should fail)
        p = subprocess.Popen(rpuz + ['directory', 'destroy', 'simplechroot'],
                             stderr=subprocess.PIPE)
        out, err = p.communicate()
        assert p.poll() != 0
        err = err.splitlines()
        assert b"Wrong unpacker used" in err[0]
        assert err[1].startswith(b"usage:")
    finally:
        # Delete chroot
        check_call(sudo + rpuz + ['chroot', 'destroy', 'simplechroot'])

    # Use reprounzip-vistrails with chroot
    check_call(sudo + rpuz + [
        'chroot', 'setup', '--bind-magic-dirs', 'simple.rpz', 'simplechroot_vt'
    ])
    try:
        output_in_chroot = join_root(Path('simplechroot_vt/root'),
                                     orig_output_location)
        # Run using reprounzip-vistrails
        check_simple(
            sudo + rpuz_python + [
                '-m', 'reprounzip.plugins.vistrails', '1', 'chroot',
                'simplechroot_vt', '0', '--input-file',
                'arg1:%s' % (tests / 'simple_input2.txt'), '--output-file',
                'arg2:output_vt.txt'
            ], 'err', 2)
        with output_in_chroot.open(encoding='utf-8') as fp:
            assert fp.read().strip() == '36'
    finally:
        # Delete chroot
        check_call(sudo + rpuz + ['chroot', 'destroy', 'simplechroot_vt'])

    if not (tests / 'vagrant').exists():
        check_call([
            'sudo', 'sh', '-c',
            'mkdir %(d)s; chmod 777 %(d)s' % {
                'd': tests / 'vagrant'
            }
        ])

    # Unpack Vagrant-chroot
    check_call(rpuz + [
        'vagrant', 'setup/create', '--memory', '512', '--use-chroot',
        'simple.rpz', (tests / 'vagrant/simplevagrantchroot').path
    ])
    print("\nVagrant project set up in simplevagrantchroot")
    try:
        if run_vagrant:
            check_simple(
                rpuz + [
                    'vagrant', 'run', '--no-stdin',
                    (tests / 'vagrant/simplevagrantchroot').path
                ], 'out')
            # Get output file
            check_call(rpuz + [
                'vagrant', 'download', (tests / 'vagrant/simplevagrantchroot'
                                        ).path, 'arg2:voutput1.txt'
            ])
            with Path('voutput1.txt').open(encoding='utf-8') as fp:
                assert fp.read().strip() == '42'
            # Get random file
            check_call(rpuz + [
                'vagrant', 'download', (tests /
                                        'vagrant/simplevagrantchroot').path,
                '%s:binvc.bin' % (Path.cwd() / 'simple')
            ])
            assert same_files('simple.orig', 'binvc.bin')
            # Replace input file
            check_call(rpuz + [
                'vagrant', 'upload', (tests /
                                      'vagrant/simplevagrantchroot').path,
                '%s:arg1' % (tests / 'simple_input2.txt')
            ])
            check_call(rpuz + [
                'vagrant', 'upload', (tests /
                                      'vagrant/simplevagrantchroot').path
            ])
            # Run again
            check_simple(
                rpuz + [
                    'vagrant', 'run', '--no-stdin',
                    (tests / 'vagrant/simplevagrantchroot').path
                ], 'out', 2)
            # Get output file
            check_call(rpuz + [
                'vagrant', 'download', (tests / 'vagrant/simplevagrantchroot'
                                        ).path, 'arg2:voutput2.txt'
            ])
            with Path('voutput2.txt').open(encoding='utf-8') as fp:
                assert fp.read().strip() == '36'
            # Reset input file
            check_call(rpuz + [
                'vagrant', 'upload',
                (tests / 'vagrant/simplevagrantchroot').path, ':arg1'
            ])
            # Run again
            check_simple(
                rpuz + [
                    'vagrant', 'run', '--no-stdin',
                    (tests / 'vagrant/simplevagrantchroot').path
                ], 'out')
            # Get output file
            check_call(rpuz + [
                'vagrant', 'download', (tests / 'vagrant/simplevagrantchroot'
                                        ).path, 'arg2:voutput1.txt'
            ])
            with Path('voutput1.txt').open(encoding='utf-8') as fp:
                assert fp.read().strip() == '42'
            # Replace input file via path
            check_call(rpuz + [
                'vagrant', 'upload', (tests /
                                      'vagrant/simplevagrantchroot').path,
                '%s:%s' %
                (tests / 'simple_input2.txt', tests / 'simple_input.txt')
            ])
            # Run again
            check_simple(
                rpuz + [
                    'vagrant', 'run', '--no-stdin',
                    (tests / 'vagrant/simplevagrantchroot').path
                ], 'out', 2)
            # Destroy
            check_call(rpuz + [
                'vagrant', 'destroy', (tests /
                                       'vagrant/simplevagrantchroot').path
            ])
        elif interactive:
            print("Test and press enter")
            sys.stdin.readline()
    finally:
        if (tests / 'vagrant/simplevagrantchroot').exists():
            (tests / 'vagrant/simplevagrantchroot').rmtree()
    # Unpack Vagrant without chroot
    check_call(rpuz + [
        'vagrant', 'setup/create', '--dont-use-chroot', 'simple.rpz',
        (tests / 'vagrant/simplevagrant').path
    ])
    print("\nVagrant project set up in simplevagrant")
    try:
        if run_vagrant:
            check_simple(
                rpuz + [
                    'vagrant', 'run', '--no-stdin',
                    (tests / 'vagrant/simplevagrant').path
                ], 'out')
            # Get output file
            check_call(rpuz + [
                'vagrant', 'download',
                (tests / 'vagrant/simplevagrant').path, 'arg2:woutput1.txt'
            ])
            with Path('woutput1.txt').open(encoding='utf-8') as fp:
                assert fp.read().strip() == '42'
            # Get random file
            check_call(rpuz + [
                'vagrant', 'download', (tests / 'vagrant/simplevagrant').path,
                '%s:binvs.bin' % (Path.cwd() / 'simple')
            ])
            assert same_files('simple.orig', 'binvs.bin')
            # Replace input file
            check_call(rpuz + [
                'vagrant', 'upload', (tests / 'vagrant/simplevagrant').path,
                '%s:arg1' % (tests / 'simple_input2.txt')
            ])
            check_call(
                rpuz +
                ['vagrant', 'upload', (tests / 'vagrant/simplevagrant').path])
            # Run again
            check_simple(
                rpuz + [
                    'vagrant', 'run', '--no-stdin',
                    (tests / 'vagrant/simplevagrant').path
                ], 'out', 2)
            # Get output file
            check_call(rpuz + [
                'vagrant', 'download',
                (tests / 'vagrant/simplevagrant').path, 'arg2:woutput2.txt'
            ])
            with Path('woutput2.txt').open(encoding='utf-8') as fp:
                assert fp.read().strip() == '36'
            # Reset input file
            check_call(rpuz + [
                'vagrant', 'upload', (tests /
                                      'vagrant/simplevagrant').path, ':arg1'
            ])
            # Run again
            check_simple(
                rpuz + [
                    'vagrant', 'run', '--no-stdin',
                    (tests / 'vagrant/simplevagrant').path
                ], 'out')
            # Get output file
            check_call(rpuz + [
                'vagrant', 'download',
                (tests / 'vagrant/simplevagrant').path, 'arg2:voutput1.txt'
            ])
            with Path('voutput1.txt').open(encoding='utf-8') as fp:
                assert fp.read().strip() == '42'
            # Destroy
            check_call(
                rpuz +
                ['vagrant', 'destroy', (tests / 'vagrant/simplevagrant').path])
        elif interactive:
            print("Test and press enter")
            sys.stdin.readline()
    finally:
        if (tests / 'vagrant/simplevagrant').exists():
            (tests / 'vagrant/simplevagrant').rmtree()

    # Unpack Docker
    check_call(rpuz + ['docker', 'setup/create', 'simple.rpz', 'simpledocker'])
    print("\nDocker project set up in simpledocker")
    try:
        if run_docker:
            check_call(rpuz + ['docker', 'setup/build', 'simpledocker'])
            check_simple(rpuz + ['docker', 'run', 'simpledocker'], 'out')
            # Get output file
            check_call(
                rpuz +
                ['docker', 'download', 'simpledocker', 'arg2:doutput1.txt'])
            with Path('doutput1.txt').open(encoding='utf-8') as fp:
                assert fp.read().strip() == '42'
            # Get random file
            check_call(rpuz + [
                'docker', 'download', 'simpledocker',
                '%s:bind.bin' % (Path.cwd() / 'simple')
            ])
            assert same_files('simple.orig', 'bind.bin')
            # Replace input file
            check_call(rpuz + [
                'docker', 'upload', 'simpledocker',
                '%s:arg1' % (tests / 'simple_input2.txt')
            ])
            check_call(rpuz + ['docker', 'upload', 'simpledocker'])
            check_call(rpuz + ['showfiles', 'simpledocker'])
            # Run again
            check_simple(rpuz + ['docker', 'run', 'simpledocker'], 'out', 2)
            # Get output file
            check_call(
                rpuz +
                ['docker', 'download', 'simpledocker', 'arg2:doutput2.txt'])
            with Path('doutput2.txt').open(encoding='utf-8') as fp:
                assert fp.read().strip() == '36'
            # Reset input file
            check_call(rpuz + ['docker', 'upload', 'simpledocker', ':arg1'])
            # Run again
            check_simple(rpuz + ['docker', 'run', 'simpledocker'], 'out')
            # Get output file
            check_call(
                rpuz +
                ['docker', 'download', 'simpledocker', 'arg2:doutput1.txt'])
            with Path('doutput1.txt').open(encoding='utf-8') as fp:
                assert fp.read().strip() == '42'
            # Replace input file via path
            check_call(rpuz + [
                'docker', 'upload', 'simpledocker',
                '%s:%s' %
                (tests / 'simple_input2.txt', tests / 'simple_input.txt')
            ])
            # Run again
            check_simple(rpuz + ['docker', 'run', 'simpledocker'], 'out', 2)
            # Destroy
            check_call(rpuz + ['docker', 'destroy', 'simpledocker'])
        elif interactive:
            print("Test and press enter")
            sys.stdin.readline()
    finally:
        if Path('simpledocker').exists():
            Path('simpledocker').rmtree()

    # ########################################
    # 'threads' program: testrun
    #

    # Build
    build('threads', ['threads.c'], ['-lpthread'])
    # Trace
    output = check_output(rpz + ['testrun', './threads'], 'err')
    assert any(b'successfully exec\'d /bin/./echo' in l
               for l in output.splitlines())

    # ########################################
    # 'threads2' program: testrun
    #

    # Build
    build('threads2', ['threads2.c'], ['-lpthread'])
    # Trace
    output = check_output(rpz + ['testrun', './threads2'], 'err')
    assert any(b'successfully exec\'d /bin/echo' in l
               for l in output.splitlines())

    # ########################################
    # 'segv' program: testrun
    #

    # Build
    build('segv', ['segv.c'])
    # Trace
    check_call(rpz + ['testrun', './segv'])

    # ########################################
    # 'exec_echo' program: trace, pack, run --cmdline
    #

    # Build
    build('exec_echo', ['exec_echo.c'])
    # Trace
    check_call(
        rpz +
        ['trace', '--overwrite', './exec_echo', 'originalexecechooutput'])
    # Pack
    check_call(rpz + ['pack', 'exec_echo.rpz'])
    # Unpack chroot
    check_call(sudo + rpuz +
               ['chroot', 'setup', 'exec_echo.rpz', 'echochroot'])
    try:
        # Run original command-line
        output = check_output(sudo + rpuz + ['chroot', 'run', 'echochroot'])
        assert output == b'originalexecechooutput\n'
        # Prints out command-line
        output = check_output(sudo + rpuz +
                              ['chroot', 'run', 'echochroot', '--cmdline'])
        assert any(b'./exec_echo originalexecechooutput' == s.strip()
                   for s in output.split(b'\n'))
        # Run with different command-line
        output = check_output(sudo + rpuz + [
            'chroot', 'run', 'echochroot', '--cmdline', './exec_echo',
            'changedexecechooutput'
        ])
        assert output == b'changedexecechooutput\n'
    finally:
        check_call(sudo + rpuz + ['chroot', 'destroy', 'echochroot'])

    # ########################################
    # 'exec_echo' program: testrun
    # This is built with -m32 so that we transition:
    #   python (x64) -> exec_echo (i386) -> echo (x64)
    #

    if sys.maxsize > 2**32:
        # Build
        build('exec_echo32', ['exec_echo.c'], ['-m32'])
        # Trace
        check_call(rpz + ['testrun', './exec_echo32', '42'])
    else:
        print("Can't try exec_echo transitions: not running on 64bits")

    # ########################################
    # Tracing non-existing program
    #

    ret = call(rpz + ['testrun', './doesntexist'])
    assert ret == 127

    # ########################################
    # 'connect' program: testrun
    #

    # Build
    build('connect', ['connect.c'])
    # Trace
    err = check_output(rpz + ['testrun', './connect'], 'err')
    err = err.split(b'\n')
    assert not any(b'program exited with non-zero code' in l for l in err)
    assert any(re.search(br'process connected to [0-9.]+:80', l) for l in err)

    # ########################################
    # 'vfork' program: testrun
    #

    # Build
    build('vfork', ['vfork.c'])
    # Trace
    err = check_output(rpz + ['testrun', './vfork'], 'err')
    err = err.split(b'\n')
    assert not any(b'program exited with non-zero code' in l for l in err)

    # ########################################
    # 'rename' program: trace
    #

    # Build
    build('rename', ['rename.c'])
    # Trace
    check_call(rpz +
               ['trace', '--overwrite', '-d', 'rename-trace', './rename'])
    with Path('rename-trace/config.yml').open(encoding='utf-8') as fp:
        config = yaml.safe_load(fp)
    # Check that written files were logged
    database = Path.cwd() / 'rename-trace/trace.sqlite3'
    if PY3:
        # On PY3, connect() only accepts unicode
        conn = sqlite3.connect(str(database))
    else:
        conn = sqlite3.connect(database.path)
    conn.row_factory = sqlite3.Row
    rows = conn.execute('''
        SELECT name FROM opened_files
        ''')
    files = set(Path(r[0]) for r in rows)
    for n in ('dir1/file', 'dir2/file', 'dir2/brokensymlink', 'dir2/symlink'):
        if (Path.cwd() / n) not in files:
            raise AssertionError("Missing file: %s" % (Path.cwd() / n))
    conn.close()
    # Check that created files won't be packed
    for f in config.get('other_files'):
        if 'dir2' in Path(f).parent.components:
            raise AssertionError("Created file shouldn't be packed: %s" %
                                 Path(f))

    # ########################################
    # 'readwrite' program: trace

    # Build
    build('readwrite', ['readwrite.c'])
    # Create test folder
    Path('readwrite_test').mkdir()
    with Path('readwrite_test/existing').open('w'):
        pass

    # Trace existing one
    check_call(rpz + [
        'trace', '--overwrite', '-d', 'readwrite-E-trace', './readwrite',
        'readwrite_test/existing'
    ])
    # Check that file was logged as read and written
    database = Path.cwd() / 'readwrite-E-trace/trace.sqlite3'
    if PY3:
        # On PY3, connect() only accepts unicode
        conn = sqlite3.connect(str(database))
    else:
        conn = sqlite3.connect(database.path)
    conn.row_factory = sqlite3.Row
    rows = list(
        conn.execute(
            '''
        SELECT mode FROM opened_files
        WHERE name = ?
        ''', (str(Path('readwrite_test/existing').absolute()), )))
    conn.close()
    assert rows
    assert rows[0][0] == FILE_READ | FILE_WRITE

    # Trace non-existing one
    check_call(rpz + [
        'trace', '--overwrite', '-d', 'readwrite-N-trace', './readwrite',
        'readwrite_test/nonexisting'
    ])
    # Check that file was logged as written only
    database = Path.cwd() / 'readwrite-N-trace/trace.sqlite3'
    if PY3:
        # On PY3, connect() only accepts unicode
        conn = sqlite3.connect(str(database))
    else:
        conn = sqlite3.connect(database.path)
    conn.row_factory = sqlite3.Row
    rows = list(
        conn.execute(
            '''
        SELECT mode FROM opened_files
        WHERE name = ?
        ''', (str(Path('readwrite_test/nonexisting').absolute()), )))
    conn.close()
    assert rows
    assert rows[0][0] == FILE_WRITE

    # Trace a failure: inaccessible file
    ret = call(rpz + [
        'trace', '--overwrite', '-d', 'readwrite-F-trace', './readwrite',
        'readwrite_test/non/existing/file'
    ])
    assert ret == 1

    # ########################################
    # Test shebang corner-cases
    #

    Path('a').symlink('b')
    with Path('b').open('w') as fp:
        fp.write('#!%s 0\nsome content\n' % (Path.cwd() / 'c'))
    Path('b').chmod(0o744)
    Path('c').symlink('d')
    with Path('d').open('w') as fp:
        fp.write('#!e')
    Path('d').chmod(0o744)
    with Path('e').open('w') as fp:
        fp.write('#!/bin/echo')
    Path('e').chmod(0o744)

    # Trace
    out = check_output(rpz + [
        'trace', '--overwrite', '-d', 'shebang-trace',
        '--dont-identify-packages', './a', '1', '2'
    ])
    out = out.splitlines()[0]
    assert out == ('e %s 0 ./a 1 2' % (Path.cwd() / 'c')).encode('ascii')

    # Check config
    with Path('shebang-trace/config.yml').open(encoding='utf-8') as fp:
        config = yaml.safe_load(fp)
    other_files = set(
        Path(f) for f in config['other_files']
        if f.startswith('%s/' % Path.cwd()))

    # Check database
    database = Path.cwd() / 'shebang-trace/trace.sqlite3'
    if PY3:
        # On PY3, connect() only accepts unicode
        conn = sqlite3.connect(str(database))
    else:
        conn = sqlite3.connect(database.path)
    conn.row_factory = sqlite3.Row
    rows = conn.execute('''
        SELECT name FROM opened_files
        ''')
    opened = [Path(r[0]) for r in rows if r[0].startswith('%s/' % Path.cwd())]
    rows = conn.execute('''
        SELECT name, argv FROM executed_files
        ''')
    executed = [(Path(r[0]), r[1]) for r in rows
                if Path(r[0]).lies_under(Path.cwd())]

    print("other_files: %r" % sorted(other_files))
    print("opened: %r" % opened)
    print("executed: %r" % executed)

    assert other_files == set(Path.cwd() / p
                              for p in ('a', 'b', 'c', 'd', 'e'))
    assert opened == [Path.cwd() / 'c', Path.cwd() / 'e']
    assert executed == [(Path.cwd() / 'a', './a\x001\x002\x00')]

    # ########################################
    # Test old packages
    #

    old_packages = [
        ('simple-0.4.0.rpz',
         'https://drive.google.com/uc?export=download&id=0B3ucPz7GSthBVG4xZW1V'
         'eDhXNTQ'),
        ('simple-0.6.0.rpz',
         'https://drive.google.com/uc?export=download&id=0B3ucPz7GSthBbl9SUjhr'
         'cUdtbGs'),
        ('simple-0.7.1.rpz',
         'https://drive.google.com/uc?export=download&id=0B3ucPz7GSthBRGp2Vm5V'
         'QVpWOGs'),
    ]
    for name, url in old_packages:
        print("Testing old package %s" % name)
        f = Path(name)
        if not f.exists():
            download_file_retry(url, f)
        # Info
        check_call(rpuz + ['info', name])
        # Show files
        check_call(rpuz + ['showfiles', name])
        # Lists packages
        check_call(rpuz + ['installpkgs', '--summary', name])
        # Unpack directory
        check_call(rpuz + ['directory', 'setup', name, 'simpledir'])
        # Run directory
        check_simple(rpuz + ['directory', 'run', 'simpledir'], 'err')
        output_in_dir = Path('simpledir/root/tmp')
        output_in_dir = output_in_dir.listdir('reprozip_*')[0]
        output_in_dir = output_in_dir / 'simple_output.txt'
        with output_in_dir.open(encoding='utf-8') as fp:
            assert fp.read().strip() == '42'
        # Delete with wrong command (should fail)
        p = subprocess.Popen(rpuz + ['chroot', 'destroy', 'simpledir'],
                             stderr=subprocess.PIPE)
        out, err = p.communicate()
        assert p.poll() != 0
        err = err.splitlines()
        assert b"Wrong unpacker used" in err[0]
        assert err[1].startswith(b"usage: ")
        # Delete directory
        check_call(rpuz + ['directory', 'destroy', 'simpledir'])

    # ########################################
    # Copies back coverage report
    #

    coverage = Path('.coverage')
    if coverage.exists():
        coverage.copyfile(tests.parent / '.coverage.runpy')