Exemple #1
0
def pull_directory(remote, remotedir, localdir):
    """
    Copy a remote directory to a local directory.
    """
    log.debug('Transferring archived files from %s:%s to %s',
              remote.shortname, remotedir, localdir)
    if not os.path.exists(localdir):
        os.mkdir(localdir)
    r = remote.get_tar_stream(remotedir, sudo=True)
    tar = tarfile.open(mode='r|gz', fileobj=r.stdout)
    while True:
        ti = tar.next()
        if ti is None:
            break

        if ti.isdir():
            # ignore silently; easier to just create leading dirs below
            # XXX this mean empty dirs are not transferred
            pass
        elif ti.isfile():
            sub = safepath.munge(ti.name)
            safepath.makedirs(root=localdir, path=os.path.dirname(sub))
            tar.makefile(ti, targetpath=os.path.join(localdir, sub))
        else:
            if ti.isdev():
                type_ = 'device'
            elif ti.issym():
                type_ = 'symlink'
            elif ti.islnk():
                type_ = 'hard link'
            else:
                type_ = 'unknown'
            log.info('Ignoring tar entry: %r type %r', ti.name, type_)
Exemple #2
0
def pull_directory(remote, remotedir, localdir):
    """
    Copy a remote directory to a local directory.
    """
    log.debug('Transferring archived files from %s:%s to %s', remote.shortname,
              remotedir, localdir)
    if not os.path.exists(localdir):
        os.mkdir(localdir)
    r = remote.get_tar_stream(remotedir, sudo=True)
    tar = tarfile.open(mode='r|gz', fileobj=r.stdout)
    while True:
        ti = tar.next()
        if ti is None:
            break

        if ti.isdir():
            # ignore silently; easier to just create leading dirs below
            # XXX this mean empty dirs are not transferred
            pass
        elif ti.isfile():
            sub = safepath.munge(ti.name)
            safepath.makedirs(root=localdir, path=os.path.dirname(sub))
            tar.makefile(ti, targetpath=os.path.join(localdir, sub))
        else:
            if ti.isdev():
                type_ = 'device'
            elif ti.issym():
                type_ = 'symlink'
            elif ti.islnk():
                type_ = 'hard link'
            else:
                type_ = 'unknown'
            log.info('Ignoring tar entry: %r type %r', ti.name, type_)
Exemple #3
0
def create_job_archive(job_name, job_archive_path, archive_dir):
    log.info('Creating job\'s archive dir %s', job_archive_path)
    safe_archive = safepath.munge(job_name)
    run_archive = os.path.join(archive_dir, safe_archive)
    if not os.path.exists(run_archive):
        safepath.makedirs('/', run_archive)
    safepath.makedirs('/', job_archive_path)
Exemple #4
0
def pull_directory(remote, remotedir, localdir):
    """
    Copy a remote directory to a local directory.
    """
    log.debug('Transferring archived files from %s:%s to %s', remote.shortname,
              remotedir, localdir)
    if not os.path.exists(localdir):
        os.mkdir(localdir)
    proc = remote.run(
        args=[
            'sudo',
            'tar',
            'c',
            '-f',
            '-',
            '-C',
            remotedir,
            '--',
            '.',
        ],
        stdout=run.PIPE,
        wait=False,
    )
    tar = tarfile.open(mode='r|', fileobj=proc.stdout)
    while True:
        ti = tar.next()
        if ti is None:
            break

        if ti.isdir():
            # ignore silently; easier to just create leading dirs below
            pass
        elif ti.isfile():
            sub = safepath.munge(ti.name)
            safepath.makedirs(root=localdir, path=os.path.dirname(sub))
            tar.makefile(ti, targetpath=os.path.join(localdir, sub))
        else:
            if ti.isdev():
                type_ = 'device'
            elif ti.issym():
                type_ = 'symlink'
            elif ti.islnk():
                type_ = 'hard link'
            else:
                type_ = 'unknown'
                log.info('Ignoring tar entry: %r type %r', ti.name, type_)
                continue
    proc.exitstatus.get()
Exemple #5
0
def pull_directory(remote, remotedir, localdir):
    """
    Copy a remote directory to a local directory.
    """
    log.debug('Transferring archived files from %s:%s to %s',
              remote.shortname, remotedir, localdir)
    if not os.path.exists(localdir):
        os.mkdir(localdir)
    proc = remote.run(
        args=[
            'sudo',
            'tar',
            'c',
            '-f', '-',
            '-C', remotedir,
            '--',
            '.',
            ],
        stdout=run.PIPE,
        wait=False,
        )
    tar = tarfile.open(mode='r|', fileobj=proc.stdout)
    while True:
        ti = tar.next()
        if ti is None:
            break

        if ti.isdir():
            # ignore silently; easier to just create leading dirs below
            pass
        elif ti.isfile():
            sub = safepath.munge(ti.name)
            safepath.makedirs(root=localdir, path=os.path.dirname(sub))
            tar.makefile(ti, targetpath=os.path.join(localdir, sub))
        else:
            if ti.isdev():
                type_ = 'device'
            elif ti.issym():
                type_ = 'symlink'
            elif ti.islnk():
                type_ = 'hard link'
            else:
                type_ = 'unknown'
                log.info('Ignoring tar entry: %r type %r', ti.name, type_)
                continue
    proc.exitstatus.get()
Exemple #6
0
def pull_directory(remote, remotedir, localdir, write_to=copy_fileobj):
    """
    Copy a remote directory to a local directory.

    :param remote: the remote object representing the remote host from where
                   the specified directory is pulled
    :param remotedir: the source directory on remote host
    :param localdir: the destination directory on localhost
    :param write_to: optional function to write the file to localdir.
                     its signature should be:
                     func(src: fileobj,
                          tarinfo: tarfile.TarInfo,
                          local_path: str)
    """
    log.debug('Transferring archived files from %s:%s to %s',
              remote.shortname, remotedir, localdir)
    if not os.path.exists(localdir):
        os.mkdir(localdir)
    r = remote.get_tar_stream(remotedir, sudo=True)
    tar = tarfile.open(mode='r|gz', fileobj=r.stdout)
    while True:
        ti = tar.next()
        if ti is None:
            break

        if ti.isdir():
            # ignore silently; easier to just create leading dirs below
            # XXX this mean empty dirs are not transferred
            pass
        elif ti.isfile():
            sub = safepath.munge(ti.name)
            safepath.makedirs(root=localdir, path=os.path.dirname(sub))
            with tar.extractfile(ti) as src:
                write_to(src, ti, os.path.join(localdir, sub))
        else:
            if ti.isdev():
                type_ = 'device'
            elif ti.issym():
                type_ = 'symlink'
            elif ti.islnk():
                type_ = 'hard link'
            else:
                type_ = 'unknown'
            log.info('Ignoring tar entry: %r type %r', ti.name, type_)
Exemple #7
0
def pull_directory(remote, remotedir, localdir):
    """
    Copy a remote directory to a local directory.
    """
    log.debug('Transferring archived files from %s:%s to %s', remote.shortname,
              remotedir, localdir)
    if not os.path.exists(localdir):
        os.mkdir(localdir)
    _, local_tarfile = tempfile.mkstemp(dir=localdir)
    remote.get_tar(remotedir, local_tarfile, sudo=True)
    with open(local_tarfile, 'r+') as fb1:
        tar = tarfile.open(mode='r|gz', fileobj=fb1)
        while True:
            ti = tar.next()
            if ti is None:
                break

            if ti.isdir():
                # ignore silently; easier to just create leading dirs below
                pass
            elif ti.isfile():
                sub = safepath.munge(ti.name)
                safepath.makedirs(root=localdir, path=os.path.dirname(sub))
                tar.makefile(ti, targetpath=os.path.join(localdir, sub))
            else:
                if ti.isdev():
                    type_ = 'device'
                elif ti.issym():
                    type_ = 'symlink'
                elif ti.islnk():
                    type_ = 'hard link'
                else:
                    type_ = 'unknown'
                    log.info('Ignoring tar entry: %r type %r', ti.name, type_)
                    continue
    os.remove(local_tarfile)
Exemple #8
0
def pull_directory(remote, remotedir, localdir):
    """
    Copy a remote directory to a local directory.
    """
    log.debug('Transferring archived files from %s:%s to %s',
              remote.shortname, remotedir, localdir)
    if not os.path.exists(localdir):
        os.mkdir(localdir)
    _, local_tarfile = tempfile.mkstemp(dir=localdir)
    remote.get_tar(remotedir, local_tarfile, sudo=True)
    with open(local_tarfile, 'r+') as fb1:
        tar = tarfile.open(mode='r|gz', fileobj=fb1)
        while True:
            ti = tar.next()
            if ti is None:
                break

            if ti.isdir():
                # ignore silently; easier to just create leading dirs below
                pass
            elif ti.isfile():
                sub = safepath.munge(ti.name)
                safepath.makedirs(root=localdir, path=os.path.dirname(sub))
                tar.makefile(ti, targetpath=os.path.join(localdir, sub))
            else:
                if ti.isdev():
                    type_ = 'device'
                elif ti.issym():
                    type_ = 'symlink'
                elif ti.islnk():
                    type_ = 'hard link'
                else:
                    type_ = 'unknown'
                    log.info('Ignoring tar entry: %r type %r', ti.name, type_)
                    continue
    os.remove(local_tarfile)
Exemple #9
0
def run_job(job_config, teuth_bin_path, archive_dir, verbose):
    safe_archive = safepath.munge(job_config['name'])
    if job_config.get('first_in_suite') or job_config.get('last_in_suite'):
        if teuth_config.results_server:
            try:
                report.try_delete_jobs(job_config['name'],
                                       job_config['job_id'])
            except Exception as e:
                log.warning("Unable to delete job %s, exception occurred: %s",
                            job_config['job_id'], e)
        suite_archive_dir = os.path.join(archive_dir, safe_archive)
        safepath.makedirs('/', suite_archive_dir)
        args = [
            os.path.join(teuth_bin_path, 'teuthology-results'),
            '--archive-dir',
            suite_archive_dir,
            '--name',
            job_config['name'],
        ]
        if job_config.get('first_in_suite'):
            log.info('Generating memo for %s', job_config['name'])
            if job_config.get('seed'):
                args.extend(['--seed', job_config['seed']])
            if job_config.get('subset'):
                args.extend(['--subset', job_config['subset']])
        else:
            log.info('Generating results for %s', job_config['name'])
            timeout = job_config.get('results_timeout',
                                     teuth_config.results_timeout)
            args.extend(['--timeout', str(timeout)])
            if job_config.get('email'):
                args.extend(['--email', job_config['email']])
        # Execute teuthology-results, passing 'preexec_fn=os.setpgrp' to
        # make sure that it will continue to run if this worker process
        # dies (e.g. because of a restart)
        result_proc = subprocess.Popen(args=args, preexec_fn=os.setpgrp)
        log.info("teuthology-results PID: %s", result_proc.pid)
        return

    log.info('Creating archive dir %s', job_config['archive_path'])
    safepath.makedirs('/', job_config['archive_path'])
    log.info('Running job %s', job_config['job_id'])

    suite_path = job_config['suite_path']
    arg = [
        os.path.join(teuth_bin_path, 'teuthology'),
    ]
    # The following is for compatibility with older schedulers, from before we
    # started merging the contents of job_config['config'] into job_config
    # itself.
    if 'config' in job_config:
        inner_config = job_config.pop('config')
        if not isinstance(inner_config, dict):
            log.warn("run_job: job_config['config'] isn't a dict, it's a %s",
                     str(type(inner_config)))
        else:
            job_config.update(inner_config)

    if verbose or job_config['verbose']:
        arg.append('-v')

    arg.extend([
        '--lock',
        '--block',
        '--owner',
        job_config['owner'],
        '--archive',
        job_config['archive_path'],
        '--name',
        job_config['name'],
    ])
    if job_config['description'] is not None:
        arg.extend(['--description', job_config['description']])
    arg.append('--')

    with tempfile.NamedTemporaryFile(prefix='teuthology-worker.',
                                     suffix='.tmp',
                                     mode='w+t') as tmp:
        yaml.safe_dump(data=job_config, stream=tmp)
        tmp.flush()
        arg.append(tmp.name)
        env = os.environ.copy()
        python_path = env.get('PYTHONPATH', '')
        python_path = ':'.join([suite_path, python_path]).strip(':')
        env['PYTHONPATH'] = python_path
        log.debug("Running: %s" % ' '.join(arg))
        p = subprocess.Popen(args=arg, env=env)
        log.info("Job archive: %s", job_config['archive_path'])
        log.info("Job PID: %s", str(p.pid))

        if teuth_config.results_server:
            log.info("Running with watchdog")
            try:
                run_with_watchdog(p, job_config)
            except Exception:
                log.exception("run_with_watchdog had an unhandled exception")
                raise
        else:
            log.info("Running without watchdog")
            # This sleep() is to give the child time to start up and create the
            # archive dir.
            time.sleep(5)
            symlink_worker_log(job_config['worker_log'],
                               job_config['archive_path'])
            p.wait()

        if p.returncode != 0:
            log.error('Child exited with code %d', p.returncode)
        else:
            log.info('Success!')
Exemple #10
0
def worker():
    parser = argparse.ArgumentParser(
        description="""
Grab jobs from a beanstalk queue and run the teuthology tests they
describe. One job is run at a time.
"""
    )
    parser.add_argument("-v", "--verbose", action="store_true", default=None, help="be more verbose")
    parser.add_argument("--archive-dir", metavar="DIR", help="path under which to archive results", required=True)
    parser.add_argument("-l", "--log-dir", help="path in which to store logs", required=True)
    parser.add_argument("-t", "--tube", help="which beanstalk tube to read jobs from", required=True)

    ctx = parser.parse_args()

    loglevel = logging.INFO
    if ctx.verbose:
        loglevel = logging.DEBUG

    logging.basicConfig(
        level=loglevel,
        filename=os.path.join(ctx.log_dir, "worker.{pid}".format(pid=os.getpid())),
        format="%(asctime)s.%(msecs)03d %(levelname)s:%(name)s:%(message)s",
        datefmt="%Y-%m-%dT%H:%M:%S",
    )

    if not os.path.isdir(ctx.archive_dir):
        sys.exit(
            "{prog}: archive directory must exist: {path}".format(
                prog=os.path.basename(sys.argv[0]), path=ctx.archive_dir
            )
        )

    from teuthology.misc import read_config

    read_config(ctx)

    beanstalk = connect(ctx)
    beanstalk.watch(ctx.tube)
    beanstalk.ignore("default")

    while True:
        job = beanstalk.reserve(timeout=60)
        if job is None:
            continue

        # bury the job so it won't be re-run if it fails
        job.bury()
        log.debug("Reserved job %d", job.jid)
        log.debug("Config is: %s", job.body)
        job_config = yaml.safe_load(job.body)
        safe_archive = safepath.munge(job_config["name"])

        if job_config.get("last_in_suite", False):
            log.debug("Generating coverage for %s", job_config["name"])
            args = [
                os.path.join(os.path.dirname(sys.argv[0]), "teuthology-results"),
                "--timeout",
                str(job_config.get("results_timeout", 21600)),
                "--email",
                job_config["email"],
                "--archive-dir",
                os.path.join(ctx.archive_dir, safe_archive),
                "--name",
                job_config["name"],
            ]
            subprocess.Popen(args=args)
        else:
            log.debug("Creating archive dir...")
            safepath.makedirs(ctx.archive_dir, safe_archive)
            archive_path = os.path.join(ctx.archive_dir, safe_archive, str(job.jid))
            log.info("Running job %d", job.jid)
            run_job(job_config, archive_path)
        job.delete()
Exemple #11
0
def archive(ctx, config):
    log.info('Creating archive directory...')
    run.wait(
        ctx.cluster.run(
            args=[
                'install', '-d', '-m0755', '--',
                '/tmp/cephtest/archive',
                ],
            wait=False,
            )
        )

    try:
        yield
    finally:
        if ctx.archive is not None:

            log.info('Transferring archived files...')
            logdir = os.path.join(ctx.archive, 'remote')
            os.mkdir(logdir)
            for remote in ctx.cluster.remotes.iterkeys():
                path = os.path.join(logdir, remote.shortname)
                os.mkdir(path)
                log.debug('Transferring archived files from %s to %s', remote.shortname, path)
                proc = remote.run(
                    args=[
                        'tar',
                        'c',
                        '-f', '-',
                        '-C', '/tmp/cephtest/archive',
                        '--',
                        '.',
                        ],
                    stdout=run.PIPE,
                    wait=False,
                    )
                tar = tarfile.open(mode='r|', fileobj=proc.stdout)
                while True:
                    ti = tar.next()
                    if ti is None:
                        break

                    if ti.isdir():
                        # ignore silently; easier to just create leading dirs below
                        pass
                    elif ti.isfile():
                        sub = safepath.munge(ti.name)
                        safepath.makedirs(root=path, path=os.path.dirname(sub))
                        tar.makefile(ti, targetpath=os.path.join(path, sub))
                    else:
                        if ti.isdev():
                            type_ = 'device'
                        elif ti.issym():
                            type_ = 'symlink'
                        elif ti.islnk():
                            type_ = 'hard link'
                        else:
                            type_ = 'unknown'
                        log.info('Ignoring tar entry: %r type %r', ti.name, type_)
                        continue
                proc.exitstatus.get()

        log.info('Removing archive directory...')
        run.wait(
            ctx.cluster.run(
                args=[
                    'rm',
                    '-rf',
                    '--',
                    '/tmp/cephtest/archive',
                    ],
                wait=False,
                ),
            )
Exemple #12
0
def worker():
    parser = argparse.ArgumentParser(description="""
Grab jobs from a beanstalk queue and run the teuthology tests they
describe. One job is run at a time.
""")
    parser.add_argument(
        '-v', '--verbose',
        action='store_true', default=None,
        help='be more verbose',
        )
    parser.add_argument(
        '--archive-dir',
        metavar='DIR',
        help='path under which to archive results',
        required=True,
        )
    parser.add_argument(
        '-l', '--log-dir',
        help='path in which to store logs',
        required=True,
        )
    parser.add_argument(
        '-t', '--tube',
        help='which beanstalk tube to read jobs from',
        required=True,
        )

    ctx = parser.parse_args()

    loglevel = logging.INFO
    if ctx.verbose:
        loglevel = logging.DEBUG

    logging.basicConfig(
        level=loglevel,
        filename=os.path.join(ctx.log_dir, 'worker.{pid}'.format(pid=os.getpid())),
        format='%(asctime)s.%(msecs)03d %(levelname)s:%(name)s:%(message)s',
        datefmt='%Y-%m-%dT%H:%M:%S',
        )

    if not os.path.isdir(ctx.archive_dir):
        sys.exit("{prog}: archive directory must exist: {path}".format(
                prog=os.path.basename(sys.argv[0]),
                path=ctx.archive_dir,
                ))

    from teuthology.misc import read_config
    read_config(ctx)

    beanstalk = connect(ctx)
    beanstalk.watch(ctx.tube)
    beanstalk.ignore('default')

    while True:
        job = beanstalk.reserve(timeout=60)
        if job is None:
            continue

        # bury the job so it won't be re-run if it fails
        job.bury()
        log.debug('Reserved job %d', job.jid)
        log.debug('Config is: %s', job.body)
        job_config = yaml.safe_load(job.body)
        safe_archive = safepath.munge(job_config['name'])
        teuthology_branch = job_config.get('config', {}).get('teuthology_branch', 'master')

        teuth_path = os.path.join(os.getenv("HOME"), 'teuthology-' + teuthology_branch, 'virtualenv', 'bin')
        if not os.path.isdir(teuth_path):
            raise Exception('Teuthology branch ' + teuthology_branch + ' not found at ' + teuth_path)
        if job_config.get('last_in_suite'):
            log.debug('Generating coverage for %s', job_config['name'])
            args = [
                os.path.join(teuth_path, 'teuthology-results'),
                '--timeout',
                str(job_config.get('results_timeout', 21600)),
                '--email',
                job_config['email'],
                '--archive-dir',
                os.path.join(ctx.archive_dir, safe_archive),
                '--name',
                job_config['name'],
                ]
            subprocess.Popen(args=args)
        else:
            log.debug('Creating archive dir...')
            safepath.makedirs(ctx.archive_dir, safe_archive)
            archive_path = os.path.join(ctx.archive_dir, safe_archive, str(job.jid))
            log.info('Running job %d', job.jid)
            run_job(job_config, archive_path, teuth_path)
        job.delete()
Exemple #13
0
def worker():
    parser = argparse.ArgumentParser(description="""
Grab jobs from a beanstalk queue and run the teuthology tests they
describe. One job is run at a time.
""")
    parser.add_argument(
        '-v',
        '--verbose',
        action='store_true',
        default=None,
        help='be more verbose',
    )
    parser.add_argument(
        '--archive-dir',
        metavar='DIR',
        help='path under which to archive results',
        required=True,
    )
    parser.add_argument(
        '-l',
        '--log-dir',
        help='path in which to store logs',
        required=True,
    )

    ctx = parser.parse_args()

    loglevel = logging.INFO
    if ctx.verbose:
        loglevel = logging.DEBUG

    logging.basicConfig(
        level=loglevel,
        filename=os.path.join(ctx.log_dir,
                              'worker.{pid}'.format(pid=os.getpid())),
        format='%(asctime)s.%(msecs)03d %(levelname)s:%(name)s:%(message)s',
        datefmt='%Y-%m-%dT%H:%M:%S',
    )

    if not os.path.isdir(ctx.archive_dir):
        sys.exit("{prog}: archive directory must exist: {path}".format(
            prog=os.path.basename(sys.argv[0]),
            path=ctx.archive_dir,
        ))

    from teuthology.misc import read_config
    read_config(ctx)

    beanstalk = connect(ctx)
    beanstalk.watch('teuthology')
    beanstalk.ignore('default')

    while True:
        job = beanstalk.reserve(timeout=60)
        if job is None:
            continue

        # bury the job so it won't be re-run if it fails
        job.bury()
        log.debug('Reserved job %d', job.jid)
        log.debug('Config is: %s', job.body)
        job_config = yaml.safe_load(job.body)
        safe_archive = safepath.munge(job_config['name'])

        if job_config.get('last_in_suite', False):
            log.debug('Generating coverage for %s', job_config['name'])
            args = [
                os.path.join(os.path.dirname(sys.argv[0]),
                             'teuthology-results'),
                '--timeout',
                str(job_config.get('results_timeout', 21600)),
                '--email',
                job_config['email'],
                '--archive-dir',
                os.path.join(ctx.archive_dir, safe_archive),
                '--name',
                job_config['name'],
            ]
            subprocess.Popen(args=args)
        else:
            log.debug('Creating archive dir...')
            safepath.makedirs(ctx.archive_dir, safe_archive)
            archive_path = os.path.join(ctx.archive_dir, safe_archive,
                                        str(job.jid))
            log.info('Running job %d', job.jid)
            run_job(job_config, archive_path)
        job.delete()
Exemple #14
0
def archive(ctx, config):
    log.info('Creating archive directory...')
    run.wait(
        ctx.cluster.run(
            args=[
                'install',
                '-d',
                '-m0755',
                '--',
                '/tmp/cephtest/archive',
            ],
            wait=False,
        ))

    try:
        yield
    finally:
        if ctx.archive is not None:

            log.info('Transferring archived files...')
            logdir = os.path.join(ctx.archive, 'remote')
            os.mkdir(logdir)
            for remote in ctx.cluster.remotes.iterkeys():
                path = os.path.join(logdir, remote.shortname)
                os.mkdir(path)
                log.debug('Transferring archived files from %s to %s',
                          remote.shortname, path)
                proc = remote.run(
                    args=[
                        'tar',
                        'c',
                        '-f',
                        '-',
                        '-C',
                        '/tmp/cephtest/archive',
                        '--',
                        '.',
                    ],
                    stdout=run.PIPE,
                    wait=False,
                )
                tar = tarfile.open(mode='r|', fileobj=proc.stdout)
                while True:
                    ti = tar.next()
                    if ti is None:
                        break

                    if ti.isdir():
                        # ignore silently; easier to just create leading dirs below
                        pass
                    elif ti.isfile():
                        sub = safepath.munge(ti.name)
                        safepath.makedirs(root=path, path=os.path.dirname(sub))
                        tar.makefile(ti, targetpath=os.path.join(path, sub))
                    else:
                        if ti.isdev():
                            type_ = 'device'
                        elif ti.issym():
                            type_ = 'symlink'
                        elif ti.islnk():
                            type_ = 'hard link'
                        else:
                            type_ = 'unknown'
                        log.info('Ignoring tar entry: %r type %r', ti.name,
                                 type_)
                        continue
                proc.exitstatus.get()

        log.info('Removing archive directory...')
        run.wait(
            ctx.cluster.run(
                args=[
                    'rm',
                    '-rf',
                    '--',
                    '/tmp/cephtest/archive',
                ],
                wait=False,
            ), )