def setUp(self):
        self.src_tree = autotemp.tempdir(unique_id='utilsrc')
        self.dest_tree = autotemp.tempdir(unique_id='utilsdest')

        # empty subdirs
        os.mkdir(self.src("empty"))
        os.mkdir(self.dest("empty"))
Beispiel #2
0
    def _send_tarball(self, pkg_name, remote_dest):
        name, pkg_type = self.job.pkgmgr.parse_tarball_name(pkg_name)
        src_dirs = []
        if pkg_type == 'test':
            for test_dir in ['site_tests', 'tests']:
                src_dir = os.path.join(self.job.clientdir, test_dir, name)
                if os.path.exists(src_dir):
                    src_dirs += [src_dir]
                    if autoserv_prebuild:
                        prebuild.setup(self.job.clientdir, src_dir)
                    break
        elif pkg_type == 'profiler':
            src_dirs += [os.path.join(self.job.clientdir, 'profilers', name)]
            if autoserv_prebuild:
                prebuild.setup(self.job.clientdir, src_dir)
        elif pkg_type == 'dep':
            src_dirs += [os.path.join(self.job.clientdir, 'deps', name)]
        elif pkg_type == 'client':
            return  # you must already have a client to hit this anyway
        else:
            return  # no other types are supported

        # iterate over src_dirs until we find one that exists, then tar it
        for src_dir in src_dirs:
            if os.path.exists(src_dir):
                try:
                    logging.info('Bundling %s into %s', src_dir, pkg_name)
                    temp_dir = autotemp.tempdir(unique_id='autoserv-packager',
                                                dir=self.job.tmpdir)
                    tarball_path = self.job.pkgmgr.tar_package(
                        pkg_name, src_dir, temp_dir.name, " .", None)
                    self.host.send_file(tarball_path, remote_dest)
                finally:
                    temp_dir.clean()
                return
Beispiel #3
0
    def __init__(self, path, is_writable=False):
        """
        Instantiate a job directory.

        @param path: The path of the directory. If None a temporary directory
            will be created instead.
        @param is_writable: If True, expect the directory to be writable.

        @raise MissingDirectoryException: raised if is_writable=False and the
            directory does not exist.
        @raise UnwritableDirectoryException: raised if is_writable=True and
            the directory exists but is not writable.
        @raise UncreatableDirectoryException: raised if is_writable=True, the
            directory does not exist and it cannot be created.
        """
        if path is None:
            if is_writable:
                self._tempdir = autotemp.tempdir(unique_id='autotest')
                self.path = self._tempdir.name
            else:
                raise self.MissingDirectoryException(path)
        else:
            self._tempdir = None
            self.path = path
        self._ensure_valid(is_writable)
Beispiel #4
0
    def start_master_ssh(self):
        """
        Called whenever a slave SSH connection needs to be initiated (e.g., by
        run, rsync, scp). If master SSH support is enabled and a master SSH
        connection is not active already, start a new one in the background.
        Also, cleanup any zombie master SSH connections (e.g., dead due to
        reboot).
        """
        if not enable_master_ssh:
            return

        # If a previously started master SSH connection is not running
        # anymore, it needs to be cleaned up and then restarted.
        if self.master_ssh_job is not None:
            if self.master_ssh_job.sp.poll() is not None:
                logging.info("Master ssh connection to %s is down.",
                             self.hostname)
                self._cleanup_master_ssh()

        # Start a new master SSH connection.
        if self.master_ssh_job is None:
            # Create a shared socket in a temp location.
            self.master_ssh_tempdir = autotemp.tempdir(unique_id='ssh-master')
            self.master_ssh_option = ("-o ControlPath=%s/socket" %
                                      self.master_ssh_tempdir.name)

            # Start the master SSH connection in the background.
            master_cmd = self.ssh_command(options="-N -o ControlMaster=yes")
            logging.info("Starting master ssh connection '%s'" % master_cmd)
            self.master_ssh_job = utils.BgJob(master_cmd)
Beispiel #5
0
    def _send_and_wait(self, title, *args):
        """Send a message to the autoserv and wait for it to signal
        completion.

        @param title: An alphanumeric string to title the message.
        @param *args: Additional arbitrary alphanumeric arguments to pass
                to the server.
        """
        # create a named pipe for us to receive a signal on
        fifo_dir = autotemp.tempdir(suffix='-fifo', unique_id='harness',
                                    dir=self.job.tmpdir)
        try:
            fifo_path = os.path.join(fifo_dir.name, 'autoserv.fifo')
            os.mkfifo(fifo_path)

            # send signal to the server as title[:args]:path
            msg = ':'.join([title] + list(args) + [fifo_path]) + '\n'
            self.status.write(msg)

            # wait for the server to signal back to us
            fifo = open(fifo_path)
            fifo.read(1)
            fifo.close()
        finally:
            fifo_dir.clean()
    def start_master_ssh(self):
        """
        Called whenever a slave SSH connection needs to be initiated (e.g., by
        run, rsync, scp). If master SSH support is enabled and a master SSH
        connection is not active already, start a new one in the background.
        Also, cleanup any zombie master SSH connections (e.g., dead due to
        reboot).
        """
        if not enable_master_ssh:
            return

        # If a previously started master SSH connection is not running
        # anymore, it needs to be cleaned up and then restarted.
        if self.master_ssh_job is not None:
            if self.master_ssh_job.sp.poll() is not None:
                logging.info("Master ssh connection to %s is down.",
                             self.hostname)
                self._cleanup_master_ssh()

        # Start a new master SSH connection.
        if self.master_ssh_job is None:
            # Create a shared socket in a temp location.
            self.master_ssh_tempdir = autotemp.tempdir(unique_id='ssh-master')
            self.master_ssh_option = ("-o ControlPath=%s/socket" %
                                      self.master_ssh_tempdir.name)

            # Start the master SSH connection in the background.
            master_cmd = self.ssh_command(options="-N -o ControlMaster=yes")
            logging.info("Starting master ssh connection '%s'" % master_cmd)
            self.master_ssh_job = utils.BgJob(master_cmd)
Beispiel #7
0
    def __init__(self, path, is_writable=False):
        """
        Instantiate a job directory.

        @param path: The path of the directory. If None a temporary directory
            will be created instead.
        @param is_writable: If True, expect the directory to be writable.

        @raise MissingDirectoryException: raised if is_writable=False and the
            directory does not exist.
        @raise UnwritableDirectoryException: raised if is_writable=True and
            the directory exists but is not writable.
        @raise UncreatableDirectoryException: raised if is_writable=True, the
            directory does not exist and it cannot be created.
        """
        if path is None:
            if is_writable:
                self._tempdir = autotemp.tempdir(unique_id='autotest')
                self.path = self._tempdir.name
            else:
                raise self.MissingDirectoryException(path)
        else:
            self._tempdir = None
            self.path = path
        self._ensure_valid(is_writable)
    def _send_tarball(self, pkg_name, remote_dest):
        name, pkg_type = self.job.pkgmgr.parse_tarball_name(pkg_name)
        src_dirs = []
        if pkg_type == 'test':
            for test_dir in ['site_tests', 'tests']:
                src_dir = os.path.join(self.job.clientdir, test_dir, name)
                if os.path.exists(src_dir):
                    src_dirs += [src_dir]
                    if autoserv_prebuild:
                        prebuild.setup(self.job.clientdir, src_dir)
                    break
        elif pkg_type == 'profiler':
            src_dirs += [os.path.join(self.job.clientdir, 'profilers', name)]
            if autoserv_prebuild:
                prebuild.setup(self.job.clientdir, src_dir)
        elif pkg_type == 'dep':
            src_dirs += [os.path.join(self.job.clientdir, 'deps', name)]
        elif pkg_type == 'client':
            return  # you must already have a client to hit this anyway
        else:
            return  # no other types are supported

        # iterate over src_dirs until we find one that exists, then tar it
        for src_dir in src_dirs:
            if os.path.exists(src_dir):
                try:
                    logging.info('Bundling %s into %s', src_dir, pkg_name)
                    temp_dir = autotemp.tempdir(unique_id='autoserv-packager',
                                                dir=self.job.tmpdir)
                    tarball_path = self.job.pkgmgr.tar_package(
                        pkg_name, src_dir, temp_dir.name, " .", None)
                    self.host.send_file(tarball_path, remote_dest)
                finally:
                    temp_dir.clean()
                return
Beispiel #9
0
    def _send_and_wait(self, title, *args):
        """Send a message to the autoserv and wait for it to signal
        completion.

        @param title: An alphanumeric string to title the message.
        @param *args: Additional arbitrary alphanumeric arguments to pass
                to the server.
        """
        # create a named pipe for us to recieve a signal on
        fifo_dir = autotemp.tempdir(suffix='-fifo',
                                    unique_id='harness',
                                    dir=self.job.tmpdir)
        try:
            fifo_path = os.path.join(fifo_dir.name, 'autoserv.fifo')
            os.mkfifo(fifo_path)

            # send signal to the server as title[:args]:path
            msg = ':'.join([title] + list(args) + [fifo_path]) + '\n'
            self.status.write(msg)

            # wait for the server to signal back to us
            fifo = open(fifo_path)
            fifo.read(1)
            fifo.close()
        finally:
            fifo_dir.clean()
Beispiel #10
0
    def _send_tarball(self, pkg_name, remote_dest):
        name, pkg_type = self.job.pkgmgr.parse_tarball_name(pkg_name)
        src_dirs = []
        if pkg_type == 'test':
            test_dirs = ['site_tests', 'tests']
            # if test_dir is defined in global config
            # package the tests from there (if exists)
            settings_test_dirs = settings.get_value('COMMON', 'test_dir',
                                                    default="")
            if settings_test_dirs:
                test_dirs = settings_test_dirs.strip().split(',') + test_dirs
            for test_dir in test_dirs:
                src_dir = os.path.join(self.job.clientdir, test_dir, name)
                if os.path.exists(src_dir):
                    src_dirs += [src_dir]
                    if autoserv_prebuild:
                        prebuild.setup(self.job.clientdir, src_dir)
                    break
        elif pkg_type == 'profiler':
            src_dirs += [os.path.join(self.job.clientdir, 'profilers', name)]
            if autoserv_prebuild:
                prebuild.setup(self.job.clientdir, src_dir)
        elif pkg_type == 'dep':
            src_dirs += [os.path.join(self.job.clientdir, 'deps', name)]
        elif pkg_type == 'client':
            return  # you must already have a client to hit this anyway
        else:
            return  # no other types are supported

        # iterate over src_dirs until we find one that exists, then tar it
        for src_dir in src_dirs:
            if os.path.exists(src_dir):
                try:
                    logging.info('Bundling %s into %s', src_dir, pkg_name)
                    temp_dir = autotemp.tempdir(unique_id='autoserv-packager',
                                                dir=self.job.tmpdir)

                    exclude_paths = None
                    exclude_file_path = os.path.join(src_dir, ".pack_exclude")
                    if os.path.exists(exclude_file_path):
                        exclude_file = open(exclude_file_path)
                        exclude_paths = exclude_file.read().splitlines()
                        exclude_file.close()

                    tarball_path = self.job.pkgmgr.tar_package(
                        pkg_name, src_dir, temp_dir.name,
                        " .", exclude_paths)
                    self.host.send_file(tarball_path, remote_dest)
                finally:
                    temp_dir.clean()
                return
    def _send_tarball(self, pkg_name, remote_dest):
        name, pkg_type = self.job.pkgmgr.parse_tarball_name(pkg_name)
        src_dirs = []
        if pkg_type == 'test':
            test_dirs = ['site_tests', 'tests']
            # if test_dir is defined in global config
            # package the tests from there (if exists)
            settings_test_dirs = settings.get_value('COMMON', 'test_dir',
                                                    default="")
            if settings_test_dirs:
                test_dirs = settings_test_dirs.strip().split(',') + test_dirs
            for test_dir in test_dirs:
                src_dir = os.path.join(self.job.clientdir, test_dir, name)
                if os.path.exists(src_dir):
                    src_dirs += [src_dir]
                    if autoserv_prebuild:
                        prebuild.setup(self.job.clientdir, src_dir)
                    break
        elif pkg_type == 'profiler':
            src_dirs += [os.path.join(self.job.clientdir, 'profilers', name)]
            if autoserv_prebuild:
                prebuild.setup(self.job.clientdir, src_dir)
        elif pkg_type == 'dep':
            src_dirs += [os.path.join(self.job.clientdir, 'deps', name)]
        elif pkg_type == 'client':
            return  # you must already have a client to hit this anyway
        else:
            return  # no other types are supported

        # iterate over src_dirs until we find one that exists, then tar it
        for src_dir in src_dirs:
            if os.path.exists(src_dir):
                try:
                    logging.info('Bundling %s into %s', src_dir, pkg_name)
                    temp_dir = autotemp.tempdir(unique_id='autoserv-packager',
                                                dir=self.job.tmpdir)

                    exclude_paths = None
                    exclude_file_path = os.path.join(src_dir, ".pack_exclude")
                    if os.path.exists(exclude_file_path):
                        exclude_file = open(exclude_file_path)
                        exclude_paths = exclude_file.read().splitlines()
                        exclude_file.close()

                    tarball_path = self.job.pkgmgr.tar_package(
                        pkg_name, src_dir, temp_dir.name,
                        " .", exclude_paths)
                    self.host.send_file(tarball_path, remote_dest)
                finally:
                    temp_dir.clean()
                return
Beispiel #12
0
def main():
    (options, args) = parser.parse_args()
    if len(args) < 2:
        parser.print_help()
        sys.exit(1)

    results_dirpath = path.normpath(args[0])
    if not path.exists(results_dirpath) or not path.isdir(results_dirpath):
        print 'Invalid results_dirpath:', results_dirpath
        parser.print_help()
        sys.exit(1)

    scenarios_dirpath = path.normpath(args[1])
    if not path.exists(scenarios_dirpath) or not path.isdir(scenarios_dirpath):
        print 'Invalid scenarios_dirpath:', scenarios_dirpath
        parser.print_help()
        sys.exit(1)

    results_dirname = path.basename(results_dirpath)
    # Not everything is a valid python package name, fix if necessary
    package_dirname = scenario_base.fix_package_dirname(
        options.name or results_dirname)

    scenario_package_dirpath = path.join(
        scenarios_dirpath, package_dirname)
    if path.exists(scenario_package_dirpath):
        print (
            'Scenario package already exists at path: %s' %
            scenario_package_dirpath)
        parser.print_help()
        sys.exit(1)

    # Create new scenario package
    os.mkdir(scenario_package_dirpath)

    # Create tmp_dir
    tmp_dirpath = autotemp.tempdir(unique_id='new_scenario')
    copied_dirpath = path.join(tmp_dirpath.name, results_dirname)
    # Copy results_dir
    shutil.copytree(results_dirpath, copied_dirpath)

    # scenario_base.sanitize_results_data(copied_dirpath)

    # Launch parser on copied_dirpath, collect emitted test objects.
    harness = scenario_base.new_parser_harness(copied_dirpath)
    try:
        parser_result = harness.execute()
    except Exception, e:
        parser_result = e
Beispiel #13
0
    def __init__(self, address='', port=_DEFAULT_PORT, tmpdir=None):
        """
        :param address: Address on which server must be started.
        :param port: Port of server.
        :param tmpdir: Dir where pid file is saved.
        """
        if tmpdir:
            self.tmpdir = TempDir(tmpdir)
        else:
            self.tmpdir = autotemp.tempdir(unique_id='',
                                           prefix=("SyncListenServer_%d" %
                                                   port))
        self.sessions = {}
        self.exit_event = threading.Event()

        self.server_pid = parallel.fork_start(
            self.tmpdir.name, lambda: self._start_server(address, port))
Beispiel #14
0
    def __init__(self, address='', port=_DEFAULT_PORT, tmpdir=None):
        """
        :param address: Address on which server must be started.
        :param port: Port of server.
        :param tmpdir: Dir where pid file is saved.
        """
        l = lambda: self._start_server(address, port)

        if tmpdir:
            self.tmpdir = TempDir(tmpdir)
        else:
            self.tmpdir = autotemp.tempdir(unique_id='',
                                           prefix="SyncListenServer_%d" % (port))
        self.sessions = {}
        self.exit_event = threading.Event()

        self.server_pid = parallel.fork_start(self.tmpdir.name, l)
Beispiel #15
0
def load_results_dir(package_dirpath):
    """Unpack results tarball in package_dirpath to temp dir.

    Args:
      package_dirpath: str; Path to scenario package directory.

    Returns:
      str; New temp path for extracted results directory.
      - Or -
      None; If tarball does not exist
    """
    tgz_filepath = path.join(package_dirpath, RESULTS_DIR_TARBALL)
    if not path.exists(tgz_filepath):
        return None, None

    tgz = tarfile.open(tgz_filepath, 'r:gz')
    tmp_dirpath = autotemp.tempdir(unique_id='scenario_base')
    results_dirname = tgz.next().name
    tgz.extract(results_dirname, tmp_dirpath.name)
    for info in tgz:
        tgz.extract(info.name, tmp_dirpath.name)
    return tmp_dirpath, path.join(tmp_dirpath.name, results_dirname)
Beispiel #16
0
def load_results_dir(package_dirpath):
    """Unpack results tarball in package_dirpath to temp dir.

    Args:
      package_dirpath: str; Path to scenario package directory.

    Returns:
      str; New temp path for extracted results directory.
      - Or -
      None; If tarball does not exist
    """
    tgz_filepath = path.join(package_dirpath, RESULTS_DIR_TARBALL)
    if not path.exists(tgz_filepath):
        return None, None

    tgz = tarfile.open(tgz_filepath, 'r:gz')
    tmp_dirpath = autotemp.tempdir(unique_id='scenario_base')
    results_dirname = tgz.next().name
    tgz.extract(results_dirname, tmp_dirpath.name)
    for info in tgz:
        tgz.extract(info.name, tmp_dirpath.name)
    return tmp_dirpath, path.join(tmp_dirpath.name, results_dirname)
    def setUp(self):
        self.god = mock.mock_god()
        self.god.stub_function(local_host.utils, 'run')

        self.tmpdir = autotemp.tempdir(unique_id='localhost_unittest')
 def test_create_dir(self):
     temp_dir = autotemp.tempdir(unique_id='dir')
     self.assertTrue(os.path.exists(temp_dir.name))
     self.assertTrue(os.path.isdir(temp_dir.name))
 def test_clean(self):
     temp_dir = autotemp.tempdir(unique_id='clean')
     name = temp_dir.name
     self.assertTrue(os.path.exists(name))
     temp_dir.clean()
     self.assertFalse(os.path.exists(name))
 def test_del(self):
     temp_dir = autotemp.tempdir(unique_id='del')
     name = temp_dir.name
     self.assertTrue(os.path.exists(name))
     temp_dir.__del__()
     self.assertFalse(os.path.exists(name))
    def setUp(self):
        self.god = mock.mock_god()
        self.god.stub_function(local_host.utils, 'run')

        self.tmpdir = autotemp.tempdir(unique_id='localhost_unittest')
Beispiel #22
0
 def test_create_dir(self):
     temp_dir = autotemp.tempdir(unique_id='dir')
     self.assertTrue(os.path.exists(temp_dir.name))
     self.assertTrue(os.path.isdir(temp_dir.name))
Beispiel #23
0
 def test_clean(self):
     temp_dir = autotemp.tempdir(unique_id='clean')
     name = temp_dir.name
     self.assertTrue(os.path.exists(name))
     temp_dir.clean()
     self.assertFalse(os.path.exists(name))
Beispiel #24
0
 def test_del(self):
     temp_dir = autotemp.tempdir(unique_id='del')
     name = temp_dir.name
     self.assertTrue(os.path.exists(name))
     temp_dir.__del__()
     self.assertFalse(os.path.exists(name))