Esempio n. 1
0
 def test_custom_rpm_version_py_command(self):
     """Test custom rpm_version command."""
     stdout, stderr, return_code = self.run_setup('rpm_version')
     self.addDetail('stdout', content.text_content(stdout))
     self.addDetail('stderr', content.text_content(stderr))
     self.assertIn('Extracting rpm version', stdout)
     self.assertEqual(return_code, 0)
Esempio n. 2
0
    def _run_core(self):
        # Add an observer to trap all logged errors.
        self.case.reactor = self._reactor
        error_observer = _log_observer
        full_log = StringIO()
        full_observer = log.FileLogObserver(full_log)
        spinner = self._make_spinner()
        successful, unhandled = run_with_log_observers(
            [error_observer.gotEvent, full_observer.emit], self._blocking_run_deferred, spinner
        )

        self.case.addDetail("twisted-log", text_content(full_log.getvalue()))

        logged_errors = error_observer.flushErrors()
        for logged_error in logged_errors:
            successful = False
            self._got_user_failure(logged_error, tb_label="logged-error")

        if unhandled:
            successful = False
            for debug_info in unhandled:
                f = debug_info.failResult
                info = debug_info._getDebugTracebacks()
                if info:
                    self.case.addDetail("unhandled-error-in-deferred-debug", text_content(info))
                self._got_user_failure(f, "unhandled-error-in-deferred")

        junk = spinner.clear_junk()
        if junk:
            successful = False
            self._log_user_exception(UncleanReactorError(junk))

        if successful:
            self.result.addSuccess(self.case, details=self.case.getDetails())
Esempio n. 3
0
 def _setUp(self):
     super(NeutronNetworkFixture, self)._setUp()
     self.neutron = get_neutron_client(
         project_name=self.project_fixture.name,
         user_name=self.project_fixture.admin_user.name,
         password=self.project_fixture.admin_user_fixture.password)
     self.subnet_id = self.get_subnet_id()
     cidr = CONF.network['cidr'].format(subnet=self.subnet_id)
     # TODO: handle clashes and retry.
     self.net_name = factory.make_obj_name("network")
     self.sub_name = factory.make_obj_name("subnet")
     self.network = self.neutron.create_network(
         {"network": dict(name=self.net_name)})
     network_id = self.network["network"]["id"]
     self.subnet = self.neutron.create_subnet(
         {"subnet": dict(
             name=self.sub_name, network_id=network_id, cidr=cidr,
             ip_version=4)})
     self.addCleanup(self.delete_network)
     self.addDetail(
         'NeutronNetworkFixture-network',
         text_content('Network %s created' % self.net_name))
     self.addDetail(
         'NeutronNetworkFixture-subnet',
         text_content('Subnet %s created (cidr=%s)' % (
             self.sub_name, cidr)))
    def test_bug_import_script_in_testing_mode(self):
        # Test that the bug import script works with --testing.
        script = os.path.join(config.root, 'scripts', 'bug-import.py')
        cache_filename = os.path.join(self.tmpdir, 'bug-map.pickle')
        stdout, stderr, returncode = run_process(
            (script, '--testing', '--cache', cache_filename,
             self.write_example_xml()))
        self.addDetail("stdout", text_content(stdout))
        self.addDetail("stderr", text_content(stderr))
        self.assertEqual(0, returncode)

        # Find the product that was created:
        match = re.search(r'Product ([^ ]+) created', stderr)
        self.assertIsNotNone(match)
        product_name = match.group(1)

        # Find the imported bug number:
        match = re.search(r'Creating Launchpad bug #(\d+)', stderr)
        self.assertIsNotNone(match)
        bug_id = int(match.group(1))

        # Abort transaction so we can see the result:
        transaction.abort()
        bug = getUtility(IBugSet).get(bug_id)
        self.assertEqual('A test bug', bug.title)
        self.assertEqual(product_name, bug.bugtasks[0].product.name)
Esempio n. 5
0
    def test_show_redundancy_summary(self):

        ret, session = self.login()
        prompt = accelerated_upgrade.get_prompt(session)
        search_window = len(prompt) + 1

        self.addDetail('prompt', text_content(prompt))

        session.sendline()
        status = session.expect_exact(
            [prompt], timeout=3, searchwindowsize=search_window)
        self.assertEqual(0, status)
        self.addDetail('received-prompt', text_content(session.after))

        cmd = 'show redundancy summary'
        session.sendline(cmd)

        status = session.expect_exact(
            [prompt, INVALID_INPUT, MORE, EOF],
            timeout=5,
            searchwindowsize=search_window
        )

        #check if get prompt after the command
        self.assertEqual(0, status)

        # replace LF with CR/LF (maybe different on Windows)
        output = asr_9k.commands[cmd][0].replace('\n', '\r\n')
        found = session.before.find(output)

        self.addDetail('found', text_content(str(found)))
        self.addDetail('received', text_content(session.before))
        self.addDetail('expected', text_content(output))

        self.assertNotEqual(-1, found)
Esempio n. 6
0
    def run_apt_autoremove(self):
        if sys.platform == "win32":
            return

        deb_env = os.environ.copy()
        deb_env.update(
            {"DEBIAN_FRONTEND": "noninteractive", "DEBCONF_NONINTERACTIVE_SEEN": "true"}
        )

        try:
            autoremove_output = subprocess.check_output(
                "sudo apt-get autoremove -y".split(),
                stderr=subprocess.STDOUT,
                env=deb_env,
            )
            self.addDetail(
                "apt-get autoremove output",
                content.text_content(autoremove_output.decode("utf-8")),
            )
        except FileNotFoundError as e:
            self.addDetail("apt-get autoremove error", content.text_content(str(e)))
        except subprocess.CalledProcessError as e:
            self.addDetail("apt-get autoremove error", content.text_content(str(e)))
            self.addDetail(
                "apt-get autoremove output",
                content.text_content(e.output.decode("utf-8")),
            )

            if os.getenv("SNAPCRAFT_APT_AUTOREMOVE_CHECK_FAIL", False):
                raise
Esempio n. 7
0
    def test_pep8(self):

        # NOTE(jecarey): Add tests marked as off_by_default to enable testing
        turn_on = set(['H106'])
        if self.options.select:
            turn_on.update(self.options.select)
        self.options.select = tuple(turn_on)

        report = pep8.BaseReport(self.options)
        checker = pep8.Checker(filename=self.filename, lines=self.lines,
                               options=self.options, report=report)
        checker.check_all()
        self.addDetail('doctest', content.text_content(self.raw))
        if self.code == 'Okay':
            self.assertThat(
                len(report.counters),
                matchers.Not(matchers.GreaterThan(
                    len(self.options.benchmark_keys))),
                "incorrectly found %s" % ', '.join(
                    [key for key in report.counters
                     if key not in self.options.benchmark_keys]))
        else:
            self.addDetail('reason',
                           content.text_content("Failed to trigger rule %s" %
                                                self.code))
            self.assertIn(self.code, report.counters)
Esempio n. 8
0
    def test_recordsets_with_zone_id(self):
        '''Test DNS recordsets functionality'''
        sub = ''.join(random.choice(string.ascii_lowercase) for _ in range(6))

        zone = '%s.example2.net.' % sub
        email = '*****@*****.**'
        name = 'www.%s' % zone
        type_ = 'a'
        description = 'Test recordset'
        ttl = 3600
        records = ['192.168.1.1']

        self.addDetail('zone', content.text_content(zone))
        self.addDetail('recordset', content.text_content(name))

        # Create a zone to hold the tested recordset
        zone_obj = self.user_cloud.create_zone(name=zone, email=email)

        # Test we can create a recordset and we get it returned
        created_recordset = self.user_cloud.create_recordset(zone_obj['id'],
                                                             name,
                                                             type_,
                                                             records,
                                                             description, ttl)
        self.addCleanup(self.cleanup, zone, created_recordset['id'])

        self.assertEqual(created_recordset['zone_id'], zone_obj['id'])
        self.assertEqual(created_recordset['name'], name)
        self.assertEqual(created_recordset['type'], type_.upper())
        self.assertEqual(created_recordset['records'], records)
        self.assertEqual(created_recordset['description'], description)
        self.assertEqual(created_recordset['ttl'], ttl)

        # Test that we can list recordsets
        recordsets = self.user_cloud.list_recordsets(zone_obj['id'],)
        self.assertIsNotNone(recordsets)

        # Test we get the same recordset with the get_recordset method
        get_recordset = self.user_cloud.get_recordset(zone_obj['id'],
                                                      created_recordset['id'])
        self.assertEqual(get_recordset['id'], created_recordset['id'])

        # Test we can update a field on the recordset and only that field
        # is updated
        updated_recordset = self.user_cloud.update_recordset(
            zone_obj['id'],
            created_recordset['id'],
            ttl=7200)
        self.assertEqual(updated_recordset['id'], created_recordset['id'])
        self.assertEqual(updated_recordset['name'], name)
        self.assertEqual(updated_recordset['type'], type_.upper())
        self.assertEqual(updated_recordset['records'], records)
        self.assertEqual(updated_recordset['description'], description)
        self.assertEqual(updated_recordset['ttl'], 7200)

        # Test we can delete and get True returned
        deleted_recordset = self.user_cloud.delete_recordset(
            zone, created_recordset['id'])
        self.assertTrue(deleted_recordset)
Esempio n. 9
0
    def test_logfile_contains_finish(self):
        """Confirm that logger calls in 'finish' phase recorded

        Repository layout being checked

          B---C             local/master
         /
        A---D---E           upstream/master

        """
        tree = [
            ('A', []),
            ('B', ['A']),
            ('C', ['B']),
            ('D', ['A']),
            ('E', ['D']),
        ]

        branches = {
            'head': ('master', 'C'),
            'upstream': ('upstream/master', 'E'),
        }

        self.gittree = base.BuildTree(self.testrepo, tree, branches.values())

        cmdline = self.parser.get_default('script_cmdline')

        tfile = None
        try:
            tfile = tempfile.NamedTemporaryFile(delete=False)
            # need to close to allow reopen to write
            tfile.close()

            cmdline.extend(['-v', '--log-file', tfile.name, '--log-level',
                            'debug', 'import', '--into', 'master',
                            'upstream/master'])
            try:
                output = subprocess.check_output(cmdline,
                                                 stderr=subprocess.STDOUT)
            except subprocess.CalledProcessError as cpe:
                self.addDetail(
                    'subprocess-output',
                    content.text_content(cpe.output.decode('utf-8')))
                raise
            self.addDetail(
                'subprocess-output',
                content.text_content(output.decode('utf-8')))

            logfile_contents = open(tfile.name, 'r').read()
        finally:
            if tfile and os.path.exists(tfile.name):
                os.remove(tfile.name)

        self.assertThat(
            logfile_contents,
            matchers.Contains("Merging by inverting the 'ours' strategy"))
        self.assertThat(
            logfile_contents,
            matchers.Contains("Replacing tree contents with those from"))
Esempio n. 10
0
 def refcounts(self, rc, prev):
     """Report a change in reference counts."""
     details = {
         'sys-refcounts': text_content(str(rc)),
         'changes': text_content(str(rc - prev)),
     }
     # XXX: Emit the details dict as JSON?
     self._emit_fake_test(self.TAG_REFCOUNTS, self.TAG_REFCOUNTS, details)
Esempio n. 11
0
 def detailed_refcounts(self, track, rc, prev):
     """Report a change in reference counts, with extra detail."""
     details = {
         'sys-refcounts': text_content(str(rc)),
         'changes': text_content(str(rc - prev)),
         'track': text_content(str(track.delta)),
     }
     self._emit_fake_test(self.TAG_REFCOUNTS, self.TAG_REFCOUNTS, details)
Esempio n. 12
0
    def test_update_cluster(self):
        profile_name = "test_profile"
        spec = {
            "properties": {
                "flavor": "m1.tiny",
                "image": "cirros-0.4.0-x86_64-disk",
                "networks": [
                    {
                        "network": "private"
                    }
                ],
                "security_groups": [
                    "default"
                ]
            },
            "type": "os.nova.server",
            "version": 1.0
        }

        self.addDetail('profile', content.text_content(profile_name))
        # Test that we can create a profile and we get it returned

        profile = self.user_cloud.create_cluster_profile(name=profile_name,
                                                         spec=spec)

        self.addCleanup(self.cleanup_profile, profile['id'])

        cluster_name = 'example_cluster'
        desired_capacity = 0

        self.addDetail('cluster', content.text_content(cluster_name))

        # Test that we can create a cluster and we get it returned
        cluster = self.user_cloud.create_cluster(
            name=cluster_name, profile=profile,
            desired_capacity=desired_capacity)

        self.addCleanup(self.cleanup_cluster, cluster['cluster']['id'])

        # Test that we can update a field on the cluster and only that field
        # is updated

        self.user_cloud.update_cluster(cluster['cluster']['id'],
                                       new_name='new_cluster_name')

        wait = wait_for_status(
            self.user_cloud.get_cluster_by_id,
            {'name_or_id': cluster['cluster']['id']}, 'status', 'ACTIVE')

        self.assertTrue(wait)
        cluster_update = self.user_cloud.get_cluster_by_id(
            cluster['cluster']['id'])
        self.assertEqual(cluster_update['id'], cluster['cluster']['id'])
        self.assertEqual(cluster_update['name'], 'new_cluster_name')
        self.assertEqual(cluster_update['profile_id'],
                         cluster['cluster']['profile_id'])
        self.assertEqual(cluster_update['desired_capacity'],
                         cluster['cluster']['desired_capacity'])
    def test_yaml_snippet(self):
        if not self.in_filename:
            return

        if self.conf_filename is not None:
            config = configparser.ConfigParser()
            config.readfp(open(self.conf_filename))
        else:
            config = {}

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)
        if isinstance(yaml_content, list):
            yaml_content = yaml_content[0]
        project = None
        if ('project-type' in yaml_content):
            yaml_content['project-type']
            if (yaml_content['project-type'] == "maven"):
                project = project_maven.Maven(None)
            elif (yaml_content['project-type'] == "matrix"):
                project = project_matrix.Matrix(None)
            elif (yaml_content['project-type'] == "flow"):
                project = project_flow.Flow(None)
            elif (yaml_content['project-type'] == "multijob"):
                project = project_multijob.MultiJob(None)
            elif (yaml_content['project-type'] == "folder"):
                project = folders.Folder(None)

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element('project')
        print yaml_content
        plugins_info = None
        if self.plugins_info_filename is not None:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info",
                           text_content(str(plugins_info)))

        parser = YamlParser(config, plugins_info)

        pub = self.klass(parser.registry)

        # Generate the XML tree directly with modules/general
        pub.gen_xml(parser, xml_project, yaml_content)

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, 'fixturejob').output().decode('utf-8')

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(expected_xml,
                                              doctest.ELLIPSIS |
                                              doctest.NORMALIZE_WHITESPACE |
                                              doctest.REPORT_NDIFF)
        )
Esempio n. 14
0
    def test_get_cluster_receiver_by_id(self):
        profile_name = "test_profile"
        spec = {
            "properties": {
                "flavor": "m1.tiny",
                "image": "cirros-0.4.0-x86_64-disk",
                "networks": [
                    {
                        "network": "private"
                    }
                ],
                "security_groups": [
                    "default"
                ]
            },
            "type": "os.nova.server",
            "version": 1.0
        }

        self.addDetail('profile', content.text_content(profile_name))
        # Test that we can create a profile and we get it returned

        profile = self.user_cloud.create_cluster_profile(name=profile_name,
                                                         spec=spec)

        self.addCleanup(self.cleanup_profile, profile['id'])

        cluster_name = 'example_cluster'
        desired_capacity = 0

        self.addDetail('cluster', content.text_content(cluster_name))

        # Test that we can create a cluster and we get it returned
        cluster = self.user_cloud.create_cluster(
            name=cluster_name, profile=profile,
            desired_capacity=desired_capacity)

        self.addCleanup(self.cleanup_cluster, cluster['cluster']['id'])

        receiver_name = "example_receiver"
        receiver_type = "webhook"

        self.addDetail('receiver', content.text_content(receiver_name))

        # Test that we can create a receiver and we get it returned

        receiver = self.user_cloud.create_cluster_receiver(
            name=receiver_name, receiver_type=receiver_type,
            cluster_name_or_id=cluster['cluster']['id'],
            action='CLUSTER_SCALE_OUT')

        self.addCleanup(self.cleanup_receiver, receiver['id'])

        # Test that we get the same receiver with the get_receiver method

        receiver_get = self.user_cloud.get_cluster_receiver_by_id(
            receiver['id'])
        self.assertEqual(receiver_get['id'], receiver["id"])
    def test_recordsets(self):
        '''Test DNS recordsets functionality'''
        zone = 'example2.net.'
        email = '*****@*****.**'
        name = 'www'
        type_ = 'a'
        description = 'Test recordset'
        ttl = 3600
        records = ['192.168.1.1']

        self.addDetail('zone', content.text_content(zone))
        self.addDetail('recordset', content.text_content(name))
        self.addCleanup(self.cleanup, zone, name)

        # Create a zone to hold the tested recordset
        zone_obj = self.user_cloud.create_zone(name=zone, email=email)

        # Test we can create a recordset and we get it returned
        created_recordset = self.user_cloud.create_recordset(zone, name, type_,
                                                             records,
                                                             description, ttl)
        self.assertEqual(created_recordset['zone_id'], zone_obj['id'])
        self.assertEqual(created_recordset['name'], name + '.' + zone)
        self.assertEqual(created_recordset['type'], type_.upper())
        self.assertEqual(created_recordset['records'], records)
        self.assertEqual(created_recordset['description'], description)
        self.assertEqual(created_recordset['ttl'], ttl)

        # Test that we can list recordsets
        recordsets = self.user_cloud.list_recordsets(zone)
        self.assertIsNotNone(recordsets)

        # Test we get the same recordset with the get_recordset method
        get_recordset = self.user_cloud.get_recordset(zone,
                                                      created_recordset['id'])
        self.assertEqual(get_recordset['id'], created_recordset['id'])

        # Test the get method also works by name
        get_recordset = self.user_cloud.get_recordset(zone, name + '.' + zone)
        self.assertEqual(get_recordset['id'], created_recordset['id'])

        # Test we can update a field on the recordset and only that field
        # is updated
        updated_recordset = self.user_cloud.update_recordset(zone_obj['id'],
                                                             name + '.' + zone,
                                                             ttl=7200)
        self.assertEqual(updated_recordset['id'], created_recordset['id'])
        self.assertEqual(updated_recordset['name'], name + '.' + zone)
        self.assertEqual(updated_recordset['type'], type_.upper())
        self.assertEqual(updated_recordset['records'], records)
        self.assertEqual(updated_recordset['description'], description)
        self.assertEqual(updated_recordset['ttl'], 7200)

        # Test we can delete and get True returned
        deleted_recordset = self.user_cloud.delete_recordset(
            zone, name + '.' + zone)
        self.assertTrue(deleted_recordset)
Esempio n. 16
0
 def addSkip(self, test, reason=None, details=None):
     super(TestByTestResult, self).addSkip(test, reason, details)
     self._status = 'skip'
     if details is None:
         details = {'reason': text_content(reason)}
     elif reason:
         # XXX: What if details already has 'reason' key?
         details['reason'] = text_content(reason)
     self._details = details
Esempio n. 17
0
    def test_yaml_snippet(self):
        if not self.in_filename:
            return

        jjb_config = self._get_config()

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename is not None:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename", text_content(self.plugins_info_filename))
            self.addDetail("plugins-info", text_content(str(plugins_info)))

        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, plugins_info)
        registry.set_parser_data(parser.data)

        pub = self.klass(registry)

        project = None
        if "project-type" in yaml_content:
            if yaml_content["project-type"] == "maven":
                project = project_maven.Maven(registry)
            elif yaml_content["project-type"] == "matrix":
                project = project_matrix.Matrix(registry)
            elif yaml_content["project-type"] == "flow":
                project = project_flow.Flow(registry)
            elif yaml_content["project-type"] == "multijob":
                project = project_multijob.MultiJob(registry)
            elif yaml_content["project-type"] == "externaljob":
                project = project_externaljob.ExternalJob(registry)

        if "view-type" in yaml_content:
            if yaml_content["view-type"] == "list":
                project = view_list.List(None)
            elif yaml_content["view-type"] == "pipeline":
                project = view_pipeline.Pipeline(None)
            else:
                raise InvalidAttributeError("view-type", yaml_content["view-type"])

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element("project")

        # Generate the XML tree directly with modules/general
        pub.gen_xml(xml_project, yaml_content)

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, "fixturejob").output().decode("utf-8")

        self.assertThat(
            pretty_xml, testtools.matchers.DocTestMatches(expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF)
        )
Esempio n. 18
0
    def test_yaml_snippet(self):
        config = self._get_config()

        expected_xml = self._read_utf8_content().strip() \
            .replace('<BLANKLINE>', '').replace('\n\n', '\n')

        parser = YamlParser(config)
        parser.parse(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info",
                           text_content(str(plugins_info)))

        registry = ModuleRegistry(config, plugins_info)
        registry.set_parser_data(parser.data)
        job_data_list, view_data_list = parser.expandYaml(registry)

        # Generate the XML tree
        xml_generator = XmlJobGenerator(registry)
        xml_jobs = xml_generator.generateXML(job_data_list)

        xml_jobs.sort(key=AlphanumSort)

        # check reference files are under correct path for folders
        prefix = os.path.dirname(self.in_filename)
        # split using '/' since fullname uses URL path separator
        expected_folders = list(set([
            os.path.normpath(
                os.path.join(prefix,
                             '/'.join(job_data['name'].split('/')[:-1])))
            for job_data in job_data_list
        ]))
        actual_folders = [os.path.dirname(f) for f in self.out_filenames]

        six.assertCountEqual(
            self,
            expected_folders, actual_folders,
            "Output file under wrong path, was '%s', should be '%s'" %
            (self.out_filenames[0],
                os.path.join(expected_folders[0],
                             os.path.basename(self.out_filenames[0]))))

        # Prettify generated XML
        pretty_xml = u"\n".join(job.output().decode('utf-8')
                                for job in xml_jobs) \
            .strip().replace('\n\n', '\n')

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(expected_xml,
                                            doctest.ELLIPSIS |
                                            doctest.REPORT_NDIFF))
 def test_json_renderer(self):
     context = json.loads(TEST_JSON)
     x = renderers.JsonRenderer()
     result = x.render('{{a.b}}', context)
     self.addDetail('result', content.text_content(result))
     result_structure = json.loads(result)
     desire_structure = json.loads('[1,2,3,"foo"]')
     self.assertEqual(desire_structure, result_structure)
     result = x.render('{{a.c}}', context)
     self.addDetail('result', content.text_content(result))
     self.assertEqual(u'the quick brown fox', result)
Esempio n. 20
0
    def __init__(self, popen_command, code, expected, stdout, stderr):
        """Initialize this mismatch detail object."""
        super(SubprocessExitWithMismatch, self).__init__()

        command = " ".join(popen_command)
        self._msg = "{0} exited with {1}, but expected {2}".format(command,
                                                                   code,
                                                                   expected)
        self._details = {
            "Output": text_content(stdout),
            "Errors": text_content(stderr)
        }
 def _end_test(self, detail_description):
     end_time = time.time()
     self.test_time = end_time - self.start_time
     content = test_content.text_content(
         detail_description + " time: " + str(self.test_time) + "s")
     self.addDetail("Current", content)
     reference_time = self._get_benchmark_result()
     if reference_time is not None:
         content = test_content.text_content(
             "Min time: " + str(reference_time[0]) + "s, " +
             "Max time: " + str(reference_time[1]) + "s")
         self.addDetail("AWS", content)
Esempio n. 22
0
    def test_custom_build_py_command(self):
        """Test custom build_py command.

        Test that a custom subclass of the build_py command runs when listed in
        the commands [global] option, rather than the normal build command.
        """

        stdout, stderr, return_code = self.run_setup('build_py')
        self.addDetail('stdout', content.text_content(stdout))
        self.addDetail('stderr', content.text_content(stderr))
        self.assertIn('Running custom build_py command.', stdout)
        self.assertEqual(return_code, 0)
Esempio n. 23
0
    def setUp(self):
        # add description in case parent setup fails.
        self.addDetail('description', text_content(self.desc))

        # builds the tree to be tested
        super(TestUpstreamMergeBaseSearcher, self).setUp()

        # need the tree built at this point
        self.addDetail('expected-changes',
                       text_content(pformat(
                           list((c, self.gittree.graph[c].hexsha)
                                for c in self.expected_changes))))
    def test_smoke(self):
        diff = create_proper_job(self.factory)
        transaction.commit()
        out, err, exit_code = run_script(
            "LP_DEBUG_SQL=1 cronscripts/process-job-source.py -vv %s" % (IPackageDiffJobSource.getName())
        )

        self.addDetail("stdout", text_content(out))
        self.addDetail("stderr", text_content(err))
        self.assertEqual(0, exit_code)
        self.assertEqual(PackageDiffStatus.COMPLETED, diff.status)
        self.assertIsNot(None, diff.diff_content)
Esempio n. 25
0
 def execute(self, cmd, stdin=None):
     proc = subprocess.Popen(
         self.cmd + cmd,
         stdin=subprocess.PIPE,
         stdout=subprocess.PIPE,
         stderr=subprocess.PIPE,
     )
     out, err = proc.communicate(stdin)
     self.addDetail('stdout',
                    content.text_content(out.decode('utf-8', 'replace')))
     self.addDetail('stderr',
                    content.text_content(err.decode('utf-8', 'replace')))
     return proc.returncode, out, err
Esempio n. 26
0
    def test_yaml_snippet(self):
        if not self.in_filename:
            return

        config = self._get_config()

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename is not None:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info",
                           text_content(str(plugins_info)))

        parser = YamlParser(config, plugins_info)

        pub = self.klass(parser.registry)

        project = None
        if ('project-type' in yaml_content):
            if (yaml_content['project-type'] == "maven"):
                project = project_maven.Maven(parser.registry)
            elif (yaml_content['project-type'] == "matrix"):
                project = project_matrix.Matrix(parser.registry)
            elif (yaml_content['project-type'] == "flow"):
                project = project_flow.Flow(parser.registry)
            elif (yaml_content['project-type'] == "multijob"):
                project = project_multijob.MultiJob(parser.registry)
            elif (yaml_content['project-type'] == "externaljob"):
                project = project_externaljob.ExternalJob(parser.registry)

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element('project')

        # Generate the XML tree directly with modules/general
        pub.gen_xml(parser, xml_project, yaml_content)

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, 'fixturejob').output().decode('utf-8')

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(expected_xml,
                                              doctest.ELLIPSIS |
                                              doctest.REPORT_NDIFF)
        )
Esempio n. 27
0
 def setUp(self):
     super(CapturedSubprocess, self).setUp()
     proc = subprocess.Popen(*self.args, **self.kwargs)
     out, err = proc.communicate()
     self.out = out.decode('utf-8', 'replace')
     self.err = err.decode('utf-8', 'replace')
     self.addDetail(self.label + '-stdout', content.text_content(self.out))
     self.addDetail(self.label + '-stderr', content.text_content(self.err))
     self.returncode = proc.returncode
     if proc.returncode:
         raise AssertionError('Failed process %s' % proc.returncode)
     self.addCleanup(delattr, self, 'out')
     self.addCleanup(delattr, self, 'err')
     self.addCleanup(delattr, self, 'returncode')
Esempio n. 28
0
 def test_pep8(self):
     report = pep8.BaseReport(self.options)
     checker = pep8.Checker(lines=self.lines, options=self.options, report=report)
     checker.check_all()
     self.addDetail("doctest", content.text_content(self.raw))
     if self.code == "Okay":
         self.assertThat(
             len(report.counters),
             matchers.Not(matchers.GreaterThan(len(self.options.benchmark_keys))),
             "incorrectly found %s"
             % ", ".join([key for key in report.counters if key not in self.options.benchmark_keys]),
         )
     else:
         self.addDetail("reason", content.text_content("Failed to trigger rule %s" % self.code))
         self.assertIn(self.code, report.counters)
    def test_script(self):
        product = self.factory.makeProduct()
        self.factory.makeBug(target=product)
        self.assertEqual(0, get_bugsummary_rows(product).count())
        self.assertEqual(1, get_bugsummaryjournal_rows(product).count())
        transaction.commit()

        exit_code, out, err = run_script('scripts/bugsummary-rebuild.py')
        self.addDetail("stdout", text_content(out))
        self.addDetail("stderr", text_content(err))
        self.assertEqual(0, exit_code)

        transaction.commit()
        self.assertEqual(1, get_bugsummary_rows(product).count())
        self.assertEqual(0, get_bugsummaryjournal_rows(product).count())
Esempio n. 30
0
    def install_snap(self, snap_content_dir, snap_name, version):
        if not config.get('skip-install', False):
            snap_file_name = '{}_{}_amd64.snap'.format(
                snap_name, version)
            snap_local_path = os.path.join(
                self.src_dir, snap_content_dir, snap_file_name)
            self.snappy_testbed.copy_file(snap_local_path, '/home/ubuntu')
            snap_path_in_testbed = os.path.join(
                '/home/ubuntu/', snap_file_name)
            # Remove the snap file from the testbed.
            self.addCleanup(
                self.snappy_testbed.run_command,
                ['rm', snap_path_in_testbed])
            try:
                self.snappy_testbed.run_command([
                    'sudo', 'snap', 'install', snap_path_in_testbed])
            except subprocess.CalledProcessError as e:
                self.addDetail(
                    'ssh output', content.text_content(str(e.output)))
                raise
            # Uninstall the snap from the testbed.
            snap_name = snap_file_name[:snap_file_name.index('_')]
            self.addCleanup(
                self.snappy_testbed.run_command,
                ['sudo', 'snap', 'remove', snap_name])

            list_output = self.snappy_testbed.run_command(
                ['snap', 'list'])
            expected = '.*{}.*'.format(snap_name)
            self.assertThat(
                list_output, MatchesRegex(expected, flags=re.DOTALL))
Esempio n. 31
0
    def test_list_cluster_policies(self):
        policy_name = 'example_policy'
        spec = {
            "properties": {
                "adjustment": {
                    "min_step": 1,
                    "number": 1,
                    "type": "CHANGE_IN_CAPACITY"
                },
                "event": "CLUSTER_SCALE_IN"
            },
            "type": "senlin.policy.scaling",
            "version": "1.0"
        }

        self.addDetail('policy', content.text_content(policy_name))

        # Test that we can create a policy and we get it returned

        policy = self.user_cloud.create_cluster_policy(name=policy_name,
                                                       spec=spec)

        self.addCleanup(self.cleanup_policy, policy['id'])

        policy_get = self.user_cloud.get_cluster_policy_by_id(policy['id'])

        # Test that we can list policies

        policies = self.user_cloud.list_cluster_policies()

        # Format of the created_at variable differs between policy create
        # and policy get so if we don't ignore this variable, comparison will
        # always fail
        policies[0]['created_at'] = 'ignore'
        policy_get['created_at'] = 'ignore'

        self.assertEqual(policies, [policy_get])
Esempio n. 32
0
    def install_snap(self,
                     snap_local_path,
                     snap_name,
                     version,
                     devmode=False,
                     classic=False):
        if not config.get("skip-install", False):
            tmp_in_testbed = self.snappy_testbed.run_command(
                "mktemp -d").strip()
            self.addCleanup(self.snappy_testbed.run_command,
                            ["rm", "-rf", tmp_in_testbed])
            self.snappy_testbed.copy_file(snap_local_path, tmp_in_testbed)
            snap_file_name = os.path.basename(snap_local_path)
            snap_path_in_testbed = os.path.join(tmp_in_testbed, snap_file_name)
            cmd = [
                "sudo", "snap", "install", "--force-dangerous",
                snap_path_in_testbed
            ]
            if devmode:
                cmd.append("--devmode")
            if classic:
                cmd.append("--classic")
            try:
                self.snappy_testbed.run_command(cmd)
            except subprocess.CalledProcessError as e:
                self.addDetail("ssh output",
                               content.text_content(str(e.output)))
                raise
            # Uninstall the snap from the testbed.
            snap_name = snap_file_name[:snap_file_name.index("_")]
            self.addCleanup(self.snappy_testbed.run_command,
                            ["sudo", "snap", "remove", snap_name])

            list_output = self.snappy_testbed.run_command(["snap", "list"])
            expected = ".*{}.*".format(snap_name)
            self.assertThat(list_output, MatchesRegex(expected,
                                                      flags=re.DOTALL))
Esempio n. 33
0
    def test_get_cluster_profile_by_id(self):
        profile_name = "test_profile"
        spec = {
            "properties": {
                "flavor": "m1.tiny",
                "image": base.IMAGE_NAME,
                "networks": [
                    {
                        "network": "private"
                    }
                ],
                "security_groups": [
                    "default"
                ]
            },
            "type": "os.nova.server",
            "version": 1.0
        }

        self.addDetail('profile', content.text_content(profile_name))
        # Test that we can create a profile and we get it returned

        profile = self.user_cloud.create_cluster_profile(name=profile_name,
                                                         spec=spec)

        self.addCleanup(self.cleanup_profile, profile['id'])

        profile_get = self.user_cloud.get_cluster_profile_by_id(profile['id'])

        # Test that we get the same profile with the get_profile method
        # Format of the created_at variable differs between policy create
        # and policy get so if we don't ignore this variable, comparison will
        # always fail
        profile['created_at'] = 'ignore'
        profile_get['created_at'] = 'ignore'

        self.assertEqual(profile_get, profile)
Esempio n. 34
0
 def test__command_high_verbosity(self):
     verbosity, set_verbosity = self._get_log_levels(3)
     name = factory.make_name("log.name")
     logged = log_something(
         name,
         verbosity=verbosity,
         set_verbosity=set_verbosity,
         mode=LoggingMode.COMMAND,
     )
     self.addDetail("logged", text_content(logged))
     observed = find_log_lines(logged)
     expected = [
         (name, "debug", "From `twisted.logger`."),
         (name, "info", "From `twisted.logger`."),
         (name, "warn", "From `twisted.logger`."),
         (name, "error", "From `twisted.logger`."),
         (name, "info", "From `twisted.python.log`."),
         ("logsomething", "info", "From `twisted.python.log.logfile`."),
         (name, "debug", "From `logging`."),
         (name, "info", "From `logging`."),
         (name, "warn", "From `logging`."),
         (name, "error", "From `logging`."),
         ("maas." + name, "debug", "From `get_maas_logger`."),
         ("maas." + name, "info", "From `get_maas_logger`."),
         ("maas." + name, "warn", "From `get_maas_logger`."),
         ("maas." + name, "error", "From `get_maas_logger`."),
     ]
     self.assertSequenceEqual(expected, observed)
     self.assertThat(
         logged,
         DocTestMatches("""\
     ...
     Printing to stdout.
     Printing to stderr.
     This is a warning!
     """),
     )
Esempio n. 35
0
    def test_list_cluster_profiles(self):
        profile_name = "test_profile"
        spec = {
            "properties": {
                "flavor": "m1.tiny",
                "image": base.IMAGE_NAME,
                "networks": [
                    {
                        "network": "private"
                    }
                ],
                "security_groups": [
                    "default"
                ]
            },
            "type": "os.nova.server",
            "version": 1.0
        }

        self.addDetail('profile', content.text_content(profile_name))
        # Test that we can create a profile and we get it returned

        profile = self.user_cloud.create_cluster_profile(name=profile_name,
                                                         spec=spec)

        self.addCleanup(self.cleanup_profile, profile['id'])

        # Test that we can list profiles

        wait = wait_for_create(self.user_cloud.get_cluster_profile_by_id,
                               {'name_or_id': profile['id']})

        get_profile = self.user_cloud.get_cluster_profile_by_id(profile['id'])

        profiles = self.user_cloud.list_cluster_profiles()
        self.assertEqual(profiles, [get_profile])
        self.assertTrue(wait)
Esempio n. 36
0
    def install_snap(self,
                     snap_local_path,
                     snap_name,
                     version,
                     devmode=False,
                     classic=False):
        if not config.get('skip-install', False):
            tmp_in_testbed = self.snappy_testbed.run_command(
                'mktemp -d').strip()
            self.addCleanup(self.snappy_testbed.run_command,
                            ['rm', '-rf', tmp_in_testbed])
            self.snappy_testbed.copy_file(snap_local_path, tmp_in_testbed)
            snap_file_name = os.path.basename(snap_local_path)
            snap_path_in_testbed = os.path.join(tmp_in_testbed, snap_file_name)
            cmd = [
                'sudo', 'snap', 'install', '--force-dangerous',
                snap_path_in_testbed
            ]
            if devmode:
                cmd.append('--devmode')
            if classic:
                cmd.append('--classic')
            try:
                self.snappy_testbed.run_command(cmd)
            except subprocess.CalledProcessError as e:
                self.addDetail('ssh output',
                               content.text_content(str(e.output)))
                raise
            # Uninstall the snap from the testbed.
            snap_name = snap_file_name[:snap_file_name.index('_')]
            self.addCleanup(self.snappy_testbed.run_command,
                            ['sudo', 'snap', 'remove', snap_name])

            list_output = self.snappy_testbed.run_command(['snap', 'list'])
            expected = '.*{}.*'.format(snap_name)
            self.assertThat(list_output, MatchesRegex(expected,
                                                      flags=re.DOTALL))
Esempio n. 37
0
    def _run_core(self):
        # Add an observer to trap all logged errors.
        self.case.reactor = self._reactor
        error_observer = _log_observer
        full_log = StringIO()
        full_observer = log.FileLogObserver(full_log)
        spinner = self._make_spinner()
        successful, unhandled = run_with_log_observers(
            [error_observer.gotEvent, full_observer.emit],
            self._blocking_run_deferred, spinner)

        self.case.addDetail('twisted-log',
                            Content(UTF8_TEXT, full_log.readlines))

        logged_errors = error_observer.flushErrors()
        for logged_error in logged_errors:
            successful = False
            self._got_user_failure(logged_error, tb_label='logged-error')

        if unhandled:
            successful = False
            for debug_info in unhandled:
                f = debug_info.failResult
                info = debug_info._getDebugTracebacks()
                if info:
                    self.case.addDetail('unhandled-error-in-deferred-debug',
                                        text_content(info))
                self._got_user_failure(f, 'unhandled-error-in-deferred')

        junk = spinner.clear_junk()
        if junk:
            successful = False
            self._log_user_exception(UncleanReactorError(junk))

        if successful:
            self.result.addSuccess(self.case, details=self.case.getDetails())
Esempio n. 38
0
    def run_snapcraft(
            self, command, project_dir=None, debug=True,
            pre_func=lambda: None):
        if project_dir:
            self.copy_project_to_cwd(project_dir)

        if isinstance(command, str):
            command = [command]
        snapcraft_command = [self.snapcraft_command]
        if debug:
            snapcraft_command.append('-d')
        try:
            pre_func()
            snapcraft_output = subprocess.check_output(
                snapcraft_command + command,
                stderr=subprocess.STDOUT, universal_newlines=True)
        except subprocess.CalledProcessError as e:
            self.addDetail('output', content.text_content(e.output))
            raise

        if not os.getenv('SNAPCRAFT_IGNORE_APT_AUTOREMOVE', False):
            self.addCleanup(self.run_apt_autoremove)

        return snapcraft_output
Esempio n. 39
0
    def test_volumes(self):
        '''Test volume and snapshot functionality'''
        volume_name = self.getUniqueString()
        snapshot_name = self.getUniqueString()
        self.addDetail('volume', content.text_content(volume_name))
        self.addCleanup(self.cleanup, volume_name, snapshot_name)
        volume = self.demo_cloud.create_volume(display_name=volume_name,
                                               size=1)
        snapshot = self.demo_cloud.create_volume_snapshot(
            volume['id'], display_name=snapshot_name)

        volume_ids = [v['id'] for v in self.demo_cloud.list_volumes()]
        self.assertIn(volume['id'], volume_ids)

        snapshot_list = self.demo_cloud.list_volume_snapshots()
        snapshot_ids = [s['id'] for s in snapshot_list]
        self.assertIn(snapshot['id'], snapshot_ids)

        ret_snapshot = self.demo_cloud.get_volume_snapshot_by_id(
            snapshot['id'])
        self.assertEqual(snapshot['id'], ret_snapshot['id'])

        self.demo_cloud.delete_volume_snapshot(snapshot_name, wait=True)
        self.demo_cloud.delete_volume(volume_name, wait=True)
Esempio n. 40
0
    def test_flake8(self):

        with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
            f.write(''.join(self.lines))

        cmd = [
            sys.executable, '-mflake8', '--isolated',
            '--select=%s' % self.code, '--ignore=F',
            '--format=%(code)s\t%(path)s\t%(row)d', f.name
        ]
        out, _ = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()

        out = out.decode('utf-8')

        if self.code == 'Okay':
            self.assertEqual('', out)
        else:
            self.addDetail(
                'reason',
                content.text_content("Failed to trigger rule %s" % self.code))
            self.assertNotEqual('', out)
            self.assertEqual(self.code, out.split('\t')[0].rstrip(':'), out)

        os.remove(f.name)
Esempio n. 41
0
    def test_create_object(self):
        '''Test uploading small and large files.'''
        container_name = self.getUniqueString('container')
        self.addDetail('container', content.text_content(container_name))
        self.addCleanup(self.demo_cloud.delete_container, container_name)
        self.demo_cloud.create_container(container_name)
        self.assertEqual(container_name,
                         self.demo_cloud.list_containers()[0]['name'])
        sizes = (
            (64 * 1024, 1),  # 64K, one segment
            (64 * 1024, 5)   # 64MB, 5 segments
        )
        for size, nseg in sizes:
            segment_size = int(round(size / nseg))
            with tempfile.NamedTemporaryFile() as fake_file:
                fake_content = ''.join(random.SystemRandom().choice(
                    string.ascii_uppercase + string.digits)
                    for _ in range(size)).encode('latin-1')

                fake_file.write(fake_content)
                fake_file.flush()
                name = 'test-%d' % size
                self.addCleanup(
                    self.demo_cloud.delete_object, container_name, name)
                self.demo_cloud.create_object(
                    container_name, name,
                    fake_file.name,
                    segment_size=segment_size,
                    metadata={'foo': 'bar'})
                self.assertFalse(self.demo_cloud.is_object_stale(
                    container_name, name,
                    fake_file.name
                    )
                )
            self.assertEqual(
                'bar', self.demo_cloud.get_object_metadata(
                    container_name, name)['x-object-meta-foo']
            )
            self.demo_cloud.update_object(container=container_name, name=name,
                                          metadata={'testk': 'testv'})
            self.assertEqual(
                'testv', self.demo_cloud.get_object_metadata(
                    container_name, name)['x-object-meta-testk']
            )
            try:
                self.assertIsNotNone(
                    self.demo_cloud.get_object(container_name, name))
            except exc.OpenStackCloudException as e:
                self.addDetail(
                    'failed_response',
                    content.text_content(str(e.response.headers)))
                self.addDetail(
                    'failed_response',
                    content.text_content(e.response.text))
            self.assertEqual(
                name,
                self.demo_cloud.list_objects(container_name)[0]['name'])
            self.assertTrue(
                self.demo_cloud.delete_object(container_name, name))
        self.assertEqual([], self.demo_cloud.list_objects(container_name))
        self.assertEqual(container_name,
                         self.demo_cloud.list_containers()[0]['name'])
        self.demo_cloud.delete_container(container_name)
Esempio n. 42
0
 def get_details(self):
     return {
         'expected': content.text_content(self.expected),
         'actual': content.text_content(self.actual),
     }
Esempio n. 43
0
 def addDetail(self, logfile, text):
     if type(text) is str:
         super(BaseTestCase, self).addDetail(logfile,
                                             content.text_content(text))
     else:
         super(BaseTestCase, self).addDetail(logfile, text)
Esempio n. 44
0
 def _add_detail(self, detail_str):
     frame = inspect.stack()[1]
     self.addDetail('%s:%s ' %(frame[1],frame[2]), content.text_content(detail_str))
Esempio n. 45
0
    def test_update_policy_on_cluster(self):
        profile_name = "test_profile"
        spec = {
            "properties": {
                "flavor": "m1.tiny",
                "image": base.IMAGE_NAME,
                "networks": [{
                    "network": "private"
                }],
                "security_groups": ["default"]
            },
            "type": "os.nova.server",
            "version": 1.0
        }

        self.addDetail('profile', content.text_content(profile_name))
        # Test that we can create a profile and we get it returned

        profile = self.user_cloud.create_cluster_profile(name=profile_name,
                                                         spec=spec)

        self.addCleanup(self.cleanup_profile, profile['id'])

        cluster_name = 'example_cluster'
        desired_capacity = 0

        self.addDetail('cluster', content.text_content(cluster_name))

        # Test that we can create a cluster and we get it returned
        cluster = self.user_cloud.create_cluster(
            name=cluster_name,
            profile=profile,
            desired_capacity=desired_capacity)

        self.addCleanup(self.cleanup_cluster, cluster['cluster']['id'])

        policy_name = 'example_policy'
        spec = {
            "properties": {
                "adjustment": {
                    "min_step": 1,
                    "number": 1,
                    "type": "CHANGE_IN_CAPACITY"
                },
                "event": "CLUSTER_SCALE_IN"
            },
            "type": "senlin.policy.scaling",
            "version": "1.0"
        }

        self.addDetail('policy', content.text_content(policy_name))

        # Test that we can create a policy and we get it returned

        policy = self.user_cloud.create_cluster_policy(name=policy_name,
                                                       spec=spec)

        self.addCleanup(self.cleanup_policy, policy['id'],
                        cluster['cluster']['id'])

        # Test that we can attach policy to cluster and get True returned

        attach_cluster = self.user_cloud.get_cluster_by_id(
            cluster['cluster']['id'])
        attach_policy = self.user_cloud.get_cluster_policy_by_id(policy['id'])

        self.user_cloud.attach_policy_to_cluster(attach_cluster,
                                                 attach_policy,
                                                 is_enabled=True)

        wait_attach = wait_for_status(self.user_cloud.get_cluster_by_id,
                                      {'name_or_id': cluster['cluster']['id']},
                                      'policies',
                                      ["{policy}".format(policy=policy['id'])])

        get_old_policy = self.user_cloud.get_policy_on_cluster(
            cluster['cluster']["id"], policy['id'])

        # Test that we can update the policy on cluster
        policy_update = self.user_cloud.update_policy_on_cluster(
            attach_cluster, attach_policy, is_enabled=False)

        get_old_policy.update({'enabled': False})

        wait_update = wait_for_status(
            self.user_cloud.get_policy_on_cluster, {
                'name_or_id': cluster['cluster']['id'],
                'policy_name_or_id': policy['id']
            }, 'enabled', False)

        get_new_policy = self.user_cloud.get_policy_on_cluster(
            cluster['cluster']["id"], policy['id'])

        get_old_policy['last_op'] = None
        get_new_policy['last_op'] = None

        self.assertTrue(policy_update)
        self.assertEqual(get_new_policy, get_old_policy)
        self.assertTrue(wait_attach)
        self.assertTrue(wait_update)
Esempio n. 46
0
    def test_delete_cluster(self):
        profile_name = "test_profile"
        spec = {
            "properties": {
                "flavor": "m1.tiny",
                "image": base.IMAGE_NAME,
                "networks": [{
                    "network": "private"
                }],
                "security_groups": ["default"]
            },
            "type": "os.nova.server",
            "version": 1.0
        }

        self.addDetail('profile', content.text_content(profile_name))
        # Test that we can create a profile and we get it returned

        profile = self.user_cloud.create_cluster_profile(name=profile_name,
                                                         spec=spec)

        self.addCleanup(self.cleanup_profile, profile['id'])

        cluster_name = 'example_cluster'
        desired_capacity = 0

        self.addDetail('cluster', content.text_content(cluster_name))

        # Test that we can create a cluster and we get it returned
        cluster = self.user_cloud.create_cluster(
            name=cluster_name,
            profile=profile,
            desired_capacity=desired_capacity)

        self.addCleanup(self.cleanup_cluster, cluster['cluster']['id'])

        policy_name = 'example_policy'
        spec = {
            "properties": {
                "adjustment": {
                    "min_step": 1,
                    "number": 1,
                    "type": "CHANGE_IN_CAPACITY"
                },
                "event": "CLUSTER_SCALE_IN"
            },
            "type": "senlin.policy.scaling",
            "version": "1.0"
        }

        self.addDetail('policy', content.text_content(policy_name))

        # Test that we can create a policy and we get it returned

        policy = self.user_cloud.create_cluster_policy(name=policy_name,
                                                       spec=spec)

        self.addCleanup(self.cleanup_policy, policy['id'])

        # Test that we can attach policy to cluster and get True returned
        attach_cluster = self.user_cloud.get_cluster_by_id(
            cluster['cluster']['id'])
        attach_policy = self.user_cloud.get_cluster_policy_by_id(policy['id'])

        self.user_cloud.attach_policy_to_cluster(attach_cluster,
                                                 attach_policy,
                                                 is_enabled=True)

        receiver_name = "example_receiver"
        receiver_type = "webhook"

        self.addDetail('receiver', content.text_content(receiver_name))

        # Test that we can create a receiver and we get it returned

        self.user_cloud.create_cluster_receiver(
            name=receiver_name,
            receiver_type=receiver_type,
            cluster_name_or_id=cluster['cluster']['id'],
            action='CLUSTER_SCALE_OUT')

        # Test that we can delete cluster and get True returned
        cluster_delete = self.user_cloud.delete_cluster(
            cluster['cluster']['id'])
        self.assertTrue(cluster_delete)
Esempio n. 47
0
 def _setUp(self):
     fixtures.Fixture._setUp(self)
     self.addDetail('broken', content.text_content("foobar"))
     raise Exception("_setUp broke")
Esempio n. 48
0
 def test_add_xfail_details(self):
     self.result.startTest(self)
     details = {"foo": text_content("bar")}
     self.result.addExpectedFailure(self, details=details)
     self.result.stopTest(self)
     self.assertCalled(status='xfail', details=details)
Esempio n. 49
0
 def _add_output_detail(self, output):
     self.addDetail('output', content.text_content(str(output)))
Esempio n. 50
0
 def _add_reason(self, reason):
     self.addDetail('reason', content.text_content(reason))
Esempio n. 51
0
 def setUp(self):
     fixtures.Fixture.setUp(self)
     self.addDetail('aaa', content.text_content("foo"))
     self.addDetail('bbb', content.text_content("bar"))
Esempio n. 52
0
 def test_foo(self):
     self.addDetail('foo_content', content.text_content("foo ok"))
     self.useFixture(fixture)
Esempio n. 53
0
    # case. Use AsynchronousDeferredRunTestForBrokenTwisted, which loops the
    # reactor a couple of times after the test is done.
    async_factory = AsynchronousDeferredRunTestForBrokenTwisted.make_factory(
        timeout=timeout.total_seconds(),
        suppress_twisted_logging=False,
        store_twisted_logs=False,
    )
    return retry_flaky(async_factory)


# By default, asynchronous tests are timed out after 2 minutes.
DEFAULT_ASYNC_TIMEOUT = timedelta(minutes=2)


def _test_skipped(case, result, exception):
    result.addSkip(case, details={'reason': text_content(unicode(exception))})


class AsyncTestCase(testtools.TestCase, _MktempMixin):
    """
    Base class for asynchronous test cases.

    :ivar reactor: The Twisted reactor that the test is being run in. Set by
        ``async_runner`` and only available for the duration of the test.
    """

    run_tests_with = async_runner(timeout=DEFAULT_ASYNC_TIMEOUT)
    # See comment on TestCase.skipException.
    skipException = SkipTest

    def __init__(self, *args, **kwargs):
Esempio n. 54
0
    def test_update_cluster_receiver(self):
        profile_name = "test_profile"
        spec = {
            "properties": {
                "flavor": "m1.tiny",
                "image": base.IMAGE_NAME,
                "networks": [{
                    "network": "private"
                }],
                "security_groups": ["default"]
            },
            "type": "os.nova.server",
            "version": 1.0
        }

        self.addDetail('profile', content.text_content(profile_name))
        # Test that we can create a profile and we get it returned

        profile = self.user_cloud.create_cluster_profile(name=profile_name,
                                                         spec=spec)

        self.addCleanup(self.cleanup_profile, profile['id'])

        cluster_name = 'example_cluster'
        desired_capacity = 0

        self.addDetail('cluster', content.text_content(cluster_name))

        # Test that we can create a cluster and we get it returned
        cluster = self.user_cloud.create_cluster(
            name=cluster_name,
            profile=profile,
            desired_capacity=desired_capacity)

        self.addCleanup(self.cleanup_cluster, cluster['cluster']['id'])

        receiver_name = "example_receiver"
        receiver_type = "webhook"

        self.addDetail('receiver', content.text_content(receiver_name))

        # Test that we can create a receiver and we get it returned

        receiver = self.user_cloud.create_cluster_receiver(
            name=receiver_name,
            receiver_type=receiver_type,
            cluster_name_or_id=cluster['cluster']['id'],
            action='CLUSTER_SCALE_OUT')

        self.addCleanup(self.cleanup_receiver, receiver['id'])

        # Test that we can update a field on the receiver and only that field
        # is updated

        receiver_update = self.user_cloud.update_cluster_receiver(
            receiver['id'], new_name='new_receiver_name')
        self.assertEqual(receiver_update['receiver']['id'], receiver['id'])
        self.assertEqual(receiver_update['receiver']['type'], receiver['type'])
        self.assertEqual(receiver_update['receiver']['cluster_id'],
                         receiver['cluster_id'])
        self.assertEqual(receiver_update['receiver']['name'],
                         'new_receiver_name')
Esempio n. 55
0
    def test_yaml_snippet(self, mock):
        if not self.in_filename:
            return

        jjb_config = self._get_config()

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info", text_content(str(plugins_info)))

        parser = YamlParser(jjb_config)
        e = pkg_resources.EntryPoint.parse
        d = pkg_resources.Distribution()
        config = configparser.ConfigParser()
        config.read(os.path.dirname(__file__) + "/../setup.cfg")
        groups = {}
        for key in config["entry_points"]:
            groups[key] = list()
            for line in config["entry_points"][key].split("\n"):
                if "" == line.strip():
                    continue
                groups[key].append(e(line, dist=d))

        def mock_iter_entry_points(group, name=None):
            return (entry for entry in groups[group]
                    if name is None or name == entry.name)

        mock.side_effect = mock_iter_entry_points
        registry = ModuleRegistry(jjb_config, plugins_info)
        registry.set_parser_data(parser.data)

        pub = self.klass(registry)

        project = None
        if "project-type" in yaml_content:
            if yaml_content["project-type"] == "maven":
                project = project_maven.Maven(registry)
            elif yaml_content["project-type"] == "matrix":
                project = project_matrix.Matrix(registry)
            elif yaml_content["project-type"] == "flow":
                project = project_flow.Flow(registry)
            elif yaml_content["project-type"] == "githuborg":
                project = project_githuborg.GithubOrganization(registry)
            elif yaml_content["project-type"] == "multijob":
                project = project_multijob.MultiJob(registry)
            elif yaml_content["project-type"] == "multibranch":
                project = project_multibranch.WorkflowMultiBranch(registry)
            elif yaml_content["project-type"] == "multibranch-defaults":
                project = project_multibranch.WorkflowMultiBranchDefaults(
                    registry)  # noqa
            elif yaml_content["project-type"] == "externaljob":
                project = project_externaljob.ExternalJob(registry)

        if "view-type" in yaml_content:
            if yaml_content["view-type"] == "all":
                project = view_all.All(registry)
            elif yaml_content["view-type"] == "delivery_pipeline":
                project = view_delivery_pipeline.DeliveryPipeline(registry)
            elif yaml_content["view-type"] == "list":
                project = view_list.List(registry)
            elif yaml_content["view-type"] == "nested":
                project = view_nested.Nested(registry)
            elif yaml_content["view-type"] == "pipeline":
                project = view_pipeline.Pipeline(registry)
            elif yaml_content["view-type"] == "sectioned":
                project = view_sectioned.Sectioned(registry)
            else:
                raise InvalidAttributeError("view-type",
                                            yaml_content["view-type"])

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element("project")

        # Generate the XML tree directly with modules/general
        pub.gen_xml(xml_project, yaml_content)

        # check output file is under correct path
        if "name" in yaml_content:
            prefix = os.path.dirname(self.in_filename)
            # split using '/' since fullname uses URL path separator
            expected_folders = [
                os.path.normpath(
                    os.path.join(
                        prefix,
                        "/".join(
                            parser._getfullname(yaml_content).split("/")[:-1]),
                    ))
            ]
            actual_folders = [os.path.dirname(f) for f in self.out_filenames]

            self.assertEquals(
                expected_folders,
                actual_folders,
                "Output file under wrong path, was '%s', should be '%s'" % (
                    self.out_filenames[0],
                    os.path.join(expected_folders[0],
                                 os.path.basename(self.out_filenames[0])),
                ),
            )

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, "fixturejob").output().decode("utf-8")

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(
                expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF),
        )
Esempio n. 56
0
 def setUp(self):
     fixtures.Fixture.setUp(self)
     self.addDetail('content', content.text_content("foobar"))
     raise Exception()
Esempio n. 57
0
 def test_add_error_details(self):
     self.result.startTest(self)
     details = {"foo": text_content("bar")}
     self.result.addError(self, details=details)
     self.result.stopTest(self)
     self.assertCalled(status='error', details=details)
Esempio n. 58
0
 def run(self, result):
     result.startTest(self)
     result.addError(self,
                     None,
                     details={'traceback': content.text_content("tb")})
     result.stopTest(self)
Esempio n. 59
0
        # Optimization to stop running early if there's no way that we can
        # reach the minimum number of successes.
        max_fails = flaky.max_runs - flaky.min_passes
        while (successes < flaky.min_passes
               and len(results) - successes <= max_fails):
            was_successful, result_type, details = self._attempt_test(case)
            if was_successful:
                successes += 1
            results.append((result_type, details))
        successful = successes >= flaky.min_passes

        flaky_data = flaky.to_dict()
        flaky_data.update({'runs': len(results), 'passes': successes})
        flaky_details = {
            'flaky': text_content(pformat(flaky_data)),
        }
        combined_details = _combine_details([flaky_details] +
                                            list(r[1] for r in results))

        if successful:
            skip_reported = False
            for result_type, details in results:
                if result_type == _ResultType.skip:
                    result.addSkip(case, details=details)
                    skip_reported = True

            if not skip_reported:
                Message.new(
                    message_type=u"flocker:test:flaky",
                    id=case.id(),
Esempio n. 60
0
    def test_cluster_templates(self):
        '''Test cluster_templates functionality'''
        name = 'fake-cluster_template'
        server_type = 'vm'
        public = False
        image_id = 'fedora-atomic-f23-dib'
        tls_disabled = False
        registry_enabled = False
        coe = 'kubernetes'
        keypair_id = 'testkey'

        self.addDetail('cluster_template', content.text_content(name))
        self.addCleanup(self.cleanup, name)

        # generate a keypair to add to nova
        ssh_directory = '/tmp/.ssh'
        if not os.path.isdir(ssh_directory):
            os.mkdir(ssh_directory)
        subprocess.call([
            'ssh-keygen', '-t', 'rsa', '-N', '', '-f',
            '%s/id_rsa_shade' % ssh_directory
        ])

        # add keypair to nova
        with open('%s/id_rsa_shade.pub' % ssh_directory) as f:
            key_content = f.read()
            self.demo_cloud.create_keypair('testkey', key_content)

        # Test we can create a cluster_template and we get it returned
        self.ct = self.demo_cloud.create_cluster_template(
            name=name, image_id=image_id, keypair_id=keypair_id, coe=coe)
        self.assertEqual(self.ct['name'], name)
        self.assertEqual(self.ct['image_id'], image_id)
        self.assertEqual(self.ct['keypair_id'], keypair_id)
        self.assertEqual(self.ct['coe'], coe)
        self.assertEqual(self.ct['registry_enabled'], registry_enabled)
        self.assertEqual(self.ct['tls_disabled'], tls_disabled)
        self.assertEqual(self.ct['public'], public)
        self.assertEqual(self.ct['server_type'], server_type)

        # Test that we can list cluster_templates
        cluster_templates = self.demo_cloud.list_cluster_templates()
        self.assertIsNotNone(cluster_templates)

        # Test we get the same cluster_template with the
        # get_cluster_template method
        cluster_template_get = self.demo_cloud.get_cluster_template(
            self.ct['uuid'])
        self.assertEqual(cluster_template_get['uuid'], self.ct['uuid'])

        # Test the get method also works by name
        cluster_template_get = self.demo_cloud.get_cluster_template(name)
        self.assertEqual(cluster_template_get['name'], self.ct['name'])

        # Test we can update a field on the cluster_template and only that
        # field is updated
        cluster_template_update = self.demo_cloud.update_cluster_template(
            self.ct['uuid'], 'replace', tls_disabled=True)
        self.assertEqual(cluster_template_update['uuid'], self.ct['uuid'])
        self.assertEqual(cluster_template_update['tls_disabled'], True)

        # Test we can delete and get True returned
        cluster_template_delete = self.demo_cloud.delete_cluster_template(
            self.ct['uuid'])
        self.assertTrue(cluster_template_delete)