Example #1
0
    def test_clean_up_another_fs(self, mock_stat, mock_image_service,
                                 mock_statvfs, cache_cleanup_list_mock):
        # Not enough space, need to cleanup second cache
        mock_stat.side_effect = [mock.Mock(st_dev=1),
                                 mock.Mock(st_dev=2),
                                 mock.Mock(st_dev=1)]
        mock_show = mock_image_service.return_value.show
        mock_show.return_value = dict(size=42)
        mock_statvfs.side_effect = [
            mock.Mock(f_frsize=1, f_bavail=1),
            mock.Mock(f_frsize=1, f_bavail=1024)
        ]

        cache_cleanup_list_mock.__iter__.return_value = self.cache_cleanup_list
        image_cache.clean_up_caches(None, 'master_dir', [('uuid', 'path')])

        mock_show.assert_called_once_with('uuid')
        mock_statvfs.assert_called_with('master_dir')
        self.assertEqual(2, mock_statvfs.call_count)
        self.mock_second_cache.return_value.clean_up.assert_called_once_with(
            amount=(42 - 1))
        self.assertFalse(self.mock_first_cache.return_value.clean_up.called)

        # Since first cache exists on a different partition, it wouldn't be
        # considered for cleanup.
        mock_stat_calls_expected = [mock.call('master_dir'),
                                    mock.call('first_cache_dir'),
                                    mock.call('second_cache_dir')]
        mock_statvfs_calls_expected = [mock.call('master_dir'),
                                       mock.call('master_dir')]
        self.assertEqual(mock_stat_calls_expected, mock_stat.mock_calls)
        self.assertEqual(mock_statvfs_calls_expected, mock_statvfs.mock_calls)
Example #2
0
 def test_pruning_query_flush(self, tr_get, session):
     cache = M.repository.ModelCache(max_queries=3, max_instances=2)
     # ensure cache doesn't store None instances
     tree1 = mock.Mock(name='tree1',
                       spec=['_id', '_val'], _id='tree1', val='bar')
     tree2 = mock.Mock(name='tree2',
                       spec=['_id', '_val'], _id='tree2', val='fuz')
     tr_get.return_value = tree2
     cache.set(M.repository.Tree, {'_id': 'tree1'}, tree1)
     cache.set(M.repository.Tree, {'_id': 'tree2'}, tree2)
     cache.get(M.repository.Tree, {'query_1': 'tree2'})
     cache.get(M.repository.Tree, {'query_2': 'tree2'})
     cache.get(M.repository.Tree, {'query_3': 'tree2'})
     self.assertEqual(cache._query_cache[M.repository.Tree], {
         (('query_1', 'tree2'),): 'tree2',
         (('query_2', 'tree2'),): 'tree2',
         (('query_3', 'tree2'),): 'tree2',
     })
     self.assertEqual(cache._instance_cache[M.repository.Tree], {
         'tree1': tree1,
         'tree2': tree2,
     })
     self.assertEqual(session.call_args_list,
                      [mock.call(tree1), mock.call(tree2)])
     self.assertEqual(session.return_value.flush.call_args_list,
                      [mock.call(tree1), mock.call(tree2)])
     assert not session.return_value.expunge.called
 def test_build_box_predictor_with_mask_branch(self):
   box_predictor_proto = box_predictor_pb2.BoxPredictor()
   box_predictor_proto.mask_rcnn_box_predictor.fc_hyperparams.op = (
       hyperparams_pb2.Hyperparams.FC)
   box_predictor_proto.mask_rcnn_box_predictor.conv_hyperparams.op = (
       hyperparams_pb2.Hyperparams.CONV)
   box_predictor_proto.mask_rcnn_box_predictor.predict_instance_masks = True
   box_predictor_proto.mask_rcnn_box_predictor.mask_prediction_conv_depth = 512
   mock_argscope_fn = mock.Mock(return_value='arg_scope')
   box_predictor = box_predictor_builder.build(
       argscope_fn=mock_argscope_fn,
       box_predictor_config=box_predictor_proto,
       is_training=True,
       num_classes=90)
   mock_argscope_fn.assert_has_calls(
       [mock.call(box_predictor_proto.mask_rcnn_box_predictor.fc_hyperparams,
                  True),
        mock.call(box_predictor_proto.mask_rcnn_box_predictor.conv_hyperparams,
                  True)], any_order=True)
   self.assertFalse(box_predictor._use_dropout)
   self.assertAlmostEqual(box_predictor._dropout_keep_prob, 0.5)
   self.assertEqual(box_predictor.num_classes, 90)
   self.assertTrue(box_predictor._is_training)
   self.assertEqual(box_predictor._box_code_size, 4)
   self.assertTrue(box_predictor._predict_instance_masks)
   self.assertEqual(box_predictor._mask_prediction_conv_depth, 512)
   self.assertFalse(box_predictor._predict_keypoints)
    def checkDiffTacFile(self, quiet):
        """
        Utility function to test calling _makeBuildbotTac() on base directory
        with a buildbot.tac file, with does needs to be changed.

        @param quiet: the value of 'quiet' argument for _makeBuildbotTac()
        """
        # set-up mocks to simulate buildbot.tac file in basedir
        self.patch(os.path, "exists", mock.Mock(return_value=True))
        self.setUpOpen("old-tac-contents")

        # call _makeBuildbotTac()
        create_slave._makeBuildbotTac("bdir", "new-tac-contents", quiet)

        # check that buildbot.tac.new file was created with expected contents
        tac_file_path = os.path.join("bdir", "buildbot.tac")
        self.open.assert_has_calls([mock.call(tac_file_path, "rt"),
                                    mock.call(tac_file_path + ".new", "wt")])
        self.fileobj.write.assert_called_once_with("new-tac-contents")
        self.chmod.assert_called_once_with(tac_file_path + ".new", 0o600)

        # check output to the log
        if quiet:
            self.assertWasQuiet()
        else:
            self.assertLogged("not touching existing buildbot.tac",
                              "creating buildbot.tac.new instead")
Example #5
0
    def test_reload_allocations(self):
        exp_host_name = '/dhcp/cccccccc-cccc-cccc-cccc-cccccccccccc/host'
        exp_host_data = """
00:00:80:aa:bb:cc,192-168-0-2.openstacklocal,192.168.0.2
00:00:f3:aa:bb:cc,fdca-3ba5-a17a-4ba3--2.openstacklocal,fdca:3ba5:a17a:4ba3::2
00:00:0f:aa:bb:cc,192-168-0-3.openstacklocal,192.168.0.3
00:00:0f:aa:bb:cc,fdca-3ba5-a17a-4ba3--3.openstacklocal,fdca:3ba5:a17a:4ba3::3
""".lstrip()
        exp_opt_name = '/dhcp/cccccccc-cccc-cccc-cccc-cccccccccccc/opts'
        exp_opt_data = "tag:tag0,option:router,192.168.0.1"
        fake_v6 = 'gdca:3ba5:a17a:4ba3::1'
        fake_v6_cidr = 'gdca:3ba5:a17a:4ba3::/64'
        exp_opt_data = """
tag:tag0,option:dns-server,8.8.8.8
tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1
tag:tag0,option:router,192.168.0.1
tag:tag1,option:dns-server,%s
tag:tag1,option:classless-static-route,%s,%s""".lstrip() % (fake_v6,
                                                            fake_v6_cidr,
                                                            fake_v6)

        exp_args = ['ip', 'netns', 'exec', 'qdhcp-ns', 'kill', '-HUP', 5]

        with mock.patch('os.path.isdir') as isdir:
            isdir.return_value = True
            with mock.patch.object(dhcp.Dnsmasq, 'pid') as pid:
                pid.__get__ = mock.Mock(return_value=5)
                dm = dhcp.Dnsmasq(self.conf, FakeDualNetwork(),
                                  namespace='qdhcp-ns')
                dm.reload_allocations()

        self.safe.assert_has_calls([mock.call(exp_host_name, exp_host_data),
                                    mock.call(exp_opt_name, exp_opt_data)])
        self.execute.assert_called_once_with(exp_args, root_helper='sudo',
                                             check_exit_code=True)
Example #6
0
    def test_generate(self):
        with patch('jinja2.environment.Environment') as mock:
            instance = mock.return_value

            template_stream_mock = Mock()
            template_mock = Mock()
            template_mock.stream.return_value = template_stream_mock
            instance.get_template.return_value = template_mock

            generator = MetadataFileGenerator()
            generator.generate('test')

            # Environment#get_template()
            calls = [
                call('test')
            ]
            instance.get_template.assert_has_calls(calls)

            # Template#stream()
            calls = [
                call()
            ]
            template_mock.stream.assert_has_calls(calls)

            # TemplateStream#dump()
            calls = [
                call('test')
            ]
            template_stream_mock.dump.assert_has_calls(calls)
  def check_estimated_size_bytes(self, entity_bytes, timestamp, namespace=None):
    """A helper method to test get_estimated_size_bytes"""

    timestamp_req = helper.make_request(
        self._PROJECT, namespace, helper.make_latest_timestamp_query(namespace))
    timestamp_resp = self.make_stats_response(
        {'timestamp': datastore_helper.from_timestamp(timestamp)})
    kind_stat_req = helper.make_request(
        self._PROJECT, namespace, helper.make_kind_stats_query(
            namespace, self._query.kind[0].name,
            datastore_helper.micros_from_timestamp(timestamp)))
    kind_stat_resp = self.make_stats_response(
        {'entity_bytes': entity_bytes})

    def fake_run_query(req):
      if req == timestamp_req:
        return timestamp_resp
      elif req == kind_stat_req:
        return kind_stat_resp
      else:
        print kind_stat_req
        raise ValueError("Unknown req: %s" % req)

    self._mock_datastore.run_query.side_effect = fake_run_query
    self.assertEqual(entity_bytes, ReadFromDatastore.get_estimated_size_bytes(
        self._PROJECT, namespace, self._query, self._mock_datastore))
    self.assertEqual(self._mock_datastore.run_query.call_args_list,
                     [call(timestamp_req), call(kind_stat_req)])
Example #8
0
    def test_run_no_messages(self, run_queue):
        """Ensure the return value is False when no messages are processed from
        the queues.
        Ensure all push queues are processed by run().
        Ensure pull queues are skipped.
        """

        from furious.test_stubs.appengine.queues import run

        queue_descs = [
            {'name': 'default', 'mode': 'push', 'bucket_size': 100},
            {'name': 'default-pull', 'mode': 'pull', 'bucket_size': 5},
            {'name': 'my_queue', 'mode': 'push', 'bucket_size': 100}]

        queue_service = Mock()
        queue_service.GetQueues.side_effect = [queue_descs]

        # Simulate that there are no messages processed from any queue.
        run_queue.return_value = 0

        run_result = run(queue_service)

        # Expect 'default' and 'my_queue' to be processed since the other one
        # is a pull queue.
        expected_call_args_list = [call(queue_service, 'default', None, None, False),
                                   call(queue_service, 'my_queue', None, None, False)]

        # Ensure run_queue processes tries to process the push queues.
        self.assertEqual(run_queue.call_args_list,
                         expected_call_args_list)

        # Make sure that 0 is the number of messages processed.
        self.assertEqual(0, run_result['tasks_processed'])
        self.assertEqual(1, run_result['iterations'])
Example #9
0
    def test_run_queue(self, _execute_task):
        """When run() is called, ensure tasks are run, and
        the queue is flushed to remove run tasks.  Also, ensure True
        is returned since messages were processed.
        """

        from furious.test_stubs.appengine.queues import run_queue

        queue_service = Mock()
        queue_service.GetTasks.return_value = ['task1', 'task2', 'task3']

        num_processed = run_queue(queue_service, 'default')

        # Expect _execute_task() to be called for each task
        expected_call_args_list = [call('task1', None, None),
                                   call('task2', None, None),
                                   call('task3', None, None)]

        self.assertEquals(_execute_task.call_args_list,
                          expected_call_args_list)

        # Make sure FlushQueue was called once to clear the queue after
        # tasks were processed
        self.assertEqual(1, queue_service.FlushQueue.call_count)

        # We should have processed tasks, so verify the num processed.
        self.assertEqual(3, num_processed)
Example #10
0
    def test_validate(self):
        mock_extension = self.patchobject(
            neutron.NeutronClientPlugin, 'has_extension', return_value=True)
        nc = mock.Mock()
        mock_create = self.patchobject(neutron.NeutronClientPlugin, '_create')
        mock_create.return_value = nc
        mock_find = self.patchobject(neutron.NeutronClientPlugin,
                                     'find_resourceid_by_name_or_id')
        mock_find.side_effect = [
            'foo',
            qe.NeutronClientException(status_code=404)
        ]

        constraint = self.constraint_class()
        ctx = utils.dummy_context()
        if hasattr(constraint, 'extension') and constraint.extension:
            mock_extension.side_effect = [
                False,
                True,
                True,
            ]
            ex = self.assertRaises(
                exception.EntityNotFound,
                constraint.validate_with_client, ctx.clients, "foo"
            )
            expected = ("The neutron extension (%s) could not be found." %
                        constraint.extension)
            self.assertEqual(expected, six.text_type(ex))
        self.assertTrue(constraint.validate("foo", ctx))
        self.assertFalse(constraint.validate("bar", ctx))
        mock_find.assert_has_calls(
            [mock.call(self.resource_type, 'foo',
                       cmd_resource=self.cmd_resource),
             mock.call(self.resource_type, 'bar',
                       cmd_resource=self.cmd_resource)])
Example #11
0
 def test_get_blkdev_error(self, mock_get_blkdev):
     mock_exec = mock.Mock()
     output = volume_utils.setup_blkio_cgroup('src', 'dst', 1,
                                              execute=mock_exec)
     self.assertIsNone(output)
     mock_get_blkdev.assert_has_calls([mock.call('src'), mock.call('dst')])
     self.assertFalse(mock_exec.called)
Example #12
0
    def test_comit_no_parents(self):
        mocked_repo = MagicMock()
        mocked_parent = MagicMock()

        mocked_parent.id = 1

        mocked_repo.status.return_value = True
        mocked_repo.index.write_tree.return_value = "tree"
        mocked_repo.revparse_single.return_value = mocked_parent
        mocked_repo.create_commit.return_value = "commit"

        author = ("author_1", "author_2")
        commiter = ("commiter_1", "commiter_2")

        with patch('gitfs.repository.Signature') as mocked_signature:
            mocked_signature.return_value = "signature"

            repo = Repository(mocked_repo)
            commit = repo.commit("message", author, commiter)

            assert commit == "commit"
            assert mocked_repo.status.call_count == 1
            assert mocked_repo.index.write_tree.call_count == 1
            assert mocked_repo.index.write.call_count == 1

            mocked_signature.has_calls([call(*author), call(*commiter)])
            mocked_repo.revparse_single.assert_called_once_with("HEAD")
            mocked_repo.create_commit.assert_called_once_with("HEAD",
                                                              "signature",
                                                              "signature",
                                                              "message",
                                                              "tree", [1])
Example #13
0
    def _test_plug(self, namespace=None, mtu=None):
        def device_exists(device, namespace=None):
            return device.startswith('brq')

        root_veth = mock.Mock()
        ns_veth = mock.Mock()

        self.ip().add_veth = mock.Mock(return_value=(root_veth, ns_veth))

        self.device_exists.side_effect = device_exists
        br = interface.BridgeInterfaceDriver(self.conf)
        mac_address = 'aa:bb:cc:dd:ee:ff'
        br.plug('01234567-1234-1234-99',
                'port-1234',
                'ns-0',
                mac_address,
                namespace=namespace)

        ip_calls = [mock.call(),
                    mock.call().add_veth('tap0', 'ns-0', namespace2=namespace)]
        ns_veth.assert_has_calls([mock.call.link.set_address(mac_address)])
        if mtu:
            ns_veth.assert_has_calls([mock.call.link.set_mtu(mtu)])
            root_veth.assert_has_calls([mock.call.link.set_mtu(mtu)])

        self.ip.assert_has_calls(ip_calls)

        root_veth.assert_has_calls([mock.call.link.set_up()])
        ns_veth.assert_has_calls([mock.call.link.set_up()])
    def test_initial_delay(self, sleep_mock):
        self.num_runs = 1

        timer = loopingcall.DynamicLoopingCall(self._wait_for_zero)
        timer.start(initial_delay=3).wait()

        sleep_mock.assert_has_calls([mock.call(3), mock.call(1)])
Example #15
0
    def test_adding_return_value_mock(self):
        for Klass in Mock, MagicMock:
            mock = Klass()
            mock.return_value = MagicMock()

            mock()()
            self.assertEqual(mock.mock_calls, [call(), call()()])
    def test_find_usage(self):
        mock_conn = Mock()

        with patch('%s.connect' % self.pb) as mock_connect:
            with patch.multiple(
                    self.pb,
                    _find_usage_vpcs=DEFAULT,
                    _find_usage_subnets=DEFAULT,
                    _find_usage_ACLs=DEFAULT,
                    _find_usage_route_tables=DEFAULT,
                    _find_usage_gateways=DEFAULT,
            ) as mocks:
                cls = _VpcService(21, 43)
                cls.conn = mock_conn
                assert cls._have_usage is False
                cls.find_usage()
        assert mock_connect.mock_calls == [call()]
        assert cls._have_usage is True
        assert mock_conn.mock_calls == []
        for x in [
                '_find_usage_vpcs',
                '_find_usage_subnets',
                '_find_usage_ACLs',
                '_find_usage_route_tables',
                '_find_usage_gateways',
        ]:
            assert mocks[x].mock_calls == [call()]
    def test_interval_adjustment(self, sleep_mock):
        self.num_runs = 2

        timer = loopingcall.DynamicLoopingCall(self._wait_for_zero)
        timer.start(periodic_interval_max=5).wait()

        sleep_mock.assert_has_calls([mock.call(5), mock.call(1)])
Example #18
0
    def test_create_snapshot_online_with_concurrent_delete(
            self, mock_sleep, mock_snapshot_get):
        self._driver._nova = mock.Mock()

        # Test what happens when progress is so slow that someone
        # decides to delete the snapshot while the last known status is
        # "creating".
        mock_snapshot_get.side_effect = [
            {'status': 'creating', 'progress': '42%'},
            {'status': 'creating', 'progress': '45%'},
            {'status': 'deleting'},
        ]

        with mock.patch.object(self._driver, '_do_create_snapshot') as \
                mock_do_create_snapshot:
            self.assertRaises(exception.RemoteFSConcurrentRequest,
                              self._driver._create_snapshot_online,
                              self._fake_snapshot,
                              self._fake_volume.name,
                              self._fake_snapshot_path)

        mock_do_create_snapshot.assert_called_once_with(
            self._fake_snapshot, self._fake_volume.name,
            self._fake_snapshot_path)
        self.assertEqual([mock.call(1), mock.call(1)],
                         mock_sleep.call_args_list)
        self.assertEqual(3, mock_snapshot_get.call_count)
        mock_snapshot_get.assert_called_with(self._fake_snapshot._context,
                                             self._fake_snapshot.id)
Example #19
0
    def _test_qemu_img_info(self, mock_basename,
                            mock_qemu_img_info, backing_file, basedir,
                            valid_backing_file=True):
        fake_vol_name = 'fake_vol_name'
        mock_info = mock_qemu_img_info.return_value
        mock_info.image = mock.sentinel.image_path
        mock_info.backing_file = backing_file

        self._driver._VALID_IMAGE_EXTENSIONS = ['vhd', 'vhdx', 'raw', 'qcow2']

        mock_basename.side_effect = [mock.sentinel.image_basename,
                                     mock.sentinel.backing_file_basename]

        if valid_backing_file:
            img_info = self._driver._qemu_img_info_base(
                mock.sentinel.image_path, fake_vol_name, basedir)
            self.assertEqual(mock_info, img_info)
            self.assertEqual(mock.sentinel.image_basename,
                             mock_info.image)
            expected_basename_calls = [mock.call(mock.sentinel.image_path)]
            if backing_file:
                self.assertEqual(mock.sentinel.backing_file_basename,
                                 mock_info.backing_file)
                expected_basename_calls.append(mock.call(backing_file))
            mock_basename.assert_has_calls(expected_basename_calls)
        else:
            self.assertRaises(exception.RemoteFSException,
                              self._driver._qemu_img_info_base,
                              mock.sentinel.image_path,
                              fake_vol_name, basedir)

        mock_qemu_img_info.assert_called_with(mock.sentinel.image_path)
 def test_low_e_open_string(self, mock_compare_notes):
     mock_compare_notes.side_effect = [0, 1]
     note = ('E', 2)
     expected = tuple(note)
     self.assertEqual(notemappings._lowest_string_with(note), expected)
     calls = [call(('E', 2), note), call(('E', 4), note)]
     mock_compare_notes.assert_has_calls(calls)
 def test_middle_e(self, mock_compare_notes):
     mock_compare_notes.side_effect = [-1, 1]
     note = ('E', 3)
     expected = ('E', 2)
     self.assertEqual(notemappings._lowest_string_with(note), expected)
     calls = [call(('E', 2), note), call(('E', 4), note)]
     mock_compare_notes.assert_has_calls(calls)
    def test_remoteGetWorkerInfo_no_info(self):
        # All remote commands tried in remoteGetWorkerInfo are unavailable.
        # This should be real old worker...
        def side_effect(*args, **kwargs):
            if args[0] == 'print':
                return
            return defer.fail(twisted_pb.RemoteError(
                'twisted.spread.flavors.NoSuchMethod', None, None))

        self.mind.callRemote.side_effect = side_effect
        conn = pb.Connection(self.master, self.worker, self.mind)
        info = yield conn.remoteGetWorkerInfo()

        r = {}
        self.assertEqual(info, r)
        calls = [
            mock.call('getWorkerInfo'),
            mock.call('print',
                      message='buildbot-slave detected, failing back to deprecated buildslave API. '
                              '(Ignoring missing getWorkerInfo method.)'),
            mock.call('getSlaveInfo'),
            mock.call('getCommands'),
            mock.call('getVersion'),
        ]
        self.mind.callRemote.assert_has_calls(calls)
Example #23
0
    def test_get_undredacted_processed_with_trouble(self):
        # setup some internal behaviors and fake outs
        boto_s3_store = self.setup_mocked_s3_storage(
            TransactionExecutorWithLimitedBackoff
        )
        mocked_bucket = boto_s3_store._connect_to_endpoint.return_value \
            .get_bucket.return_value
        mocked_key = mocked_bucket.new_key.return_value
        mocked_key.get_contents_as_string \
            .side_effect = [
                self._fake_unredacted_processed_crash_as_string()
            ]
        actions = [
            mocked_bucket,
            ABadDeal('second-hit'),
            ABadDeal('first hit'),
        ]

        def temp_failure_fn(key):
            self.assertEqual(key, '120408')
            action = actions.pop()
            if isinstance(action, Exception):
                raise action
            return action

        boto_s3_store._connect_to_endpoint.return_value.get_bucket \
            .side_effect = (
                temp_failure_fn
            )
        # the tested call
        result = boto_s3_store.get_unredacted_processed(
            "936ce666-ff3b-4c7a-9674-367fe2120408"
        )

        # what should have happened internally
        self.assertEqual(boto_s3_store._calling_format.call_count, 3)
        boto_s3_store._calling_format.assert_called_with()

        self.assertEqual(boto_s3_store._connect_to_endpoint.call_count, 3)
        self.assert_s3_connection_parameters(boto_s3_store)

        self.assertEqual(
            boto_s3_store._mocked_connection.get_bucket.call_count,
            3
        )
        boto_s3_store._mocked_connection.get_bucket \
            .assert_has_calls(
                [
                    mock.call('120408'),
                    mock.call('120408'),
                    mock.call('120408'),
                ],
            )

        self.assertEqual(mocked_key.get_contents_as_string.call_count, 1)
        mocked_key.get_contents_as_string.assert_has_calls(
            [mock.call(), ],
        )

        self.assertEqual(result, self._fake_unredacted_processed_crash())
Example #24
0
    def test_makeRelayCrypto(self, mock_rc, mock_maes, mock_sha1, mock_hexp,
        mock_hext):
        secret_input = 'secret input'
        km = [chr(i) for i in range(96)]

        ret = ntor._makeRelayCrypto(secret_input)

        mock_hext.assert_called_once_with(
            salt='ntor-curve25519-sha256-1:key_extract',
            input_key_material='secret input',
            hash=hashlib.sha256)

        mock_hexp.assert_called_once_with(
            pseudo_random_key='prk',
            info='ntor-curve25519-sha256-1:key_expand',
            length=72,
            hash=hashlib.sha256)

        self.assertEqual(mock_sha1.call_count, 2)
        self.assertEqual(mock_sha1.call_args_list,
            [mock.call(km[:20]), mock.call(km[20:40])])
        self.assertEqual(mock_maes.call_count, 2)
        self.assertEqual(mock_maes.call_args_list,
            [mock.call(km[40:56]), mock.call(km[56:72])])
        mock_rc.assert_called_once_with(
            forward_cipher='cipher', forward_digest='sha1',
            backward_cipher='cipher', backward_digest='sha1')
        self.assertEqual(ret, 'ret')
Example #25
0
    def test_mysql_mode_locks_unlocks_tables(self, mock_create_dir, mock_get_lvm_info, mock_get_vol_fs_type, mock_popen):
        mock_get_vol_fs_type.return_value = 'xfs'
        mock_get_lvm_info.return_value = {
            'volgroup': 'lvm_volgroup',
            'srcvol': 'lvm_device',
            'snap_path': 'snap_path'}
        mock_process = Mock()
        mock_process.communicate.return_value = '', ''
        mock_process.returncode = 0
        mock_popen.return_value = mock_process

        backup_opt = Mock()
        backup_opt.snapshot = True
        backup_opt.lvm_auto_snap = ''
        backup_opt.path_to_backup = '/just/a/path'
        backup_opt.lvm_dirmount = '/var/mountpoint'
        backup_opt.lvm_snapperm = 'ro'
        backup_opt.mode = 'mysql'
        backup_opt.mysql_db_inst = Mock()
        mock_cursor = Mock()
        backup_opt.mysql_db_inst.cursor.return_value = mock_cursor

        self.assertTrue(lvm.lvm_snap(backup_opt))

        first_call = call('FLUSH TABLES WITH READ LOCK')
        second_call = call('UNLOCK TABLES')
        self.assertEquals(first_call, mock_cursor.execute.call_args_list[0])
        self.assertEquals(second_call, mock_cursor.execute.call_args_list[1])
Example #26
0
    def test_create_hm_with_vip(self):
        with self.subnet() as subnet:
            with self.health_monitor() as hm:
                with self.pool(provider='radware',
                               subnet_id=subnet['subnet']['id']) as pool:
                    with self.vip(pool=pool, subnet=subnet):

                        self.plugin_instance.create_pool_health_monitor(
                            context.get_admin_context(),
                            hm, pool['pool']['id']
                        )

                        # Test REST calls
                        calls = [
                            mock.call(
                                'POST', '/api/workflow/' + pool['pool']['id'] +
                                '/action/BaseCreate',
                                mock.ANY, driver.TEMPLATE_HEADER
                            ),
                            mock.call(
                                'POST', '/api/workflow/' + pool['pool']['id'] +
                                '/action/BaseCreate',
                                mock.ANY, driver.TEMPLATE_HEADER
                            )
                        ]
                        self.driver_rest_call_mock.assert_has_calls(
                            calls, any_order=True)

                        phm = self.plugin_instance.get_pool_health_monitor(
                            context.get_admin_context(),
                            hm['health_monitor']['id'], pool['pool']['id']
                        )
                        self.assertEqual(phm['status'], constants.ACTIVE)
Example #27
0
    def test_transfer_accept_invalid_volume(self, mock_notify):
        svc = self.start_service('volume', host='test_host')
        self.addCleanup(svc.stop)
        tx_api = transfer_api.API()
        volume = utils.create_volume(self.ctxt, updated_at=self.updated_at)
        transfer = tx_api.create(self.ctxt, volume.id, 'Description')
        volume = storage.Volume.get_by_id(self.ctxt, volume.id)
        self.assertEqual('awaiting-transfer', volume['status'],
                         'Unexpected state')

        calls = [mock.call(self.ctxt, mock.ANY, "transfer.create.start"),
                 mock.call(self.ctxt, mock.ANY, "transfer.create.end")]
        mock_notify.assert_has_calls(calls)
        self.assertEqual(2, mock_notify.call_count)

        volume.status = 'wrong'
        volume.save()
        self.assertRaises(exception.InvalidVolume,
                          tx_api.accept,
                          self.ctxt, transfer['id'], transfer['auth_key'])
        volume.status = 'awaiting-transfer'
        volume.save()

        # Because the InvalidVolume exception is raised in tx_api, so there is
        # only transfer.accept.start called and missing transfer.accept.end.
        calls = [mock.call(self.ctxt, mock.ANY, "transfer.accept.start")]
        mock_notify.assert_has_calls(calls)
        self.assertEqual(3, mock_notify.call_count)
    def test_remoteGetWorkerInfo_slave_2_16(self):
        """In buildslave 2.16 all information about worker is retrieved in
        a single getSlaveInfo() call."""
        def side_effect(*args, **kwargs):
            if 'getWorkerInfo' in args:
                return defer.fail(twisted_pb.RemoteError(
                    'twisted.spread.flavors.NoSuchMethod', None, None))
            if 'getSlaveInfo' in args:
                return defer.succeed({
                    'info': 'test',
                    'slave_commands': {'x': 1, 'y': 2},
                    'version': 'TheVersion',
                })
            if 'getCommands' in args:
                return defer.succeed({'x': 1, 'y': 2})
            if 'getVersion' in args:
                return defer.succeed('TheVersion')

        self.mind.callRemote.side_effect = side_effect
        conn = pb.Connection(self.master, self.worker, self.mind)
        info = yield conn.remoteGetWorkerInfo()

        r = {'info': 'test', 'worker_commands': {
            'y': 2, 'x': 1}, 'version': 'TheVersion'}
        self.assertEqual(info, r)
        calls = [
            mock.call('getWorkerInfo'),
            mock.call('print',
                      message='buildbot-slave detected, failing back to deprecated buildslave API. '
                              '(Ignoring missing getWorkerInfo method.)'),
            mock.call('getSlaveInfo'),
        ]
        self.mind.callRemote.assert_has_calls(calls)
    def _test_fdb_add(self, proxy_enabled=False):
        fdb_entries = {'net_id':
                       {'ports':
                        {'agent_ip': [constants.FLOODING_ENTRY,
                                      ['port_mac', 'port_ip']]},
                        'network_type': 'vxlan',
                        'segment_id': 1}}

        with mock.patch.object(utils, 'execute',
                               return_value='') as execute_fn, \
                mock.patch.object(ip_lib, 'add_neigh_entry',
                                  return_value='') as add_fn:
            self.lb_rpc.fdb_add(None, fdb_entries)

            expected = [
                mock.call(['bridge', 'fdb', 'show', 'dev', 'vxlan-1'],
                          run_as_root=True),
                mock.call(['bridge', 'fdb', 'add',
                           constants.FLOODING_ENTRY[0],
                           'dev', 'vxlan-1', 'dst', 'agent_ip'],
                          run_as_root=True,
                          check_exit_code=False),
                mock.call(['bridge', 'fdb', 'replace', 'port_mac', 'dev',
                           'vxlan-1', 'dst', 'agent_ip'],
                          run_as_root=True,
                          check_exit_code=False),
            ]
            execute_fn.assert_has_calls(expected)
            if proxy_enabled:
                add_fn.assert_called_with('port_ip', 'port_mac', 'vxlan-1')
            else:
                add_fn.assert_not_called()
Example #30
0
    def test_check_events_acquired(self, mock_datetime):
        """
        `check_events` checks events in each bucket when they are partitoned.
        """
        self.kz_partition.acquired = True
        self.scheduler_service.startService()
        self.kz_partition.__iter__.return_value = [2, 3]
        self.scheduler_service.log = mock.Mock()
        mock_datetime.utcnow.return_value = 'utcnow'

        responses = [4, 5]
        self.check_events_in_bucket.side_effect = lambda *_: defer.succeed(responses.pop(0))

        d = self.scheduler_service.check_events(100)

        self.assertEqual(self.successResultOf(d), [4, 5])
        self.assertEqual(self.kz_partition.__iter__.call_count, 1)
        self.scheduler_service.log.bind.assert_called_once_with(
            scheduler_run_id='transaction-id', utcnow='utcnow')
        log = self.scheduler_service.log.bind.return_value
        log.msg.assert_called_once_with('Got buckets {buckets}',
                                        buckets=[2, 3], path='/part_path')
        self.assertEqual(self.check_events_in_bucket.mock_calls,
                         [mock.call(log, self.mock_store, 2, 'utcnow', 100),
                          mock.call(log, self.mock_store, 3, 'utcnow', 100)])
Example #31
0
    def test_update(self, mock_workflow, service_checker):
        service_checker.return_value = True
        flavor = self.flavors.first()
        ngt = self.nodegroup_templates.first()
        configs = self.plugins_configs.first()
        new_name = ngt.name + '-updated'
        UPDATE_URL = reverse('horizon:project:data_processing.clusters:edit',
                             kwargs={'template_id': ngt.id})
        mock_workflow.return_value = {}

        self.mock_extension_supported.return_value = True
        self.mock_availability_zone_list.return_value = \
            self.availability_zones.list()
        self.mock_volume_type_list.return_value = []
        self.mock_flavor_list.return_value = [flavor]
        self.mock_plugin_get_version_details.return_value = configs
        self.mock_floating_ip_pools_list.return_value = []
        self.mock_security_group_list.return_value = []
        self.mock_nodegroup_template_get.return_value = ngt
        self.mock_nodegroup_template_update.return_value = True

        res = self.client.post(
            UPDATE_URL, {
                'ng_id': ngt.id,
                'nodegroup_name': new_name,
                'plugin_name': ngt.plugin_name,
                ngt.plugin_name + '_version': '1.2.1',
                'hadoop_version': ngt.hadoop_version,
                'description': ngt.description,
                'flavor': flavor.id,
                'availability_zone': None,
                'storage': 'ephemeral_drive',
                'volumes_per_node': 0,
                'volumes_size': 0,
                'volume_type': None,
                'volume_local_to_instance': False,
                'volumes_availability_zone': None,
                'floating_ip_pool': None,
                'is_proxy_gateway': False,
                'security_autogroup': True,
                'processes': 'HDFS:namenode',
                'use_autoconfig': True
            })

        self.assertNoFormErrors(res)
        self.assertRedirectsNoFollow(res, INDEX_URL)
        self.assertMessageCount(success=1)
        self.mock_extension_supported.assert_called_once_with(
            test.IsHttpRequest(), 'AvailabilityZones')
        self.mock_availability_zone_list.assert_called_once_with(
            test.IsHttpRequest())
        self.mock_volume_type_list.assert_called_once_with(
            test.IsHttpRequest())
        self.mock_flavor_list.assert_called_once_with(test.IsHttpRequest())
        self.assert_mock_multiple_calls_with_same_arguments(
            self.mock_plugin_get_version_details, 5,
            mock.call(test.IsHttpRequest(), ngt.plugin_name,
                      ngt.hadoop_version))
        self.mock_floating_ip_pools_list.assert_called_once_with(
            test.IsHttpRequest())
        self.mock_security_group_list.assert_called_once_with(
            test.IsHttpRequest())
        self.mock_nodegroup_template_get.assert_called_once_with(
            test.IsHttpRequest(), ngt.id)
        self.mock_nodegroup_template_update.assert_called_once_with(
            request=test.IsHttpRequest(),
            ngt_id=ngt.id,
            name=new_name,
            plugin_name=ngt.plugin_name,
            hadoop_version=ngt.hadoop_version,
            flavor_id=flavor.id,
            description=ngt.description,
            volumes_per_node=0,
            volumes_size=None,
            volume_type=None,
            volume_local_to_instance=False,
            volumes_availability_zone=None,
            node_processes=['namenode'],
            node_configs={},
            floating_ip_pool=None,
            security_groups=[],
            auto_security_group=True,
            availability_zone=None,
            use_autoconfig=True,
            is_proxy_gateway=False,
            shares=[],
            is_protected=False,
            is_public=False,
            image_id=ngt.image_id,
        )
Example #32
0
    def test_transfer_students(self):
        """
        Verify the transfer student command works as intended.
        """
        student = UserFactory.create()
        student.set_password(self.PASSWORD)
        student.save()
        mode = 'verified'
        # Original Course
        original_course_location = locator.CourseLocator('Org0', 'Course0', 'Run0')
        course = self._create_course(original_course_location)
        # Enroll the student in 'verified'
        CourseEnrollment.enroll(student, course.id, mode='verified')

        # Create and purchase a verified cert for the original course.
        self._create_and_purchase_verified(student, course.id)

        # New Course 1
        course_location_one = locator.CourseLocator('Org1', 'Course1', 'Run1')
        new_course_one = self._create_course(course_location_one)

        # New Course 2
        course_location_two = locator.CourseLocator('Org2', 'Course2', 'Run2')
        new_course_two = self._create_course(course_location_two)
        original_key = text_type(course.id)
        new_key_one = text_type(new_course_one.id)
        new_key_two = text_type(new_course_two.id)

        # Run the actual management command
        call_command(
            'transfer_students',
            '--from', original_key,
            '--to', new_key_one, new_key_two,
        )
        self.assertTrue(self.signal_fired)

        # Confirm the analytics event was emitted.
        self.mock_tracker.emit.assert_has_calls(
            [
                call(
                    EVENT_NAME_ENROLLMENT_ACTIVATED,
                    {'course_id': original_key, 'user_id': student.id, 'mode': mode}
                ),
                call(
                    EVENT_NAME_ENROLLMENT_MODE_CHANGED,
                    {'course_id': original_key, 'user_id': student.id, 'mode': mode}
                ),
                call(
                    EVENT_NAME_ENROLLMENT_DEACTIVATED,
                    {'course_id': original_key, 'user_id': student.id, 'mode': mode}
                ),
                call(
                    EVENT_NAME_ENROLLMENT_ACTIVATED,
                    {'course_id': new_key_one, 'user_id': student.id, 'mode': mode}
                ),
                call(
                    EVENT_NAME_ENROLLMENT_MODE_CHANGED,
                    {'course_id': new_key_one, 'user_id': student.id, 'mode': mode}
                ),
                call(
                    EVENT_NAME_ENROLLMENT_ACTIVATED,
                    {'course_id': new_key_two, 'user_id': student.id, 'mode': mode}
                ),
                call(
                    EVENT_NAME_ENROLLMENT_MODE_CHANGED,
                    {'course_id': new_key_two, 'user_id': student.id, 'mode': mode}
                )
            ]
        )
        self.mock_tracker.reset_mock()

        # Confirm the enrollment mode is verified on the new courses, and enrollment is enabled as appropriate.
        self.assertEqual((mode, False), CourseEnrollment.enrollment_mode_for_user(student, course.id))
        self.assertEqual((mode, True), CourseEnrollment.enrollment_mode_for_user(student, new_course_one.id))
        self.assertEqual((mode, True), CourseEnrollment.enrollment_mode_for_user(student, new_course_two.id))

        # Confirm the student has not be refunded.
        target_certs = CertificateItem.objects.filter(
            course_id=course.id, user_id=student, status='purchased', mode=mode
        )
        self.assertTrue(target_certs[0])
        self.assertFalse(target_certs[0].refund_requested_time)
        self.assertEqual(target_certs[0].order.status, 'purchased')
Example #33
0
def SafePopen_RemoveStdinWindows_test(*args):
    utils.SafePopen(['foo'], stdin_windows='bar')
    eq_(subprocess.Popen.call_args, call(['foo']))
Example #34
0
  def testSimpleCase(self):
    self.make_requests.side_effect = iter([
        [INSTANCE_WITHOUT_EXTERNAL_ADDRESS],
        [self.project_resource],
        [],
    ])

    self.Run("""compute ssh john@instance-1 --zone zone-1 --internal-ip""")

    # Require SSH keys
    self.ensure_keys.assert_called_once_with(
        self.keys, None, allow_passphrase=True)

    # SSH Command
    self.ssh_init.assert_has_calls(
        [
            mock.call(
                mock_matchers.TypeMatcher(ssh.SSHCommand),
                remote=self.remote,
                identity_file=self.private_key_file,
                options=dict(self.options, HostKeyAlias='compute.22222'),
                remote_command=[
                    '[ `curl "http://metadata.google.internal/'
                    'computeMetadata/v1/instance/id" -H "Metadata-Flavor: '
                    'Google" -q` = 22222 ] || exit 23'],
            ),
            mock.call(
                mock_matchers.TypeMatcher(ssh.SSHCommand),
                remote=self.remote,
                identity_file=self.private_key_file,
                extra_flags=[],
                tty=None,
                options=dict(self.options, HostKeyAlias='compute.22222'),
                remote_command=None,
                iap_tunnel_args=None,
                remainder=[],
            ),
        ],
        any_order=True,
    )

    self.ssh_run.assert_has_calls([
        mock.call(mock_matchers.TypeMatcher(ssh.SSHCommand), self.env,
                  force_connect=True),
        mock.call(mock_matchers.TypeMatcher(ssh.SSHCommand), self.env,
                  force_connect=True)])
    self.CheckRequests(
        [(self.compute.instances,
          'Get',
          self.messages.ComputeInstancesGetRequest(
              instance='instance-1',
              project='my-project',
              zone='zone-1'))],
        [(self.compute.projects,
          'Get',
          self.messages.ComputeProjectsGetRequest(
              project='my-project'))],
        [(self.compute.projects,
          'SetCommonInstanceMetadata',
          self.messages.ComputeProjectsSetCommonInstanceMetadataRequest(
              metadata=self.messages.Metadata(
                  items=[
                      self.messages.Metadata.ItemsValueListEntry(
                          key='a',
                          value='b'),
                      self.messages.Metadata.ItemsValueListEntry(
                          key='ssh-keys',
                          value='john:' + self.public_key_material),
                      self.messages.Metadata.ItemsValueListEntry(
                          key='sshKeys',
                          value='me:{0}\n'.format(self.public_key_material)),
                  ]),

              project='my-project'))],
    )
    def test_setup_install_boot_images_efi(
        self, mock_machine, mock_exists, mock_open, mock_sync,
        mock_command, mock_get_grub_bios_core_loader,
        mock_get_unsigned_grub_loader, mock_get_boot_path
    ):
        mock_get_boot_path.return_value = '/boot'
        mock_get_unsigned_grub_loader.return_value = None
        mock_get_grub_bios_core_loader.return_value = None
        data = mock.Mock()
        mock_sync.return_value = data
        mock_machine.return_value = 'x86_64'
        self.firmware.efi_mode = mock.Mock(
            return_value='efi'
        )
        self.os_exists['root_dir/boot/grub2/fonts/unicode.pf2'] = False
        self.os_exists['root_dir/usr/share/grub2/unicode.pf2'] = True
        self.os_exists['root_dir/usr/share/grub2/i386-pc'] = True
        self.os_exists['root_dir/usr/share/grub2/x86_64-efi'] = True
        self.os_exists['root_dir/boot/efi/'] = False

        def side_effect(arg):
            return self.os_exists[arg]

        mock_exists.side_effect = side_effect
        context_manager_mock = mock.Mock()
        mock_open.return_value = context_manager_mock
        file_mock = mock.Mock()
        enter_mock = mock.Mock()
        exit_mock = mock.Mock()
        enter_mock.return_value = file_mock
        setattr(context_manager_mock, '__enter__', enter_mock)
        setattr(context_manager_mock, '__exit__', exit_mock)
        self.bootloader.setup_install_boot_images(self.mbrid)

        assert mock_open.call_args_list == [
            call('root_dir/boot/grub2/earlyboot.cfg', 'w'),
            call('root_dir/EFI/BOOT/earlyboot.cfg', 'w')
        ]
        assert file_mock.write.call_args_list == [
            call('set btrfs_relative_path="yes"\n'),
            call('search --file --set=root /boot/0xffffffff\n'),
            call('set prefix=($root)/boot/grub2\n'),
            call('set btrfs_relative_path="yes"\n'),
            call('search --file --set=root /boot/0xffffffff\n'),
            call('set prefix=($root)/boot/grub2\n')
        ]
        assert mock_command.call_args_list == [
            call(
                [
                    'cp', 'root_dir/usr/share/grub2/unicode.pf2',
                    'root_dir/boot/grub2/fonts/unicode.pf2'
                ]
            ),
            call(
                [
                    'grub2-mkimage', '-O', 'i386-pc',
                    '-o', 'root_dir/usr/share/grub2/i386-pc/core.img',
                    '-c', 'root_dir/boot/grub2/earlyboot.cfg',
                    '-p', '/boot/grub2',
                    '-d', 'root_dir/usr/share/grub2/i386-pc',
                    'ext2', 'iso9660', 'linux', 'echo', 'configfile',
                    'search_label', 'search_fs_file', 'search',
                    'search_fs_uuid', 'ls', 'normal', 'gzio', 'png', 'fat',
                    'gettext', 'font', 'minicmd', 'gfxterm', 'gfxmenu',
                    'all_video', 'xfs', 'btrfs', 'lvm', 'luks',
                    'gcry_rijndael', 'gcry_sha256', 'gcry_sha512',
                    'crypto', 'cryptodisk', 'test', 'true', 'part_gpt',
                    'part_msdos', 'biosdisk', 'vga', 'vbe',
                    'chain', 'boot'
                ]
            ),
            call(
                [
                    'bash', '-c', 'cat root_dir/usr/share/grub2/i386-pc/'
                    'cdboot.img root_dir/usr/share/grub2/i386-pc/core.img > '
                    'root_dir/usr/share/grub2/i386-pc/eltorito.img'
                ]
            ),
            call(
                [
                    'grub2-mkimage', '-O', 'x86_64-efi',
                    '-o', 'root_dir/EFI/BOOT/bootx64.efi',
                    '-c', 'root_dir/EFI/BOOT/earlyboot.cfg',
                    '-p', '/boot/grub2',
                    '-d', 'root_dir/usr/share/grub2/x86_64-efi',
                    'ext2', 'iso9660', 'linux', 'echo', 'configfile',
                    'search_label', 'search_fs_file', 'search',
                    'search_fs_uuid', 'ls', 'normal', 'gzio', 'png', 'fat',
                    'gettext', 'font', 'minicmd', 'gfxterm', 'gfxmenu',
                    'all_video', 'xfs', 'btrfs', 'lvm', 'luks',
                    'gcry_rijndael', 'gcry_sha256', 'gcry_sha512',
                    'crypto', 'cryptodisk', 'test', 'true', 'part_gpt',
                    'part_msdos', 'efi_gop', 'efi_uga', 'linuxefi'
                ]
            )
        ]
        assert mock_sync.call_args_list == [
            call(
                'root_dir/usr/share/grub2/i386-pc/',
                'root_dir/boot/grub2/i386-pc'
            ),
            call(
                'root_dir/usr/share/grub2/x86_64-efi/',
                'root_dir/boot/grub2/x86_64-efi'
            )
        ]
        assert data.sync_data.call_args_list == [
            call(exclude=['*.module'], options=['-a']),
            call(exclude=['*.module'], options=['-a'])
        ]

        mock_get_unsigned_grub_loader.return_value = 'custom_grub_image'
        mock_get_grub_bios_core_loader.return_value = 'custom_bios_grub_image'
        mock_command.reset_mock()
        file_mock.write.reset_mock()
        mock_open.reset_mock()
        self.bootloader.setup_install_boot_images(self.mbrid)

        assert mock_command.call_args_list == [
            call(
                [
                    'cp', 'root_dir/usr/share/grub2/unicode.pf2',
                    'root_dir/boot/grub2/fonts/unicode.pf2'
                ]
            ),
            call(
                [
                    'bash', '-c', 'cat root_dir/usr/share/grub2/i386-pc/'
                    'cdboot.img custom_bios_grub_image > '
                    'root_dir/usr/share/grub2/i386-pc/eltorito.img'
                ]
            ),
            call(
                [
                    'cp', 'custom_grub_image', 'root_dir/EFI/BOOT/bootx64.efi'
                ]
            )
        ]
        assert file_mock.write.call_args_list == [
            call('set btrfs_relative_path="yes"\n'),
            call('search --file --set=root /boot/0xffffffff\n'),
            call('set prefix=($root)/boot/grub2\n'),
            call('normal\n')
        ]
        assert mock_open.call_args_list == [
            call('root_dir/EFI/BOOT/grub.cfg', 'w'),
            call('root_dir/EFI/BOOT/grub.cfg', 'a')
        ]
Example #36
0
def test_get_all_executables_pathsep(path, pathsep):
    with patch('thefrick.utils.Path') as Path_mock:
        get_all_executables()
        Path_mock.assert_has_calls([call(p) for p in path.split(pathsep)],
                                   True)
Example #37
0
    def test_main_embedded(self, wait_for_startup_mock, is_solr_local_mock,
                           is_hbase_local_mock, java_mock, exists_mock,
                           expandWebApp_mock, atlasDir_mock, executeEnvSh_mock,
                           writePid_mock, exist_pid_mock, grep_mock,
                           getConfigWithDefault_mock, getConfig_mock,
                           configure_hbase_mock, runProcess_mock):
        sys.argv = []
        exists_mock.return_value = True
        expandWebApp_mock.return_value = "webapp"
        atlasDir_mock.return_value = "atlas_home"
        is_hbase_local_mock.return_value = True
        is_solr_local_mock.return_value = True
        wait_for_startup_mock.return_value = True

        exist_pid_mock(789)
        exist_pid_mock.assert_called_with(789)
        grep_mock.return_value = "hbase"
        getConfig_mock.side_effect = self.get_config_mock_side_effect
        getConfigWithDefault_mock.side_effect = self.get_default_config_mock_side_effect

        atlas.main()
        self.assertTrue(configure_hbase_mock.called)

        if IS_WINDOWS:
            calls = [
                call([
                    'atlas_home\\hbase\\bin\\start-hbase.cmd', '--config',
                    'atlas_home\\hbase\\conf'
                ], 'atlas_home\\logs', False, True),
                call([
                    'atlas_home\\solr\\bin\\solr.cmd', 'start', '-z',
                    'localhost:9838', '-p', '9838'
                ], 'atlas_home\\logs', False, True),
                call([
                    'atlas_home\\solr\\bin\\solr.cmd', 'create', '-c',
                    'vertex_index', '-d',
                    'atlas_home\\solr\\server\\solr\\configsets\\_default\\conf',
                    '-shards', '1', '-replicationFactor', '1'
                ], 'atlas_home\\logs', False, True),
                call([
                    'atlas_home\\solr\\bin\\solr.cmd', 'create', '-c',
                    'edge_index', '-d',
                    'atlas_home\\solr\\server\\solr\\configsets\\_default\\conf',
                    '-shards', '1', '-replicationFactor', '1'
                ], 'atlas_home\\logs', False, True),
                call([
                    'atlas_home\\solr\\bin\\solr.cmd', 'create', '-c',
                    'fulltext_index', '-d',
                    'atlas_home\\solr\\server\\solr\\configsets\\_default\\conf',
                    '-shards', '1', '-replicationFactor', '1'
                ], 'atlas_home\\logs', False, True)
            ]

            runProcess_mock.assert_has_calls(calls)
        else:
            calls = [
                call([
                    'atlas_home/hbase/bin/hbase-daemon.sh', '--config',
                    'atlas_home/hbase/conf', 'start', 'master'
                ], 'atlas_home/logs', False, True),
                call([
                    'atlas_home/solr/bin/solr', 'start', '-z',
                    'localhost:9838', '-p', '9838'
                ], 'atlas_home/logs', False, True),
                call([
                    'atlas_home/solr/bin/solr', 'create', '-c', 'vertex_index',
                    '-d',
                    'atlas_home/solr/server/solr/configsets/_default/conf',
                    '-shards', '1', '-replicationFactor', '1'
                ], 'atlas_home/logs', False, True),
                call([
                    'atlas_home/solr/bin/solr', 'create', '-c', 'edge_index',
                    '-d',
                    'atlas_home/solr/server/solr/configsets/_default/conf',
                    '-shards', '1', '-replicationFactor', '1'
                ], 'atlas_home/logs', False, True),
                call([
                    'atlas_home/solr/bin/solr', 'create', '-c',
                    'fulltext_index', '-d',
                    'atlas_home/solr/server/solr/configsets/_default/conf',
                    '-shards', '1', '-replicationFactor', '1'
                ], 'atlas_home/logs', False, True)
            ]

            runProcess_mock.assert_has_calls(calls)

        self.assertTrue(java_mock.called)
        if IS_WINDOWS:

            java_mock.assert_called_with(
                'org.apache.atlas.Atlas',
                ['-app', 'atlas_home\\server\\webapp\\atlas'],
                'atlas_home\\conf;atlas_home\\server\\webapp\\atlas\\WEB-INF\\classes;atlas_home\\server\\webapp\\atlas\\WEB-INF\\lib\\*;atlas_home\\libext\\*;atlas_home\\hbase\\conf',
                [
                    '-Datlas.log.dir=atlas_home\\logs',
                    '-Datlas.log.file=application.log',
                    '-Datlas.home=atlas_home', '-Datlas.conf=atlas_home\\conf',
                    '-Xmx1024m', '-Dlog4j.configuration=atlas-log4j.xml',
                    '-Djava.net.preferIPv4Stack=true', '-server'
                ], 'atlas_home\\logs')

        else:
            java_mock.assert_called_with(
                'org.apache.atlas.Atlas',
                ['-app', 'atlas_home/server/webapp/atlas'],
                'atlas_home/conf:atlas_home/server/webapp/atlas/WEB-INF/classes:atlas_home/server/webapp/atlas/WEB-INF/lib/*:atlas_home/libext/*:atlas_home/hbase/conf',
                [
                    '-Datlas.log.dir=atlas_home/logs',
                    '-Datlas.log.file=application.log',
                    '-Datlas.home=atlas_home', '-Datlas.conf=atlas_home/conf',
                    '-Xmx1024m', '-Dlog4j.configuration=atlas-log4j.xml',
                    '-Djava.net.preferIPv4Stack=true', '-server'
                ], 'atlas_home/logs')

        pass
    def test_schedule_instance_group(self, mock_get_hosts, mock_get_all_states,
                                     mock_claim):
        """Test that since the request spec object contains an instance group
        object, that upon choosing a host in the primary schedule loop,
        that we update the request spec's instance group information
        """
        num_instances = 2
        ig = objects.InstanceGroup(hosts=[])
        spec_obj = objects.RequestSpec(num_instances=num_instances,
                                       flavor=objects.Flavor(memory_mb=512,
                                                             root_gb=512,
                                                             ephemeral_gb=0,
                                                             swap=0,
                                                             vcpus=1),
                                       project_id=uuids.project_id,
                                       instance_group=ig)

        hs1 = mock.Mock(spec=host_manager.HostState,
                        host='host1',
                        uuid=uuids.cn1)
        hs2 = mock.Mock(spec=host_manager.HostState,
                        host='host2',
                        uuid=uuids.cn2)
        all_host_states = [hs1, hs2]
        mock_get_all_states.return_value = all_host_states
        mock_claim.return_value = True

        alloc_reqs_by_rp_uuid = {
            uuids.cn1: [mock.sentinel.alloc_req_cn1],
            uuids.cn2: [mock.sentinel.alloc_req_cn2],
        }

        # Simulate host 1 and host 2 being randomly returned first by
        # _get_sorted_hosts() in the two iterations for each instance in
        # num_instances
        mock_get_hosts.side_effect = ([hs2, hs1], [hs1, hs2])
        instance_uuids = [
            getattr(uuids, 'instance%d' % x) for x in range(num_instances)
        ]
        ctx = mock.Mock()
        self.driver._schedule(ctx, spec_obj, instance_uuids,
                              alloc_reqs_by_rp_uuid,
                              mock.sentinel.provider_summaries)

        # Check that we called _claim_resources() for both the first and second
        # host state
        claim_calls = [
            mock.call(ctx.elevated.return_value, spec_obj, uuids.instance0,
                      [mock.sentinel.alloc_req_cn2]),
            mock.call(ctx.elevated.return_value, spec_obj, uuids.instance1,
                      [mock.sentinel.alloc_req_cn1]),
        ]
        mock_claim.assert_has_calls(claim_calls)

        # Check that _get_sorted_hosts() is called twice and that the
        # second time, we pass it the hosts that were returned from
        # _get_sorted_hosts() the first time
        sorted_host_calls = [
            mock.call(spec_obj, all_host_states, 0),
            mock.call(spec_obj, [hs2, hs1], 1),
        ]
        mock_get_hosts.assert_has_calls(sorted_host_calls)

        # The instance group object should have both host1 and host2 in its
        # instance group hosts list and there should not be any "changes" to
        # save in the instance group object
        self.assertEqual(['host2', 'host1'], ig.hosts)
        self.assertEqual({}, ig.obj_get_changes())
Example #39
0
 def test_cluster_delete_multiple_id_success(self, mock_delete):
     self._test_arg_success('cluster-delete xxx xyz')
     calls = [mock.call('xxx'), mock.call('xyz')]
     mock_delete.assert_has_calls(calls)
Example #40
0
    def test_create(self, mock_workflow, service_checker):
        service_checker.return_value = True
        mock_workflow.return_value = {}
        flavor = self.flavors.first()
        ngt = self.nodegroup_templates.first()
        configs = self.plugins_configs.first()
        new_name = ngt.name + '-new'

        self.mock_extension_supported.return_value = True
        self.mock_availability_zone_list.return_value = \
            self.availability_zones.list()
        self.mock_volume_type_list.return_value = []
        self.mock_flavor_list.return_value = [flavor]
        self.mock_plugin_get_version_details.return_value = configs
        self.mock_floating_ip_pools_list.return_value = []
        self.mock_security_group_list.return_value = []
        self.mock_nodegroup_template_create.return_value = True

        res = self.client.post(
            CREATE_URL, {
                'nodegroup_name': new_name,
                'plugin_name': ngt.plugin_name,
                ngt.plugin_name + '_version': '1.2.1',
                'hadoop_version': ngt.hadoop_version,
                'description': ngt.description,
                'flavor': flavor.id,
                'availability_zone': None,
                'storage': 'ephemeral_drive',
                'volumes_per_node': 0,
                'volumes_size': 0,
                'volume_type': None,
                'volume_local_to_instance': False,
                'volumes_availability_zone': None,
                'floating_ip_pool': None,
                'security_autogroup': True,
                'processes': 'HDFS:namenode',
                'use_autoconfig': True,
                'shares': [],
                'is_public': False,
                'is_protected': False
            })

        self.assertNoFormErrors(res)
        self.assertRedirectsNoFollow(res, INDEX_URL)
        self.assertMessageCount(success=1)
        self.mock_extension_supported.assert_called_once_with(
            test.IsHttpRequest(), 'AvailabilityZones')
        self.mock_availability_zone_list.assert_called_once_with(
            test.IsHttpRequest())
        self.mock_flavor_list.assert_called_once_with(test.IsHttpRequest())
        self.assert_mock_multiple_calls_with_same_arguments(
            self.mock_plugin_get_version_details, 4,
            mock.call(test.IsHttpRequest(), ngt.plugin_name,
                      ngt.hadoop_version))
        self.mock_floating_ip_pools_list.assert_called_once_with(
            test.IsHttpRequest())
        self.mock_security_group_list.assert_called_once_with(
            test.IsHttpRequest())
        self.mock_nodegroup_template_create(
            test.IsHttpRequest(), **{
                'name': new_name,
                'plugin_name': ngt.plugin_name,
                'hadoop_version': ngt.hadoop_version,
                'description': ngt.description,
                'flavor_id': flavor.id,
                'volumes_per_node': None,
                'volumes_size': None,
                'volume_type': None,
                'volume_local_to_instance': False,
                'volumes_availability_zone': None,
                'node_processes': ['namenode'],
                'node_configs': {},
                'floating_ip_pool': None,
                'security_groups': [],
                'image_id': None,
                'auto_security_group': True,
                'availability_zone': None,
                'is_proxy_gateway': False,
                'use_autoconfig': True,
                'shares': [],
                'is_public': False,
                'is_protected': False
            })
    def test_apply_multiple_tags_to_a_matched_build(
            self,
            tagBuild_side_effect,
            expected_destination_tags_for_build,
            expected_destination_tags_for_devel_build
    ):

        # Because rule file specifies that destination tag uses the value of
        # requires.platform, and there are two of those values in modulemd,
        # the module build should be tagged with two tags.
        self.mock_retrieve_modulemd_content.return_value = dedent('''\
            ---
            document: modulemd
            version: 2
            data:
              name: javapackages-tools
              stream: 1
              version: 1
              context: c1
              dependencies:
              - buildrequires:
                  platform: [f29]
                requires:
                  platform: [f29, f28]
            ''')

        session = self.mock_ClientSession.return_value
        session.tagBuild.side_effect = tagBuild_side_effect

        rule_defs = read_rule_defs()
        tagging_service.handle(rule_defs, {
            'id': 1,
            'name': 'javapackages-tools',
            'stream': '1',
            'version': '1',
            'context': 'c1',
            'state_name': 'ready',
        })

        nvr = 'javapackages-tools-1-1.c1'
        nvr_devel = 'javapackages-tools-devel-1-1.c1'
        session.tagBuild.assert_has_calls([
            call('f29-modular-ursamajor', nvr),
            call('f28-modular-ursamajor', nvr),
            call('f29-modular-ursamajor', nvr_devel),
            call('f28-modular-ursamajor', nvr_devel),
        ], any_order=True)

        # 2 messages should be sent:
        # javapackages-tools: f29, f28
        # javapackages-tools-devel: f29, f28
        self.mock_publish.assert_has_calls([
            call('build.tag.requested', {
                'build': {
                    'id': 1, 'name': 'javapackages-tools',
                    'stream': '1', 'version': '1', 'context': 'c1',
                },
                'nvr': nvr,
                'destination_tags': expected_destination_tags_for_build,
            }),
            call('build.tag.requested', {
                'build': {
                    'id': 1, 'name': 'javapackages-tools-devel',
                    'stream': '1', 'version': '1', 'context': 'c1',
                },
                'nvr': nvr_devel,
                'destination_tags': expected_destination_tags_for_devel_build,
            }),
        ], any_order=True)
Example #42
0
    def testInitEngines_usesCorrectIsolationLevels(
        self,
        mock_get_secret: mock.MagicMock,
        mock_in_gcp: mock.MagicMock,
        mock_in_production: mock.MagicMock,
        mock_create_engine: mock.MagicMock,
        mock_get_states: mock.MagicMock,
    ) -> None:
        # Arrange
        mock_in_gcp.return_value = True
        mock_in_production.return_value = True
        # Pretend all secret values are just the key suffixed with '_value'
        mock_get_secret.side_effect = lambda key: f"{key}_value"

        # Act
        SQLAlchemyEngineManager.attempt_init_engines_for_server(
            set(schema_utils.SchemaType))

        # Assert
        self.assertEqual(
            mock_create_engine.call_args_list,
            [
                call(
                    URL.create(
                        drivername="postgresql",
                        username="******",
                        password="******",
                        database="postgres",
                        query={
                            "host":
                            "/cloudsql/sqlalchemy_cloudsql_instance_id_value"
                        },
                    ),
                    isolation_level=None,
                    poolclass=None,
                    echo_pool=True,
                    pool_recycle=600,
                ),
                call(
                    URL.create(
                        drivername="postgresql",
                        username="******",
                        password="******",
                        database="postgres",
                        query={
                            "host":
                            "/cloudsql/state_cloudsql_instance_id_value"
                        },
                    ),
                    isolation_level="SERIALIZABLE",
                    poolclass=sqlalchemy.pool.NullPool,
                    echo_pool=True,
                    pool_recycle=600,
                ),
                call(
                    URL.create(
                        drivername="postgresql",
                        username="******",
                        password="******",
                        database="postgres",
                        query={
                            "host":
                            "/cloudsql/operations_cloudsql_instance_id_value"
                        },
                    ),
                    isolation_level=None,
                    poolclass=None,
                    pool_size=2,
                    max_overflow=5,
                    pool_timeout=15,
                    echo_pool=True,
                    pool_recycle=600,
                ),
                call(
                    URL.create(
                        drivername="postgresql",
                        username="******",
                        password="******",
                        database="postgres",
                        query={
                            "host":
                            "/cloudsql/justice_counts_cloudsql_instance_id_value"
                        },
                    ),
                    isolation_level="SERIALIZABLE",
                    poolclass=None,
                    echo_pool=True,
                    pool_recycle=600,
                ),
                call(
                    URL.create(
                        drivername="postgresql",
                        username="******",
                        password="******",
                        database="postgres",
                        query={
                            "host":
                            "/cloudsql/case_triage_cloudsql_instance_id_value"
                        },
                    ),
                    isolation_level=None,
                    poolclass=None,
                    echo_pool=True,
                    pool_recycle=600,
                ),
                call(
                    URL.create(
                        drivername="postgresql",
                        username="******",
                        password="******",
                        database="us_xx_primary",
                        query={
                            "host":
                            "/cloudsql/state_cloudsql_instance_id_value"
                        },
                    ),
                    isolation_level="SERIALIZABLE",
                    poolclass=sqlalchemy.pool.NullPool,
                    echo_pool=True,
                    pool_recycle=600,
                ),
                call(
                    URL.create(
                        drivername="postgresql",
                        username="******",
                        password="******",
                        database="us_ww_primary",
                        query={
                            "host":
                            "/cloudsql/state_cloudsql_instance_id_value"
                        },
                    ),
                    isolation_level="SERIALIZABLE",
                    poolclass=sqlalchemy.pool.NullPool,
                    echo_pool=True,
                    pool_recycle=600,
                ),
                call(
                    URL.create(
                        drivername="postgresql",
                        username="******",
                        password="******",
                        database="us_xx_secondary",
                        query={
                            "host":
                            "/cloudsql/state_cloudsql_instance_id_value"
                        },
                    ),
                    isolation_level="SERIALIZABLE",
                    poolclass=sqlalchemy.pool.NullPool,
                    echo_pool=True,
                    pool_recycle=600,
                ),
                call(
                    URL.create(
                        drivername="postgresql",
                        username="******",
                        password="******",
                        database="us_ww_secondary",
                        query={
                            "host":
                            "/cloudsql/state_cloudsql_instance_id_value"
                        },
                    ),
                    isolation_level="SERIALIZABLE",
                    poolclass=sqlalchemy.pool.NullPool,
                    echo_pool=True,
                    pool_recycle=600,
                ),
            ],
        )
        mock_get_states.assert_called()
Example #43
0
def test_finalize(m_get_parent, m_hg_rebase, m_hg_get_successor, hg):
    commits = [
        {
            "rev": "1",
            "node": "aaa",
            "orig-node": "aaa"
        },
        {
            "rev": "2",
            "node": "bbb",
            "orig-node": "bbb"
        },
        {
            "rev": "3",
            "node": "ccc",
            "orig-node": "ccc"
        },
    ]

    m_get_parent.return_value = "different:than_others"
    m_hg_get_successor.return_value = (None, None)
    hg.finalize(copy.deepcopy(commits))
    assert m_hg_rebase.call_count == 2
    assert m_hg_rebase.call_args_list == [
        mock.call(
            {
                "rev": "2",
                "node": "bbb",
                "orig-node": "bbb"
            },
            {
                "rev": "1",
                "node": "aaa",
                "orig-node": "aaa"
            },
        ),
        mock.call(
            {
                "rev": "3",
                "node": "ccc",
                "orig-node": "ccc"
            },
            {
                "rev": "2",
                "node": "bbb",
                "orig-node": "bbb"
            },
        ),
    ]

    m_get_parent.side_effect = ("first", "aaa", "last")
    m_hg_rebase.reset_mock()
    hg.finalize(commits)
    m_hg_rebase.assert_called_once_with(
        {
            "rev": "3",
            "node": "ccc",
            "orig-node": "ccc"
        },
        {
            "rev": "2",
            "node": "bbb",
            "orig-node": "bbb"
        },
    )

    m_hg_get_successor.reset_mock()
    m_get_parent.side_effect = None
    m_get_parent.return_value = "different:than_others"
    m_hg_get_successor.side_effect = [(None, None), ("4", "ddd")]
    _commits = commits[:]
    hg.finalize(_commits)
    assert m_hg_get_successor.call_count == 2
    assert m_hg_get_successor.call_args_list == [
        mock.call("bbb"), mock.call("ccc")
    ]
    assert _commits == [
        {
            "rev": "1",
            "node": "aaa",
            "orig-node": "aaa"
        },
        {
            "rev": "2",
            "node": "bbb",
            "orig-node": "bbb"
        },
        {
            "rev": "3",
            "node": "ddd",
            "orig-node": "ccc",
            "name": "4:ddd"
        },
    ]

    m_hg_rebase.reset_mock()
    m_hg_get_successor.side_effect = None
    m_hg_get_successor.return_value = (None, None)
    _commits = commits[:]
    _commits[0]["node"] = "AAA"  # node has been amended
    hg.finalize(_commits)
    assert m_hg_rebase.call_count == 2
    def test_update_all_reference_data_command(self):

        # Test missing required arguments
        with self.assertRaises(CommandError) as err:
            call_command('update_all_reference_data')
        self.assertEqual(str(err.exception), 'Error: one of the arguments --omim-key --use-cached-omim --skip-omim is required')

        # Test update all gencode, no skips, fail primate_ai and mgi
        self.mock_omim.return_value = 'omim'
        call_command('update_all_reference_data', '--omim-key=test_key')

        calls = [
            mock.call(31, reset=True),
            mock.call(29),
            mock.call(28),
            mock.call(27),
            mock.call(19),
        ]
        self.mock_update_gencode.assert_has_calls(calls)

        self.mock_omim.assert_called_with('test_key')
        self.mock_cached_omim.assert_not_called()

        self.assertEqual(self.mock_update_records.call_count, 4)
        calls = [
            mock.call('omim'),
            mock.call('dbnsfp_gene'),
            mock.call('gene_constraint'),
            mock.call('gene_cn_sensitivity'),
        ]
        self.mock_update_records.assert_has_calls(calls)

        self.mock_update_hpo.assert_called_with()

        calls = [
            mock.call('Done'),
            mock.call('Updated: gencode, omim, dbnsfp_gene, gene_constraint, gene_cn_sensitivity, hpo'),
            mock.call('Failed to Update: primate_ai, mgi')
        ]
        self.mock_logger.info.assert_has_calls(calls)

        calls = [
            mock.call('unable to update primate_ai: Primate_AI failed'),
            mock.call('unable to update mgi: MGI failed')
        ]
        self.mock_logger.error.assert_has_calls(calls)
Example #45
0
    def test_sow_fs_and_db(self):
        """Tests sow from filesystem and database."""
        # Access to protected member: _sow
        #
        # pylint: disable=W0212
        pubsub = websocket.DirWatchPubSub(self.root)

        handler = mock.Mock()

        impl = mock.Mock()
        sow_dir = os.path.join(self.root, '.sow', 'trace')
        fs.mkdir_safe(sow_dir)

        with tempfile.NamedTemporaryFile(dir=sow_dir,
                                         delete=False,
                                         prefix='trace.db-') as temp:
            pass
        impl.sow = sow_dir
        impl.sow_table = 'trace'

        conn = sqlite3.connect(temp.name)
        conn.execute("""
            CREATE TABLE trace (
                path text, timestamp integer, data text,
                directory text, name text
            )
            """)
        conn.executemany(
            """
            INSERT INTO trace (
                path, timestamp, directory, name
            ) VALUES(?, ?, ?, ?)
            """, [('/aaa', 3, '/', 'aaa'), ('/bbb', 2, '/', 'bbb'),
                  ('/ccc', 1, '/', 'ccc')])
        conn.commit()
        conn.close()

        impl.on_event.side_effect = [
            {
                'echo': 1
            },
            {
                'echo': 2
            },
            {
                'echo': 3
            },
            {
                'echo': 4
            },
        ]

        io.open(os.path.join(self.root, 'xxx'), 'w').close()
        modified = os.stat(os.path.join(self.root, 'xxx')).st_mtime

        pubsub._sow('/', '*', 0, handler, impl)

        impl.on_event.assert_has_calls([
            mock.call('/ccc', None, None),
            mock.call('/bbb', None, None),
            mock.call('/aaa', None, None),
            mock.call('/xxx', None, ''),
        ])
        handler.send_msg.assert_has_calls([
            mock.call({
                'when': 1,
                'echo': 1
            }),
            mock.call({
                'when': 2,
                'echo': 2
            }),
            mock.call({
                'when': 3,
                'echo': 3
            }),
            mock.call({
                'when': modified,
                'echo': 4
            }),
        ])

        # Create empty sow database, this will simulate db removing database
        # while constructing sow.
        #
        with tempfile.NamedTemporaryFile(dir=sow_dir,
                                         delete=False,
                                         prefix='trace.db-') as temp:
            pass

        pubsub._sow('/', '*', 0, handler, impl)
        impl.on_event.assert_has_calls([
            mock.call('/ccc', None, None),
            mock.call('/bbb', None, None),
            mock.call('/aaa', None, None),
            mock.call('/xxx', None, ''),
        ])
        handler.send_msg.assert_has_calls([
            mock.call({
                'when': 1,
                'echo': 1
            }),
            mock.call({
                'when': 2,
                'echo': 2
            }),
            mock.call({
                'when': 3,
                'echo': 3
            }),
            mock.call({
                'when': modified,
                'echo': 4
            }),
        ])
    def test_tag_build_if_match_one_rule_only(self):

        # Note that, platform does not match the rule in rule file.
        self.mock_retrieve_modulemd_content.return_value = dedent('''\
            ---
            document: modulemd
            version: 2
            data:
              name: javapackages-tools
              stream: 1
              version: 1
              context: c1
              dependencies:
              - buildrequires:
                  platform: [f29]
                requires:
                  platform: [f29]
            ''')

        session = self.mock_ClientSession.return_value
        session.tagBuild.side_effect = [1, 2, 3]

        rule_defs = read_rule_defs()
        tagging_service.handle(rule_defs, {
            'id': 1,
            'name': 'javapackages-tools',
            'stream': '1',
            'version': '1',
            'context': 'c1',
            'state_name': 'ready',
        })

        nvr = 'javapackages-tools-1-1.c1'
        nvr_devel = 'javapackages-tools-devel-1-1.c1'
        session.tagBuild.assert_has_calls([
            call('f29-modular-ursamajor', nvr),
            call('f29-modular-ursamajor', nvr_devel),
        ], any_order=True)

        # 2 messages should be sent:
        # javapackages-tools: f29
        # javapackages-tools-devel: f29
        self.mock_publish.assert_has_calls([
            call('build.tag.requested', {
                'build': {
                    'id': 1, 'name': 'javapackages-tools',
                    'stream': '1', 'version': '1', 'context': 'c1',
                },
                'nvr': nvr,
                'destination_tags': [
                    {'tag': 'f29-modular-ursamajor', 'task_id': 1},
                ],
            }),
            call('build.tag.requested', {
                'build': {
                    'id': 1, 'name': 'javapackages-tools-devel',
                    'stream': '1', 'version': '1', 'context': 'c1',
                },
                'nvr': nvr_devel,
                'destination_tags': [
                    {'tag': 'f29-modular-ursamajor', 'task_id': 2},
                ],
            }),
        ], any_order=True)
Example #47
0
    def test_push_local(self):
        def get_joined(*args):
            return defer.succeed([])

        self.mock_get_joined.side_effect = get_joined

        self.presence_list = [
            {"observed_user_id": "@banana:test"},
            {"observed_user_id": "@clementine:test"},
        ]

        self.datastore.set_presence_state.return_value = defer.succeed(
            {"state": ONLINE}
        )

        # TODO(paul): Gut-wrenching
        from synapse.handlers.presence import UserPresenceCache
        self.handlers.presence_handler._user_cachemap[self.u_apple] = (
            UserPresenceCache()
        )
        self.handlers.presence_handler._user_cachemap[self.u_apple].update(
            {"presence": OFFLINE}, serial=0
        )
        apple_set = self.handlers.presence_handler._local_pushmap.setdefault(
                "apple", set())
        apple_set.add(self.u_banana)
        apple_set.add(self.u_clementine)

        yield self.handlers.presence_handler.set_state(self.u_apple,
            self.u_apple, {"presence": ONLINE}
        )
        yield self.handlers.presence_handler.set_state(self.u_banana,
            self.u_banana, {"presence": ONLINE}
        )

        presence = yield self.handlers.presence_handler.get_presence_list(
                observer_user=self.u_apple, accepted=True)

        self.assertEquals([
            {"observed_user": self.u_banana,
                "presence": ONLINE,
                "last_active_ago": 0,
                "displayname": "Frank",
                "avatar_url": "http://foo"},
            {"observed_user": self.u_clementine,
                "presence": OFFLINE}
        ], presence)

        self.mock_update_client.assert_has_calls([
            call(users_to_push=set([self.u_apple, self.u_banana, self.u_clementine]),
                room_ids=[],
                observed_user=self.u_apple,
                statuscache=ANY), # self-reflection
        ], any_order=True)

        statuscache = self.mock_update_client.call_args[1]["statuscache"]
        self.assertEquals({
            "presence": ONLINE,
            "last_active": 1000000, # MockClock
            "displayname": "Frank",
            "avatar_url": "http://foo",
        }, statuscache.state)

        self.mock_update_client.reset_mock()

        self.datastore.set_profile_displayname.return_value = defer.succeed(
                None)

        yield self.handlers.profile_handler.set_displayname(self.u_apple,
                self.u_apple, "I am an Apple")

        self.mock_update_client.assert_has_calls([
            call(users_to_push=set([self.u_apple, self.u_banana, self.u_clementine]),
                room_ids=[],
                observed_user=self.u_apple,
                statuscache=ANY), # self-reflection
        ], any_order=True)

        statuscache = self.mock_update_client.call_args[1]["statuscache"]
        self.assertEquals({
            "presence": ONLINE,
            "last_active": 1000000, # MockClock
            "displayname": "I am an Apple",
            "avatar_url": "http://foo",
        }, statuscache.state)
Example #48
0
def test_set_args(m_hg_hg_log, m_hg_hg_out, m_parse_config, hg):
    class Args:
        def __init__(self,
                     start="(auto)",
                     end=".",
                     safe_mode=False,
                     single=False):
            self.start_rev = start
            self.end_rev = end
            self.safe_mode = safe_mode
            self.single = single

    m_config = mozphab.config
    with pytest.raises(exceptions.Error):
        hg.set_args(Args())

    # baseline config
    hg.mercurial_version = LooseVersion("4.5")
    m_config.safe_mode = False
    m_parse_config.return_value = {
        "ui.username": "******",
        "extensions.evolve": ""
    }

    # evolve & shelve
    hg._hg = []
    hg.set_args(Args())
    assert (["--config", "extensions.rebase="] + ["--pager", "never"] +
            ["--config", "rebase.experimental.inmemory=true"]) == hg._hg
    assert hg.use_evolve
    assert not hg.has_shelve

    # inmemory rebase requires hg 4.5+
    hg.mercurial_version = LooseVersion("4.0")
    hg._hg = []
    hg.set_args(Args())
    assert (["--config", "extensions.rebase="] +
            ["--pager", "never"]) == hg._hg
    hg.mercurial_version = LooseVersion("4.5")

    # safe_mode
    safe_mode_options = (["--config", "extensions.rebase="] +
                         ["--pager", "never"] +
                         ["--config", "ui.username=username"] +
                         ["--config", "extensions.evolve="])
    hg._hg = []
    hg.set_args(Args(safe_mode=True))
    assert safe_mode_options == hg._hg

    m_config.safe_mode = True
    hg._hg = []
    hg.set_args(Args())
    assert safe_mode_options == hg._hg
    m_config.safe_mode = False

    # no evolve
    m_parse_config.return_value = {
        "ui.username": "******",
        "extensions.shelve": ""
    }
    hg._hg = []
    hg.set_args(Args())
    assert (["--config", "extensions.rebase="] + ["--pager", "never"] +
            ["--config", "rebase.experimental.inmemory=true"] +
            ["--config", "experimental.evolution.createmarkers=true"] +
            ["--config", "extensions.strip="]) == hg._hg
    assert not hg.use_evolve
    assert hg.has_shelve

    m_hg_hg_log.side_effect = [("123456789012", ), ("098765432109", )]
    hg._hg = []
    hg.set_args(Args())
    assert "123456789012::098765432109" == hg.revset

    m_hg_hg_log.side_effect = IndexError
    with pytest.raises(exceptions.Error):
        hg.set_args(Args())

    m_hg_hg_log.reset_mock()
    m_hg_hg_log.side_effect = [("123456789012", ), ("123456789012", )]
    hg.set_args(Args(single=True))
    assert "123456789012" == hg.revset
    assert m_hg_hg_log.call_args_list == [mock.call(".")]

    m_hg_hg_log.reset_mock()
    m_hg_hg_log.side_effect = [("123456789012", ), ("123456789012", )]
    hg.set_args(Args(start="start", single=True))
    assert "123456789012" == hg.revset
    assert m_hg_hg_log.call_args_list == [mock.call("start")]
Example #49
0
def PresentDialog_Confirm_Call( message ):
  """Return a mock.call object for a call to vimsupport.PresentDialog, as called
  why vimsupport.Confirm with the supplied confirmation message"""
  return call( message, [ 'Ok', 'Cancel' ] )
Example #50
0
 def _check_agent_method_called(self, calls):
     self.mock_ip.netns.execute.assert_has_calls(
         [mock.call(call, check_exit_code=False) for call in calls],
         any_order=True)
Example #51
0
 def testOneFile(self):
     self.runTest(['/sys/kernel/mm/transparent_hugepage/enabled=always'], [
         mock.call('echo "always" | sudo tee '
                   '/sys/kernel/mm/transparent_hugepage/enabled')
     ])
    def test_simple_alarm_trip(self, utcnow):
        utcnow.return_value = datetime.datetime(2015, 1, 26, 12, 57, 0, 0)
        self._set_all_alarms('ok')
        avgs = self._get_stats(
            60,
            [self.alarms[0].rule['threshold'] + v for v in moves.xrange(1, 6)])
        maxs = self._get_stats(
            300,
            [self.alarms[1].rule['threshold'] - v for v in moves.xrange(4)])
        avgs2 = self._get_stats(
            50,
            [self.alarms[2].rule['threshold'] + v for v in moves.xrange(1, 7)])

        self.requests.get.side_effect = [avgs, maxs]
        self.requests.post.side_effect = [avgs2]
        self._evaluate_all_alarms()

        expected_headers = {
            'X-Auth-Token': 'fake_token',
            'Content-Type': 'application/json'
        }

        start_alarm1 = "2015-01-26T12:51:00"
        start_alarm2 = "2015-01-26T12:32:00"
        start_alarm3 = "2015-01-26T12:51:10"
        end = "2015-01-26T12:57:00"

        self.assertEqual([
            mock.call(url='http://localhost:8041/v1/resource/instance/'
                      'my_instance/metric/cpu_util/measures',
                      params={
                          'aggregation': 'mean',
                          'start': start_alarm1,
                          'end': end
                      },
                      headers=expected_headers),
            mock.call(url='http://localhost:8041/v1/aggregation/metric',
                      params={
                          'aggregation':
                          'max',
                          'start':
                          start_alarm2,
                          'end':
                          end,
                          'metric[]': [
                              '0bb1604d-1193-4c0a-b4b8-74b170e35e83',
                              '9ddc209f-42f8-41e1-b8f1-8804f59c4053'
                          ]
                      },
                      headers=expected_headers)
        ], self.requests.get.mock_calls)
        self.assertEqual([
            mock.call(url='http://localhost:8041/v1/aggregation/resource/'
                      'instance/metric/cpu_util',
                      params={
                          'aggregation': 'mean',
                          'start': start_alarm3,
                          'end': end
                      },
                      data='{"=": {"server_group": "my_autoscaling_group"}}',
                      headers=expected_headers),
        ], self.requests.post.mock_calls)

        self._assert_all_alarms('alarm')
        expected = [
            mock.call(alarm.alarm_id, state='alarm') for alarm in self.alarms
        ]
        update_calls = self.api_client.alarms.set_state.call_args_list
        self.assertEqual(expected, update_calls)
        reasons = [
            'Transition to alarm due to 5 samples outside'
            ' threshold, most recent: %s' % avgs.values[-1],
            'Transition to alarm due to 4 samples outside'
            ' threshold, most recent: %s' % maxs.values[-1],
            'Transition to alarm due to 6 samples outside'
            ' threshold, most recent: %s' % avgs2.values[-1],
        ]
        reason_datas = [
            self._reason_data('outside', 5, avgs.values[-1]),
            self._reason_data('outside', 4, maxs.values[-1]),
            self._reason_data('outside', 6, avgs2.values[-1])
        ]
        expected = [
            mock.call(alarm, 'ok', reason,
                      reason_data) for alarm, reason, reason_data in zip(
                          self.alarms, reasons, reason_datas)
        ]
        self.assertEqual(expected, self.notifier.notify.call_args_list)
Example #53
0
def CreateTestLinuxVm():
    vm_spec = pkb_common_test_case.CreateTestVmSpec()
    return pkb_common_test_case.TestLinuxVirtualMachine(vm_spec=vm_spec)


def CreateCentos7Vm():
    vm_spec = pkb_common_test_case.CreateTestVmSpec()
    return TestCentos7VirtualMachine(vm_spec)


# /proc/cmdline on a GCP CentOS7 vm
_CENTOS7_KERNEL_COMMAND_LINE = (
    'BOOT_IMAGE=/boot/vmlinuz-3.10.0-1127.13.1.el7.x86_64 '
    'root=UUID=1-2-3-4-5 ro crashkernel=auto console=ttyS0,38400n8')

_DISABLE_YUM_CRON = mock.call('sudo systemctl disable yum-cron.service',
                              ignore_failure=True)


class TestCentos7VirtualMachine(linux_virtual_machine.CentOs7Mixin,
                                pkb_common_test_case.TestVirtualMachine):
    user_name = 'perfkit'


class TestSetFiles(pkb_common_test_case.PkbCommonTestCase):
    def runTest(self, set_files, calls):
        """Run a SetFiles test.

    Args:
      set_files: the value of FLAGS.set_files
      calls: a list of mock.call() objects giving the expected calls to
        vm.RemoteCommand() for the test.
Example #54
0
def EventNotification_FileReadyToParse_NonDiagnostic_ConfirmExtraConf_test(
    ycm ):

  # This test validates the behaviour of YouCompleteMe.HandleFileParseRequest
  # in combination with YouCompleteMe.OnFileReadyToParse when the completer
  # raises the (special) UnknownExtraConf exception
  FILE_NAME = 'a_file'
  MESSAGE = ( 'Found ' + FILE_NAME + '. Load? \n\n(Question can be '
              'turned off with options, see YCM docs)' )

  def UnknownExtraConfResponse( *args ):
    raise UnknownExtraConf( FILE_NAME )

  with patch( 'ycm.client.base_request.BaseRequest.PostDataToHandler',
              new_callable = ExtendedMock ) as post_data_to_handler:
    with MockArbitraryBuffer( 'some_filetype' ):
      with MockEventNotification( UnknownExtraConfResponse ):

        # When the user accepts the extra conf, we load it
        with patch( 'ycm.vimsupport.PresentDialog',
                    return_value = 0,
                    new_callable = ExtendedMock ) as present_dialog:
          ycm.OnFileReadyToParse()
          ok_( ycm.FileParseRequestReady() )
          ycm.HandleFileParseRequest()

          present_dialog.assert_has_exact_calls( [
            PresentDialog_Confirm_Call( MESSAGE ),
          ] )
          post_data_to_handler.assert_has_exact_calls( [
            call( { 'filepath': FILE_NAME }, 'load_extra_conf_file' )
          ] )

          # Subsequent calls don't re-raise the warning
          ycm.HandleFileParseRequest()

          present_dialog.assert_has_exact_calls( [
            PresentDialog_Confirm_Call( MESSAGE )
          ] )
          post_data_to_handler.assert_has_exact_calls( [
            call( { 'filepath': FILE_NAME }, 'load_extra_conf_file' )
          ] )

          ok_( ycm.ShouldResendFileParseRequest() )

          # But it does if a subsequent event raises again
          ycm.OnFileReadyToParse()
          ok_( ycm.FileParseRequestReady() )
          ycm.HandleFileParseRequest()

          present_dialog.assert_has_exact_calls( [
            PresentDialog_Confirm_Call( MESSAGE ),
            PresentDialog_Confirm_Call( MESSAGE ),
          ] )
          post_data_to_handler.assert_has_exact_calls( [
            call( { 'filepath': FILE_NAME }, 'load_extra_conf_file' ),
            call( { 'filepath': FILE_NAME }, 'load_extra_conf_file' )
          ] )

          ok_( ycm.ShouldResendFileParseRequest() )

        post_data_to_handler.reset_mock()

        # When the user rejects the extra conf, we reject it
        with patch( 'ycm.vimsupport.PresentDialog',
                    return_value = 1,
                    new_callable = ExtendedMock ) as present_dialog:
          ycm.OnFileReadyToParse()
          ok_( ycm.FileParseRequestReady() )
          ycm.HandleFileParseRequest()

          present_dialog.assert_has_exact_calls( [
            PresentDialog_Confirm_Call( MESSAGE ),
          ] )
          post_data_to_handler.assert_has_exact_calls( [
            call( { 'filepath': FILE_NAME }, 'ignore_extra_conf_file' )
          ] )

          # Subsequent calls don't re-raise the warning
          ycm.HandleFileParseRequest()

          present_dialog.assert_has_exact_calls( [
            PresentDialog_Confirm_Call( MESSAGE )
          ] )
          post_data_to_handler.assert_has_exact_calls( [
            call( { 'filepath': FILE_NAME }, 'ignore_extra_conf_file' )
          ] )

          ok_( ycm.ShouldResendFileParseRequest() )

          # But it does if a subsequent event raises again
          ycm.OnFileReadyToParse()
          ok_( ycm.FileParseRequestReady() )
          ycm.HandleFileParseRequest()

          present_dialog.assert_has_exact_calls( [
            PresentDialog_Confirm_Call( MESSAGE ),
            PresentDialog_Confirm_Call( MESSAGE ),
          ] )
          post_data_to_handler.assert_has_exact_calls( [
            call( { 'filepath': FILE_NAME }, 'ignore_extra_conf_file' ),
            call( { 'filepath': FILE_NAME }, 'ignore_extra_conf_file' )
          ] )

          ok_( ycm.ShouldResendFileParseRequest() )
Example #55
0
 def test_register_servicecatalog_commands(self):
     event_emitter = mock.Mock()
     register_servicecatalog_commands(event_emitter)
     event_emitter.register.assert_has_calls(
         [call('building-command-table.servicecatalog', inject_commands)])
Example #56
0
 def assertRemoteHostCalled(self, *calls):
     self.assertEqual([mock.call(call) for call in calls],
                      self.remote_command.call_args_list)
 def _build_calls(self, *courses):
     """ Builds a list of mock.call instances representing calls to reindexing method """
     return [mock.call(self.store, course.id) for course in courses]
Example #58
0
    def test_listener_rejector(self, mock_UDPTendril, mock_track_tendril,
                               mock_socket):
        mock_socket.return_value.recvfrom.side_effect = [
            ('msg1', ('127.0.0.2', 8082)),
            ('msg2', ('127.0.0.3', 8083)),
            ('msg3', ('127.0.0.4', 8084)),
            TestException(),
            TestException(),
            TestException(),
            TestException(),
            TestException(),
            TestException(),
            TestException(),
            TestException(),
            TestException(),
            TestException(),
            TestException(),
        ]
        tendrils = [mock.Mock(), mock.Mock(), mock.Mock()]
        mock_UDPTendril.side_effect = tendrils[:]
        acceptor = mock.Mock(side_effect=application.RejectConnection())
        manager = udp.UDPTendrilManager()
        manager.running = True

        with self.assertRaises(TestException):
            manager.listener(acceptor, None)

        mock_socket.assert_called_once_with(socket.AF_INET, socket.SOCK_DGRAM)
        mock_socket.return_value.assert_has_calls([
            mock.call.bind(('', 0)),
            mock.call.getsockname(),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.recvfrom(4096),
            mock.call.close(),
        ])
        self.assertEqual(manager.local_addr, ('127.0.0.1', 8080))
        mock_UDPTendril.assert_has_calls([
            mock.call(manager, ('127.0.0.1', 8080), ('127.0.0.2', 8082)),
            mock.call(manager, ('127.0.0.1', 8080), ('127.0.0.3', 8083)),
            mock.call(manager, ('127.0.0.1', 8080), ('127.0.0.4', 8084)),
        ])
        acceptor.assert_has_calls([
            mock.call(tendrils[0]),
            mock.call(tendrils[1]),
            mock.call(tendrils[2]),
        ])
        self.assertFalse(mock_track_tendril.called)
        self.assertFalse(tendrils[0]._recv_frameify.called)
        self.assertFalse(tendrils[1]._recv_frameify.called)
        self.assertFalse(tendrils[2]._recv_frameify.called)
Example #59
0
    def test_create_disk_standard_root_is_overlay(
        self, mock_rand, mock_temp, mock_getsize, mock_exists,
        mock_grub_dir, mock_command, mock_squashfs, mock_fs,
        mock_DeviceProvider
    ):
        mock_rand.return_value = 15
        self.disk_builder.root_filesystem_is_overlay = True
        self.disk_builder.volume_manager_name = None
        squashfs = Mock()
        mock_squashfs.return_value = squashfs
        mock_getsize.return_value = 1048576
        tempfile = Mock()
        tempfile.name = 'kiwi-tempname'
        mock_temp.return_value = tempfile
        mock_exists.return_value = True
        self.disk_builder.initrd_system = 'dracut'

        m_open = mock_open()
        with patch('builtins.open', m_open, create=True):
            self.disk_builder.create_disk()

        assert mock_squashfs.call_args_list == [
            call(
                custom_args={'compression': None},
                device_provider=mock_DeviceProvider.return_value,
                root_dir='root_dir'
            ), call(
                custom_args={'compression': None},
                device_provider=mock_DeviceProvider.return_value,
                root_dir='root_dir'
            )
        ]
        assert squashfs.create_on_file.call_args_list == [
            call(exclude=['var/cache/kiwi'], filename='kiwi-tempname'),
            call(exclude=[
                'image', '.profile', '.kconfig', 'run/*', 'tmp/*',
                '.buildenv', 'var/cache/kiwi',
                'boot/*', 'boot/.*', 'boot/efi/*', 'boot/efi/.*'
            ], filename='kiwi-tempname')
        ]
        self.disk.create_root_readonly_partition.assert_called_once_with(11)
        assert mock_command.call_args_list[2] == call(
            ['dd', 'if=kiwi-tempname', 'of=/dev/readonly-root-device']
        )
        assert m_open.return_value.write.call_args_list == [
            call('kiwi_BootPart="1"\n'),
            call('kiwi_RootPart="1"\n'),
            call('0x0f0f0f0f\n'),
            call('boot_cmdline\n'),
            call(b'\x0f\x0f\x0f\x0f')
        ]
        assert self.boot_image_task.include_module.call_args_list == [
            call('kiwi-overlay'), call('kiwi-repart')
        ]
        self.boot_image_task.omit_module.assert_called_once_with('multipath')
        self.boot_image_task.write_system_config_file.assert_called_once_with(
            config={'modules': ['kiwi-overlay']}
        )
Example #60
0
 def test_loc_main(self):
     """Testing main."""
     with patch('sys.exit') as mocked_exit:
         loc = main(**self.default_options())
         assert_that(mocked_exit.mock_calls, equal_to([call(1)]))
         self.verify_results(loc)