Example #1
0
 def test_one_attended(self):
     students = [{'Curso': '', 'Nombre': '', 'Apellido': ''}]
     attended_cert = create_autospec(Certificate)
     certified_cert = create_autospec(Certificate)
     all_students_certificates(students, attended_cert, certified_cert)
     self.assertTrue(attended_cert.generate.called)
     self.assertFalse(certified_cert.generate.called)
Example #2
0
def test_read():
    self = create_autospec(TopLevelTickStore)
    tsl = TickStoreLibrary(create_autospec(TickStore), create_autospec(DateRange))
    self._get_libraries.return_value = [tsl, tsl]
    dr = create_autospec(DateRange)
    with patch("pandas.concat") as concat:
        res = TopLevelTickStore.read(
            self, sentinel.symbol, dr, columns=sentinel.include_columns, include_images=sentinel.include_images
        )
    assert concat.call_args_list == [call([tsl.library.read.return_value, tsl.library.read.return_value])]
    assert res == concat.return_value
    assert tsl.library.read.call_args_list == [
        call(
            sentinel.symbol,
            tsl.date_range.intersection.return_value,
            sentinel.include_columns,
            include_images=sentinel.include_images,
        ),
        call(
            sentinel.symbol,
            tsl.date_range.intersection.return_value,
            sentinel.include_columns,
            include_images=sentinel.include_images,
        ),
    ]
Example #3
0
    def test_create_autospec(self):
        mock = create_autospec(X)
        instance = mock()
        self.assertRaises(TypeError, instance)

        mock = create_autospec(X())
        self.assertRaises(TypeError, mock)
Example #4
0
def test_mongo_date_range_query():
    self = create_autospec(TickStore)
    self._collection = create_autospec(Collection)
    self._symbol_query.return_value = {"sy": {"$in" : ["s1" , "s2"]}}
    self._collection.aggregate.return_value = iter([{"_id": "s1", "start": dt(2014, 1, 1, 0, 0, tzinfo=mktz())},
                                                    {"_id": "s2", "start": dt(2014, 1, 1, 12, 0, tzinfo=mktz())}])

    self._collection.find_one.side_effect = [
        {'e': dt(2014, 1, 1, 15, 0, tzinfo=mktz())},
        {'e': dt(2014, 1, 2, 12, 0, tzinfo=mktz())}]

    query = TickStore._mongo_date_range_query(self, 'sym', DateRange(dt(2014, 1, 2, 0, 0, tzinfo=mktz()),
                                                                     dt(2014, 1, 3, 0, 0, tzinfo=mktz())))

    assert self._collection.aggregate.call_args_list == [call([
     {"$match": {"s": {"$lte": dt(2014, 1, 2, 0, 0, tzinfo=mktz())}, "sy": {"$in" : ["s1" , "s2"]}}},
     {"$project": {"_id": 0, "s": 1, "sy": 1}},
     {"$group": {"_id": "$sy", "start": {"$max": "$s"}}},
     {"$sort": {"start": 1}}])]

    assert self._collection.find_one.call_args_list == [
        call({'sy': 's1', 's': dt(2014, 1, 1, 0, 0, tzinfo=mktz())}, {'e': 1}),
        call({'sy': 's2', 's': dt(2014, 1, 1, 12, 0, tzinfo=mktz())}, {'e': 1})]

    assert query == {'s': {'$gte': dt(2014, 1, 1, 12, 0, tzinfo=mktz()), '$lte': dt(2014, 1, 3, 0, 0, tzinfo=mktz())}}
    def setUp(self):
        self.vlan_id_gen = 'gen_id'
        self.name_gen = 'gen_name'
        self.spec = Mock()
        self.vm = Mock()
        self.pv_service = Mock()
        self.pv_service.find_by_uuid = Mock(self.vm)
        self.si = Mock()
        self.vm_uuid = 'uuid'
        self.vlan_id = 100
        self.spec_type = Mock()

        vnic_device_mapper = create_autospec(spec=VNicDeviceMapper)
        vnic_device_mapper.vnic = create_autospec(spec=vim.vm.device.VirtualEthernetCard)
        vnic_device_mapper.vnic.macAddress = 'AA-BB'
        vnic_device_mapper.vnic.deviceInfo = Mock()
        vnic_device_mapper.vnic.deviceInfo.label = 'AA-BB'
        vnic_device_mapper.network = Mock()
        vnic_device_mapper.network.name = 'the network'
        vnic_device_mapper.network.key = 'keyyyyyey'
        vnic_device_mapper.requested_vnic = 'requested'

        self.dv_connector = Mock()
        self.dv_connector.connect_by_mapping = Mock(return_value=[vnic_device_mapper])
        self.dv_port_name_gen = Mock()
        self.vlan_spec_factory = Mock()
        self.vlan_id_range_parser = Mock()
        self.vlan_id_range_parser.parse_vlan_id = Mock(return_value=self.vlan_id_gen)
        self.dv_port_name_gen.generate_port_group_name = Mock(return_value=self.name_gen)
        self.vlan_spec_factory.get_vlan_spec = Mock(return_value=self.spec)
Example #6
0
 def test_handle_exception_toomanyredirects(self, mock_print_function):
     mock_request = mock.create_autospec(Request).return_value
     mock_request.url = "htt;//mockurl.mock"
     mock_exception = mock.create_autospec(exceptions.TooManyRedirects).return_value
     mock_exception.__str__.return_value = "Number of maximum redirections exceeded"
     self.request_sender.handle_request_exception(mock_request, mock_exception)
     self.assertEquals(mock_print_function.call_count, 2)
Example #7
0
 def test_handle_exception_timeout(self, mock_print_function):
     mock_request = mock.create_autospec(Request).return_value
     mock_request.url = "http://mockurl.mock"
     mock_exception = mock.create_autospec(exceptions.Timeout).return_value
     mock_exception.__str__.return_value = "Timeout reached"
     self.request_sender.handle_request_exception(mock_request, mock_exception)
     self.assertEquals(mock_print_function.call_count, 2)
Example #8
0
 def test_handle_exception_httperror(self, mock_print_function):
     mock_request = mock.create_autospec(Request).return_value
     mock_request.url = "http://mockurl.mock"
     mock_exception = mock.create_autospec(exceptions.HTTPError).return_value
     mock_exception.__str__.return_value = "Invalid HTTP respons"
     self.request_sender.handle_request_exception(mock_request, mock_exception)
     self.assertEquals(mock_print_function.call_count, 2)
Example #9
0
 def test_handle_exception_urlrequired(self, mock_print_function):
     mock_request = mock.create_autospec(Request).return_value
     mock_request.url = "htt;//mockurl.mock"
     mock_exception = mock.create_autospec(exceptions.URLRequired).return_value
     mock_exception.__str__.return_value = "Invalid url"
     self.request_sender.handle_request_exception(mock_request, mock_exception)
     self.assertEquals(mock_print_function.call_count, 2)
Example #10
0
 def test_get_binary(self):
     mock_request = mock.create_autospec(Request).return_value
     mock_response = mock.create_autospec(Response).return_value
     mock_response.content = "<html><a href='url1'>text1</a><a href='url2'>text2</a></html>"
     self.request_sender.session.send.return_value = mock_response
     result = self.request_sender.get_binary(mock_request)
     self.assertEquals(result, mock_response.content)
Example #11
0
 def test_handle_exception_connection_error(self, mock_print_function):
     mock_request = mock.create_autospec(Request).return_value
     mock_request.url = "htt;//mockurl.mock"
     mock_exception = mock.create_autospec(exceptions.ConnectionError).return_value
     mock_exception.__str__.return_value = "DNS failure"
     self.request_sender.handle_request_exception(mock_request, mock_exception)
     self.assertEquals(mock_print_function.call_count, 2)
Example #12
0
 def setUp(self):
     mock_config = mock_factory.create_mock_config()
     mock_item_queue = mock.create_autospec(queue).return_value
     mock_item_writer = mock.create_autospec(ItemWriter).return_value
     self.item_processor = ItemProcessor(mock_config, mock_item_queue)
     self.item_processor.item_writer = mock_item_writer
     self.item_processor.no_items_received = True
Example #13
0
 def test_handle_exception_baseexception(self, mock_print_function):
     mock_request = mock.create_autospec(Request).return_value
     mock_request.url = "http://mockurl.mock"
     mock_exception = mock.create_autospec(BaseException).return_value
     mock_exception.__str__.return_value = "BaseException message"
     self.request_sender.handle_request_exception(mock_request, mock_exception)
     self.assertEquals(mock_print_function.call_count, 2)
Example #14
0
def test_get_verbose_status_of_marathon_app_column_alignment():
    fake_app = mock.create_autospec(marathon.models.app.MarathonApp)
    fake_app.version = '2015-01-15T05:30:49.862Z'
    fake_app.id = '/fake--service'

    fake_task1 = mock.create_autospec(marathon.models.app.MarathonTask)
    fake_task1.id = 'fake_task1_id'
    fake_task1.host = 'fake_deployed_host'
    fake_task1.ports = [6666]
    fake_task1.staged_at = datetime.datetime.fromtimestamp(0)
    fake_task1.health_check_results = []

    fake_task2 = mock.create_autospec(marathon.models.app.MarathonTask)
    fake_task2.id = 'fake_task2_id'
    fake_task2.host = 'fake_deployed_host_with_a_really_long_name'
    fake_task2.ports = [6666]
    fake_task2.staged_at = datetime.datetime.fromtimestamp(0)
    fake_task2.health_check_results = []

    fake_app.tasks = [fake_task1, fake_task2]
    tasks, out = marathon_serviceinit.get_verbose_status_of_marathon_app(fake_app)
    (_, _, _, headers_line, task1_line, task2_line) = out.split('\n')
    assert headers_line.index('Host deployed to') == task1_line.index('fake_deployed_host')
    assert headers_line.index('Host deployed to') == task2_line.index('fake_deployed_host_with_a_really_long_name')
    assert headers_line.index('Deployed at what localtime') == task1_line.index('1970-01-01T00:00')
    assert headers_line.index('Deployed at what localtime') == task2_line.index('1970-01-01T00:00')
def test_evaluate_initializer_with_fallback_calls_fallback():
    init = mock.create_autospec(Initializer())
    fallback = mock.create_autospec(Initializer())
    fallback.side_effect = lambda x: np.array(1)
    init.side_effect = InitializationError
    evaluate_initializer(init, (7, 5), fallback)
    fallback.assert_called_once_with((7, 5))
    def test_authenticate(self):
        self.auth_backend._authentication_preprocessing = mock.create_autospec(
            self.auth_backend._authentication_preprocessing, return_value=None
        )
        self.auth_backend._start_authentication = mock.create_autospec(
            self.auth_backend._start_authentication, return_value=None
        )
        self.auth_backend._process_challenge = mock.create_autospec(
            self.auth_backend._process_challenge, return_value=None
        )
        self.auth_backend._extract_data = mock.create_autospec(self.auth_backend._extract_data, return_value=None)
        self.auth_backend._verify_session = mock.create_autospec(self.auth_backend._verify_session, return_value=None)

        d = self.auth_backend.authenticate(self.TEST_USER, self.TEST_PASS)

        def check(*args):
            self.auth_backend._authentication_preprocessing.assert_called_once_with(
                username=self.TEST_USER, password=self.TEST_PASS
            )
            self.auth_backend._start_authentication.assert_called_once_with(None, username=self.TEST_USER)
            self.auth_backend._process_challenge.assert_called_once_with(None, username=self.TEST_USER)
            self.auth_backend._extract_data.assert_called_once_with(None)
            self.auth_backend._verify_session.assert_called_once_with(None)

        d.addCallback(check)

        return d
Example #17
0
    def setUp(self):
        super(ImageBackendFixture, self).setUp()

        # Mock template functions passed to cache
        self.mock_fetch_image = mock.create_autospec(libvirt_utils.fetch_image)
        self.useFixture(fixtures.MonkeyPatch(
            'nova.virt.libvirt.utils.fetch_image', self.mock_fetch_image))

        self.mock_fetch_raw_image = \
            mock.create_autospec(libvirt_utils.fetch_raw_image)
        self.useFixture(fixtures.MonkeyPatch(
            'nova.virt.libvirt.utils.fetch_raw_image',
            self.mock_fetch_raw_image))

        self.mock_create_ephemeral = \
            mock.create_autospec(driver.LibvirtDriver._create_ephemeral)
        self.useFixture(fixtures.MonkeyPatch(
            'nova.virt.libvirt.driver.LibvirtDriver._create_ephemeral',
            self.mock_create_ephemeral))

        self.mock_create_swap = \
            mock.create_autospec(driver.LibvirtDriver._create_swap)
        self.useFixture(fixtures.MonkeyPatch(
            'nova.virt.libvirt.driver.LibvirtDriver._create_swap',
            self.mock_create_swap))

        # Backend.backend creates all Image objects
        self.useFixture(fixtures.MonkeyPatch(
            'nova.virt.libvirt.imagebackend.Backend.backend',
            self._mock_backend))
Example #18
0
 def setup_job(self):
     self.last_success = mock.Mock(run_time=datetime.datetime(2012, 3, 14))
     mock_scheduler = mock.create_autospec(scheduler.ConstantScheduler)
     run_collection = mock.create_autospec(JobRunCollection,
                     last_success=self.last_success)
     self.job = job.Job("jobname", mock_scheduler, run_collection=run_collection)
     self.context = command_context.JobContext(self.job)
Example #19
0
  def testBasicCall(self):
    api = rest_api._RestApi('scope')
    self.assertEqual(api.scopes, ['scope'])

    fut_get_token = ndb.Future()
    fut_get_token.set_result('blah')
    api.get_token_async = mock.create_autospec(api.get_token_async,
                                               return_value=fut_get_token)

    fut_urlfetch = ndb.Future()
    fut_urlfetch.set_result(
        test_utils.MockUrlFetchResult(200, {'foo': 'bar'}, 'yoohoo'))
    api.urlfetch_async = mock.create_autospec(api.urlfetch_async,
                                              return_value=fut_urlfetch)

    res = api.do_request('http://example.com')

    self.assertEqual(res, (200, {'foo': 'bar'}, 'yoohoo'))
    api.urlfetch_async.assert_called_once_with(
        'http://example.com',
        headers={'authorization': 'OAuth blah'},
        follow_redirects=False,
        payload=None,
        method='GET',
        deadline=None,
        callback=None)
    def test_get_object_by_path_checks_childEntity(self):
        """
        Checks whether the function can grab from child entities
        """
        "#arrange"
        pv_service = pyVmomiService(None, None)

        def search_child(*args, **keys):
            if args[0].name == pv_service.ChildEntity:
                return True
            return False

        class FolderMock:
            childEntity = None

        folder = Mock(spec=FolderMock())
        folder.name = pv_service.ChildEntity
        get_folder = MagicMock(return_value=folder)
        pv_service.get_folder = get_folder

        si = create_autospec(spec=vim.ServiceInstance)
        si.RetrieveContent = Mock()
        si.content = create_autospec(spec=vim.ServiceInstanceContent())
        si.content.searchIndex = Mock()
        si.content.searchIndex.FindChild = MagicMock(side_effect=search_child)

        "#act"
        result = pv_service.find_obj_by_path(si, "", "", "")

        "#assert"
        self.assertTrue(result)
Example #21
0
 def test_create_page_processor(self, mock_item_processor_class):
     mock_config = mock.create_autospec(Config).return_value
     mock_item_queue = mock.create_autospec(queue).return_value
     mock_item_processor = mock.create_autospec(ItemProcessor).return_value
     mock_item_processor_class.return_value = mock_item_processor
     result = self.thread_factory.create_item_processor(mock_config, mock_item_queue)
     self.assertEquals(result, mock_item_processor)
Example #22
0
 def create_blank_response(cls, code, msg):
   # TODO(wfarner): Don't use a mock here.
   response = create_autospec(spec=Response, instance=True)
   response.responseCode = code
   response.result = create_autospec(spec=Result, instance=True)
   response.details = [ResponseDetail(message=msg)]
   return response
Example #23
0
 def setup_resource(self):
     self.mcp = mock.create_autospec(mcp.MasterControlProgram)
     self.resource = www.ConfigResource(self.mcp)
     self.controller = self.resource.controller = mock.create_autospec(
         controller.ConfigController)
     with mock.patch('tron.api.www.respond', autospec=True) as self.respond:
         yield
    def test_create_empty(self):
        # Create a minimal fake GAPIC with a dummy response.
        from google.cloud.firestore_v1beta1.document import DocumentReference
        from google.cloud.firestore_v1beta1.document import DocumentSnapshot
        firestore_api = mock.Mock(spec=['commit'])
        document_reference = mock.create_autospec(DocumentReference)
        snapshot = mock.create_autospec(DocumentSnapshot)
        snapshot.exists = True
        document_reference.get.return_value = snapshot
        commit_response = mock.Mock(
            write_results=[document_reference],
            get=[snapshot],
            spec=['write_results'])
        firestore_api.commit.return_value = commit_response

        # Attach the fake GAPIC to a real client.
        client = _make_client('dignity')
        client._firestore_api_internal = firestore_api
        client.get_all = mock.MagicMock()
        client.get_all.exists.return_value = True

        # Actually make a document and call create().
        document = self._make_one('foo', 'twelve', client=client)
        document_data = {}
        write_result = document.create(document_data)
        self.assertTrue(write_result.get().exists)
Example #25
0
 def setup_resource(self):
     instances = mock.create_autospec(serviceinstance.ServiceInstanceCollection)
     self.service = mock.create_autospec(service.Service,
         instances=instances, enabled=True, config=mock.Mock())
     self.resource = www.ServiceResource(self.service)
     self.resource.controller = mock.create_autospec(
         controller.ServiceController)
Example #26
0
 def setup_jobrun(self):
     self.action_graph = mock.create_autospec(actiongraph.ActionGraph)
     self.run_time = datetime.datetime(2012, 3, 14, 15, 9, 26)
     self.path = ['base', 'path']
     self.output_path = mock.create_autospec(filehandler.OutputPath)
     self.node_pool = mock.create_autospec(node.NodePool)
     self.action_run_state_data = [{
         'job_run_id': 'thejobname.22',
         'action_name': 'blingaction',
         'state': 'succeeded',
         'run_time': 'sometime',
         'start_time': 'sometime',
         'end_time': 'sometime',
         'command': 'doit',
         'node_name': 'thenode',
     }]
     self.state_data = {
         'job_name': 'thejobname',
         'run_num': 22,
         'run_time': self.run_time,
         'node_name': 'thebox',
         'end_time': 'the_end',
         'start_time': 'start_time',
         'runs': self.action_run_state_data,
         'cleanup_run': None,
         'manual': True,
     }
     self.context = mock.Mock()
Example #27
0
 def test_job_runs_from_state(self):
     state_data = [
         dict(
             run_num=i,
             job_name="thename",
             run_time="sometime",
             start_time="start_time",
             end_time="sometime",
             cleanup_run=None,
             runs=[],
         ) for i in range(3, -1, -1)
     ]
     action_graph = mock.create_autospec(actiongraph.ActionGraph)
     output_path = mock.create_autospec(filehandler.OutputPath)
     context = mock.Mock()
     node_pool = mock.create_autospec(node.NodePool)
     runs = jobrun.job_runs_from_state(
         state_data,
         action_graph,
         output_path,
         context,
         node_pool,
     )
     assert len(runs) == 4
     assert all([type(job) == jobrun.JobRun for job in runs])
Example #28
0
 def create_mock_scheduled_tasks(cls):
   jobs = []
   for name in ['foo', 'bar', 'baz']:
     job = create_autospec(spec=ScheduledTask, instance=True)
     job.failure_count = 0
     job.assignedTask = create_autospec(spec=AssignedTask, instance=True)
     job.assignedTask.slaveHost = 'slavehost'
     job.assignedTask.task = create_autospec(spec=TaskConfig, instance=True)
     job.assignedTask.task.maxTaskFailures = 1
     job.assignedTask.task.executorConfig = ExecutorConfig(name='name', data='fake data')
     job.assignedTask.task.metadata = []
     job.assignedTask.task.job = JobKey(role=cls.TEST_ROLE, environment=cls.TEST_ENV, name=name)
     job.assignedTask.task.owner = Identity(role='mchucarroll')
     job.assignedTask.task.environment = 'test'
     job.assignedTask.task.jobName = 'woops'
     job.assignedTask.task.numCpus = 2
     job.assignedTask.task.ramMb = 2
     job.assignedTask.task.diskMb = 2
     job.assignedTask.instanceId = 4237894
     job.assignedTask.assignedPorts = None
     job.status = ScheduleStatus.RUNNING
     mockEvent = create_autospec(spec=TaskEvent, instance=True)
     mockEvent.timestamp = 28234726395
     mockEvent.status = ScheduleStatus.RUNNING
     mockEvent.message = "Hi there"
     job.taskEvents = [mockEvent]
     jobs.append(job)
   return jobs
Example #29
0
    def test_acquired(self):
        """
        Test the acquiring primitives
        """
        self.locker._sequence = '4'
        retval = ('/_locks/test_lock/4', None)
        self.locker._get_locker = mock.create_autospec(
            self.locker._get_locker, return_value=retval)
        self.assertTrue(self.locker._acquired())
        self.assertTrue(self.locker.is_taken)
        retval = ('/_locks/test_lock/1', '/_locks/test_lock/4')
        self.locker._get_locker = mock.MagicMock(return_value=retval)
        self.assertFalse(self.locker._acquired(blocking=False))
        self.assertFalse(self.locker.is_taken)
        d = {
            u'action': u'delete',
            u'node': {
                u'modifiedIndex': 190,
                u'key': u'/_locks/test_lock/1',
                u'value': self.locker.uuid
            }
        }
        self._mock_api(200, d)
        returns = [('/_locks/test_lock/1', '/_locks/test_lock/4'),  ('/_locks/test_lock/4', None)]

        def side_effect():
            return returns.pop()

        self.locker._get_locker = mock.create_autospec(
            self.locker._get_locker, side_effect=side_effect)
        self.assertTrue(self.locker._acquired())
Example #30
0
def test_invoke__triggers_max_errors():
    with open('imhotep/fixtures/10line.diff') as f:
        ten_diff = f.read()
    reporter = mock.create_autospec(PRReporter)
    tool = mock.create_autospec(Tool)
    manager = mock.create_autospec(RepoManager)
    tool.get_configs.side_effect = AttributeError
    tool.invoke.return_value = {
        'f1.txt': {
            '1': 'there was an error',
            '2': 'there was an error',
            '3': 'there was an error',
            '4': 'there was an error',
            '5': 'there was an error',
            '6': 'there was an error',
            '7': 'there was an error',
            '8': 'there was an error',
            '9': 'there was an error',
        }
    }
    manager.clone_repo.return_value.diff_commit.return_value = ten_diff
    manager.clone_repo.return_value.tools = [tool]
    imhotep = Imhotep(
        pr_number=1,
        repo_manager=manager,
        commit_info=mock.Mock(),
    )
    imhotep.invoke(reporter=reporter, max_errors=2)

    assert reporter.report_line.call_count == 2
    assert reporter.post_comment.called
 def test_basic(self):
     for spec in (SomeClass, SomeClass()):
         mock = create_autospec(spec)
         self._check_someclass_mock(mock)
Example #32
0
 def setUp(self):
     self.naming = mock.create_autospec(naming.Naming)
Example #33
0
    def setupConfig(self, config_dict, startWorker=True):
        """
        Setup and start a master configured
        by the function configFunc defined in the test module.
        @type config_dict: dict
        @param configFunc: The BuildmasterConfig dictionary.
        """
        # mock reactor.stop (which trial *really* doesn't
        # like test code to call!)
        stop = mock.create_autospec(reactor.stop)
        self.patch(reactor, 'stop', stop)

        if startWorker:
            if self.proto == 'pb':
                proto = {"pb": {"port": "tcp:0:interface=127.0.0.1"}}
                workerclass = worker.Worker
            elif self.proto == 'null':
                proto = {"null": {}}
                workerclass = worker.LocalWorker
            config_dict['workers'] = [workerclass("local1", password=Interpolate("localpw"), missing_timeout=0)]
            config_dict['protocols'] = proto

        m = yield getMaster(self, reactor, config_dict)
        self.master = m
        self.assertFalse(stop.called,
                         "startService tried to stop the reactor; check logs")

        if not startWorker:
            return

        if self.proto == 'pb':
            # We find out the worker port automatically
            workerPort = list(itervalues(m.pbmanager.dispatchers))[
                0].port.getHost().port

            # create a worker, and attach it to the master, it will be started, and stopped
            # along with the master
            worker_dir = FilePath(self.mktemp())
            worker_dir.createDirectory()
            sandboxed_worker_path = os.environ.get(
                "SANDBOXED_WORKER_PATH", None)
            if sandboxed_worker_path is None:
                self.w = Worker(
                    "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path,
                    False)
            else:
                self.w = SandboxedWorker(
                    "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path,
                    sandboxed_worker_path)
                self.addCleanup(self.w.shutdownWorker)

        elif self.proto == 'null':
            self.w = None

        if self.w is not None:
            self.w.setServiceParent(m)

        @defer.inlineCallbacks
        def dump():
            if not self._passed:
                dump = StringIO()
                print(u"FAILED! dumping build db for debug", file=dump)
                builds = yield self.master.data.get(("builds",))
                for build in builds:
                    yield self.printBuild(build, dump, withLogs=True)

                raise self.failureException(dump.getvalue())
        self.addCleanup(dump)
def _create_mock(failing):
    m = mock.create_autospec(DaemonThread)
    m.is_alive.return_value = not failing
    return m
Example #35
0
 class F(ModelChoiceFilter):
     get_queryset = mock.create_autospec(ModelChoiceFilter.get_queryset,
                                         return_value=qs)
    def test_builtins(self):
        # used to fail with infinite recursion
        create_autospec(1)

        create_autospec(int)
        create_autospec('foo')
        create_autospec(str)
        create_autospec({})
        create_autospec(dict)
        create_autospec([])
        create_autospec(list)
        create_autospec(set())
        create_autospec(set)
        create_autospec(1.0)
        create_autospec(float)
        create_autospec(1j)
        create_autospec(complex)
        create_autospec(False)
        create_autospec(True)
    def test_create_autospec_keyword_arguments(self):
        class Foo(object):
            a = 3

        m = create_autospec(Foo, a='3')
        self.assertEqual(m.a, '3')
Example #38
0
def fake_process(**kwargs):
  proc = mock.create_autospec(psutil.Process, spec_set=True)
  [setattr(getattr(proc, k), 'return_value', v) for k, v in kwargs.items()]
  return proc
Example #39
0
def group_service():
    return mock.create_autospec(GroupService, spec_set=True, instance=True)
Example #40
0
 def items(self):
     return map(lambda _: mock.create_autospec(ResultItemWidget), range(5))
Example #41
0
from cluster import *
from . import utils
import mock

PROPOSAL1 = Proposal(caller='cli', client_id=123, input='one')
PROPOSAL2 = Proposal(caller='cli', client_id=125, input='two')
PROPOSAL3 = Proposal(caller='cli', client_id=127, input='tre')

Commander = mock.create_autospec(Commander)
Scout = mock.create_autospec(Scout)


class Tests(utils.ComponentTestCase):
    def setUp(self):
        super(Tests, self).setUp()
        Scout.reset_mock()
        Commander.reset_mock()
        self.ldr = Leader(self.node, ['p1', 'p2'],
                          commander_cls=Commander,
                          scout_cls=Scout)

    def assertScoutStarted(self, ballot_num):
        Scout.assert_called_once_with(self.node, ballot_num, ['p1', 'p2'])
        scout = Scout(self.node, ballot_num, ['p1', 'p2'])
        scout.start.assert_called_once_with()

    def assertNoScout(self):
        self.assertFalse(self.ldr.scouting)

    def assertCommanderStarted(self, ballot_num, slot, proposal):
        Commander.assert_called_once_with(self.node, ballot_num, slot,
Example #42
0
    if table_name == 'IDMap' and column_names == 'identifier_id, ref_table':
        return [(u'supply_type_id', u'SupplyTypes'),
                (u'ghg_id', u'GreenhouseGases')]
    elif table_name == 'SupplyTypes' and column_names == 'id, name':
        return [(1, u'Blend'), (2, u'Conversion'), (3, u'Delivery'),
                (4, u'Import'), (5, u'Primary'), (6, u'Storage')]
    elif table_name == 'GreenhouseGases' and column_names == 'id, name':
        return [(1, u'CO2'), (2, u'CH4'), (3, u'N2O')]

    # if we've gotten this far without returning, something is amiss
    raise ValueError("Mock doesn't know how to provide this table read: " +
                     str(table_name) + ", " + str(column_names) + ", " +
                     str(filters))


mock_sql_read_table = mock.create_autospec(sql_read_table,
                                           side_effect=read_table)


@mock.patch('energyPATHWAYS.util.sql_read_table', mock_sql_read_table)
class TestIdToName(unittest.TestCase):
    def test_basic_lookup(self):
        self.assertEqual(id_to_name('supply_type_id', 1), 'Blend')
        self.assertEqual(id_to_name('ghg_id', 2), 'CH4')

    def test_lookup_none_att(self):
        self.assertIsNone(id_to_name('supply_type_id', None))

    def test_tuple_lookup(self):
        self.assertEqual(id_to_name('supply_type_id', 1, 'tuple'),
                         ('supply_type', 'Blend'))
Example #43
0
def test_check_quota_Zero():
    self = create_autospec(ArcticLibraryBinding)
    self.quota = 0
    ArcticLibraryBinding.check_quota(self)
    def setUp(self):
        super(TunnelTest, self).setUp()
        cfg.CONF.set_default('firewall_driver',
                             'neutron.agent.firewall.NoopFirewallDriver',
                             group='SECURITYGROUP')
        cfg.CONF.set_override('report_interval', 0, 'AGENT')

        self.INT_BRIDGE = 'integration_bridge'
        self.TUN_BRIDGE = 'tunnel_bridge'
        self.MAP_TUN_BRIDGE = 'tun_br_map'
        self.AUX_BRIDGE = 'ancillary_bridge'
        self.NET_MAPPING = ['net1:%s' % self.MAP_TUN_BRIDGE]
        self.INT_OFPORT = 11111
        self.TUN_OFPORT = 22222
        self.MAP_TUN_INT_OFPORT = 33333
        self.MAP_TUN_PHY_OFPORT = 44444

        self.LVM = self.mod_agent.LocalVLANMapping(
            LV_ID, 'gre', None, LS_ID, VIF_PORTS)
        self.LVM_FLAT = self.mod_agent.LocalVLANMapping(
            LV_ID, 'flat', 'net1', LS_ID, VIF_PORTS)
        self.LVM_VLAN = self.mod_agent.LocalVLANMapping(
            LV_ID, 'vlan', 'net1', LS_ID, VIF_PORTS)

        self.inta = mock.Mock()
        self.intb = mock.Mock()

        mock.patch.object(ovs_lib.BaseOVS, 'config',
                          new_callable=mock.PropertyMock,
                          return_value={}).start()

        self.ovs_bridges = {
            self.INT_BRIDGE: mock.create_autospec(
                self.br_int_cls('br-int')),
            self.TUN_BRIDGE: mock.create_autospec(
                self.br_tun_cls('br-tun')),
            self.MAP_TUN_BRIDGE: mock.create_autospec(
                self.br_phys_cls('br-phys')),
            self.AUX_BRIDGE: mock.create_autospec(
                ovs_lib.OVSBridge('br-aux')),
        }
        self.ovs_int_ofports = {
            'patch-tun': self.TUN_OFPORT,
            'int-%s' % self.MAP_TUN_BRIDGE: self.MAP_TUN_INT_OFPORT
        }

        def lookup_br(br_name, *args, **kwargs):
            return self.ovs_bridges[br_name]

        self.mock_int_bridge_cls = mock.patch(self._BR_INT_CLASS,
                                              autospec=True).start()
        self.mock_int_bridge_cls.side_effect = lookup_br
        self.mock_phys_bridge_cls = mock.patch(self._BR_PHYS_CLASS,
                                               autospec=True).start()
        self.mock_phys_bridge_cls.side_effect = lookup_br
        self.mock_tun_bridge_cls = mock.patch(self._BR_TUN_CLASS,
                                              autospec=True).start()
        self.mock_tun_bridge_cls.side_effect = lookup_br
        self.mock_aux_bridge_cls = mock.patch(
            'neutron.agent.common.ovs_lib.OVSBridge',
            autospec=True).start()
        self.mock_aux_bridge_cls.side_effect = lookup_br

        self.mock_int_bridge = self.ovs_bridges[self.INT_BRIDGE]
        self.mock_int_bridge.add_port.return_value = self.MAP_TUN_INT_OFPORT
        self.mock_int_bridge.add_patch_port.side_effect = (
            lambda tap, peer: self.ovs_int_ofports[tap])
        self.mock_int_bridge.port_exists.return_value = False
        self.mock_int_bridge.get_vif_ports.return_value = []
        self.mock_int_bridge.get_ports_attributes.return_value = []
        self.mock_int_bridge.db_get_val.return_value = {}

        self.mock_map_tun_bridge = self.ovs_bridges[self.MAP_TUN_BRIDGE]
        self.mock_map_tun_bridge.br_name = self.MAP_TUN_BRIDGE
        self.mock_map_tun_bridge.add_port.return_value = (
            self.MAP_TUN_PHY_OFPORT)
        self.mock_map_tun_bridge.add_patch_port.return_value = (
            self.MAP_TUN_PHY_OFPORT)
        self.mock_map_tun_bridge.port_exists.return_value = False

        self.mock_tun_bridge = self.ovs_bridges[self.TUN_BRIDGE]
        self.mock_tun_bridge.add_port.return_value = self.INT_OFPORT
        self.mock_tun_bridge.add_patch_port.return_value = self.INT_OFPORT

        self.ipdevice = mock.patch.object(ip_lib, 'IPDevice').start()

        self.ipwrapper = mock.patch.object(ip_lib, 'IPWrapper').start()
        add_veth = self.ipwrapper.return_value.add_veth
        add_veth.return_value = [self.inta, self.intb]

        self.get_bridges = mock.patch.object(ovs_lib.BaseOVS,
                                             'get_bridges').start()
        self.get_bridges.return_value = [self.INT_BRIDGE,
                                         self.TUN_BRIDGE,
                                         self.MAP_TUN_BRIDGE,
                                         self.AUX_BRIDGE]
        self.get_bridge_external_bridge_id = mock.patch.object(
            ovs_lib.BaseOVS,
            'get_bridge_external_bridge_id').start()
        self.get_bridge_external_bridge_id.side_effect = (
            lambda bridge: bridge if bridge in self.ovs_bridges else None)

        self.execute = mock.patch('neutron.agent.common.utils.execute').start()

        self._define_expected_calls()
Example #45
0
def test_set_quota():
    self = create_autospec(ArcticLibraryBinding)
    ArcticLibraryBinding.set_quota(self, 10000)
    self.set_library_metadata.assert_called_once_with('QUOTA', 10000)
    assert self.quota_countdown == 0
    assert self.quota == 10000
Example #46
0
def test_check_quota_countdown():
    self = create_autospec(ArcticLibraryBinding)
    self.quota = 10
    self.quota_countdown = 10
    ArcticLibraryBinding.check_quota(self)
    assert self.quota_countdown == 9
Example #47
0
 def config(self):
     config = create_autospec(Configuration([]), spec_set=True)
     config.datadog_container_image = None
     return config
Example #48
0
def test_get_quota():
    self = create_autospec(ArcticLibraryBinding)
    self.get_library_metadata.return_value = 42
    assert ArcticLibraryBinding.get_quota(self) == 42
    self.get_library_metadata.assert_called_once_with('QUOTA')
Example #49
0
 def v2(self):
     return create_autospec(BaseTransformer,
                            spec_set=True,
                            return_value={"version": 3})
Example #50
0
def user_service(pyramid_config):
    service = mock.create_autospec(UserService, spec_set=True, instance=True)
    pyramid_config.register_service(service, name="user")
    return service
Example #51
0
    def test_export_staging_delegate_validation_failed(self) -> None:
        metric_view_one = MetricBigQueryViewBuilder(
            dataset_id="dataset",
            view_id="view1",
            view_query_template="select * from table",
            dimensions=("a", "b", "c"),
        ).build()

        export_config_one = ExportBigQueryViewConfig(
            view=metric_view_one,
            view_filter_clause="WHERE state_code = 'US_XX'",
            intermediate_table_name="intermediate_table",
            output_directory=GcsfsDirectoryPath.from_absolute_path(
                "gs://bucket1/US_XX"),
        )
        export_config_one_staging = ExportBigQueryViewConfig(
            view=metric_view_one,
            view_filter_clause="WHERE state_code = 'US_XX'",
            intermediate_table_name="intermediate_table",
            output_directory=GcsfsDirectoryPath.from_absolute_path(
                "gs://bucket1/staging/US_XX"),
        )

        metric_view_two = MetricBigQueryViewBuilder(
            dataset_id="dataset",
            view_id="view2",
            view_query_template="select * from view2",
            dimensions=("d", "e", "f"),
        ).build()

        export_config_two = ExportBigQueryViewConfig(
            view=metric_view_two,
            view_filter_clause="WHERE state_code = 'US_XX'",
            intermediate_table_name="intermediate_table2",
            output_directory=GcsfsDirectoryPath.from_absolute_path(
                "gs://bucket2/US_XX"),
        )
        export_config_two_staging = ExportBigQueryViewConfig(
            view=metric_view_two,
            view_filter_clause="WHERE state_code = 'US_XX'",
            intermediate_table_name="intermediate_table2",
            output_directory=GcsfsDirectoryPath.from_absolute_path(
                "gs://bucket2/staging/US_XX"),
        )

        mock_fs = create_autospec(GCSFileSystem)

        delegate_one = create_autospec(BigQueryViewExporter)
        delegate_two = create_autospec(BigQueryViewExporter)

        delegate_one.export_and_validate.return_value = [
            export_config_one_staging.output_path("json"),
            export_config_two_staging.output_path("json"),
        ]
        delegate_two.export_and_validate.return_value = [
            export_config_one_staging.output_path("txt"),
            export_config_two_staging.output_path("txt"),
        ]

        delegate_one = create_autospec(BigQueryViewExporter)
        delegate_one_staging_paths = [
            export_config_one_staging.output_path("json"),
            export_config_two_staging.output_path("json"),
        ]
        delegate_one.export_and_validate.return_value = delegate_one_staging_paths

        delegate_two = create_autospec(BigQueryViewExporter)
        delegate_two.export_and_validate.side_effect = ViewExportValidationError(
            "Validation failed")

        # Make the actual call
        with pytest.raises(ViewExportValidationError) as e:
            export_views_with_exporters(
                mock_fs,
                [export_config_one, export_config_two],
                {
                    ExportOutputFormatType.JSON: delegate_one,
                    ExportOutputFormatType.METRIC: delegate_two,
                },
            )

        self.assertIn("Validation failed", str(e.value))
Example #52
0
 def v3(self, app_spec):
     return create_autospec(BaseFactory,
                            spec_set=True,
                            version=3,
                            return_value=app_spec)
Example #53
0
def mock_flash_function():
    """Return a mock object with the same API as request.session.flash()."""
    return mock.create_autospec(DummyRequest().session.flash,
                                return_value=None)
Example #54
0
    def test_export_happy_path(self) -> None:
        metric_view_one = MetricBigQueryViewBuilder(
            dataset_id="dataset",
            view_id="view1",
            view_query_template="select * from table",
            dimensions=("a", "b", "c"),
        ).build()

        export_config_one = ExportBigQueryViewConfig(
            view=metric_view_one,
            view_filter_clause="WHERE state_code = 'US_XX'",
            intermediate_table_name="intermediate_table",
            output_directory=GcsfsDirectoryPath.from_absolute_path(
                "gs://bucket1/US_XX"),
        )
        export_config_one_staging = ExportBigQueryViewConfig(
            view=metric_view_one,
            view_filter_clause="WHERE state_code = 'US_XX'",
            intermediate_table_name="intermediate_table",
            output_directory=GcsfsDirectoryPath.from_absolute_path(
                "gs://bucket1/staging/US_XX"),
        )

        metric_view_two = MetricBigQueryViewBuilder(
            dataset_id="dataset",
            view_id="view2",
            view_query_template="select * from view2",
            dimensions=("d", "e", "f"),
        ).build()

        export_config_two = ExportBigQueryViewConfig(
            view=metric_view_two,
            view_filter_clause="WHERE state_code = 'US_XX'",
            intermediate_table_name="intermediate_table2",
            output_directory=GcsfsDirectoryPath.from_absolute_path(
                "gs://bucket2/US_XX"),
        )
        export_config_two_staging = ExportBigQueryViewConfig(
            view=metric_view_two,
            view_filter_clause="WHERE state_code = 'US_XX'",
            intermediate_table_name="intermediate_table2",
            output_directory=GcsfsDirectoryPath.from_absolute_path(
                "gs://bucket2/staging/US_XX"),
        )

        mock_fs = create_autospec(GCSFileSystem)

        mock_fs.exists.return_value = True

        delegate_one = create_autospec(BigQueryViewExporter)
        delegate_one_staging_paths = [
            export_config_one_staging.output_path("json"),
            export_config_two_staging.output_path("json"),
        ]
        delegate_one.export_and_validate.return_value = delegate_one_staging_paths

        delegate_two = create_autospec(BigQueryViewExporter)
        delegate_two_staging_paths = [
            export_config_one_staging.output_path("txt"),
            export_config_two_staging.output_path("txt"),
        ]
        delegate_two.export_and_validate.return_value = delegate_two_staging_paths

        # Make the actual call
        export_views_with_exporters(
            mock_fs,
            [export_config_one, export_config_two],
            {
                ExportOutputFormatType.JSON: delegate_one,
                ExportOutputFormatType.METRIC: delegate_two,
            },
        )

        # Assert all mocks called as expected
        delegate_one.export_and_validate.assert_has_calls([
            call([export_config_one_staging, export_config_two_staging]),
        ])

        delegate_two.export_and_validate.assert_has_calls([
            call([export_config_one_staging, export_config_two_staging]),
        ])

        mock_fs.copy.assert_has_calls(
            [
                call(
                    GcsfsFilePath(bucket_name="bucket1",
                                  blob_name="staging/US_XX/view1.json"),
                    GcsfsFilePath(bucket_name="bucket1",
                                  blob_name="US_XX/view1.json"),
                ),
                call(
                    GcsfsFilePath(bucket_name="bucket2",
                                  blob_name="staging/US_XX/view2.json"),
                    GcsfsFilePath(bucket_name="bucket2",
                                  blob_name="US_XX/view2.json"),
                ),
                call(
                    GcsfsFilePath(bucket_name="bucket1",
                                  blob_name="staging/US_XX/view1.txt"),
                    GcsfsFilePath(bucket_name="bucket1",
                                  blob_name="US_XX/view1.txt"),
                ),
                call(
                    GcsfsFilePath(bucket_name="bucket2",
                                  blob_name="staging/US_XX/view2.txt"),
                    GcsfsFilePath(bucket_name="bucket2",
                                  blob_name="US_XX/view2.txt"),
                ),
            ],
            any_order=True,
        )

        mock_fs.delete.assert_has_calls(
            [
                call(
                    GcsfsFilePath(bucket_name="bucket1",
                                  blob_name="staging/US_XX/view1.json")),
                call(
                    GcsfsFilePath(bucket_name="bucket2",
                                  blob_name="staging/US_XX/view2.json")),
                call(
                    GcsfsFilePath(bucket_name="bucket1",
                                  blob_name="staging/US_XX/view1.txt")),
                call(
                    GcsfsFilePath(bucket_name="bucket2",
                                  blob_name="staging/US_XX/view2.txt")),
            ],
            any_order=True,
        )
def _make_rpc_error(error_cls, trailing_metadata=None):
    import grpc

    grpc_error = mock.create_autospec(grpc.Call, instance=True)
    grpc_error.trailing_metadata.return_value = trailing_metadata
    return error_cls("error", errors=(grpc_error, ))
Example #56
0
    def test__thread_main_max_latency(self, time):
        # Note: this test is a bit brittle as it assumes the operation of
        # _get_many invokes queue.get() followed by queue._get(). It fails
        # the "change detector" test in that way. However, this is still a
        # useful test to verify the queue timeout is appropriately calculated.
        from google.cloud.logging_v2.handlers.transports import background_thread

        # Use monotonically increasing time.
        time.side_effect = range(1, 6)

        worker = self._make_one(_Logger(self.NAME),
                                max_latency=2,
                                max_batch_size=10)
        worker._queue = mock.create_autospec(queue.Queue, instance=True)

        worker._queue.get.side_effect = [
            {
                "message": 1
            },  # Single record.
            queue.Empty(),  # Emulate a queue.get() timeout.
            {
                "message": "2"
            },  # Second record.
            background_thread._WORKER_TERMINATOR,  # Stop the thread.
            queue.Empty(),  # Emulate a queue.get() timeout.
        ]

        worker._thread_main()

        self.assertEqual(worker._cloud_logger._num_batches, 2)
        self.assertTrue(worker._cloud_logger._batch.commit_called)
        self.assertEqual(worker._cloud_logger._batch.commit_count, 1)

        # Time should have been called five times.
        #
        #   For the first batch, it should have been called:
        #       * Once to get the start time. (1)
        #       * Once to get the elapsed time while grabbing the second item.
        #         (2)
        #
        #   For the second batch, it should have been called:
        #       * Once to get start time. (3)
        #       * Once to get the elapsed time while grabbing the second item.
        #         (3)
        #       * Once to get the elapsed time while grabbing the final
        #         item. (4)
        #       * Once final time to get the elapsed time while receiving
        #         the empty queue.
        #
        self.assertEqual(time.call_count, 5)

        # Queue.get should've been called 5 times as well, but with different
        # timeouts due to the monotonically increasing time.
        #
        #   For the first batch, it will be called once without a timeout
        #   (for the first item) and then with timeout=1, as start will be
        #   1 and now will be 2.
        #
        #   For the second batch, it will be called once without a timeout
        #   (for the first item) and then with timeout=1, as start will be
        #   3 and now will be 4, and finally with timeout=0 as start will be 3
        #   and now will be 5.
        #
        worker._queue.get.assert_has_calls([
            mock.call(),
            mock.call(timeout=1),
            mock.call(),
            mock.call(timeout=1),
            mock.call(timeout=0),
        ])
 def _next_page(self):
     return mock.create_autospec(page_iterator.Page, instance=True)
 def __init__(self, fs: FakeGCSFileSystem):
     super().__init__(create_autospec(gcsfs.GCSFileSystem))
     self.fs = fs
Example #59
0
 def setUp(self):
   super(PcapFilter, self).setUp()
   self.naming = mock.create_autospec(naming.Naming)
Example #60
0
    def test_masterDeactivated(self):
        self.master.db.insertTestData([
            fakedb.Master(id=14, name='other', active=0, last_active=0),

            # set up a running build with some steps
            fakedb.Builder(id=77, name='b1'),
            fakedb.Worker(id=13, name='sl'),
            fakedb.Buildset(id=8822),
            fakedb.BuildRequest(id=82, builderid=77, buildsetid=8822),
            fakedb.BuildRequestClaim(brid=82, masterid=14,
                                     claimed_at=SOMETIME),
            fakedb.Build(id=13,
                         builderid=77,
                         masterid=14,
                         workerid=13,
                         buildrequestid=82,
                         number=3,
                         results=None),
            fakedb.Step(id=200, buildid=13),
            fakedb.Log(id=2000, stepid=200, num_lines=2),
            fakedb.LogChunk(logid=2000,
                            first_line=1,
                            last_line=2,
                            content=u'ab\ncd')
        ])

        # mock out the _masterDeactivated methods this will call
        for rtype in 'builder', 'scheduler', 'changesource':
            rtype_obj = getattr(self.master.data.rtypes, rtype)
            m = mock.Mock(name='%s._masterDeactivated' % rtype,
                          spec=rtype_obj._masterDeactivated)
            m.side_effect = lambda masterid: defer.succeed(None)
            rtype_obj._masterDeactivated = m

        # and the update methods..
        for meth in 'finishBuild', 'finishStep', 'finishLog':
            m = mock.create_autospec(getattr(self.master.data.updates, meth))
            m.side_effect = lambda *args, **kwargs: defer.succeed(None)
            setattr(self.master.data.updates, meth, m)

        yield self.rtype._masterDeactivated(14, 'other')

        self.master.data.rtypes.builder._masterDeactivated. \
            assert_called_with(masterid=14)
        self.master.data.rtypes.scheduler._masterDeactivated. \
            assert_called_with(masterid=14)
        self.master.data.rtypes.changesource._masterDeactivated. \
            assert_called_with(masterid=14)

        # see that we finished off that build and its steps and logs
        updates = self.master.data.updates
        updates.finishLog.assert_called_with(logid=2000)
        updates.finishStep.assert_called_with(stepid=200,
                                              results=RETRY,
                                              hidden=False)
        updates.finishBuild.assert_called_with(buildid=13, results=RETRY)

        self.assertEqual(self.master.mq.productions, [
            (('masters', '14', 'stopped'),
             dict(masterid=14, name='other', active=False)),
        ])