Beispiel #1
0
    def test_resource_in_discovery_container_after_get_patch_delete(self):
        with mock.patch('requests.get') as requests_get_mock:
            resource = {"@odata.id": "odata.id", "something": "irrelevant"}
            get_response = Mock()
            get_response.status_code = ReturnCodes.OK
            get_response.headers = {}
            get_response.text = json.dumps(resource)
            requests_get_mock.return_value = get_response
            discovery_container = DiscoveryContainer()
            self.api_caller.get_resource("/resource", discovery_container)
            self.assertEqual(discovery_container["http://{API_ENDPOINT}/resource".format(
                API_ENDPOINT=API_ENDPOINT)].body,
                             resource)

            patched_resource = {"@odata.id": "odata.id", "something": "relevant"}
            get_response.text = json.dumps(patched_resource)

            with mock.patch('requests.patch') as requests_patch_mock:
                patch_response = Mock()
                patch_response.status_code = ReturnCodes.OK
                patch_response.headers = {}
                patch_response.text = "{}"
                requests_patch_mock.return_value = patch_response
                _, _, _, _ = self.api_caller.patch_resource("/resource", discovery_container)
                self.assertEqual(discovery_container["http://{API_ENDPOINT}/resource".format(
                API_ENDPOINT=API_ENDPOINT)].body, patched_resource)

                with mock.patch('requests.delete') as requests_delete_mock:
                    delete_response = Mock()
                    delete_response.status_code = ReturnCodes.NO_CONTENT
                    delete_response.headers = {}
                    delete_response.text = ""
                    requests_delete_mock.return_value = delete_response
                    _, _, _, _ = self.api_caller.delete_resource("/resource", discovery_container)
                    self.assertNotIn("/resource", discovery_container)
    def setUp(self):
        super(TestMakePreviewPage, self).setUp()
        label_patcher = mock.patch(
            'wikidatastuff.preview_item.PreviewItem.format_labels')
        self.mock_format_label = label_patcher.start()
        self.mock_format_label.return_value = 'formatted_label'
        self.addCleanup(label_patcher.stop)

        description_patcher = mock.patch(
            'wikidatastuff.preview_item.PreviewItem.format_descriptions')
        self.mock_format_description = description_patcher.start()
        self.mock_format_description.return_value = 'formatted_description'
        self.addCleanup(description_patcher.stop)

        item_patcher = mock.patch(
            'wikidatastuff.preview_item.PreviewItem.format_item')
        self.mock_format_item = item_patcher.start()
        self.mock_format_item.return_value = 'formatted_item'
        self.addCleanup(item_patcher.stop)

        reference_patcher = mock.patch(
            'wikidatastuff.preview_item.PreviewItem.format_reference')
        self.mock_format_reference = reference_patcher.start()
        self.mock_format_reference.return_value = 'formatted_reference'
        self.addCleanup(reference_patcher.stop)

        protoclaim_patcher = mock.patch(
            'wikidatastuff.preview_item.PreviewItem.format_protoclaims')
        self.mock_format_protoclaim = protoclaim_patcher.start()
        self.mock_format_protoclaim.return_value = 'formatted_protoclaim'
        self.addCleanup(protoclaim_patcher.stop)
Beispiel #3
0
def test_field_rules_and_callback():
    form = Form()
    form.add_text("description", "Description")\
        .add_rule(Validator.INTEGER, "Please provide integer")

    form.add_text("first_name", "First Name:")\
        .set_required()

    form.add_button("reset", "Reset")
    form.add_submit("save", "Save")

    response = {
        "description":1,
        "first_name": "Test",
        "save": True
    }

    with mock.patch("tests.form_test.on_success_dummy") as on_success_called:
        form.on_success.append(on_success_called)
        assert form.validate(response) == True
        on_success_called.assert_called_with()

    response = {
        "description": "test",
        "save": True
    }

    with mock.patch("tests.form_test.on_error_dummy") as on_error_called:
        form.on_error.append(on_error_called)
        assert form.validate(response) == False
        assert form["first_name"].is_valid() == False
        assert form["description"].is_valid() == False
        on_error_called.assert_called_with()
    def test_check_supported_version(self):
        # version ok
        current_version = '0.4.0'
        supported_version = ['0.4', '0.3', '0.2']

        self.assertTrue(ResourcesManager._check_supported_version(current_version=current_version,
                                                                  supported_versions=supported_version))

        # version ok
        current_version = '11.23.0'
        supported_version = ['11.23', '12.3', '2.23']

        self.assertTrue(ResourcesManager._check_supported_version(current_version=current_version,
                                                                  supported_versions=supported_version))

        # version non ok, user does not config
        # Testing with integer values instead of string
        current_version = '0.4.0'
        supported_version = [0.3, 0.2]

        with mock.patch('kalliope.Utils.query_yes_no', return_value=True):
            self.assertTrue(ResourcesManager._check_supported_version(current_version=current_version,
                                                                      supported_versions=supported_version))

        with mock.patch('kalliope.Utils.query_yes_no', return_value=False):
            self.assertFalse(ResourcesManager._check_supported_version(current_version=current_version,
                                                                       supported_versions=supported_version))
    def testCallback(self):
        """
        Testing the callback provided when audio has been provided by the User as an answer.
        """
        parameters = {
            "default": self.default,
            "from_answer_link": self.from_answer_link
        }
        with mock.patch("kalliope.core.NeuronModule.get_audio_from_stt") as mock_get_audio_from_stt:
            with mock.patch("kalliope.core.NeuronModule.run_synapse_by_name") as mock_run_synapse_by_name:
                # testing running the default when no order matching
                nt = Neurotransmitter(**parameters)
                mock_get_audio_from_stt.assert_called_once()
                mock_get_audio_from_stt.reset_mock()

                # testing running the default when audio None
                audio_text = None
                nt.callback(audio=audio_text)
                mock_run_synapse_by_name.assert_called_once_with(self.default, high_priority=True, is_api_call=False)
                mock_run_synapse_by_name.reset_mock()

                # testing running the default when no order matching
                audio_text = "try test audio "
                nt.callback(audio=audio_text)
                mock_run_synapse_by_name.assert_called_once_with(self.default, high_priority=True, is_api_call=False)
                mock_run_synapse_by_name.reset_mock()

                # Testing calling the right synapse
                audio_text = "answer one"
                nt.callback(audio=audio_text)
                mock_run_synapse_by_name.assert_called_once_with(synapse_name="synapse2",
                                                                 user_order=audio_text,
                                                                 synapse_order="answer one",
                                                                 high_priority=True,
                                                                 is_api_call=False)
    def test_pre_save(self):
        p = Poll()
        p.image = SimpleUploadedFile(TEST_IMAGE, b'')

        c = CloudinaryField('image', width_field="image_width", height_field="image_height")
        c.set_attributes_from_name('image')
        mocked_resource = cloudinary.CloudinaryResource(metadata={"width": TEST_IMAGE_W, "height": TEST_IMAGE_H},
                                                        type="upload", public_id=TEST_IMAGE, resource_type="image")

        with mock.patch('cloudinary.uploader.upload_resource', return_value=mocked_resource) as upload_mock:
            prep_value = c.pre_save(p, None)

        self.assertTrue(upload_mock.called)
        self.assertEqual(".png", os.path.splitext(prep_value)[1])
        self.assertEqual(TEST_IMAGE_W, p.image_width)
        self.assertEqual(TEST_IMAGE_H, p.image_height)

        # check empty values handling
        p.image = SimpleUploadedFile(TEST_IMAGE, b'')
        mocked_resource_empty = cloudinary.CloudinaryResource(metadata={})
        with mock.patch('cloudinary.uploader.upload_resource', return_value=mocked_resource_empty) as upload_mock:
            c.pre_save(p, None)

        self.assertTrue(upload_mock.called)
        self.assertIsNone(p.image_width)
        self.assertIsNone(p.image_height)
Beispiel #7
0
    def test_remove_other_unseen_boundaries(self):
        # other unseen boundaries are boundaries which have not been updated in any way for a country

        # insert features in the database
        geojson_data = [self.data_geojson_level_0, self.data_geojson_level_1]

        with patch("builtins.open") as mock_file:
            mock_file.return_value.__enter__ = lambda filename: filename
            mock_file.return_value.__exit__ = mock.Mock()
            mock_file.return_value.read.side_effect = lambda: geojson_data.pop(0)

            with CaptureSTDOUT():
                call_command("import_geojson", "admin_level_0_simplified.json", "admin_level_1_simplified.json")

        self.assertEqual(AdminBoundary.objects.count(), 2)

        # update data, and add a new boundary
        geojson_data = [self.data_geojson_level_0]

        with patch("builtins.open") as mock_file:
            mock_file.return_value.__enter__ = lambda filename: filename
            mock_file.return_value.__exit__ = mock.Mock()
            mock_file.return_value.read.side_effect = lambda: geojson_data.pop(0)

            with CaptureSTDOUT() as captured_output:
                call_command("import_geojson", "admin_level_0_simplified.json")

        self.assertEqual(
            captured_output.getvalue(),
            "=== parsing admin_level_0_simplified.json\n ** updating Granica (R1000)\n ** removing unseen boundaries (R1000)\nOther unseen boundaries removed: 1\n ** updating paths for all of Granica\n",
        )

        self.assertEqual(AdminBoundary.objects.count(), 1)
Beispiel #8
0
    def test_update_features_with_parent(self):
        # insert features in the database
        geojson_data = [self.data_geojson_level_0, self.data_geojson_level_1]

        with patch("builtins.open") as mock_file:
            mock_file.return_value.__enter__ = lambda filename: filename
            mock_file.return_value.__exit__ = mock.Mock()
            mock_file.return_value.read.side_effect = lambda: geojson_data.pop(0)

            with CaptureSTDOUT():
                call_command("import_geojson", "admin_level_0_simplified.json", "admin_level_1_simplified.json")

        self.assertEqual(AdminBoundary.objects.count(), 2)

        # update features
        geojson_data = [self.data_geojson_level_0, self.data_geojson_level_1]

        with patch("builtins.open") as mock_file:
            mock_file.return_value.__enter__ = lambda filename: filename
            mock_file.return_value.__exit__ = mock.Mock()
            mock_file.return_value.read.side_effect = lambda: geojson_data.pop(0)

            with CaptureSTDOUT() as captured_output:
                call_command("import_geojson", "admin_level_0_simplified.json", "admin_level_1_simplified.json")

        self.assertEqual(
            captured_output.getvalue(),
            "=== parsing admin_level_0_simplified.json\n ** updating Granica (R1000)\n ** removing unseen boundaries (R1000)\n=== parsing admin_level_1_simplified.json\n ** updating Međa 2 (R2000)\n ** removing unseen boundaries (R2000)\nOther unseen boundaries removed: 0\n ** updating paths for all of Granica\n",
        )

        self.assertEqual(AdminBoundary.objects.count(), 2)
Beispiel #9
0
 def _send_presence_for_aggregated_tests(self, email, status, validate_time):
     # type: (str, str, datetime.datetime) -> Dict[str, Dict[str, Any]]
     self.login(email)
     timezone_util = 'zerver.views.presence.timezone_now'
     with mock.patch(timezone_util, return_value=validate_time - datetime.timedelta(seconds=5)):
         self.client_post("/json/users/me/presence", {'status': status})
     with mock.patch(timezone_util, return_value=validate_time - datetime.timedelta(seconds=2)):
         self.client_post("/api/v1/users/me/presence", {'status': status},
                          HTTP_USER_AGENT="ZulipAndroid/1.0",
                          **self.api_auth(email))
     with mock.patch(timezone_util, return_value=validate_time - datetime.timedelta(seconds=7)):
         latest_result = self.client_post("/api/v1/users/me/presence", {'status': status},
                                          HTTP_USER_AGENT="ZulipIOS/1.0",
                                          **self.api_auth(email))
     latest_result_dict = latest_result.json()
     self.assertDictEqual(
         latest_result_dict['presences'][email]['aggregated'],
         {
             'status': status,
             'timestamp': datetime_to_timestamp(validate_time - datetime.timedelta(seconds=2)),
             'client': 'ZulipAndroid'
         }
     )
     result = self.client_get("/json/users/%s/presence" % (email,))
     return result.json()
    def setUp(self):
        self.repo = pywikibot.Site('test', 'wikidata')

        # patch bold
        def bold_side_effect(val):
            return 'bold_{}'.format(val)

        bold_patcher = mock.patch(
            'wikidatastuff.preview_item.PreviewItem.make_text_bold')
        self.mock_bold = bold_patcher.start()
        self.mock_bold.side_effect = bold_side_effect

        # patch italics
        def italics_side_effect(val):
            return 'italics_{}'.format(val)

        italics_patcher = mock.patch(
            'wikidatastuff.preview_item.PreviewItem.make_text_italics')
        self.mock_italics = italics_patcher.start()
        self.mock_italics.side_effect = italics_side_effect

        self.addCleanup(bold_patcher.stop)
        self.addCleanup(italics_patcher.stop)

        # patch wikidata_template
        wd_template_patcher = mock.patch(
            'wikidatastuff.preview_item.PreviewItem.make_wikidata_template')
        self.mock_wd_template = wd_template_patcher.start()
        self.mock_wd_template.side_effect = ['wd_template_{}'.format(i)
                                             for i in range(1, 5)]
        self.addCleanup(wd_template_patcher.stop)
Beispiel #11
0
 def _run_composer(self, args):
     mock_stdout = six.StringIO()
     mock_stderr = six.StringIO()
     with mock.patch("sys.stdout", mock_stdout):
         with mock.patch("sys.stderr", mock_stderr):
             with self.assertRaises(SystemExit) as cm:
                 ringcomposer.main(args)
     return (cm.exception.code,
             mock_stdout.getvalue(),
             mock_stderr.getvalue())
    def test_retrieve_request_from_database(self):
        with mock.patch('cts_core.commons.replay_controller.ReplayController._read_request_ids_from_database') as read_ids:
            controller_class = ReplayController().__class__()
            read_ids.return_value = [22,33]
            controller_class.initialize()

            with mock.patch('cts_framework.db.dao.http_request_dao.HttpRequestDAO.retrieve') as retrieve:
                retrieve.return_value = ('_method', 'url', '{"anything":"something"}', '_response', '_status_code')
                self.assertEqual('_response', controller_class.request("http_method", "url", anything='something'))
                retrieve.assert_called_with(22)
Beispiel #13
0
    def test_check_fail(self):
        with mock.patch('sys.exit'):
            sys.exit = MagicMock()
            with mock.patch('qrl.core.misc.DependencyChecker.DependencyChecker') as mockDepChecker:
                test_path = os.path.dirname(os.path.abspath(__file__))
                dummy_path = os.path.join(test_path, "..", "data", 'misc', 'dummy_requirements.txt')
                mockDepChecker._get_requirements_path = MagicMock(return_value=dummy_path)

                DependencyChecker.check()
                sys.exit.assert_called()
Beispiel #14
0
  def executeScript(self, path, classname=None, command=None, config_file=None,
                    # common mocks for all the scripts
                    config_overrides = None,
                    shell_mock_value = (0, "OK."), 
                    os_type=('Suse','11','Final'),
                    kinit_path_local="/usr/bin/kinit"
                    ):
    norm_path = os.path.normpath(path)
    src_dir = RMFTestCase._getSrcFolder()
    stack_version = norm_path.split(os.sep)[0]
    stacks_path = os.path.join(src_dir, PATH_TO_STACKS)
    configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, stack_version, "configs")
    script_path = os.path.join(stacks_path, norm_path)
    config_file_path = os.path.join(configs_path, config_file)

    try:
      with open(config_file_path, "r") as f:
        self.config_dict = json.load(f)
    except IOError:
      raise RuntimeError("Can not read config file: "+ config_file_path)

    if config_overrides:
      for key, value in config_overrides.iteritems():
        self.config_dict[key] = value

    self.config_dict = ConfigDictionary(self.config_dict)

    # append basedir to PYTHONPATH
    scriptsdir = os.path.dirname(script_path)
    basedir = os.path.dirname(scriptsdir)
    sys.path.append(scriptsdir)
    
    # get method to execute
    try:
      with patch.object(platform, 'linux_distribution', return_value=os_type):
        script_module = imp.load_source(classname, script_path)
    except IOError:
      raise RuntimeError("Cannot load class %s from %s",classname, norm_path)
    
    script_class_inst = RMFTestCase._get_attr(script_module, classname)()
    method = RMFTestCase._get_attr(script_class_inst, command)
    
    # Reload params import, otherwise it won't change properties during next import
    if 'params' in sys.modules:  
      del(sys.modules["params"]) 
    
    # run
    with Environment(basedir, test_mode=True) as RMFTestCase.env:
      with patch('resource_management.core.shell.checked_call', return_value=shell_mock_value): # we must always mock any shell calls
        with patch.object(Script, 'get_config', return_value=self.config_dict): # mocking configurations
          with patch.object(Script, 'install_packages'):
            with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local):
              with patch.object(platform, 'linux_distribution', return_value=os_type):
                method(RMFTestCase.env)
    sys.path.remove(scriptsdir)
    def setUp(self):
        super(GithubNotifyMailHandlerTest, self).setUp()

        patchers = [
            patch("nxtools.hooks.services.mail.EmailService.sendemail", Mock()),
            patch("gevent.spawn", mocked_spawn),
            patch("nxtools.hooks.endpoints.webhook.github_handlers.push_notify_mail.Process", MockedProcess),
        ]

        [patcher.start() for patcher in patchers]
        [self.addCleanup(patcher.stop) for patcher in patchers]
    def testJwtDecode(self):
        headers = {'Cookie': 'access_token=' + JwtServiceTest.ACCESS_TOKEN}

        with patch("jwt.decode", self.mocks.jwt_error), self.app.test_request_context(headers=headers):
            self.mocks.jwt_error.side_effect = DecodeError()
            self.assertFalse(services.get(JwtService).has_jwt())
            self.assertTrue(self.mocks.jwt_error.called)

        with patch("jwt.decode", self.mocks.jwt_decode), self.app.test_request_context(headers=headers):
            self.mocks.jwt_decode.return_value = {}
            self.assertTrue(services.get(JwtService).has_jwt())
    def test_patch(self):
        with mock.patch('cts_core.commons.api_caller.ApiCaller.__init__') as api_caller_init_mock:
            api_caller_init_mock.return_value = None

            with mock.patch('cts_core.validation.patch.metadata_patch_validator.MetadataPatchValidator._validate_patchable_property') as validate_property:
                validate_property.return_value = ValidationStatus.PASSED

                validator = MetadataPatchValidator(self.metadata_container, None, PatchingStrategy2_1())

                with StdoutCapture() as output:
                    self.assertEqual(ValidationStatus.PASSED, validator.validate(self.discovery_container))
Beispiel #18
0
    def setUp(self):
        super(HTTPTest, self).setUp()

        patchers = [
            patch("gevent.socket.create_connection", self.mocks.socket_connect),
            patch("httplib.HTTPConnection.request", self.mocks.http_request),
            patch("httplib.HTTPConnection.getresponse", self.mocks.http_getresponse),
            patch("geventhttpclient.httplib.HTTPResponse.read", self.mocks.httpresponse_read),
            patch("geventhttpclient.response.HTTPSocketResponse._read_headers"),
        ]

        [patcher.start() for patcher in patchers]
        [self.addCleanup(patcher.stop) for patcher in patchers]
Beispiel #19
0
    def test_with_correct_script_path(self):
        with mock.patch('cts_framework.db.dao.script_dao.ScriptDAO.get_script_execution_details') as get_script_execution_details:
            script_execution = Mock(configuration="", script_path="valid_script_path")
            get_script_execution_details.return_value = script_execution
            with mock.patch('cts_framework.tests_managing.tests_manager.TestsManager.get_packages') as get_packages:
                get_packages.return_value = TestsPackagesContainerMock()
                replay_action = ReplayActionUnderTest()
                with mock.patch('cts_framework.actions.execute.execute_test_scripts_action.ExecuteTestScriptsAction.execute_configuration_group') \
                        as execute_configuration_group:
                    configuration = Mock(replay_id=['62'])
                    with StdoutCapture() as output:
                        replay_action.process_action(configuration)

                    self.assertIn('Executing', output.raw)
def test_generate_shard_prune_playbook(plan_name):
    migration = _get_migration(plan_name)
    mock_shard_allocation = _get_expected_yml(plan_name, 'mock_shard_allocation_post_migration.yml')
    mock_func = get_shard_allocation_func(mock_shard_allocation)
    with patch('commcare_cloud.commands.migrations.couchdb.get_shard_allocation', mock_func),\
            patch('commcare_cloud.commands.migrations.couchdb.get_db_list', return_value=['commcarehq', 'commcarehq__apps']):
        nodes = generate_shard_prune_playbook(migration)

    if nodes:
        actual = _get_yml(migration.prune_playbook_path)
        expected = _get_expected_yml(plan_name, 'expected_{}'.format(PRUNE_PLAYBOOK_NAME))
        assert expected == actual, "file lists mismatch:\n\nExpected\n{}\nActual\n{}".format(expected, actual)
    else:
        assert not os.path.exists(migration.prune_playbook_path), migration.prune_playbook_path
Beispiel #21
0
  def test_header_on_unexpected_error(self):
    """During authorization flow we have the expected 'Unauthorized'.

    If error is unexpected we need to make sure that 'X-Expected-Error'
    header is not set.
    """
    # pylint: disable=unused-argument
    def side_effect_function(*args, **kwargs):
      raise Unauthorized("Unable to get valid credentials")

    with mock.patch("ggrc.gdrive.file_actions.get_gdrive_file_link") as mocked:
      mocked.side_effect = side_effect_function
      control = factories.ControlFactory()
      response = self.api.post(all_models.Document, [{
          "document": {
              "kind": all_models.Document.FILE,
              "source_gdrive_id": "some link",
              "link": "some link",
              "title": "some title",
              "context": None,
              "parent_obj": {
                  "id": control.id,
                  "type": "Control"
              }
          }
      }])
    self.assertEqual(response.status_code, 401)
    self.assertNotIn('X-Expected-Error', response.headers)
Beispiel #22
0
    def test_event_grid_mode_creates_advanced_filtered_subscription_with_multiple_events(self):
        p = self.load_policy({
            'name': 'test-azure-event',
            'resource': 'azure.vm',
            'mode':
                {'type': FUNCTION_EVENT_TRIGGER_MODE,
                 'events':
                     ['VmWrite',
                        {
                            'resourceProvider': 'Microsoft.Resources/subscriptions/resourceGroups',
                            'event': 'write'
                        }]},
        })

        with mock.patch('c7n_azure.azure_events.AzureEventSubscription.create') as mock_create:
            storage_account = StorageAccount(id=1, location='westus')
            event_mode = AzureEventGridMode(p)
            event_mode._create_event_subscription(storage_account, 'some_queue', None)

            name, args, kwargs = mock_create.mock_calls[0]

            # verify the advanced filter created
            event_filter = args[3].advanced_filters[0]
            self.assertEqual(event_filter.key, 'Data.OperationName')
            self.assertEqual(event_filter.values,
                             ['Microsoft.Compute/virtualMachines/write',
                              'Microsoft.Resources/subscriptions/resourceGroups/write'])
            self.assertEqual(event_filter.operator_type, 'StringIn')
  def test_unpack_archive(self):
    tmpdir = tempfile.mkdtemp()
    dummy_archive_name = os.path.join("ambari_agent", "dummy_files",
                                 "dummy_archive.zip")
    archive_file = open(dummy_archive_name, "rb")
    fileCache = FileCache(self.config)
    fileCache.unpack_archive(archive_file, tmpdir)
    # Count summary size of unpacked files:
    total_size = 0
    total_files = 0
    total_dirs = 0
    for dirpath, dirnames, filenames in os.walk(tmpdir):
      total_dirs += 1
      for f in filenames:
        fp = os.path.join(dirpath, f)
        total_size += os.path.getsize(fp)
        total_files += 1
    self.assertEquals(total_size, 51258L)
    self.assertEquals(total_files, 28)
    self.assertEquals(total_dirs, 8)
    shutil.rmtree(tmpdir)

    # Test exception handling
    with patch("os.path.isdir") as isdir_mock:
      isdir_mock.side_effect = self.exc_side_effect
      try:
        fileCache.unpack_archive(archive_file, tmpdir)
        self.fail('CachingException not thrown')
      except CachingException:
        pass # Expected
      except Exception, e:
        self.fail('Unexpected exception thrown:' + str(e))
Beispiel #24
0
 def test_connection_failure(self):
     """Test we get a failure if we do not retry"""
     with mock.patch("requests.sessions.Session.get") as mock_get:
         mock_get.side_effect = requests.exceptions.ConnectionError()
         rave = rwslib.RWSConnection("https://innovate.mdsol.com")
         with self.assertRaises(requests.exceptions.ConnectionError) as exc:
             v = rave.send_request(rwslib.rws_requests.VersionRequest())
 def setUp(self):
     super(TestFormatItis, self).setUp()
     timestring_patcher = mock.patch(
         'wikidatastuff.preview_item.pywikibot.WbTime.toTimestr')
     self.mock_format_timestring = timestring_patcher.start()
     self.mock_format_timestring.return_value = 'formatted_WbTime'
     self.addCleanup(timestring_patcher.stop)
Beispiel #26
0
  def testCpuTimes(self, cp_mock, count_mock):
    count_mock.return_value = 1

    cp = cp_mock.return_value
    cp.user = 0.1
    cp.system = 0.1
    cp.idle = 0.7
    cp.nice = 0.1
    cp.iowait = 0
    cp.irq = 0
    cp.softirq = 0
    hostinfo = HostInfo(MagicMock())

    if platform.system() != "Windows":
      with patch("os.getloadavg") as avg_mock:
        avg_mock.return_value  = [13, 13, 13]
        cpu = hostinfo.get_cpu_times()
        self.assertEqual(cpu['load_one'], 13)
        self.assertEqual(cpu['load_five'], 13)
        self.assertEqual(cpu['load_fifteen'], 13)
    else:
      cpu = hostinfo.get_cpu_times()

    self.assertAlmostEqual(cpu['cpu_user'], 10)
    self.assertAlmostEqual(cpu['cpu_system'], 10)
    self.assertAlmostEqual(cpu['cpu_idle'], 70)
    self.assertAlmostEqual(cpu['cpu_nice'], 10)
    self.assertAlmostEqual(cpu['cpu_wio'], 0)
    self.assertAlmostEqual(cpu['cpu_intr'], 0)
    self.assertAlmostEqual(cpu['cpu_sintr'], 0)
  def test_read_host_check_file_with_content(self, os_listdir_mock):
    out = StringIO.StringIO()
    sys.stdout = out
    tmpfile = tempfile.mktemp()
    f = open(tmpfile,'w')
    f.write(hostcheck_result_fileContent)
    f.close()

    os_listdir_mock.return_value = ['111']
    tf2 = tempfile.mktemp()
    f2 = open(tf2,'w')
    f2.write('java_home|hadoop')
    f2.close()
    with patch('os.path.join') as patch_join_mock:
      patch_join_mock.return_value = f2.name
      propMap = self.hostcleanup.read_host_check_file(tmpfile)

    self.assertTrue('434' in propMap["processes"])
    self.assertTrue("mysql" in propMap["users"])
    self.assertTrue("HDP-epel" in propMap["repositories"])
    self.assertTrue("/etc/hadoop" in propMap["directories"])
    self.assertTrue("hcatalog-conf" in propMap["alternatives"]["symlink_list"])
    self.assertTrue("/etc/oozie/conf.dist" in propMap["alternatives"]["target_list"])
    self.assertTrue("hadoop-libhdfs.x86_64" in propMap["packages"])
    sys.stdout = sys.__stdout__
Beispiel #28
0
  def test_hardlink_or_copy_cross_device_should_copy(self):
    content = b'hello'

    # Mock os.link to throw an CROSS-DEVICE error
    with mock.patch('os.link') as os_mock:
      err = OSError()
      err.errno = errno.EXDEV
      os_mock.side_effect = err

      with temporary_dir() as src_dir, temporary_file() as dst:
        dst.write(content)
        dst.close()

        src_path = os.path.join(src_dir, 'src')

        safe_hardlink_or_copy(dst.name, src_path)

        with open(src_path, 'rb') as f:
          self.assertEqual(content, f.read())

        # Make sure it's not symlink
        self.assertFalse(os.path.islink(dst.name))

        # Make sure they are separate copies
        self.assertFalse(self._is_hard_link(dst.name, src_path))
Beispiel #29
0
def test__glyph_receives_renderer_arg(arg, values):
    for value in values:
        with mock.patch('bokeh.plotting.helpers.GlyphRenderer', autospec=True) as gr_mock:
            fn = _glyph_function(Marker)
            fn(Figure(), x=0, y=0, **{arg: value})
            _, kwargs = gr_mock.call_args
            assert arg in kwargs and kwargs[arg] == value
 def setUp(self):
     super(TestFormatQual, self).setUp()
     itis_patcher = mock.patch(
         'wikidatastuff.preview_item.PreviewItem.format_itis')
     self.mock_format_itis = itis_patcher.start()
     self.mock_format_itis.return_value = 'formatted_itis'
     self.addCleanup(itis_patcher.stop)
Beispiel #31
0
 def _check_proxy_csrf(self, csrf_exempt=True):
     with mock.patch("django_docker_engine.proxy.csrf_exempt_decorator") \
             as csrf_exempt_mock:
         Proxy(csrf_exempt=csrf_exempt).url_patterns()
         assert csrf_exempt_mock.called == csrf_exempt
Beispiel #32
0
    def test_shifted_conv2d(self):
        assert_allclose = functools.partial(
            np.testing.assert_allclose, rtol=1e-5, atol=1e-6)
        x = np.random.normal(size=[3, 11, 13, 7]).astype(np.float32)

        def my_conv2d(*args, **kwargs):
            return conv2d(*args, **kwargs)

        # test the name scope derived by shifted_conv2d
        with tf.Graph().as_default():
            y = shifted_conv2d(
                input=x, out_channels=5, kernel_size=(1, 1),
                spatial_shift=(1, -1), conv_fn=my_conv2d
            )
            self.assertTrue(y.name.startswith('shifted_conv2d/my_conv2d/'))

        # test errors
        with pytest.raises(TypeError,
                           match='`spatial_shift` must be a tuple with two '
                                 'elements, and the elements can only be '
                                 '-1, 0 or 1'):
            _ = shifted_conv2d(input=x, out_channels=5, kernel_size=(2, 3),
                               spatial_shift=(-2, 1))
        with pytest.raises(TypeError,
                           match='`spatial_shift` must be a tuple with two '
                                 'elements, and the elements can only be '
                                 '-1, 0 or 1'):
            _ = shifted_conv2d(input=x, out_channels=5, kernel_size=(2, 3),
                               spatial_shift=(-1,))
        with pytest.raises(ValueError,
                           match='`padding` argument is not supported'):
            _ = shifted_conv2d(input=x, out_channels=5, kernel_size=(2, 3),
                               spatial_shift=(-1, 1), padding='SAME')

        with self.test_session() as sess:
            ####################################################################
            # spatial_shift == (0, 0) should correspond to conv2d SAME padding #
            ####################################################################

            # kernel_size (1, 1)
            kernel = np.random.normal(size=(1, 1, 7, 5)).astype(np.float32)
            assert_allclose(
                *sess.run([
                    shifted_conv2d(
                        input=x, out_channels=5, kernel_size=(1, 1),
                        spatial_shift=(0, 0), kernel=kernel, use_bias=False
                    ),
                    conv2d(
                        input=x, out_channels=5, kernel_size=(1, 1),
                        kernel=kernel, use_bias=False
                    )
                ])
            )

            # kernel_size (2, 3)
            kernel = np.random.normal(size=(2, 3, 7, 5)).astype(np.float32)
            assert_allclose(
                *sess.run([
                    shifted_conv2d(
                        input=x, out_channels=5, kernel_size=(2, 3),
                        spatial_shift=(0, 0), kernel=kernel, use_bias=False
                    ),
                    conv2d(
                        input=x, out_channels=5, kernel_size=(2, 3),
                        kernel=kernel, use_bias=False
                    )
                ])
            )

            ############################
            # spatial_shift == (-1, 0) #
            ############################

            # kernel_size (1, 1), no shift actually
            kernel = np.random.normal(size=(1, 1, 7, 5)).astype(np.float32)
            assert_allclose(
                *sess.run([
                    shifted_conv2d(
                        input=x, out_channels=5, kernel_size=(1, 1),
                        spatial_shift=(-1, 0), kernel=kernel, use_bias=False
                    ),
                    conv2d(
                        input=x, out_channels=5, kernel_size=(1, 1),
                        kernel=kernel, use_bias=False, padding='VALID'
                    )
                ])
            )

            # kernel_size (2, 3), shift accordingly
            kernel = np.random.normal(size=(2, 3, 7, 5)).astype(np.float32)
            x2 = np.zeros([3, 12, 15, 7], dtype=np.float32)
            x2[:, :-1, 1:-1, :] = x
            assert_allclose(
                *sess.run([
                    shifted_conv2d(
                        input=x, out_channels=5, kernel_size=(2, 3),
                        spatial_shift=(-1, 0), kernel=kernel, use_bias=False
                    ),
                    conv2d(
                        input=x2, out_channels=5, kernel_size=(2, 3),
                        kernel=kernel, use_bias=False, padding='VALID'
                    )
                ])
            )

            ############################
            # spatial_shift == (0, 1) #
            ############################

            # kernel_size (1, 1), no shift actually
            kernel = np.random.normal(size=(1, 1, 7, 5)).astype(np.float32)
            assert_allclose(
                *sess.run([
                    shifted_conv2d(
                        input=x, out_channels=5, kernel_size=(1, 1),
                        spatial_shift=(0, 1), kernel=kernel, use_bias=False
                    ),
                    conv2d(
                        input=x, out_channels=5, kernel_size=(1, 1),
                        kernel=kernel, use_bias=False, padding='VALID'
                    )
                ])
            )

            # kernel_size (2, 3), shift accordingly
            kernel = np.random.normal(size=(2, 3, 7, 5)).astype(np.float32)
            x2 = np.zeros([3, 12, 15, 7], dtype=np.float32)
            x2[:, :-1, 2:, :] = x
            assert_allclose(
                *sess.run([
                    shifted_conv2d(
                        input=x, out_channels=5, kernel_size=(2, 3),
                        spatial_shift=(0, 1), kernel=kernel, use_bias=False
                    ),
                    conv2d(
                        input=x2, out_channels=5, kernel_size=(2, 3),
                        kernel=kernel, use_bias=False, padding='VALID'
                    )
                ])
            )

            ##################################
            # spatial_shift == (-1, 1), NCHW #
            ##################################
            x = np.transpose(x, [0, 3, 1, 2])

            with mock.patch('tensorflow.nn.conv2d', patched_conv2d):
                # kernel_size (1, 1), no shift actually
                kernel = np.random.normal(size=(1, 1, 7, 5)).astype(np.float32)
                assert_allclose(
                    *sess.run([
                        shifted_conv2d(
                            input=x, out_channels=5, kernel_size=(1, 1),
                            spatial_shift=(-1, 1), kernel=kernel,
                            use_bias=False, channels_last=False
                        ),
                        conv2d(
                            input=x, out_channels=5, kernel_size=(1, 1),
                            kernel=kernel, use_bias=False,
                            padding='VALID', channels_last=False
                        )
                    ])
                )

                # kernel_size (2, 3), shift accordingly
                kernel = np.random.normal(size=(2, 3, 7, 5)).astype(np.float32)
                x2 = np.zeros([3, 7, 12, 15], dtype=np.float32)
                x2[:, :, :-1, 2:] = x
                assert_allclose(
                    *sess.run([
                        shifted_conv2d(
                            input=x, out_channels=5, kernel_size=(2, 3),
                            spatial_shift=(-1, 1), kernel=kernel,
                            use_bias=False, channels_last=False
                        ),
                        conv2d(
                            input=x2, out_channels=5, kernel_size=(2, 3),
                            kernel=kernel, use_bias=False,
                            padding='VALID', channels_last=False
                        )
                    ])
                )
Beispiel #33
0
def test_get_offers_from_page(offers_gdynia):
    with mock.patch("morizon.utils.get_content_from_source") as get_content:
        get_content.return_value = offers_gdynia
        assert type(morizon.category.get_offers_from_page('https://www.morizon.pl/do-wynajecia/mieszkania/gdynia/witomino-lesniczowka/?page=1')) == type([])
Beispiel #34
0
 def test_validate_fail_headerhash(self, time_mock):
     # BlockHeader recalculates the headerhash. If the recalculation is different...
     with mock.patch('qrl.core.BlockHeader.BlockHeader.generate_headerhash', return_value=b'nonsense'):
         result = self.block_header.validate(self.fee_reward, self.coinbase_amount, self.block_header.tx_merkle_root)
         self.assertFalse(result)
Beispiel #35
0
    def test_26_periodictask(self):
        current_utc_time = datetime(2018, 3, 4, 5, 6, 8)
        with mock.patch('privacyidea.models.datetime') as mock_dt:
            mock_dt.utcnow.return_value = current_utc_time

            task1 = PeriodicTask("task1", False, "0 5 * * *", ["localhost"],
                                 "some.module", 2, {
                                     "key1": "value2",
                                     "KEY2": True,
                                     "key3": u"öfføff",
                                 })
            task2 = PeriodicTask("some other task", True, "0 6 * * *",
                                 ["localhost"], "some.other.module", 1,
                                 {"foo": "bar"})

        self.assertEqual(
            PeriodicTask.query.filter_by(name="task1").one(), task1)
        self.assertEqual(
            PeriodicTask.query.filter_by(name="some other task").one(), task2)
        self.assertEqual(
            PeriodicTaskOption.query.filter_by(periodictask_id=task1.id,
                                               key="KEY2").one().value, "True")
        # Values are converted to strings
        self.assertEqual(
            task1.get(),
            {
                "id": task1.id,
                "name": "task1",
                "active": False,
                "interval": "0 5 * * *",
                # we get a timezone-aware datetime here
                "last_update": current_utc_time.replace(tzinfo=tzutc()),
                "nodes": ["localhost"],
                "taskmodule": "some.module",
                "ordering": 2,
                "options": {
                    "key1": "value2",
                    "KEY2": "True",
                    "key3": u"öfføff",
                },
                "last_runs": {}
            })

        # register a run
        task1.set_last_run("localhost", datetime(2018, 3, 4, 5, 6, 7))

        # assert we can update the task
        later_utc_time = current_utc_time + timedelta(seconds=1)
        with mock.patch('privacyidea.models.datetime') as mock_dt:
            mock_dt.utcnow.return_value = later_utc_time
            PeriodicTask("task one",
                         True,
                         "0 8 * * *", ["localhost", "otherhost"],
                         "some.module",
                         3, {
                             "KEY2": "value number 2",
                             "key 4": 1234
                         },
                         id=task1.id)
        # the first run for otherhost
        task1.set_last_run("otherhost", datetime(2018, 8, 9, 10, 11, 12))
        result = PeriodicTask.query.filter_by(name="task one").one().get()
        self.assertEqual(
            result, {
                "id": task1.id,
                "active": True,
                "name": "task one",
                "interval": "0 8 * * *",
                "last_update": later_utc_time.replace(tzinfo=tzutc()),
                "nodes": ["localhost", "otherhost"],
                "taskmodule": "some.module",
                "ordering": 3,
                "options": {
                    "KEY2": "value number 2",
                    "key 4": "1234"
                },
                "last_runs": {
                    "localhost": datetime(2018, 3, 4, 5, 6, 7, tzinfo=tzutc()),
                    "otherhost": datetime(
                        2018, 8, 9, 10, 11, 12, tzinfo=tzutc()),
                }
            })
        # assert all old options are removed
        self.assertEqual(
            PeriodicTaskOption.query.filter_by(periodictask_id=task1.id,
                                               key="key3").count(), 0)
        # the second run for localhost
        task1.set_last_run("localhost", datetime(2018, 3, 4, 5, 6, 8))
        result = PeriodicTask.query.filter_by(name="task one").one().get()
        self.assertEqual(
            result, {
                "id": task1.id,
                "active": True,
                "name": "task one",
                "interval": "0 8 * * *",
                "last_update": later_utc_time.replace(tzinfo=tzutc()),
                "nodes": ["localhost", "otherhost"],
                "taskmodule": "some.module",
                "ordering": 3,
                "options": {
                    "KEY2": "value number 2",
                    "key 4": "1234"
                },
                "last_runs": {
                    "localhost": datetime(2018, 3, 4, 5, 6, 8, tzinfo=tzutc()),
                    "otherhost": datetime(
                        2018, 8, 9, 10, 11, 12, tzinfo=tzutc()),
                }
            })

        # remove "localhost", assert the last run is removed
        PeriodicTask("task one",
                     True,
                     "0 8 * * *", ["otherhost"],
                     "some.module",
                     4, {"foo": "bar"},
                     id=task1.id)
        self.assertEqual(
            PeriodicTaskOption.query.filter_by(
                periodictask_id=task1.id).count(), 1)
        self.assertEqual(
            PeriodicTaskLastRun.query.filter_by(
                periodictask_id=task1.id).one().node, "otherhost")
        # naive timestamp in the database
        self.assertEqual(
            PeriodicTaskLastRun.query.filter_by(
                periodictask_id=task1.id).one().timestamp,
            datetime(2018, 8, 9, 10, 11, 12, tzinfo=None))
        self.assertEqual(
            PeriodicTaskLastRun.query.filter_by(
                periodictask_id=task1.id).one().aware_timestamp,
            datetime(2018, 8, 9, 10, 11, 12, tzinfo=tzutc()))

        # remove the tasks, everything is removed
        task1.delete()
        self.assertEqual(PeriodicTaskOption.query.count(), 1)  # from task2
        self.assertEqual(PeriodicTaskLastRun.query.count(), 0)
        task2.delete()
        self.assertEqual(PeriodicTaskOption.query.count(), 0)
Beispiel #36
0
def patched_get_block_devs_lsblk():
    with patch('ceph_volume.util.disk.get_block_devs_lsblk') as p:
        yield p
Beispiel #37
0
def test_get_url(category, region, filters):
    with mock.patch(
            "trojmiastopl.utils.get_url_for_filters") as get_url_for_filters:
        get_url_for_filters.return_value = "http://trojmiasto.pl/ogloszenia/nieruchomosci-mam-do-wynajecia/ai,300_,dw,1d,s,Gdansk.html"
        assert trojmiastopl.utils.get_url(category, region, **filters) == \
               "http://trojmiasto.pl/ogloszenia/nieruchomosci-mam-do-wynajecia/ai,300_,dw,1d,s,Gdansk.html"
def patch_insights_support(old_function):
    patcher = patch("insights.client.phase.v1.InsightsSupport")
    return patcher(old_function)
    def test_patch_nontrivial_boot_property(self):
        metadata_manager = MetadataManager(["qualifier"])
        this_dir = os.path.dirname(os.path.realpath(__file__))
        metadata_dir = os.path.join(this_dir, 'metadata')
        self.assertTrue(metadata_manager.read_metadata_from_dir(metadata_dir))
        metadata_container = metadata_manager.metadata_container

        entity_json = {
            "@odata.context":
            "/redfish/v1/$metadata#ComputerSystem.ComputerSystem",
            "@odata.id": "/redfish/v1/Systems/34",
            "@odata.type": "#ComputerSystem.v1_3_0.ComputerSystem",
            "Id": "34",
            "Name": "Computer System",
            "Description": "Computer System description",
            "SystemType": "Physical",
            "AssetTag": "AZaz09[]!@#$%^&*()_+",
            "Manufacturer": None,
            "Model": None,
            "SKU": None,
            "SerialNumber": None,
            "PartNumber": None,
            "UUID": "1d7a1040-05e4-11e6-a108-54ab3a8ec80f",
            "HostName": None,
            "Status": {
                "State": "Enabled",
                "Health": "OK",
                "HealthRollup": "OK"
            },
            "IndicatorLED": None,
            "PowerState": "Off",
            "BiosVersion": "F20A3A03",
            "Boot": {
                "@odata.type":
                "#ComputerSystem.v1_1_0.Boot",
                "BootSourceOverrideEnabled":
                "Disabled",
                "BootSourceOverrideTarget":
                "None",
                "*****@*****.**":
                ["Hdd", "Pxe", "None", "RemoteDrive"],
                "BootSourceOverrideMode":
                "Legacy",
                "*****@*****.**":
                ["Legacy", "UEFI"]
            },
            "ProcessorSummary": {
                "Count": 2,
                "Model": "Intel(R) Xeon(R)",
                "Status": {
                    "State": "Enabled",
                    "Health": "OK",
                    "HealthRollup": "OK"
                }
            },
            "MemorySummary": {
                "TotalSystemMemoryGiB": 30.52,
                "Status": {
                    "State": "Enabled",
                    "Health": "OK",
                    "HealthRollup": "OK"
                }
            },
            "Processors": {
                "@odata.id": "/redfish/v1/Systems/34/Processors"
            },
            "EthernetInterfaces": {
                "@odata.id": "/redfish/v1/Systems/34/EthernetInterfaces"
            },
            "NetworkInterfaces": {
                "@odata.id": "/redfish/v1/Systems/34/NetworkInterfaces"
            },
            "Storage": {
                "@odata.id": "/redfish/v1/Systems/34/Storage"
            },
            "Memory": {
                "@odata.id": "/redfish/v1/Systems/34/Memory"
            },
            "PCIeDevices": [],
            "PCIeFunctions": [],
            "TrustedModules": [],
            "Links": {
                "@odata.type": "#ComputerSystem.v1_2_0.Links",
                "Chassis": [{
                    "@odata.id": "/redfish/v1/Chassis/38"
                }],
                "Endpoints": [],
                "ManagedBy": [{
                    "@odata.id": "/redfish/v1/Managers/38"
                }],
                "Oem": {}
            },
            "Actions": {
                "#ComputerSystem.Reset": {
                    "target":
                    "/redfish/v1/Systems/34/Actions/ComputerSystem.Reset",
                    "*****@*****.**": [
                        "On", "ForceOff", "GracefulShutdown",
                        "GracefulRestart", "ForceRestart"
                    ]
                },
                "Oem": {
                    "#Intel.Oem.ChangeTPMState": {
                        "target":
                        "/redfish/v1/Systems/34/Actions/Oem/Intel.Oem.ChangeTPMState",
                        "*****@*****.**": []
                    }
                }
            },
            "Oem": {
                "Intel_RackScale": {
                    "@odata.type": "#Intel.Oem.ComputerSystem",
                    "PciDevices": [],
                    "PCIeConnectionId": [],
                    "ProcessorSockets": 2,
                    "MemorySockets": 16,
                    "DiscoveryState": "Basic",
                    "UserModeEnabled": None,
                    "TrustedExecutionTechnologyEnabled": None,
                    "Metrics": {
                        "@odata.id": "/redfish/v1/Systems/34/Metrics"
                    }
                }
            }
        }

        self.discovery_container.add_resource(
            ApiResource("/redfish/v1/Systems/34", "https://localhost:8443",
                        entity_json, "#ComputerSystem.v1_3_0.ComputerSystem"))

        with mock.patch('cts_core.commons.api_caller.ApiCaller.__init__'
                        ) as api_caller_init_mock:
            api_caller_init_mock.return_value = None
            validator = MetadataPatchValidator(metadata_container, None,
                                               PatchingStrategy2_2())

            # this ValidationStatus.BLOCKED should be not affect final ValidationStatus
            validator._verify_property = MagicMock(
                return_value=(ValidationStatus.PASSED, None))
            validator._restore_property = MagicMock(
                return_value=ValidationStatus.PASSED)
            validator._patch_property = MagicMock(
                return_value=(True, True, ValidationStatus.PASSED))

            with mock.patch(
                    'cts_core.commons.api_caller.ApiCaller.patch_resource'
            ) as api_caller_patch_resource:
                api_caller_patch_resource.return_value = (
                    RequestStatus.SUCCESS, 200, None, None)
                with mock.patch(
                        'cts_core.commons.api_caller.ApiCaller.get_resource'
                ) as api_caller_get_resource:
                    api_caller_get_resource.return_value = (Link(
                        "/redfish/v1/Systems/34",
                        "https://localhost:8443"), RequestStatus.SUCCESS, 200,
                                                            entity_json, None)

                    with StdoutCapture() as out:
                        self.assertEqual(
                            ValidationStatus.PASSED,
                            validator.validate(self.discovery_container))

                    self.assertNotIn('WARNING::Skipping non-trivial property',
                                     out.raw)
Beispiel #40
0
    def test_add_4(self, mock_difficulty_tracker_get):
        with set_data_dir('no_data'):
            with State() as state:
                with set_wallet_dir("test_wallet"):
                    with mocked_genesis() as custom_genesis:
                        chain_manager = ChainManager(state)

                        chain_manager._difficulty_tracker = Mock()
                        tmp_difficulty = StringToUInt256('2')
                        tmp_target = DifficultyTracker.get_target(
                            tmp_difficulty)
                        mock_difficulty_tracker_get.return_value = [
                            tmp_difficulty, tmp_target
                        ]

                        alice_xmss = get_alice_xmss()
                        slave_xmss = XMSS(
                            XmssFast(alice_xmss.seed, alice_xmss.height))
                        random_xmss1 = get_random_xmss()
                        random_kyber1 = Kyber()
                        random_dilithium1 = Dilithium()
                        random_xmss2 = get_random_xmss()
                        random_kyber2 = Kyber()
                        random_dilithium2 = Dilithium()

                        message = b'Hello World How are you?'
                        prf512_seed = b'10192'

                        custom_genesis.genesis_balance.extend([
                            qrl_pb2.GenesisBalance(
                                address=random_xmss1.address,
                                balance=65000000000000000)
                        ])
                        custom_genesis.genesis_balance.extend([
                            qrl_pb2.GenesisBalance(
                                address=random_xmss2.address,
                                balance=65000000000000000)
                        ])
                        chain_manager.load(custom_genesis)

                        with mock.patch(
                                'qrl.core.misc.ntp.getTime') as time_mock:
                            time_mock.return_value = 1615270948

                            lattice_public_key_txn = LatticePublicKey.create(
                                fee=1,
                                kyber_pk=random_kyber1.getPK(),
                                dilithium_pk=random_dilithium1.getPK(),
                                xmss_pk=random_xmss1.pk)
                            lattice_public_key_txn._data.nonce = 1
                            lattice_public_key_txn.sign(random_xmss1)

                            tmp_block1 = Block.create(
                                block_number=1,
                                prevblock_headerhash=GenesisBlock().headerhash,
                                transactions=[lattice_public_key_txn],
                                miner_address=slave_xmss.address)

                            #  Mine the nonce
                            while not PoWValidator().validate_mining_nonce(
                                    state, tmp_block1.blockheader, False):
                                tmp_block1.set_nonces(
                                    tmp_block1.mining_nonce + 1, 0)

                            res = chain_manager.add_block(block=tmp_block1)
                            self.assertTrue(res)

                            # Need to move forward the time to align with block times
                            time_mock.return_value += config.dev.minimum_minting_delay * 2

                            encrypted_eph_message = create_ephemeral_channel(
                                msg_id=lattice_public_key_txn.txhash,
                                ttl=time_mock.return_value,
                                ttr=0,
                                addr_from=random_xmss2.address,
                                kyber_pk=random_kyber2.getPK(),
                                kyber_sk=random_kyber2.getSK(),
                                receiver_kyber_pk=random_kyber1.getPK(),
                                dilithium_pk=random_dilithium2.getPK(),
                                dilithium_sk=random_dilithium2.getSK(),
                                prf512_seed=prf512_seed,
                                data=message,
                                nonce=1)

                            chain_manager.state.update_ephemeral(
                                encrypted_eph_message)
                            eph_metadata = chain_manager.state.get_ephemeral_metadata(
                                lattice_public_key_txn.txhash)

                            # Decrypting Payload

                            encrypted_eph_message = eph_metadata.encrypted_ephemeral_message_list[
                                0]
                            encrypted_payload = encrypted_eph_message.payload

                            random_kyber1.kem_decode(encrypted_eph_message.
                                                     channel.enc_aes256_symkey)
                            aes_key = bytes(random_kyber1.getMyKey())
                            myAES = AES(aes_key)
                            decrypted_payload = myAES.decrypt(
                                encrypted_payload)
                            ephemeral_channel_payload = EphemeralChannelPayload.from_json(
                                decrypted_payload)

                            self.assertEqual(
                                ephemeral_channel_payload.prf512_seed,
                                b'10192')
                            self.assertEqual(ephemeral_channel_payload.data,
                                             b'Hello World How are you?')

                            # TODO (cyyber): Add Ephemeral Testing code using Naive RNG

                            tmp_block2 = Block.create(
                                block_number=2,
                                prevblock_headerhash=tmp_block1.headerhash,
                                transactions=[],
                                miner_address=slave_xmss.address)

                            #  Mine the nonce
                            while not PoWValidator().validate_mining_nonce(
                                    state, tmp_block2.blockheader, False):
                                tmp_block2.set_nonces(
                                    tmp_block2.mining_nonce + 1, 0)

                            res = chain_manager.add_block(block=tmp_block2)
                            self.assertTrue(res)

                            # Need to move forward the time to align with block times
                            time_mock.return_value += config.dev.minimum_minting_delay * 2

                            tmp_block3 = Block.create(
                                block_number=3,
                                prevblock_headerhash=tmp_block2.headerhash,
                                transactions=[],
                                miner_address=slave_xmss.address)

                            #  Mine the nonce
                            while not PoWValidator().validate_mining_nonce(
                                    state, tmp_block3.blockheader, False):
                                tmp_block3.set_nonces(
                                    tmp_block3.mining_nonce + 1, 0)

                            res = chain_manager.add_block(block=tmp_block3)
                            self.assertTrue(res)

                            time_mock.return_value += config.dev.minimum_minting_delay

                            tmp_block4 = Block.create(
                                block_number=4,
                                prevblock_headerhash=tmp_block3.headerhash,
                                transactions=[],
                                miner_address=slave_xmss.address)

                            #  Mine the nonce
                            while not PoWValidator().validate_mining_nonce(
                                    state, tmp_block4.blockheader, False):
                                tmp_block4.set_nonces(
                                    tmp_block4.mining_nonce + 1, 0)

                            res = chain_manager.add_block(block=tmp_block4)
                            self.assertTrue(res)

                            address_state = chain_manager.get_address(
                                random_xmss1.address)

                            self.assertEqual(
                                address_state.latticePK_list[0].kyber_pk,
                                lattice_public_key_txn.kyber_pk)
                            self.assertEqual(
                                address_state.latticePK_list[0].dilithium_pk,
                                lattice_public_key_txn.dilithium_pk)

                            self.assertEqual(address_state.address,
                                             lattice_public_key_txn.addr_from)

                            random_xmss1_state = chain_manager.get_address(
                                random_xmss1.address)

                            self.assertEqual(64999999999999999,
                                             random_xmss1_state.balance)
Beispiel #41
0
class RMFTestCase(TestCase):
    # provides more verbose output when comparing assertion failures
    maxDiff = None

    # (default) build all paths to test stack scripts
    TARGET_STACKS = 'TARGET_STACKS'

    # (default) build all paths to test custom action scripts
    TARGET_CUSTOM_ACTIONS = 'TARGET_CUSTOM_ACTIONS'

    # build all paths to test common services scripts
    TARGET_COMMON_SERVICES = 'TARGET_COMMON_SERVICES'

    # build all paths to test common services scripts
    TARGET_STACK_HOOKS = 'TARGET_STACK_HOOKS'

    def executeScript(
            self,
            path,
            classname=None,
            command=None,
            config_file=None,
            config_dict=None,
            # common mocks for all the scripts
            config_overrides=None,
            stack_version=None,
            checked_call_mocks=itertools.cycle([(0, "OK.")]),
            call_mocks=itertools.cycle([(0, "OK.")]),
            os_type=('Suse', '11', 'Final'),
            kinit_path_local="/usr/bin/kinit",
            os_env={'PATH': '/bin'},
            target=TARGET_STACKS,
            mocks_dict={},
            try_install=False,
            command_args=[],
            log_out_files=False,
            available_packages_in_repos=[]):

        norm_path = os.path.normpath(path)

        if target == self.TARGET_STACKS:
            stack_version = norm_path.split(os.sep)[0]

        base_path, configs_path = self._get_test_paths(target, stack_version)
        script_path = os.path.join(base_path, norm_path)

        if config_file is not None and config_dict is None:
            self.config_dict = self.get_config_file(configs_path, config_file)
        elif config_dict is not None and config_file is None:
            self.config_dict = config_dict
        else:
            raise RuntimeError(
                "Please specify either config_file or config_dict parameter")

        # add the stack tools & features from the stack if the test case's JSON file didn't have them
        if "stack_tools" not in self.config_dict["configurations"][
                "cluster-env"]:
            self.config_dict["configurations"]["cluster-env"][
                "stack_tools"] = RMFTestCase.get_stack_tools()

        if "stack_features" not in self.config_dict["configurations"][
                "cluster-env"]:
            self.config_dict["configurations"]["cluster-env"][
                "stack_features"] = RMFTestCase.get_stack_features()

        if "stack_packages" not in self.config_dict["configurations"][
                "cluster-env"]:
            self.config_dict["configurations"]["cluster-env"][
                "stack_packages"] = RMFTestCase.get_stack_packages()

        if config_overrides:
            for key, value in config_overrides.iteritems():
                self.config_dict[key] = value

        self.config_dict = ConfigDictionary(self.config_dict)

        # append basedir to PYTHONPATH
        scriptsdir = os.path.dirname(script_path)
        basedir = os.path.dirname(scriptsdir)
        sys.path.append(scriptsdir)

        # get method to execute
        try:
            with patch.object(platform,
                              'linux_distribution',
                              return_value=os_type):
                script_module = imp.load_source(classname, script_path)
                Script.instance = None
                script_class_inst = RMFTestCase._get_attr(
                    script_module, classname)()
                script_class_inst.log_out_files = log_out_files
                script_class_inst.available_packages_in_repos = available_packages_in_repos
                Script.repository_util = RepositoryUtil(
                    self.config_dict, set())
                method = RMFTestCase._get_attr(script_class_inst, command)
        except IOError, err:
            raise RuntimeError("Cannot load class %s from %s: %s" %
                               (classname, norm_path, err.message))

        # Reload params import, otherwise it won't change properties during next import
        if 'params' in sys.modules:
            del (sys.modules["params"])

        if 'params_windows' in sys.modules:
            del (sys.modules["params_windows"])

        if 'params_linux' in sys.modules:
            del (sys.modules["params_linux"])

        # Reload status_params import, otherwise it won't change properties during next import
        if 'status_params' in sys.modules:
            del (sys.modules["status_params"])

        with Environment(basedir, test_mode=True) as RMFTestCase.env:
            with patch('resource_management.core.shell.checked_call',
                       side_effect=checked_call_mocks
                       ) as mocks_dict['checked_call']:
                with patch('resource_management.core.shell.call',
                           side_effect=call_mocks) as mocks_dict['call']:
                    with patch.object(Script,
                                      'get_config',
                                      return_value=self.config_dict
                                      ) as mocks_dict['get_config']:
                        with patch.object(Script,
                                          'get_tmp_dir',
                                          return_value="/tmp"
                                          ) as mocks_dict['get_tmp_dir']:
                            with patch.object(
                                    Script,
                                    'post_start') as mocks_dict['post_start']:
                                with patch(
                                        'resource_management.libraries.functions.get_kinit_path',
                                        return_value=kinit_path_local
                                ) as mocks_dict['get_kinit_path']:
                                    with patch.object(
                                            platform,
                                            'linux_distribution',
                                            return_value=os_type
                                    ) as mocks_dict['linux_distribution']:
                                        with patch(
                                                'resource_management.libraries.functions.stack_select.is_package_supported',
                                                return_value=True):
                                            with patch(
                                                    'resource_management.libraries.functions.stack_select.get_supported_packages',
                                                    return_value=MagicMock()):
                                                with patch.object(
                                                        os, "environ",
                                                        new=os_env
                                                ) as mocks_dict['environ']:
                                                    with patch(
                                                            'resource_management.libraries.functions.stack_select.unsafe_get_stack_versions',
                                                            return_value=((
                                                                "", 0, []))):
                                                        if not try_install:
                                                            with patch.object(
                                                                    Script,
                                                                    'install_packages'
                                                            ) as install_mock_value:
                                                                method(
                                                                    RMFTestCase
                                                                    .env,
                                                                    *command_args
                                                                )
                                                        else:
                                                            method(
                                                                RMFTestCase.
                                                                env,
                                                                *command_args)

        sys.path.remove(scriptsdir)
Beispiel #42
0
def qrlnode_with_mock_blockchain(num_blocks):
    start_time = time.time()
    with mock.patch('qrl.core.misc.ntp.getTime') as ntp_mock, \
            set_data_dir('no_data'), \
            State() as state, \
            mock.patch('time.time') as time_mock:  # noqa
        time_mock.return_value = start_time
        ntp_mock.return_value = start_time

        state.get_measurement = MagicMock(return_value=10000000)

        required_height = ceil(log(num_blocks, 2))
        required_height = int(required_height + required_height % 2)

        alice_xmss = get_alice_xmss(xmss_height=required_height)
        bob_xmss = get_bob_xmss()

        genesis_block = GenesisBlock()
        chain_manager = ChainManager(state)
        chain_manager.load(genesis_block)

        chain_manager._difficulty_tracker = Mock()
        dt = DifficultyTracker()
        tmp_difficulty = StringToUInt256('2')
        tmp_boundary = dt.get_target(tmp_difficulty)

        chain_manager._difficulty_tracker.get = MagicMock(
            return_value=(tmp_difficulty, tmp_boundary))

        block_prev = state.get_block(genesis_block.headerhash)

        for block_idx in range(1, num_blocks):
            transactions = []
            if block_idx == 1:
                slave_tx = SlaveTransaction.create(
                    addr_from=alice_xmss.address,
                    slave_pks=[bob_xmss.pk],
                    access_types=[0],
                    fee=0,
                    xmss_pk=alice_xmss.pk)
                slave_tx.sign(alice_xmss)
                slave_tx._data.nonce = 2
                transactions = [slave_tx]

            time_mock.return_value = time_mock.return_value + 60
            ntp_mock.return_value = ntp_mock.return_value + 60

            block_new = Block.create(
                block_number=block_idx,
                prevblock_headerhash=block_prev.headerhash,
                transactions=transactions,
                signing_xmss=alice_xmss,
                master_address=alice_xmss.address,
                nonce=block_idx)

            while not PoWValidator().validate_mining_nonce(
                    state, block_new.blockheader, False):
                block_new.set_mining_nonce(block_new.mining_nonce + 1)

            chain_manager.add_block(block_new)
            block_prev = block_new

        qrlnode = QRLNode(state, slaves=[])
        qrlnode.set_chain_manager(chain_manager)

        yield qrlnode
Beispiel #43
0
        # If file exists, should rethrow exception
        with patch("os.path.isfile") as isfile_mock:
            isfile_mock.return_value = True
            with patch("__builtin__.open") as open_mock:
                open_mock.side_effect = self.exc_side_effect
                try:
                    resource_files_keeper.read_hash_sum("path-to-directory")
                    self.fail('KeeperException not thrown')
                except KeeperException:
                    pass  # Expected
                except Exception, e:
                    self.fail('Unexpected exception thrown:' + str(e))

        # Test exception handling
        # If file does not exist, should ignore exception
        with patch("os.path.isfile") as isfile_mock:
            isfile_mock.return_value = False
            with patch("__builtin__.open") as open_mock:
                open_mock.side_effect = self.exc_side_effect
                res = resource_files_keeper.read_hash_sum("path-to-directory")
                self.assertEqual(res, None)
        pass

    def test_write_hash_sum(self):
        NEW_HASH = "new_hash"
        resource_files_keeper = ResourceFilesKeeper(
            self.TEST_RESOURCES_DIR, self.DUMMY_UNCHANGEABLE_PACKAGE)
        resource_files_keeper.write_hash_sum(self.DUMMY_UNCHANGEABLE_PACKAGE,
                                             NEW_HASH)
        hash_sum = resource_files_keeper.read_hash_sum(
            self.DUMMY_UNCHANGEABLE_PACKAGE)
Beispiel #44
0
def test_get_offer_data(offer_markup):
    with mock.patch("morizon.utils.get_content_from_source") as get_content:
        get_content.return_value = offer_markup
        assert type(morizon.offer.get_offer_data('https://www.morizon.pl/oferta/wynajem-mieszkanie-szczecin-pogodno-somosierry-51m2-mzn2028886916')) == type({})
Beispiel #45
0
    def test_add_3(self):
        with State() as state:
            with set_wallet_dir("test_wallet"):
                chain = Chain(state)
                buffered_chain = BufferedChain(chain)

                alice_xmss = get_alice_xmss()
                slave_xmss = XMSS(alice_xmss.height, alice_xmss.get_seed())

                staking_address = bytes(alice_xmss.get_address().encode())

                # FIXME: Replace this with a call to create a hash_chain
                h0 = sha256(b'hashchain_seed')
                h1 = sha256(h0)
                h2 = sha256(h1)
                h3 = sha256(h2)

                with mocked_genesis() as custom_genesis:
                    custom_genesis.genesis_balance.extend([
                        qrl_pb2.GenesisBalance(
                            address=alice_xmss.get_address(), balance=100)
                    ])

                    res = buffered_chain.add_block(block=GenesisBlock())
                    self.assertTrue(res)
                    stake_transaction = StakeTransaction.create(
                        activation_blocknumber=1,
                        xmss=alice_xmss,
                        slavePK=slave_xmss.pk(),
                        hashchain_terminator=h3)
                    stake_transaction._data.nonce = 1  # FIXME: The test needs private access.. This is an API issue
                    stake_transaction.sign(alice_xmss)

                    vote = Vote.create(
                        addr_from=alice_xmss.get_address().encode(),
                        blocknumber=0,
                        headerhash=GenesisBlock().headerhash,
                        xmss=slave_xmss)
                    vote.sign(slave_xmss)
                    buffered_chain.add_vote(vote)
                    vote_metadata = buffered_chain.get_consensus(0)

                    chain.pstate.stake_validators_tracker.add_sv(
                        balance=100,
                        stake_txn=stake_transaction,
                        blocknumber=1)

                    sv = chain.pstate.stake_validators_tracker.sv_dict[
                        staking_address]
                    self.assertEqual(0, sv.nonce)

                    tmp_block1 = Block.create(
                        staking_address=staking_address,
                        block_number=1,
                        reveal_hash=h2,
                        prevblock_headerhash=GenesisBlock().headerhash,
                        transactions=[stake_transaction],
                        duplicate_transactions=OrderedDict(),
                        vote=vote_metadata,
                        signing_xmss=alice_xmss,
                        nonce=1)

                    res = buffered_chain.add_block(block=tmp_block1)
                    self.assertTrue(res)

                    # Need to move forward the time to align with block times
                    with mock.patch('qrl.core.ntp.getTime') as time_mock:
                        time_mock.return_value = tmp_block1.timestamp + config.dev.minimum_minting_delay

                        vote = Vote.create(
                            addr_from=alice_xmss.get_address().encode(),
                            blocknumber=1,
                            headerhash=tmp_block1.headerhash,
                            xmss=slave_xmss)
                        vote.sign(slave_xmss)
                        buffered_chain.add_vote(vote)
                        vote_metadata = buffered_chain.get_consensus(1)

                        tmp_block2 = Block.create(
                            staking_address=staking_address,
                            block_number=2,
                            reveal_hash=h1,
                            prevblock_headerhash=tmp_block1.headerhash,
                            transactions=[],
                            duplicate_transactions=OrderedDict(),
                            vote=vote_metadata,
                            signing_xmss=alice_xmss,
                            nonce=2)

                    res = buffered_chain.add_block(block=tmp_block2)
                    self.assertTrue(res)

                    # Need to move forward the time to align with block times
                    with mock.patch('qrl.core.ntp.getTime') as time_mock:
                        time_mock.return_value = tmp_block2.timestamp + config.dev.minimum_minting_delay

                        vote = Vote.create(
                            addr_from=alice_xmss.get_address().encode(),
                            blocknumber=2,
                            headerhash=tmp_block2.headerhash,
                            xmss=slave_xmss)
                        vote.sign(slave_xmss)
                        buffered_chain.add_vote(vote)
                        vote_metadata = buffered_chain.get_consensus(2)

                        tmp_block3 = Block.create(
                            staking_address=staking_address,
                            block_number=3,
                            reveal_hash=h0,
                            prevblock_headerhash=tmp_block2.headerhash,
                            transactions=[],
                            duplicate_transactions=OrderedDict(),
                            vote=vote_metadata,
                            signing_xmss=alice_xmss,
                            nonce=3)

                    res = buffered_chain.add_block(block=tmp_block3)
                    self.assertTrue(res)
Beispiel #46
0
def test_get_max_page(offers_szczecin):
    with mock.patch("morizon.utils.get_content_from_source") as get_content:
        get_content.return_value = offers_szczecin
        assert morizon.utils.get_max_page('https://www.morizon.pl/do-wynajecia/mieszkania/gdynia/witomino-lesniczowka/?page=1&ps%5Bnumber_of_rooms_from%5D=2&') == 3
Beispiel #47
0
    def test_resnet_general_block(self):
        x = np.random.normal(size=[2, 3, 4, 5, 6]).astype(np.float32)
        x_tensor = tf.convert_to_tensor(x)
        x_tensor.tag = 'input'
        kernel_regularizer = l2_regularizer(0.001)

        # test error arguments
        for arg_name in ('kernel', 'kernel_mask', 'bias'):
            with pytest.raises(ValueError,
                               match='`{}` argument is not allowed for a '
                               'resnet block'.format(arg_name)):
                _ = resnet_general_block(conv_fn=conv2d,
                                         input=x_tensor,
                                         in_channels=6,
                                         out_channels=8,
                                         kernel_size=(3, 2),
                                         channels_last=True,
                                         **{arg_name: object()})

        # test direct shortcut, without norm, act, dropout
        my_conv2d = Mock(wraps=conv2d)
        conv_fn = ScopeArgTensorFunctionMap({
            'conv_0':
            TensorFunction(self,
                           my_conv2d,
                           'input',
                           'conv_0',
                           expected_kwargs={
                               'out_channels': 6,
                               'kernel_size': (3, 2),
                               'strides': 1,
                               'channels_last': True,
                               'use_bias': True,
                               'scope': 'conv_0',
                               'kernel_regularizer': kernel_regularizer
                           }),
            'conv_1':
            TensorFunction(self,
                           my_conv2d,
                           'conv_0',
                           'conv_1',
                           expected_kwargs={
                               'out_channels': 6,
                               'kernel_size': (3, 2),
                               'strides': 1,
                               'channels_last': True,
                               'use_bias': True,
                               'scope': 'conv_1',
                               'kernel_regularizer': kernel_regularizer
                           })
        })

        with mock.patch(
                'tfsnippet.layers.convolutional.resnet.'
                'resnet_add_shortcut_residual',
                TensorOperator(self, operator.add, 'input', 'conv_1',
                               'output')):
            y = resnet_general_block(conv_fn=conv_fn,
                                     input=x_tensor,
                                     in_channels=6,
                                     out_channels=6,
                                     kernel_size=(3, 2),
                                     channels_last=True,
                                     kernel_regularizer=kernel_regularizer)
            self.assertEqual(y.tag, 'output')
            self.assertEqual(my_conv2d.call_count, 2)

        # test conv shortcut because of strides != 1, without norm, act, dropout
        my_conv2d = Mock(wraps=conv2d)
        conv_fn = ScopeArgTensorFunctionMap({
            'conv_0':
            TensorFunction(self,
                           my_conv2d,
                           'input',
                           'conv_0',
                           expected_kwargs={
                               'out_channels': 4,
                               'kernel_size': 1,
                               'strides': 1,
                               'channels_last': False,
                               'use_bias': False,
                               'scope': 'conv_0',
                               'kernel_regularizer': kernel_regularizer
                           }),
            'conv_1':
            TensorFunction(self,
                           my_conv2d,
                           'conv_0',
                           'conv_1',
                           expected_kwargs={
                               'out_channels': 4,
                               'kernel_size': 1,
                               'strides': (2, 2),
                               'channels_last': False,
                               'use_bias': False,
                               'scope': 'conv_1',
                               'kernel_regularizer': kernel_regularizer
                           })
        })
        shortcut_conv_fn = TensorFunction(self,
                                          my_conv2d,
                                          'input',
                                          'shortcut',
                                          expected_kwargs={
                                              'out_channels':
                                              4,
                                              'kernel_size':
                                              1,
                                              'strides': (2, 2),
                                              'channels_last':
                                              False,
                                              'use_bias':
                                              True,
                                              'scope':
                                              'shortcut',
                                              'kernel_regularizer':
                                              kernel_regularizer
                                          })

        with mock.patch(
                'tfsnippet.layers.convolutional.resnet.'
                'resnet_add_shortcut_residual',
                TensorOperator(self, operator.add, 'shortcut', 'conv_1',
                               'output')):
            y = resnet_general_block(conv_fn=conv_fn,
                                     input=x_tensor,
                                     in_channels=4,
                                     out_channels=4,
                                     kernel_size=1,
                                     strides=(2, 2),
                                     channels_last=False,
                                     shortcut_conv_fn=shortcut_conv_fn,
                                     resize_at_exit=True,
                                     use_bias=False,
                                     kernel_regularizer=kernel_regularizer)
            self.assertEqual(y.tag, 'output')
            self.assertEqual(my_conv2d.call_count, 3)

        # test conv shortcut because of channel mismatch, w norm, act, dropout
        my_conv2d = Mock(wraps=conv2d)
        tensor_processor = ScopeTensorFunctionMap({
            'norm_0':
            TensorFunction(self, tf.identity, 'input', 'norm_0'),
            'activation_0':
            TensorFunction(self, tf.identity, 'norm_0', 'activation_0'),
            'after_conv_0':
            TensorFunction(self, tf.identity, 'conv_0', 'after_conv_0'),
            'dropout':
            TensorFunction(self, tf.identity, 'after_conv_0', 'dropout'),
            'norm_1':
            TensorFunction(self, tf.identity, 'dropout', 'norm_1'),
            'activation_1':
            TensorFunction(self, tf.identity, 'norm_1', 'activation_1'),
            'after_conv_1':
            TensorFunction(self, tf.identity, 'conv_1', 'after_conv_1'),
        })
        conv_fn = ScopeArgTensorFunctionMap({
            'conv_0':
            TensorFunction(self,
                           my_conv2d,
                           'activation_0',
                           'conv_0',
                           expected_kwargs={
                               'out_channels': 8,
                               'kernel_size': 2,
                               'strides': 1,
                               'channels_last': True,
                               'use_bias': False,
                               'scope': 'conv_0',
                               'kernel_regularizer': kernel_regularizer
                           }),
            'conv_1':
            TensorFunction(self,
                           my_conv2d,
                           'activation_1',
                           'conv_1',
                           expected_kwargs={
                               'out_channels': 16,
                               'kernel_size': 2,
                               'strides': 1,
                               'channels_last': True,
                               'use_bias': False,
                               'scope': 'conv_1',
                               'kernel_regularizer': kernel_regularizer
                           })
        })
        shortcut_conv_fn = TensorFunction(self,
                                          my_conv2d,
                                          'input',
                                          'shortcut',
                                          expected_kwargs={
                                              'out_channels':
                                              8,
                                              'kernel_size': (3, 2),
                                              'strides':
                                              1,
                                              'channels_last':
                                              True,
                                              'use_bias':
                                              True,
                                              'scope':
                                              'shortcut',
                                              'kernel_regularizer':
                                              kernel_regularizer,
                                          })

        with mock.patch('tfsnippet.layers.convolutional.resnet.'
                        'resnet_general_block_apply_gate',
                        TensorFunction(
                            self, resnet_general_block_apply_gate,
                            'after_conv_1', 'apply_gate',
                            expected_kwargs={
                                'gate_sigmoid_bias': 1.1,
                                'axis': -1,
                            }
                        )), \
                mock.patch('tfsnippet.layers.convolutional.resnet.'
                           'resnet_add_shortcut_residual',
                           TensorOperator(
                               self, operator.add, 'shortcut', 'apply_gate',
                               'output'
                           )):
            y = resnet_general_block(conv_fn=conv_fn,
                                     input=x_tensor,
                                     in_channels=6,
                                     out_channels=8,
                                     kernel_size=2,
                                     channels_last=True,
                                     shortcut_conv_fn=shortcut_conv_fn,
                                     shortcut_kernel_size=(3, 2),
                                     resize_at_exit=False,
                                     after_conv_0=tensor_processor,
                                     after_conv_1=tensor_processor,
                                     activation_fn=tensor_processor,
                                     normalizer_fn=tensor_processor,
                                     dropout_fn=tensor_processor,
                                     gated=True,
                                     gate_sigmoid_bias=1.1,
                                     kernel_regularizer=kernel_regularizer)
            self.assertEqual(y.tag, 'output')
            self.assertEqual(y.get_shape()[-1], 8)
            self.assertEqual(my_conv2d.call_count, 3)

        # test conv shortcut because of use_shortcut_conv = True
        my_conv2d = Mock(wraps=conv2d)
        conv_fn = ScopeArgTensorFunctionMap({
            'conv_0':
            TensorFunction(self,
                           my_conv2d,
                           'input',
                           'conv_0',
                           expected_kwargs={
                               'out_channels': 4,
                               'kernel_size': 1,
                               'strides': 1,
                               'channels_last': False,
                               'use_bias': True,
                               'scope': 'conv_0',
                               'kernel_regularizer': kernel_regularizer
                           }),
            'conv_1':
            TensorFunction(self,
                           my_conv2d,
                           'conv_0',
                           'conv_1',
                           expected_kwargs={
                               'out_channels': 8,
                               'kernel_size': 1,
                               'strides': 1,
                               'channels_last': False,
                               'use_bias': True,
                               'scope': 'conv_1',
                               'kernel_regularizer': kernel_regularizer
                           })
        })
        shortcut_conv_fn = TensorFunction(self,
                                          my_conv2d,
                                          'input',
                                          'shortcut',
                                          expected_kwargs={
                                              'out_channels':
                                              4,
                                              'kernel_size':
                                              1,
                                              'strides':
                                              1,
                                              'channels_last':
                                              False,
                                              'use_bias':
                                              True,
                                              'scope':
                                              'shortcut',
                                              'kernel_regularizer':
                                              kernel_regularizer
                                          })

        with mock.patch('tfsnippet.layers.convolutional.resnet.'
                        'resnet_general_block_apply_gate',
                        TensorFunction(
                            self, resnet_general_block_apply_gate,
                            'conv_1', 'apply_gate',
                            expected_kwargs={
                                'gate_sigmoid_bias': 1.2,
                                'axis': -3,
                            }
                        )), \
                mock.patch('tfsnippet.layers.convolutional.resnet.'
                           'resnet_add_shortcut_residual',
                           TensorOperator(
                               self, operator.add, 'shortcut', 'apply_gate',
                               'output'
                           )):
            y = resnet_general_block(conv_fn=conv_fn,
                                     input=x_tensor,
                                     in_channels=4,
                                     out_channels=4,
                                     kernel_size=1,
                                     strides=1,
                                     channels_last=False,
                                     use_shortcut_conv=True,
                                     shortcut_conv_fn=shortcut_conv_fn,
                                     resize_at_exit=True,
                                     gated=True,
                                     gate_sigmoid_bias=1.2,
                                     kernel_regularizer=kernel_regularizer)
            self.assertEqual(y.tag, 'output')
            self.assertEqual(my_conv2d.call_count, 3)
    def test_osdisks_blacklist(self, isfile_mock, chk_writable_mount_mock):
        df_output = \
          """Filesystem                                                                                        Type  1024-blocks     Used Available Capacity Mounted on
      /dev/mapper/docker-253:0-4980899-d45c264d37ab18c8ed14f890f4d59ac2b81e1c52919eb36a79419787209515f3 xfs      31447040  1282384  30164656       5% /
      tmpfs                                                                                             tmpfs    32938336        4  32938332       1% /dev
      tmpfs                                                                                             tmpfs    32938336        0  32938336       0% /sys/fs/cgroup
      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /etc/resolv.conf
      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /etc/hostname
      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /etc/hosts
      shm                                                                                               tmpfs       65536        0     65536       0% /dev/shm
      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /run/secrets
      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /mnt/blacklisted_mount
      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /mnt/blacklisted_mount/sub-dir
      """

        def isfile_side_effect(path):
            assume_files = ["/etc/resolv.conf", "/etc/hostname", "/etc/hosts"]
            return path in assume_files

        def chk_writable_mount_side_effect(path):
            assume_read_only = ["/run/secrets"]
            return path not in assume_read_only

        isfile_mock.side_effect = isfile_side_effect
        chk_writable_mount_mock.side_effect = chk_writable_mount_side_effect

        config_dict = {
            "agent": {
                "ignore_mount_points": "/mnt/blacklisted_mount"
            }
        }

        with patch("subprocess.Popen") as open_mock:
            proc_mock = Mock()
            attr = {'communicate.return_value': [df_output]}
            proc_mock.configure_mock(**attr)
            open_mock.return_value = proc_mock

            def conf_get(section, key, default=""):
                if section in config_dict and key in config_dict[section]:
                    return config_dict[section][key]

                return default

            def has_option(section, key):
                return section in config_dict and key in config_dict[section]

            conf = Mock()
            attr = {
                'get.side_effect': conf_get,
                'has_option.side_effect': has_option
            }
            conf.configure_mock(**attr)

            result = Hardware.osdisks(conf)

        self.assertEquals(1, len(result))

        expected_mounts_left = ["/"]
        mounts_left = [item["mountpoint"] for item in result]

        self.assertEquals(expected_mounts_left, mounts_left)
Beispiel #49
0
def mocked_genesis():
    custom_genesis_block = deepcopy(GenesisBlock())
    with mock.patch('qrl.core.GenesisBlock.GenesisBlock.instance'):
        GenesisBlock.instance = custom_genesis_block
        yield custom_genesis_block
Beispiel #50
0
    def test_resnet_deconv2d_block(self):
        with mock.patch(
                'tfsnippet.layers.convolutional.resnet.'
                'resnet_general_block',
                Mock(wraps=resnet_general_block)) as fn:
            normalizer_fn = lambda x: x
            activation_fn = lambda x: x
            dropout_fn = lambda x: x
            after_conv_0 = lambda x: x
            after_conv_1 = lambda x: x
            my_deconv2d = Mock(
                wraps=lambda *args, **kwargs: deconv2d(*args, **kwargs))
            shortcut_conv_fn = Mock(
                wraps=lambda *args, **kwargs: deconv2d(*args, **kwargs))
            kernel_regularizer = l2_regularizer(0.001)

            # test NHWC
            input = tf.constant(np.random.random(size=[17, 11, 32, 31, 5]),
                                dtype=tf.float32)
            output = resnet_deconv2d_block(
                input=input,
                out_channels=7,
                kernel_size=3,
                name='deconv_layer',
                kernel_regularizer=kernel_regularizer,
            )
            self.assertEqual(get_static_shape(output), (17, 11, 32, 31, 7))
            kwargs = dict(fn.call_args[1])
            kwargs.pop('conv_fn')
            self.assertDictEqual(
                kwargs, {
                    'input': input,
                    'in_channels': 5,
                    'out_channels': 7,
                    'kernel_size': 3,
                    'strides': (1, 1),
                    'channels_last': True,
                    'use_shortcut_conv': None,
                    'shortcut_conv_fn': None,
                    'shortcut_kernel_size': (1, 1),
                    'resize_at_exit': False,
                    'after_conv_0': None,
                    'after_conv_1': None,
                    'activation_fn': None,
                    'normalizer_fn': None,
                    'dropout_fn': None,
                    'gated': False,
                    'gate_sigmoid_bias': 2.,
                    'use_bias': None,
                    'name': 'deconv_layer',
                    'scope': None,
                    'kernel_regularizer': kernel_regularizer,
                })

            # test NCHW
            input = tf.constant(np.random.random(size=[17, 11, 5, 32, 31]),
                                dtype=tf.float32)
            output_shape = (17, 11, 7, 64, 63)
            output = resnet_deconv2d_block(
                input=input,
                out_channels=7,
                kernel_size=(3, 3),
                conv_fn=my_deconv2d,
                strides=2,
                output_shape=output_shape[-2:],
                channels_last=False,
                use_shortcut_conv=True,
                shortcut_conv_fn=shortcut_conv_fn,
                shortcut_kernel_size=(2, 2),
                resize_at_exit=True,
                after_conv_0=after_conv_0,
                after_conv_1=after_conv_1,
                activation_fn=activation_fn,
                normalizer_fn=normalizer_fn,
                dropout_fn=dropout_fn,
                gated=True,
                gate_sigmoid_bias=1.2,
                use_bias=True,
                scope='deconv_layer_2',
                kernel_regularizer=kernel_regularizer,
            )
            self.assertEqual(get_static_shape(output), output_shape)
            kwargs = dict(fn.call_args[1])
            self.assertIsNot(kwargs.pop('conv_fn'), my_deconv2d)
            self.assertIsNot(kwargs.pop('shortcut_conv_fn'), shortcut_conv_fn)
            self.assertDictEqual(
                kwargs, {
                    'input': input,
                    'in_channels': 5,
                    'out_channels': 7,
                    'kernel_size': (3, 3),
                    'strides': 2,
                    'channels_last': False,
                    'use_shortcut_conv': True,
                    'shortcut_kernel_size': (2, 2),
                    'resize_at_exit': True,
                    'after_conv_0': after_conv_0,
                    'after_conv_1': after_conv_1,
                    'activation_fn': activation_fn,
                    'normalizer_fn': normalizer_fn,
                    'dropout_fn': dropout_fn,
                    'gated': True,
                    'gate_sigmoid_bias': 1.2,
                    'use_bias': True,
                    'name': 'resnet_deconv2d_block',
                    'scope': 'deconv_layer_2',
                    'kernel_regularizer': kernel_regularizer,
                })
            self.assertEqual(my_deconv2d.call_count, 2)
            self.assertEqual(shortcut_conv_fn.call_count, 1)
Beispiel #51
0
class RMFTestCase(TestCase):

  # (default) build all paths to test stack scripts
  TARGET_STACKS = 'TARGET_STACKS'

  # (default) build all paths to test custom action scripts
  TARGET_CUSTOM_ACTIONS = 'TARGET_CUSTOM_ACTIONS'

  # build all paths to test common services scripts
  TARGET_COMMON_SERVICES = 'TARGET_COMMON_SERVICES'

  def executeScript(self, path, classname=None, command=None, config_file=None,
                    config_dict=None,
                    # common mocks for all the scripts
                    config_overrides = None,
                    hdp_stack_version = None,
                    checked_call_mocks = itertools.cycle([(0, "OK.")]),
                    call_mocks = itertools.cycle([(0, "OK.")]),
                    os_type=('Suse','11','Final'),
                    kinit_path_local="/usr/bin/kinit",
                    os_env={'PATH':'/bin'},
                    target=TARGET_STACKS,
                    mocks_dict={},
                    try_install=False,
                    command_args=[]):
    norm_path = os.path.normpath(path)
    src_dir = RMFTestCase.get_src_folder()
    if target == self.TARGET_STACKS:
      stack_version = norm_path.split(os.sep)[0]
      base_path = os.path.join(src_dir, PATH_TO_STACKS)
      configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, stack_version, "configs")
    elif target == self.TARGET_CUSTOM_ACTIONS:
      base_path = os.path.join(src_dir, PATH_TO_CUSTOM_ACTIONS)
      configs_path = os.path.join(src_dir, PATH_TO_CUSTOM_ACTION_TESTS, "configs")
    elif target == self.TARGET_COMMON_SERVICES:
      base_path = os.path.join(src_dir, PATH_TO_COMMON_SERVICES)
      configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, hdp_stack_version, "configs")
    else:
      raise RuntimeError("Wrong target value %s", target)
    script_path = os.path.join(base_path, norm_path)
    if config_file is not None and config_dict is None:
      config_file_path = os.path.join(configs_path, config_file)
      try:
        with open(config_file_path, "r") as f:
          self.config_dict = json.load(f)
      except IOError:
        raise RuntimeError("Can not read config file: "+ config_file_path)
    elif config_dict is not None and config_file is None:
      self.config_dict = config_dict
    else:
      raise RuntimeError("Please specify either config_file_path or config_dict parameter")

    if config_overrides:
      for key, value in config_overrides.iteritems():
        self.config_dict[key] = value

    self.config_dict = ConfigDictionary(self.config_dict)

    # append basedir to PYTHONPATH
    scriptsdir = os.path.dirname(script_path)
    basedir = os.path.dirname(scriptsdir)
    sys.path.append(scriptsdir)
    
    # get method to execute
    try:
      with patch.object(platform, 'linux_distribution', return_value=os_type):
        script_module = imp.load_source(classname, script_path)
        script_class_inst = RMFTestCase._get_attr(script_module, classname)()
        method = RMFTestCase._get_attr(script_class_inst, command)
    except IOError, err:
      raise RuntimeError("Cannot load class %s from %s: %s" % (classname, norm_path, err.message))
    
    # Reload params import, otherwise it won't change properties during next import
    if 'params' in sys.modules:  
      del(sys.modules["params"])

    if 'params_windows' in sys.modules:
      del(sys.modules["params_windows"])

    if 'params_linux' in sys.modules:
      del(sys.modules["params_linux"])

    # Reload status_params import, otherwise it won't change properties during next import
    if 'status_params' in sys.modules:
      del(sys.modules["status_params"])

    with Environment(basedir, test_mode=True) as RMFTestCase.env:
      with patch('resource_management.core.shell.checked_call', side_effect=checked_call_mocks) as mocks_dict['checked_call']:
        with patch('resource_management.core.shell.call', side_effect=call_mocks) as mocks_dict['call']:
          with patch.object(Script, 'get_config', return_value=self.config_dict) as mocks_dict['get_config']: # mocking configurations
            with patch.object(Script, 'get_tmp_dir', return_value="/tmp") as mocks_dict['get_tmp_dir']:
              with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local) as mocks_dict['get_kinit_path']:
                with patch.object(platform, 'linux_distribution', return_value=os_type) as mocks_dict['linux_distribution']:
                  with patch.object(os, "environ", new=os_env) as mocks_dict['environ']:
                    if not try_install:
                      with patch.object(Script, 'install_packages') as install_mock_value:
                        method(RMFTestCase.env, *command_args)
                    else:
                      method(RMFTestCase.env, *command_args)

    sys.path.remove(scriptsdir)
Beispiel #52
0
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''

from unittest import TestCase
import logging
import unittest
import subprocess
import socket
from mock.mock import patch
from mock.mock import MagicMock
from mock.mock import create_autospec

with patch("platform.linux_distribution",
           return_value=('redhat', '11', 'Final')):
    from ambari_agent.HostCheckReportFileHandler import HostCheckReportFileHandler
    from ambari_agent.PackagesAnalyzer import PackagesAnalyzer
    from ambari_agent.HostInfo import HostInfo
    from ambari_agent.Hardware import Hardware
    from ambari_agent.AmbariConfig import AmbariConfig
    from resource_management.core.system import System
    from ambari_commons import OSCheck, Firewall, FirewallChecks, OSConst


@patch.object(System, "os_family", new='redhat')
class TestHostInfo(TestCase):

    logger = logging.getLogger()

    @patch.object(PackagesAnalyzer, 'hasZypper')
Beispiel #53
0
from ambari_server.properties import Properties
import platform

from ambari_commons import os_utils
os_utils.search_file = MagicMock(return_value="/tmp/ambari.properties")
import shutil
project_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                           os.path.normpath("../../../../"))
shutil.copyfile(project_dir + "/ambari-server/conf/unix/ambari.properties",
                "/tmp/ambari.properties")

with patch.object(platform,
                  "linux_distribution",
                  return_value=MagicMock(return_value=('Redhat', '6.4',
                                                       'Final'))):
    with patch("os.path.isdir", return_value=MagicMock(return_value=True)):
        with patch("os.access", return_value=MagicMock(return_value=True)):
            with patch.object(
                    os_utils,
                    "parse_log4j_file",
                    return_value={'ambari.log.dir': '/var/log/ambari-server'}):
                from ambari_server.dbConfiguration import get_jdbc_driver_path, get_native_libs_path
                from ambari_server.serverConfiguration import get_conf_dir
                from ambari_server.serverClassPath import ServerClassPath, AMBARI_SERVER_LIB, SERVER_CLASSPATH_KEY, JDBC_DRIVER_PATH_PROPERTY


@patch.object(platform,
              "linux_distribution",
              new=MagicMock(return_value=('Redhat', '6.4', 'Final')))
@patch("os.path.isdir", new=MagicMock(return_value=True))
@patch("os.access", new=MagicMock(return_value=True))
Beispiel #54
0
 def test_get_url_contents_checks_cache(self, mock_get_key):
     mock_get_key.return_value = "abc"
     crawler = Crawler(self.cache, Mock())
     with patch('crawler.requests') as mock_requests:
         crawler._get_url_contents("myurl")
         self.cache.exists.assert_called_once_with('abc')
 def decorator(old_function):
     patcher = patch(
         "insights.client.collection_rules.InsightsUploadConf.validate_gpg_sig",
         return_value=valid)
     return patcher(old_function)
Beispiel #56
0
    def test_daemonize_and_stop(self, exists_mock, sleep_mock):
        from ambari_commons.shell import shellRunnerLinux

        oldpid = ProcessHelper.pidfile
        pid = str(os.getpid())
        _, tmpoutfile = tempfile.mkstemp()
        ProcessHelper.pidfile = tmpoutfile

        # Test daemonization
        main.daemonize()
        saved = open(ProcessHelper.pidfile, 'r').read()
        self.assertEqual(pid, saved)

        main.GRACEFUL_STOP_TRIES = 1
        with patch("ambari_commons.shell.shellRunnerLinux.run") as kill_mock:
            # Reuse pid file when testing agent stop
            # Testing normal exit
            exists_mock.return_value = False
            kill_mock.side_effect = [{
                'exitCode': 0,
                'output': '',
                'error': ''
            }, {
                'exitCode': 1,
                'output': '',
                'error': ''
            }]
            try:
                main.stop_agent()
                raise Exception("main.stop_agent() should raise sys.exit(0).")
            except SystemExit as e:
                self.assertEquals(0, e.code)

            kill_mock.assert_has_calls([
                call(['ambari-sudo.sh', 'kill', '-15', pid]),
                call(['ambari-sudo.sh', 'kill', '-0', pid])
            ])

            # Restore
            kill_mock.reset_mock()
            kill_mock.side_effect = [{
                'exitCode': 0,
                'output': '',
                'error': ''
            }, {
                'exitCode': 0,
                'output': '',
                'error': ''
            }, {
                'exitCode': 0,
                'output': '',
                'error': ''
            }]

            # Testing exit when failed to remove pid file
            exists_mock.return_value = True
            try:
                main.stop_agent()
                raise Exception("main.stop_agent() should raise sys.exit(0).")
            except SystemExit as e:
                self.assertEquals(0, e.code)

            kill_mock.assert_has_calls([
                call(['ambari-sudo.sh', 'kill', '-15', pid]),
                call(['ambari-sudo.sh', 'kill', '-0', pid]),
                call(['ambari-sudo.sh', 'kill', '-9', pid])
            ])

        # Restore
        ProcessHelper.pidfile = oldpid
        os.remove(tmpoutfile)
Beispiel #57
0
 def test_accumulated_first_retention_with_surpassed_minimum(self):
     retention = self.env.ref('l10n_ar_retentions.retention_retention_ganancias_efectuada')
     with mock.patch(accumulated_amount) as accumulated_amount_patch:
         accumulated_amount_patch.return_value = 101000
         assert retention.calculate_profit_retention(self.partner, 9000)[1] == 200
Beispiel #58
0
    "DownloadSource", "UnknownConfigurationMock", "FunctionMock",
    "CallFunctionMock"
]

from unittest import TestCase
import json
import os
import imp
import sys
import pprint
import itertools
from mock.mock import MagicMock, patch
import platform
import re

with patch("platform.linux_distribution",
           return_value=('Suse', '11', 'Final')):
    with patch("os.geteuid", return_value=45000
               ):  # required to mock sudo and run tests with right scenario
        from resource_management.core import sudo
        from resource_management.core.environment import Environment
        from resource_management.libraries.script.config_dictionary import ConfigDictionary
        from resource_management.libraries.script.script import Script
        from resource_management.libraries.script.config_dictionary import UnknownConfiguration
        from resource_management.libraries.functions.repository_util import RepositoryUtil

PATH_TO_STACKS = "main/resources/stacks/HDP"
PATH_TO_STACK_TESTS = "test/python/stacks/"

PATH_TO_COMMON_SERVICES = "main/resources/common-services"
PATH_TO_STACK_HOOKS = "main/resources/stack-hooks"
def patch_insights_client(old_function):
    patcher = patch("insights.client.phase.v1.InsightsClient")
    return patcher(old_function)
    def test_patch_nontrivial_roles(self):
        metadata_manager = MetadataManager(["qualifier"])
        this_dir = os.path.dirname(os.path.realpath(__file__))
        metadata_dir = os.path.join(this_dir, 'metadata')
        self.assertTrue(metadata_manager.read_metadata_from_dir(metadata_dir))
        metadata_container = metadata_manager.metadata_container

        role_administrator_json = {
            "@odata.type":
            "#Role.v1_0_0.Role",
            "Description":
            "Administrator User Role",
            "OemPrivileges": ["OemClearLog", "OemPowerControl"],
            "@odata.id":
            "/redfish/v1/AccountService/Roles/Administrator",
            "@odata.context":
            "/redfish/v1/$metadata#AccountService/Roles/Administrator/$entity",
            "AssignedPrivileges": [
                "Login", "ConfigureManager", "ConfigureUsers", "ConfigureSelf",
                "ConfigureComponents"
            ],
            "Id":
            "Administrator",
            "IsPredefined":
            True,
            "Name":
            "User Role"
        }

        role_operator_json = {
            "@odata.type": "#Role.v1_0_0.Role",
            "Description": "Operator User Role",
            "OemPrivileges": [],
            "@odata.id": "/redfish/v1/AccountService/Roles/Operator",
            "@odata.context":
            "/redfish/v1/$metadata#AccountService/Roles/Operator/$entity",
            "AssignedPrivileges":
            ["Login", "ConfigureSelf", "ConfigureComponents"],
            "Id": "Operator",
            "IsPredefined": True,
            "Name": "User Role"
        }

        role_readonly_json = {
            "@odata.type": "#Role.v1_0_0.Role",
            "Description": "ReadOnly User Role",
            "OemPrivileges": [],
            "@odata.id": "/redfish/v1/AccountService/Roles/ReadOnly",
            "@odata.context":
            "/redfish/v1/$metadata#AccountService/Roles/ReadOnly/$entity",
            "AssignedPrivileges": ["Login", "ConfigureSelf"],
            "Id": "ReadOnly",
            "IsPredefined": True,
            "Name": "User Role"
        }

        role_custom_json = {
            "@odata.type": "#Role.v1_0_0.Role",
            "Description": "Custom User Role",
            "OemPrivileges": [],
            "@odata.id": "/redfish/v1/AccountService/Roles/Custom",
            "@odata.context":
            "/redfish/v1/$metadata#AccountService/Roles/ReadOnly/$entity",
            "AssignedPrivileges": ["Login", "ConfigureSelf"],
            "Id": "ReadOnly",
            "IsPredefined": False,
            "Name": "Custom Role"
        }

        self.discovery_container.add_resource(
            ApiResource("/redfish/v1/AccountService/Roles/Administrator",
                        "https://localhost:8443", role_administrator_json,
                        "#Role.v1_0_0.Role"))

        self.discovery_container.add_resource(
            ApiResource("/redfish/v1/AccountService/Roles/Operator",
                        "https://localhost:8443", role_operator_json,
                        "#Role.v1_0_0.Role"))

        self.discovery_container.add_resource(
            ApiResource("/redfish/v1/AccountService/Roles/ReadOnly",
                        "https://localhost:8443", role_readonly_json,
                        "#Role.v1_0_0.Role"))

        self.discovery_container.add_resource(
            ApiResource("/redfish/v1/AccountService/Roles/Custom",
                        "https://localhost:8443", role_custom_json,
                        "#Role.v1_0_0.Custom"))

        with mock.patch('cts_core.commons.api_caller.ApiCaller.__init__'
                        ) as api_caller_init_mock:
            api_caller_init_mock.return_value = None
            validator = MetadataPatchValidator(metadata_container, None,
                                               PatchingStrategy2_2())

            # this ValidationStatus.BLOCKED should be not affect final ValidationStatus
            validator._verify_property = MagicMock(
                return_value=(ValidationStatus.PASSED, None))
            validator._restore_property = MagicMock(
                return_value=ValidationStatus.PASSED)
            validator._patch_property = MagicMock(
                return_value=(True, True, ValidationStatus.PASSED))

            with mock.patch(
                    'cts_core.validation.patch.metadata_patch_validator.MetadataPatchValidator'
                    '._validate_resource') as validate_resource:
                with StdoutCapture() as out:
                    self.assertEqual(
                        ValidationStatus.PASSED,
                        validator.validate(self.discovery_container))

            self.assertEquals(
                1, validate_resource.call_count)  # only for 'custom' role