def test_collect(self, urllib):
    alert_meta = {
      'name': 'alert1',
      'label': 'label1',
      'serviceName': 'service1',
      'componentName': 'component1',
      'uuid': '123',
      'enabled': 'true'
    }
    alert_source_meta = {
      'jmx': {
        'property_list': [
          'x/y'
        ]
      },
      'uri': {
        'http': '192.168.0.10:8080',
        'https_property': '{{hdfs-site/dfs.http.policy}}',
        'https_property_value': 'HTTPS_ONLY'
      },
      "reporting": {
        "ok": {
          "text": "OK: {0}"
        },
        "warning": {
          "text": "Warn: {0}",
          "value": 2
        },
        "critical": {
          "text": "Crit: {0}",
          "value": 5
        }
      }
    }
    cluster = 'c1'
    host = 'host1'
    expected_text = 'Warn: 4'

    def collector_side_effect(clus, data):
      self.assertEquals(data['name'], alert_meta['name'])
      self.assertEquals(data['label'], alert_meta['label'])
      self.assertEquals(data['text'], expected_text)
      self.assertEquals(data['service'], alert_meta['serviceName'])
      self.assertEquals(data['component'], alert_meta['componentName'])
      self.assertEquals(data['uuid'], alert_meta['uuid'])
      self.assertEquals(data['enabled'], alert_meta['enabled'])
      self.assertEquals(data['cluster'], cluster)
      self.assertEquals(clus, cluster)

    response = Mock()
    urllib.return_value = response
    response.read = Mock(return_value='{"beans": [{"y": 4}]}')
    mock_collector = MagicMock()
    mock_collector.put = Mock(side_effect=collector_side_effect)

    alert = MetricAlert(alert_meta, alert_source_meta)
    alert.set_helpers(mock_collector, {'foo-site/bar': 12, 'foo-site/baz': 'asd'})
    alert.set_cluster(cluster, host)

    alert.collect()
  def test_repeatRegistration(self,
                              start_mock, installMock, buildMock):

    registerAndHeartbeat = MagicMock(name="registerAndHeartbeat")

    self.controller.registerAndHeartbeat = registerAndHeartbeat
    self.controller.run()
    self.assertTrue(installMock.called)
    self.assertTrue(buildMock.called)
    self.assertTrue(start_mock.called)
    self.controller.registerAndHeartbeat.assert_called_once_with()

    calls = []
    def switchBool():
      if len(calls) == 0:
        self.controller.repeatRegistration = True
        calls.append(1)
      self.controller.repeatRegistration = False

    registerAndHeartbeat.side_effect = switchBool
    self.controller.run()
    self.assertEqual(2, registerAndHeartbeat.call_count)

    self.controller.registerAndHeartbeat = \
      Controller.Controller.registerAndHeartbeat
  def test_action_create_simple_xml_config(self,
                                           time_asctime_mock,
                                           os_path_isdir_mock,
                                           os_path_exists_mock,
                                           open_mock,
                                           ensure_mock):
    """
    Tests if 'create' action - creates new non existent xml file and write proper data
    where configurations={"Some conf":"Some value"}
    """
    os_path_isdir_mock.side_effect = [False, True]
    os_path_exists_mock.return_value = False
    time_asctime_mock.return_value = 'Wed 2014-02'

    result_file = MagicMock()
    open_mock.return_value = result_file

    with Environment('/') as env:
      XmlConfig('file.xml',
                conf_dir='/dir/conf',
                configurations={'property1': 'value1'}
      )

    open_mock.assert_called_with('/dir/conf/file.xml', 'wb')
    result_file.__enter__().write.assert_called_with(u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name>property1</name>\n      <value>value1</value>\n    </property>\n    \n  </configuration>\n')
    def test_report_data(self):
        mock = MagicMock()
        mock.couch_user = self.web_user
        mock.GET = {
            'location_id': 'd1',
            'startdate': '2017-11-01',
            'enddate': '2017-11-30'
        }
        mock.datespan = DateSpan(datetime(2017, 11, 1), datetime(2017, 11, 30))

        fiche_report = FicheConsommationReport(request=mock, domain='test-domain')
        header = fiche_report.headers
        rows = fiche_report.rows

        self.assertEqual(
            [column_group.html for column_group in header],
            ['', 'Product 1', 'Product 2', 'Product 3']
        )
        self.assertEqual(
            rows,
            [
                [
                    'PPS 1',
                    {'sort_key': 10, 'html': 10}, {'sort_key': 5, 'html': 5}, {'sort_key': 5, 'html': 5},
                    {'sort_key': 2, 'html': 2}, {'sort_key': 2, 'html': 2}, {'sort_key': 0, 'html': 0},
                    {'sort_key': 6, 'html': 6}, {'sort_key': 4, 'html': 4}, {'sort_key': 2, 'html': 2}
                ],
                [
                    'PPS 2',
                    {'sort_key': 13, 'html': 13}, {'sort_key': 11, 'html': 11}, {'sort_key': 2, 'html': 2},
                    {'sort_key': 0, 'html': 0}, {'sort_key': 0, 'html': 0}, {'sort_key': 0, 'html': 0},
                    {'sort_key': 150, 'html': 150}, {'sort_key': 11, 'html': 11}, {'sort_key': 139, 'html': 139}
                ]
            ]
        )
  def test_run(self, ActionQueue_mock, installMock, buildMock):
    aq = MagicMock()
    ActionQueue_mock.return_value = aq

    buildMock.return_value = "opener"
    registerAndHeartbeat  = MagicMock("registerAndHeartbeat")
    calls = []
    def side_effect():
      if len(calls) == 0:
        self.controller.repeatRegistration = True
      calls.append(1)
    registerAndHeartbeat.side_effect = side_effect
    self.controller.registerAndHeartbeat = registerAndHeartbeat

    # repeat registration
    self.controller.run()

    self.assertTrue(buildMock.called)
    installMock.called_once_with("opener")
    self.assertEqual(2, registerAndHeartbeat.call_count)

    # one call, +1
    registerAndHeartbeat.side_effect = None
    self.controller.run()
    self.assertEqual(3, registerAndHeartbeat.call_count)

    # Action queue should be started during calls
    self.assertTrue(ActionQueue_mock.called)
    self.assertTrue(aq.start.called)
  def test_action_create_empty_properties_with_dir(self,
                                                   time_asctime_mock,
                                                   os_path_isdir_mock,
                                                   os_path_exists_mock,
                                                   open_mock,
                                                   ensure_mock):
    """
    Tests if 'action_create' - creates new non existent file and write proper data
    1) properties={}
    2) dir='Some directory that exist '
    """
    os_path_isdir_mock.side_effect = [False, True]
    os_path_exists_mock.return_value = False
    time_asctime_mock.return_value = 'Some other day'

    result_file = MagicMock()
    open_mock.return_value = result_file

    with Environment('/') as env:
      PropertiesFile('file.txt',
                     dir="/dir/and/dir",
                     properties={},
      )

    open_mock.assert_called_with('/dir/and/dir/file.txt', 'wb')
    result_file.__enter__().write.assert_called_with(u'# Generated by Apache Ambari. Some other day\n    \n    \n')
    self.assertEqual(open_mock.call_count, 1)
    ensure_mock.assert_called()
  def test_action_create_xml_config_with_metacharacters(self,
                                                        time_asctime_mock,
                                                        os_path_isdir_mock,
                                                        os_path_exists_mock,
                                                        open_mock,
                                                        ensure_mock):
    """
    Tests if 'create' action - creates new non existent xml file and write proper data
    where configurations={"Some conf":"Some metacharacters"}
    """
    os_path_isdir_mock.side_effect = [False, True]
    os_path_exists_mock.return_value = False
    time_asctime_mock.return_value = 'Wed 2014-02'

    result_file = MagicMock()
    open_mock.return_value = result_file

    with Environment('/') as env:
      XmlConfig('file.xml',
                conf_dir='/dir/conf',
                configurations={"": "",
                                "prop.1": "'.'yyyy-MM-dd-HH",
                                "prop.3": "%d{ISO8601} %5p %c{1}:%L - %m%n",
                                "prop.2": "INFO, openjpa",
                                "prop.4": "${oozie.log.dir}/oozie.log",
                                "prop.empty": "",
                },
      )

    open_mock.assert_called_with('/dir/conf/file.xml', 'wb')
    result_file.__enter__().write.assert_called_with(u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.empty</name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p %c{1}:%L - %m%n</value>\n    </property>\n    \n    <property>\n      <name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    \n    <property>\n      <name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n    </property>\n    \n    <property>\n      <name>prop.4</name>\n      <value>${oozie.log.dir}/oozie.log</value>\n    </property>\n    \n  </configuration>\n')
  def test_action_create_properties_rewrite_content(self,
                                                    time_asctime_mock,
                                                    os_path_isdir_mock,
                                                    os_path_exists_mock,
                                                    open_mock,
                                                    ensure_mock):
    """
    Tests if 'action_create' - rewrite file that exist
    1) properties={"Some property":"Some value"}
    2) dir="Some dir"
    """
    os_path_isdir_mock.side_effect = [False, True]
    os_path_exists_mock.return_value = True
    time_asctime_mock.return_value = 777

    result_file = MagicMock()
    result_file.read.return_value = 'old-content'
    open_mock.return_value = result_file

    with Environment('/') as env:
      PropertiesFile('new_file',
                     dir='/dir1',
                     properties={'property_1': 'value1'},
      )

    result_file.read.assert_called()
    open_mock.assert_called_with('/dir1/new_file', 'wb')
    result_file.__enter__().write.assert_called_with(u'# Generated by Apache Ambari. 777\n    \nproperty_1=value1\n    \n')
    self.assertEqual(open_mock.call_count, 2)
    ensure_mock.assert_called()
  def test_action_create_empty_properties_without_dir(self,
                                                      time_asctime_mock,
                                                      os_path_isdir_mock,
                                                      os_path_exists_mock,
                                                      open_mock,
                                                      ensure_mock):
    """
    Tests if 'action_create' - creates new non existent file and write proper data
    1) properties={}
    2) dir=None
    """
    os_path_isdir_mock.side_effect = [False, True]
    os_path_exists_mock.return_value = False
    time_asctime_mock.return_value = 'Today is Wednesday'

    result_file = MagicMock()
    open_mock.return_value = result_file

    with Environment('/') as env:
      PropertiesFile('/somewhere_in_system/one_file.properties',
                     dir=None,
                     properties={}
      )

    open_mock.assert_called_with('/somewhere_in_system/one_file.properties', 'wb')
    result_file.__enter__().write.assert_called_with( u'# Generated by Apache Ambari. Today is Wednesday\n    \n    \n')
    self.assertEqual(open_mock.call_count, 1)
    ensure_mock.assert_called()
示例#10
0
  def test_action_create_properties_simple(self,
                                           time_asctime_mock,
                                           os_path_isdir_mock,
                                           os_path_exists_mock,
                                           open_mock,
                                           ensure_mock):
    """
    Tests if 'action_create' - creates new non existent file and write proper data
    1) properties={"Some property":"Some value"}
    2) dir=None
    """

    os_path_isdir_mock.side_effect = [False, True]
    os_path_exists_mock.return_value = False
    time_asctime_mock.return_value = 777

    result_file = MagicMock()
    open_mock.return_value = result_file

    with Environment('/') as env:
      PropertiesFile('/dir/new_file',
                     properties={'property1': 'value1'},
      )

    open_mock.assert_called_with('/dir/new_file',
                                 'wb')
    result_file.__enter__().write.assert_called_with(u'# Generated by Apache Ambari. 777\n    \nproperty1=value1\n    \n')
    self.assertEqual(open_mock.call_count, 1)
    ensure_mock.assert_called()
示例#11
0
  def test_action_create_properties_with_metacharacters(self,
                                                        time_asctime_mock,
                                                        os_path_isdir_mock,
                                                        os_path_exists_mock,
                                                        open_mock,
                                                        ensure_mock):
    """
    Tests if 'action_create' - creates new non existent file and write proper data
    1) properties={"":"", "Some property":"Metacharacters: -%{} ${a.a}/"}
    2) dir=None
    """
    os_path_isdir_mock.side_effect = [False, True]
    os_path_exists_mock.return_value = False
    time_asctime_mock.return_value = 777

    result_file = MagicMock()
    open_mock.return_value = result_file

    with Environment('/') as env:
      PropertiesFile('/dir/new_file',
                     properties={"": "",
                                 "prop.1": "'.'yyyy-MM-dd-HH",
                                 "prop.3": "%d{ISO8601} %5p %c{1}:%L - %m%n",
                                 "prop.2": "INFO, openjpa",
                                 "prop.4": "${oozie.log.dir}/oozie.log",
                                 "prop.empty": "",
                     },
      )

    open_mock.assert_called_with('/dir/new_file','wb')
    result_file.__enter__().write.assert_called_with(u"# Generated by Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - %m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    \n")
    self.assertEqual(open_mock.call_count, 1)
    ensure_mock.assert_called()
  def test_execute_status_command(self, CustomServiceOrchestrator_mock,
                                  build_mock, execute_command_mock, requestComponentSecurityState_mock,
                                  requestComponentStatus_mock, read_stack_version_mock,
                                  status_update_callback):
    CustomServiceOrchestrator_mock.return_value = None
    dummy_controller = MagicMock()
    actionQueue = ActionQueue(AmbariConfig(), dummy_controller)

    build_mock.return_value = {'dummy report': '' }

    dummy_controller.recovery_manager = RecoveryManager(tempfile.mktemp())

    requestComponentStatus_mock.reset_mock()
    requestComponentStatus_mock.return_value = {'exitcode': 0 }

    requestComponentSecurityState_mock.reset_mock()
    requestComponentSecurityState_mock.return_value = 'UNKNOWN'

    actionQueue.execute_status_command(self.status_command)
    report = actionQueue.result()
    expected = {'dummy report': '',
                'securityState' : 'UNKNOWN'}

    self.assertEqual(len(report['componentStatus']), 1)
    self.assertEqual(report['componentStatus'][0], expected)
    self.assertTrue(requestComponentStatus_mock.called)
示例#13
0
 def test_extract_value_from_report_table_row_value_input_dict(self):
     mock = MagicMock()
     mock.couch_user = self.user
     mock.GET = {
         'location_id': '',
         'program': '',
         'month_start': '10',
         'year_start': '2017',
         'month_end': '3',
         'year_end': '2018',
     }
     dashboard1_report = Dashboard1Report(request=mock, domain='test-pna')
     report_table = {
         'fix_column': False,
         'comment': 'test_comment',
         'rows': [],
         'datatables': False,
         'title': 'test_title',
         'total_row': [
             {'html': 'row_0'},
             {'html': 'row_1'},
             {'html': 'row_2'},
             {'html': 'row_3'}
         ],
         'slug': 'disponibilite',
         'default_rows': 10
     }
     report_table_value = dashboard1_report._extract_value_from_report_table_row_value(report_table)
     self.assertEqual(report_table_value, ['row_0', 'row_1', 'row_2', 'row_3'])
  def test_watchdog_1(self, kill_process_with_children_mock):
    """
    Tests whether watchdog works
    """
    subproc_mock = self.Subprocess_mockup()
    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
    _, tmpoutfile = tempfile.mkstemp()
    _, tmperrfile = tempfile.mkstemp()
    _, tmpstrucout = tempfile.mkstemp()
    PYTHON_TIMEOUT_SECONDS = 0.1
    kill_process_with_children_mock.side_effect = lambda pid : subproc_mock.terminate()

    def launch_python_subprocess_method(command, tmpout, tmperr):
      subproc_mock.tmpout = tmpout
      subproc_mock.tmperr = tmperr
      return subproc_mock
    executor.launch_python_subprocess = launch_python_subprocess_method
    runShellKillPgrp_method = MagicMock()
    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
    executor.runShellKillPgrp = runShellKillPgrp_method
    subproc_mock.returncode = None
    callback_method = MagicMock()
    thread = Thread(target =  executor.run_file, args = ("fake_puppetFile",
      ["arg1", "arg2"], tmpoutfile, tmperrfile,
      PYTHON_TIMEOUT_SECONDS, tmpstrucout, callback_method, '1'))
    thread.start()
    time.sleep(0.1)
    subproc_mock.finished_event.wait()
    self.assertEquals(subproc_mock.was_terminated, True, "Subprocess should be terminated due to timeout")
    self.assertTrue(callback_method.called)
  def test_watchdog_2(self):
    """
    Tries to catch false positive watchdog invocations
    """
    subproc_mock = self.Subprocess_mockup()
    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
    _, tmpoutfile = tempfile.mkstemp()
    _, tmperrfile = tempfile.mkstemp()
    _, tmpstrucout = tempfile.mkstemp()
    PYTHON_TIMEOUT_SECONDS =  5

    def launch_python_subprocess_method(command, tmpout, tmperr):
      subproc_mock.tmpout = tmpout
      subproc_mock.tmperr = tmperr
      return subproc_mock
    executor.launch_python_subprocess = launch_python_subprocess_method
    runShellKillPgrp_method = MagicMock()
    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
    executor.runShellKillPgrp = runShellKillPgrp_method
    subproc_mock.returncode = 0
    callback_method = MagicMock()
    thread = Thread(target =  executor.run_file, args = ("fake_puppetFile", ["arg1", "arg2"],
                                                      tmpoutfile, tmperrfile,
                                                      PYTHON_TIMEOUT_SECONDS, tmpstrucout,
                                                      callback_method, "1-1"))
    thread.start()
    time.sleep(0.1)
    subproc_mock.should_finish_event.set()
    subproc_mock.finished_event.wait()
    self.assertEquals(subproc_mock.was_terminated, False, "Subprocess should not be terminated before timeout")
    self.assertEquals(subproc_mock.returncode, 0, "Subprocess should not be terminated before timeout")
    self.assertTrue(callback_method.called)
  def test_watchdog_2(self):
    # Test hangs on Windows TODO
    if IS_WINDOWS:
      return
    """
    Tries to catch false positive watchdog invocations
    """
    subproc_mock = self.Subprocess_mockup()
    executor = PythonExecutor("/tmp", AgentConfig("", ""), self.agentToggleLogger)
    _, tmpoutfile = tempfile.mkstemp()
    _, tmperrfile = tempfile.mkstemp()
    _, tmpstrucout = tempfile.mkstemp()
    PYTHON_TIMEOUT_SECONDS =  5

    environment_vars = [("PYTHONPATH", "a:b")]
    def launch_python_subprocess_method(command, tmpout, tmperr, environment_vars):
      subproc_mock.tmpout = tmpout
      subproc_mock.tmperr = tmperr
      return subproc_mock
    executor.launch_python_subprocess = launch_python_subprocess_method
    runShellKillPgrp_method = MagicMock()
    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
    executor.runShellKillPgrp = runShellKillPgrp_method
    subproc_mock.returncode = 0
    thread = Thread(target =  executor.run_file, args = ("fake_puppetFile", ["arg1", "arg2"],
                                                      tmpoutfile, tmperrfile,
                                                      PYTHON_TIMEOUT_SECONDS, tmpstrucout, "INFO"))
    thread.start()
    time.sleep(0.1)
    subproc_mock.should_finish_event.set()
    subproc_mock.finished_event.wait()
    self.assertEquals(subproc_mock.was_terminated, False, "Subprocess should not be terminated before timeout")
    self.assertEquals(subproc_mock.returncode, 0, "Subprocess should not be terminated before timeout")
  def test_execution_results(self):
    subproc_mock = self.Subprocess_mockup()
    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
    _, tmpoutfile = tempfile.mkstemp()
    _, tmperrfile = tempfile.mkstemp()
    
    tmp_file = tempfile.NamedTemporaryFile()    # the structured out file should be preserved across calls to the hooks and script.
    tmpstructuredoutfile = tmp_file.name
    tmp_file.close()

    PYTHON_TIMEOUT_SECONDS =  5

    def launch_python_subprocess_method(command, tmpout, tmperr):
      subproc_mock.tmpout = tmpout
      subproc_mock.tmperr = tmperr
      return subproc_mock
    executor.launch_python_subprocess = launch_python_subprocess_method
    runShellKillPgrp_method = MagicMock()
    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
    executor.runShellKillPgrp = runShellKillPgrp_method
    subproc_mock.returncode = 0
    subproc_mock.should_finish_event.set()
    callback_method = MagicMock()
    result = executor.run_file("file", ["arg1", "arg2"],
                               tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS,
                               tmpstructuredoutfile, callback_method, "1-1")
    self.assertEquals(result, {'exitcode': 0, 'stderr': '', 'stdout': '',
                               'structuredOut': {}})
    self.assertTrue(callback_method.called)
  def test_execution_results(self):
    self.assertEqual.__self__.maxDiff = None
    subproc_mock = self.Subprocess_mockup()
    executor = PythonExecutor("/tmp", AgentConfig("", ""), self.agentToggleLogger)
    _, tmpoutfile = tempfile.mkstemp()
    _, tmperrfile = tempfile.mkstemp()
    _, tmpstroutfile = tempfile.mkstemp()
    if IS_WINDOWS:
      if os.path.exists(tmpstroutfile):
        tmpstroutfile = tmpstroutfile + "_t"
    PYTHON_TIMEOUT_SECONDS =  5

    def launch_python_subprocess_method(command, tmpout, tmperr, environment_vars):
      subproc_mock.tmpout = tmpout
      subproc_mock.tmperr = tmperr
      return subproc_mock
    executor.launch_python_subprocess = launch_python_subprocess_method
    runShellKillPgrp_method = MagicMock()
    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
    executor.runShellKillPgrp = runShellKillPgrp_method
    subproc_mock.returncode = 0
    subproc_mock.should_finish_event.set()
    result = executor.run_file("file", ["arg1", "arg2"], tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS, tmpstroutfile, "INFO", True, None)
    self.assertEquals(result, {'exitcode': 0, 'stderr': 'Dummy err', 'stdout': 'Dummy output',
                               'structuredOut': {}})
  def test_watchdog_1(self, kill_process_with_children_mock):
    # Test hangs on Windows TODO
    if IS_WINDOWS:
      return
    
    """
    Tests whether watchdog works
    """
    subproc_mock = self.Subprocess_mockup()
    executor = PythonExecutor("/tmp", AgentConfig("", ""), self.agentToggleLogger)
    _, tmpoutfile = tempfile.mkstemp()
    _, tmperrfile = tempfile.mkstemp()
    _, tmpstrucout = tempfile.mkstemp()
    PYTHON_TIMEOUT_SECONDS = 0.1
    kill_process_with_children_mock.side_effect = lambda pid : subproc_mock.terminate()

    def launch_python_subprocess_method(command, tmpout, tmperr, environment_vars):
      subproc_mock.tmpout = tmpout
      subproc_mock.tmperr = tmperr
      return subproc_mock
    executor.launch_python_subprocess = launch_python_subprocess_method
    runShellKillPgrp_method = MagicMock()
    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
    executor.runShellKillPgrp = runShellKillPgrp_method
    subproc_mock.returncode = None
    thread = Thread(target =  executor.run_file, args = ("fake_puppetFile",
      ["arg1", "arg2"], tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS, tmpstrucout,"INFO"))
    thread.start()
    time.sleep(0.1)
    subproc_mock.finished_event.wait()
    self.assertEquals(subproc_mock.was_terminated, True, "Subprocess should be terminated due to timeout")
  def test_cancel_backgound_command(self, read_stack_version_mock, resolve_hook_script_path_mock, resolve_script_path_mock, FileCache_mock,
                                      kill_process_with_children_mock):
    FileCache_mock.return_value = None
    FileCache_mock.cache_dir = MagicMock()
    resolve_hook_script_path_mock.return_value = None
#     shell.kill_process_with_children = MagicMock()
    dummy_controller = MagicMock()
    cfg = AmbariConfig().getConfig()
    cfg.set('agent', 'tolerate_download_failures', 'true')
    cfg.set('agent', 'prefix', '.')
    cfg.set('agent', 'cache_dir', 'background_tasks')

    actionQueue = ActionQueue(cfg, dummy_controller)

    dummy_controller.actionQueue = actionQueue
    orchestrator = CustomServiceOrchestrator(cfg, dummy_controller)
    orchestrator.file_cache = MagicMock()
    def f (a, b):
      return ""
    orchestrator.file_cache.get_service_base_dir = f
    actionQueue.customServiceOrchestrator = orchestrator

    import TestActionQueue
    import copy

    TestActionQueue.patch_output_file(orchestrator.python_executor)
    orchestrator.python_executor.prepare_process_result = MagicMock()
    orchestrator.dump_command_to_json = MagicMock()

    lock = threading.RLock()
    complete_done = threading.Condition(lock)

    complete_was_called = {}
    def command_complete_w(process_condenced_result, handle):
      with lock:
        complete_was_called['visited']= ''
        complete_done.wait(3)

    actionQueue.on_background_command_complete_callback = TestActionQueue.wraped(actionQueue.on_background_command_complete_callback, command_complete_w, None)
    execute_command = copy.deepcopy(TestActionQueue.TestActionQueue.background_command)
    actionQueue.put([execute_command])
    actionQueue.processBackgroundQueueSafeEmpty()

    time.sleep(.1)

    orchestrator.cancel_command(19,'')
    self.assertTrue(kill_process_with_children_mock.called)
    kill_process_with_children_mock.assert_called_with(33)

    with lock:
      complete_done.notifyAll()

    with lock:
      self.assertTrue(complete_was_called.has_key('visited'))

    time.sleep(.1)

    runningCommand = actionQueue.commandStatuses.get_command_status(19)
    self.assertTrue(runningCommand is not None)
    self.assertEqual(runningCommand['status'], ActionQueue.FAILED_STATUS)
示例#21
0
  def test_action_create_xml_config_sorted_by_key(self,
                                                  time_asctime_mock,
                                                  os_path_isdir_mock,
                                                  os_path_exists_mock,
                                                  open_mock,
                                                  ensure_mock):
    """
    Tests if 'create' action - creates new non existent xml file and writes proper data
    where configurations={"Key":"Value"} are stored in sorted by key order
    """
    os_path_isdir_mock.side_effect = [False, True]
    os_path_exists_mock.return_value = False
    time_asctime_mock.return_value = 'Wed 2014-02'

    result_file = MagicMock()
    open_mock.return_value = result_file

    with Environment('/') as env:
      XmlConfig('file.xml',
                conf_dir='/dir/conf',
                configurations={"": "",
                                "third": "should be third",
                                "first": "should be first",
                                "z_last": "should be last",
                                "second": "should be second",
                                },
                configuration_attributes={}
                )

    open_mock.assert_called_with('/dir/conf/file.xml', 'wb')
    result_file.__enter__().write.assert_called_with(u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>first</name>\n      <value>should be first</value>\n    </property>\n    \n    <property>\n      <name>second</name>\n      <value>should be second</value>\n    </property>\n    \n    <property>\n      <name>third</name>\n      <value>should be third</value>\n    </property>\n    \n    <property>\n      <name>z_last</name>\n      <value>should be last</value>\n    </property>\n    \n  </configuration>\n')
示例#22
0
  def test_attribute_command_escaping(self, popen_mock):
    expected_command0 = "arg1 arg2 'quoted arg'"
    expected_command1 = "arg1 arg2 'command \"arg\"'"
    expected_command2 = 'arg1 arg2 \'command \'"\'"\'arg\'"\'"\'\''
    expected_command3 = "arg1 arg2 'echo `ls /root`'"
    expected_command4 = "arg1 arg2 '$ROOT'"
    expected_command5 = "arg1 arg2 '`ls /root`'"

    subproc_mock = MagicMock()
    subproc_mock.returncode = 0
    popen_mock.return_value = subproc_mock

    with Environment("/") as env:
      Execute(('arg1', 'arg2', 'quoted arg'),
      )
      Execute(('arg1', 'arg2', 'command "arg"'),
      )
      Execute(('arg1', 'arg2', "command 'arg'"),
      )
      Execute(('arg1', 'arg2', "echo `ls /root`"),
      )
      Execute(('arg1', 'arg2', "$ROOT"),
      )
      Execute(('arg1', 'arg2', "`ls /root`"),
      )

    self.assertEqual(popen_mock.call_args_list[0][0][0][3], expected_command0)
    self.assertEqual(popen_mock.call_args_list[1][0][0][3], expected_command1)
    self.assertEqual(popen_mock.call_args_list[2][0][0][3], expected_command2)
    self.assertEqual(popen_mock.call_args_list[3][0][0][3], expected_command3)
    self.assertEqual(popen_mock.call_args_list[4][0][0][3], expected_command4)
    self.assertEqual(popen_mock.call_args_list[5][0][0][3], expected_command5)
示例#23
0
  def test_ParseOptions(self, open_mock, backup_file_mock, modify_action_mock, option_parser_mock):
    class options(object):
      user = "******"
      hostname = "127.0.0.1"
      clustername = "test1"
      password = "******"
      upgrade_json = "catalog_file"
      from_stack = "0.0"
      to_stack = "1.3"
      logfile = "test.log"
      report = "report.txt"
      warnings = []
      printonly = False

    args = ["update-configs"]
    modify_action_mock.return_value = MagicMock()
    backup_file_mock.return_value = MagicMock()
    test_mock = MagicMock()
    test_mock.parse_args = lambda: (options, args)
    option_parser_mock.return_value = test_mock

    upgradeHelper.main()
    self.assertEqual(backup_file_mock.call_count, 0)
    self.assertEqual(modify_action_mock.call_count, 1)
    self.assertEqual({"user": options.user, "pass": options.password}, upgradeHelper.Options.API_TOKENS)
    self.assertEqual(options.clustername, upgradeHelper.Options.CLUSTER_NAME)
示例#24
0
def test_rss(container_node, other_container_node, content_node, collections, home_root, guest_user, root):

    everybody_rule = get_or_add_everybody_rule()

    root.children.append(collections)

    collections.access_rule_assocs.append(NodeToAccessRule(ruletype=u"read", rule=everybody_rule))
    collections.container_children.append(container_node)

    container_node.container_children.append(other_container_node)
    other_container_node.content_children.append(content_node)

    struct = {"nodelist": [other_container_node, content_node], "build_response_start": time.time()}
    params = {}
    req = MagicMock()
    req.get_header = lambda x: "localhost:8081"
    req.fullpath = ""
    req.query = ""

    res = struct2rss(req, "", params, None, struct=struct)
    print res
    # TODO: find some way to check XML content properly
    assert res.startswith("""<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" version="2.0">
<channel>""")
    assert "document/testschema" in res
    assert "http://localhost:8081/node?id=" in res
示例#25
0
  def test_repeatRegistration(self, get_os_version_mock, get_os_type_mock,
                              run_mock, installMock, buildMock, Popen_mock):

    registerAndHeartbeat = MagicMock(name="registerAndHeartbeat")
    get_os_type_mock.return_value = "suse"
    get_os_version_mock.return_value = "11"

    self.controller.registerAndHeartbeat = registerAndHeartbeat
    self.controller.run()
    self.assertTrue(installMock.called)
    self.assertTrue(buildMock.called)
    self.controller.registerAndHeartbeat.assert_called_once_with()

    calls = []
    def switchBool():
      if len(calls) == 0:
        self.controller.repeatRegistration = True
        calls.append(1)
      self.controller.repeatRegistration = False

    registerAndHeartbeat.side_effect = switchBool
    self.controller.run()
    self.assertEqual(2, registerAndHeartbeat.call_count)

    self.controller.registerAndHeartbeat = \
      Controller.Controller.registerAndHeartbeat
示例#26
0
    def test_extract_value_from_report_table_row_value_input_string(self):
        mock = MagicMock()
        mock.couch_user = self.user
        mock.GET = {
            'location_id': '',
            'program': '',
            'month_start': '10',
            'year_start': '2017',
            'month_end': '3',
            'year_end': '2018',
        }
        dashboard1_report = Dashboard1Report(request=mock, domain='test-pna1')
        report_table = {
            'fix_column': False,
            'comment': 'test_comment',
            'rows': [['0', '4', '8', '12'],
                     ['1', '5', '9', '13'],
                     ['2', '6', '10', '14'],
                     ['3', '7', '11', '15']],
            'datatables': False,
            'title': 'test_title',
            'total_row': ['row_0', 'row_1', 'row_2', 'row_3'],
            'slug': 'disponibilite',
            'default_rows': 10
        }
        report_table_value = dashboard1_report._sanitize_single_row(report_table['total_row'])
        self.assertEqual(report_table_value, ['row_0', 'row_1', 'row_2', 'row_3'])

        all_rows = dashboard1_report._sanitize_all_rows(report_table)
        self.assertEqual(all_rows, [['0', '4', '8', '12'],
                                    ['1', '5', '9', '13'],
                                    ['2', '6', '10', '14'],
                                    ['3', '7', '11', '15']])
示例#27
0
def xml_fixture(parent_node, content_node):
    global mock
    parent_node["testvalue"] = "1001"
    content_node["testvalue"] = "1002"
    struct = {"nodelist": [parent_node, content_node],
              "build_response_start": time.time(),
              "status": "ok",
              "dataready": "0.1",
              "retrievaldate": datetime.datetime.now().isoformat(),
              "sortfield": "sortfield",
              "sortdirection": "up",
              "timetable": [],
              "result_shortlist": []}
    params = {}
    if not mock:
        req = MagicMock()
        mock = req
    else:
        req = mock
    req.get_header = lambda x: "localhost:8081"
    req.fullpath = ""
    req.query = ""

    MetadatatypeFactory(name=u"directory")
    MetadatatypeFactory(name=u"testschema")

    return struct, req, params
  def test_start(self):
    execution_commands = [
      {
        'clusterName': 'cluster',
        'hostName': 'host',
        'alertDefinition': {
          'name': 'alert1'
        }
      }
    ]

    scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, 5, None, None, None)
    alert_mock = MagicMock()
    alert_mock.interval = Mock(return_value=5)
    alert_mock.collect = Mock()
    alert_mock.set_helpers = Mock()
    scheduler.schedule_definition = MagicMock()
    scheduler._AlertSchedulerHandler__scheduler = MagicMock()
    scheduler._AlertSchedulerHandler__scheduler.running = False
    scheduler._AlertSchedulerHandler__scheduler.start = Mock()
    scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
    scheduler._AlertSchedulerHandler__config_maps = {
      'cluster': {}
    }

    scheduler.start()

    self.assertTrue(scheduler._AlertSchedulerHandler__scheduler.start.called)
    scheduler.schedule_definition.assert_called_with(alert_mock)
示例#29
0
文件: TestMain.py 项目: duxia/ambari
  def test_main(self, ping_port_init_mock, ping_port_start_mock, data_clean_init_mock,data_clean_start_mock,
                parse_args_mock, join_mock, start_mock, Controller_init_mock, try_to_connect_mock,
                update_log_level_mock, daemonize_mock, perform_prestart_checks_mock,
                resolve_ambari_config_mock, stop_mock, bind_signal_handlers_mock,
                setup_logging_mock, socket_mock):
    data_clean_init_mock.return_value = None
    Controller_init_mock.return_value = None
    ping_port_init_mock.return_value = None
    options = MagicMock()
    parse_args_mock.return_value = (options, MagicMock)

    #testing call without command-line arguments
    main.main()

    self.assertTrue(setup_logging_mock.called)
    self.assertTrue(bind_signal_handlers_mock.called)
    self.assertTrue(stop_mock.called)
    self.assertTrue(resolve_ambari_config_mock.called)
    self.assertTrue(perform_prestart_checks_mock.called)
    self.assertTrue(daemonize_mock.called)
    self.assertTrue(update_log_level_mock.called)
    try_to_connect_mock.assert_called_once_with(ANY, -1, ANY)
    self.assertTrue(start_mock.called)
    self.assertTrue(data_clean_init_mock.called)
    self.assertTrue(data_clean_start_mock.called)
    self.assertTrue(ping_port_init_mock.called)
    self.assertTrue(ping_port_start_mock.called)

    perform_prestart_checks_mock.reset_mock()

    # Testing call with --expected-hostname parameter
    options.expected_hostname = "test.hst"
    main.main()
    perform_prestart_checks_mock.assert_called_once_with(options.expected_hostname)
  def test_request(self, connect_mock):
    httpsconn_mock = MagicMock(create = True)
    self.cachedHTTPSConnection.httpsconn = httpsconn_mock

    dummy_request = MagicMock(create = True)
    dummy_request.get_method.return_value = "dummy_get_method"
    dummy_request.get_full_url.return_value = "dummy_full_url"
    dummy_request.get_data.return_value = "dummy_get_data"
    dummy_request.headers = "dummy_headers"

    responce_mock = MagicMock(create = True)
    responce_mock.read.return_value = "dummy responce"
    httpsconn_mock.getresponse.return_value = responce_mock

    # Testing normal case
    responce = self.cachedHTTPSConnection.request(dummy_request)

    self.assertEqual(responce, responce_mock.read.return_value)
    httpsconn_mock.request.assert_called_once_with(
      dummy_request.get_method.return_value,
      dummy_request.get_full_url.return_value,
      dummy_request.get_data.return_value,
      dummy_request.headers)

    # Testing case of exception
    try:
      def side_eff():
        raise Exception("Dummy exception")
      httpsconn_mock.read.side_effect = side_eff
      responce = self.cachedHTTPSConnection.request(dummy_request)
      self.fail("Should raise IOError")
    except Exception, err:
      # Expected
      pass
示例#31
0
 def test_get_filtered_datasets(self):
     """ Shall  call filter function from form class once """
     form = MagicMock()
     IndexView.get_filtered_datasets(form)
     form.filter.assert_called_once()
示例#32
0
 def open_subporcess_files_win(fout, ferr, f):
   return MagicMock(), MagicMock()
示例#33
0
  def test_execute_command(self, status_update_callback_mock, open_mock):
    # Make file read calls visible
    def open_side_effect(file, mode):
      if mode == 'r':
        file_mock = MagicMock()
        file_mock.read.return_value = "Read from " + str(file)
        return file_mock
      else:
        return self.original_open(file, mode)
    open_mock.side_effect = open_side_effect

    config = AmbariConfig()
    tempdir = tempfile.gettempdir()
    config.set('agent', 'prefix', tempdir)
    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
    config.set('agent', 'tolerate_download_failures', "true")
    dummy_controller = MagicMock()
    actionQueue = ActionQueue(config, dummy_controller)
    unfreeze_flag = threading.Event()
    python_execution_result_dict = {
      'stdout': 'out',
      'stderr': 'stderr',
      'structuredOut' : ''
      }
    def side_effect(command, tmpoutfile, tmperrfile):
      unfreeze_flag.wait()
      return python_execution_result_dict
    def patched_aq_execute_command(command):
      # We have to perform patching for separate thread in the same thread
      with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock:
          runCommand_mock.side_effect = side_effect
          actionQueue.execute_command(command)
    ### Test install/start/stop command ###
    ## Test successful execution with configuration tags
    python_execution_result_dict['status'] = 'COMPLETE'
    python_execution_result_dict['exitcode'] = 0
    # We call method in a separate thread
    execution_thread = Thread(target = patched_aq_execute_command ,
                              args = (self.datanode_install_command, ))
    execution_thread.start()
    #  check in progress report
    # wait until ready
    while True:
      time.sleep(0.1)
      report = actionQueue.result()
      if len(report['reports']) != 0:
        break
    expected = {'status': 'IN_PROGRESS',
                'stderr': 'Read from {0}/errors-3.txt'.format(tempdir),
                'stdout': 'Read from {0}/output-3.txt'.format(tempdir),
                'structuredOut' : 'Read from {0}/structured-out-3.json'.format(tempdir),
                'clusterName': u'cc',
                'roleCommand': u'INSTALL',
                'serviceName': u'HDFS',
                'role': u'DATANODE',
                'actionId': '1-1',
                'taskId': 3,
                'exitCode': 777}
    self.assertEqual(report['reports'][0], expected)
    # Continue command execution
    unfreeze_flag.set()
    # wait until ready
    while report['reports'][0]['status'] == 'IN_PROGRESS':
      time.sleep(0.1)
      report = actionQueue.result()
    # check report
    configname = os.path.join(tempdir, 'config.json')
    expected = {'status': 'COMPLETED',
                'stderr': 'stderr',
                'stdout': 'out',
                'clusterName': u'cc',
                'structuredOut': '""',
                'roleCommand': u'INSTALL',
                'serviceName': u'HDFS',
                'role': u'DATANODE',
                'actionId': '1-1',
                'taskId': 3,
                'configurationTags': {'global': {'tag': 'v1'}},
                'exitCode': 0}
    self.assertEqual(len(report['reports']), 1)
    self.assertEqual(report['reports'][0], expected)
    self.assertTrue(os.path.isfile(configname))
    # Check that we had 2 status update calls ( IN_PROGRESS and COMPLETE)
    self.assertEqual(status_update_callback_mock.call_count, 2)
    os.remove(configname)

    # now should not have reports (read complete/failed reports are deleted)
    report = actionQueue.result()
    self.assertEqual(len(report['reports']), 0)

    ## Test failed execution
    python_execution_result_dict['status'] = 'FAILED'
    python_execution_result_dict['exitcode'] = 13
    # We call method in a separate thread
    execution_thread = Thread(target = patched_aq_execute_command ,
                              args = (self.datanode_install_command, ))
    execution_thread.start()
    unfreeze_flag.set()
    #  check in progress report
    # wait until ready
    report = actionQueue.result()
    while len(report['reports']) == 0 or \
                    report['reports'][0]['status'] == 'IN_PROGRESS':
      time.sleep(0.1)
      report = actionQueue.result()
      # check report
    expected = {'status': 'FAILED',
                'stderr': 'stderr',
                'stdout': 'out',
                'clusterName': u'cc',
                'structuredOut': '""',
                'roleCommand': u'INSTALL',
                'serviceName': u'HDFS',
                'role': u'DATANODE',
                'actionId': '1-1',
                'taskId': 3,
                'exitCode': 13}
    self.assertEqual(len(report['reports']), 1)
    self.assertEqual(report['reports'][0], expected)

    # now should not have reports (read complete/failed reports are deleted)
    report = actionQueue.result()
    self.assertEqual(len(report['reports']), 0)

    ### Test upgrade command ###
    python_execution_result_dict['status'] = 'COMPLETE'
    python_execution_result_dict['exitcode'] = 0
    execution_thread = Thread(target = patched_aq_execute_command ,
                              args = (self.datanode_upgrade_command, ))
    execution_thread.start()
    unfreeze_flag.set()
    # wait until ready
    report = actionQueue.result()
    while len(report['reports']) == 0 or \
                    report['reports'][0]['status'] == 'IN_PROGRESS':
      time.sleep(0.1)
      report = actionQueue.result()
    # check report
    expected = {'status': 'COMPLETED',
                'stderr': 'stderr',
                'stdout': 'out',
                'clusterName': 'clusterName',
                'structuredOut': '""',
                'roleCommand': 'UPGRADE',
                'serviceName': 'serviceName',
                'role': 'role',
                'actionId': 17,
                'taskId': 'taskId',
                'exitCode': 0}
    self.assertEqual(len(report['reports']), 1)
    self.assertEqual(report['reports'][0], expected)

    # now should not have reports (read complete/failed reports are deleted)
    report = actionQueue.result()
    self.assertEqual(len(report['reports']), 0)
示例#34
0
class TestHookBeforeInstall(RMFTestCase):
    TMP_PATH = '/tmp/hbase-hbase'

    @patch("os.path.isfile")
    @patch.object(getpass, "getuser", new=MagicMock(return_value='some_user'))
    @patch("tempfile.mkdtemp", new=MagicMock(return_value='/tmp/jdk_tmp_dir'))
    @patch("os.path.exists")
    def test_hook_default(self, os_path_exists_mock, os_path_isfile_mock):
        def side_effect(path):
            if path == "/etc/hadoop/conf":
                return True
            return False

        os_path_exists_mock.side_effect = side_effect
        os_path_isfile_mock.side_effect = [False, True, True, True, True]

        self.executeScript("2.0.6/hooks/before-ANY/scripts/hook.py",
                           classname="BeforeAnyHook",
                           command="hook",
                           config_file="default.json",
                           call_mocks=itertools.cycle([(0, "1000")]))
        self.assertResourceCalled(
            'Group',
            'hadoop',
        )
        self.assertResourceCalled(
            'Group',
            'nobody',
        )
        self.assertResourceCalled(
            'Group',
            'users',
        )

        self.assertResourceCalled(
            'User',
            'hive',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'oozie',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop', u'users'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'nobody',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop', u'nobody'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'ambari-qa',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop', u'users'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'flume',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'hdfs',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'storm',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'mapred',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'hbase',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'tez',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop', u'users'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'zookeeper',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'falcon',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop', u'users'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'sqoop',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'yarn',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'hcat',
            gid='hadoop',
            uid=None,
            groups=[u'hadoop'],
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'File',
            '/tmp/changeUid.sh',
            content=StaticFile('changeToSecureUid.sh'),
            mode=0555,
        )
        self.assertResourceCalled(
            'Execute',
            '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0',
            not_if='(test $(id -u ambari-qa) -gt 1000) || (false)',
        )
        self.assertResourceCalled(
            'Directory',
            '/tmp/hbase-hbase',
            owner='hbase',
            create_parents=True,
            mode=0775,
            cd_access='a',
        )
        self.assertResourceCalled(
            'File',
            '/tmp/changeUid.sh',
            content=StaticFile('changeToSecureUid.sh'),
            mode=0555,
        )
        self.assertResourceCalled(
            'File',
            '/tmp/changeUid.sh',
            content=StaticFile('changeToSecureUid.sh'),
            mode=0555,
        )
        self.assertResourceCalled(
            'Execute',
            '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1000',
            not_if='(test $(id -u hbase) -gt 1000) || (false)',
        )
        self.assertResourceCalled(
            'User',
            'test_user1',
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'User',
            'test_user2',
            fetch_nonlocal_groups=True,
        )
        self.assertResourceCalled(
            'Group',
            'hdfs',
        )
        self.assertResourceCalled(
            'Group',
            'test_group',
        )
        self.assertResourceCalled(
            'User',
            'hdfs',
            fetch_nonlocal_groups=True,
            groups=[u'hadoop', u'hdfs', u'test_group'],
        )
        self.assertResourceCalled(
            'Directory',
            '/etc/hadoop',
            mode=0755,
        )
        self.assertResourceCalled(
            'Directory',
            '/etc/hadoop/conf.empty',
            owner='root',
            create_parents=True,
            group='hadoop',
        )
        self.assertResourceCalled(
            'Link',
            '/etc/hadoop/conf',
            not_if='ls /etc/hadoop/conf',
            to='/etc/hadoop/conf.empty',
        )
        self.assertResourceCalled(
            'File',
            '/etc/hadoop/conf/hadoop-env.sh',
            content=InlineTemplate(
                self.getConfig()['configurations']['hadoop-env']['content']),
            owner='hdfs',
            group='hadoop')
        self.assertResourceCalled(
            'Directory',
            '/tmp/hadoop_java_io_tmpdir',
            owner='hdfs',
            group='hadoop',
            mode=01777,
        )
        self.assertResourceCalled(
            'Directory',
            '/tmp/AMBARI-artifacts/',
            create_parents=True,
        )
        self.assertResourceCalled(
            'File',
            '/tmp/jdk-7u67-linux-x64.tar.gz',
            content=DownloadSource(
                'http://c6401.ambari.apache.org:8080/resources//jdk-7u67-linux-x64.tar.gz'
            ),
            not_if='test -f /tmp/jdk-7u67-linux-x64.tar.gz',
        )
        self.assertResourceCalled(
            'File',
            '/tmp/jdk-7u67-linux-x64.tar.gz',
            mode=0755,
        )
        self.assertResourceCalled(
            'Directory',
            '/usr/jdk64',
        )
        self.assertResourceCalled(
            'Execute',
            ('chmod', 'a+x', u'/usr/jdk64'),
            sudo=True,
        )
        self.assertResourceCalled(
            'Execute',
            'cd /tmp/jdk_tmp_dir && tar -xf /tmp/jdk-7u67-linux-x64.tar.gz && ambari-sudo.sh cp -rp /tmp/jdk_tmp_dir/* /usr/jdk64',
        )
        self.assertResourceCalled(
            'Directory',
            '/tmp/jdk_tmp_dir',
            action=['delete'],
        )
        self.assertResourceCalled(
            'File',
            '/usr/jdk64/jdk1.7.0_45/bin/java',
            mode=0755,
            cd_access='a',
        )
        self.assertResourceCalled(
            'Execute',
            ('chmod', '-R', '755', u'/usr/jdk64/jdk1.7.0_45'),
            sudo=True,
        )
        self.assertNoMoreResources()
示例#35
0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''

from stacks.utils.RMFTestCase import *
from mock.mock import MagicMock, call, patch
from resource_management import Hook
import itertools
import getpass
import os


@patch.object(Hook, "run_custom_hook", new=MagicMock())
class TestHookBeforeInstall(RMFTestCase):
    TMP_PATH = '/tmp/hbase-hbase'

    @patch("os.path.isfile")
    @patch.object(getpass, "getuser", new=MagicMock(return_value='some_user'))
    @patch("tempfile.mkdtemp", new=MagicMock(return_value='/tmp/jdk_tmp_dir'))
    @patch("os.path.exists")
    def test_hook_default(self, os_path_exists_mock, os_path_isfile_mock):
        def side_effect(path):
            if path == "/etc/hadoop/conf":
                return True
            return False

        os_path_exists_mock.side_effect = side_effect
        os_path_isfile_mock.side_effect = [False, True, True, True, True]
    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
from mock.mock import MagicMock, patch
from stacks.utils.RMFTestCase import *

from only_for_platform import not_for_platform, PLATFORM_WINDOWS

@not_for_platform(PLATFORM_WINDOWS)
@patch("resource_management.libraries.functions.get_hdp_version", new=MagicMock(return_value="2.3.2.0-1597"))
class TestSparkThriftServer(RMFTestCase):
  COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.0.2.2/package"
  STACK_VERSION = "2.3"
  DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']

  @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
  def test_configure_default(self, copy_to_hdfs_mock):
    copy_to_hdfs_mock = True
    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/spark_thrift_server.py",
                   classname = "SparkThriftServer",
                   command = "configure",
                   config_file="spark_default.json",
                   hdp_stack_version = self.STACK_VERSION,
                   target = RMFTestCase.TARGET_COMMON_SERVICES
    )
"License"); you may not use this file except in compliance
with the License.  You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
from mock.mock import MagicMock, call, patch
from stacks.utils.RMFTestCase import *

@patch("os.path.exists", new = MagicMock(return_value=True))
@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
class TestZookeeperClient(RMFTestCase):
  COMMON_SERVICES_PACKAGE_DIR = "ZOOKEEPER/3.4.5/package"
  STACK_VERSION = "2.0.6"

  def test_configure_default(self):
    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zookeeper_client.py",
                       classname = "ZookeeperClient",
                       command = "configure",
                       config_file = "default.json",
                       stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES
    )

    self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
示例#38
0
with the License.  You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''

from mock.mock import MagicMock, call, patch
from stacks.utils.RMFTestCase import *


@patch("os.path.exists", new=MagicMock(return_value=True))
class TestHookAfterInstall(RMFTestCase):
    def test_hook_default(self):
        self.executeScript("1.3.2/hooks/after-INSTALL/scripts/hook.py",
                           classname="AfterInstallHook",
                           command="hook",
                           config_file="default.json")
        self.assertResourceCalled(
            'Directory',
            '/etc/hadoop/conf.empty',
            owner='root',
            group='root',
            recursive=True,
        )
        self.assertResourceCalled('Link',
                                  '/etc/hadoop/conf',
示例#39
0
with the License.  You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
from mock.mock import MagicMock, patch
from stacks.utils.RMFTestCase import *


@patch("os.path.isfile", new=MagicMock(return_value=True))
@patch("glob.glob", new=MagicMock(return_value=["one", "two"]))
class TestWebHCatServer(RMFTestCase):
    COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
    STACK_VERSION = "2.0.6"

    def test_configure_default(self):
        self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR +
                           "/scripts/webhcat_server.py",
                           classname="WebHCatServer",
                           command="configure",
                           config_file="default.json",
                           hdp_stack_version=self.STACK_VERSION,
                           target=RMFTestCase.TARGET_COMMON_SERVICES)
        self.assert_configure_default()
        self.assertNoMoreResources()
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
from mock.mock import MagicMock, patch
from stacks.utils.RMFTestCase import *

from only_for_platform import not_for_platform, PLATFORM_WINDOWS


@not_for_platform(PLATFORM_WINDOWS)
@patch("resource_management.libraries.functions.get_stack_version",
       new=MagicMock(return_value="2.3.2.0-1597"))
class TestSparkThriftServer(RMFTestCase):
    COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.0.2.2/package"
    STACK_VERSION = "2.3"
    DEFAULT_IMMUTABLE_PATHS = [
        '/apps/hive/warehouse', '/apps/falcon', '/mr-history/done',
        '/app-logs', '/tmp'
    ]

    @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
    def test_configure_default(self, copy_to_hdfs_mock):
        copy_to_hdfs_mock = True
        self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR +
                           "/scripts/spark_thrift_server.py",
                           classname="SparkThriftServer",
                           command="configure",
示例#41
0
'''

import json
import os

from stacks.utils.RMFTestCase import *

from mock.mock import MagicMock, patch
from resource_management.libraries import functions
from resource_management.core.logger import Logger
from resource_management.libraries.script.config_dictionary import UnknownConfiguration
from hive_server_interactive import HiveServerInteractiveDefault
from resource_management.libraries.script.script import Script
from resource_management.core import shell

@patch("resource_management.libraries.Script.get_tmp_dir", new=MagicMock(return_value=('/var/lib/ambari-agent/tmp')))
@patch.object(functions, "get_stack_version", new=MagicMock(return_value="2.0.0.0-1234"))
@patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
@patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output",
       new=MagicMock(return_value=(0, '123', '')))
class TestHiveServerInteractive(RMFTestCase):
  COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
  STACK_VERSION = "2.0.6"
  UPGRADE_STACK_VERSION = "2.2"
  DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']

  def setUp(self):
    Logger.logger = MagicMock()
    self.testDirectory = os.path.dirname(os.path.abspath(__file__))
    # llap state related tests.
    self.hsi = HiveServerInteractiveDefault()
示例#42
0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
from ambari_commons import OSCheck
'''

from stacks.utils.RMFTestCase import *
from mock.mock import MagicMock, patch, call
import time
from resource_management.core import sudo
import glob


@patch.object(glob, "glob", new=MagicMock(return_value=["/tmp"]))
@patch.object(sudo,
              "read_file",
              new=MagicMock(return_value='{"interpreterSettings":[]}'))
class TestZeppelinMaster(RMFTestCase):
    COMMON_SERVICES_PACKAGE_DIR = "ZEPPELIN/0.6.0.2.5/package"
    STACK_VERSION = "2.5"

    def assert_configure_default(self):
        self.assertResourceCalled(
            'Directory',
            '/var/log/zeppelin',
            owner='zeppelin',
            group='zeppelin',
            create_parents=True,
            mode=0755,
示例#43
0
class TestHostname(TestCase):
    @patch.object(OSCheck,
                  "os_distribution",
                  new=MagicMock(return_value=os_distro_value))
    def test_hostname(self):
        hostname.cached_hostname = None
        hostname.cached_public_hostname = None
        config = AmbariConfig()
        self.assertEquals(hostname.hostname(config),
                          socket.getfqdn().lower(),
                          "hostname should equal the socket-based hostname")
        pass

    def test_server_hostnames(self):
        hostname.cached_server_hostnames = []
        config = AmbariConfig()
        default_server_hostname = config.get('server', 'hostname')
        config.set('server', 'hostname', 'ambari-host')
        server_hostnames = hostname.server_hostnames(config)
        self.assertEquals(['ambari-host'], server_hostnames,
                          "expected host name ['ambari-host']; got {0}".format(
                              server_hostnames))
        config.set('server', 'hostname', default_server_hostname)
        pass

    def test_server_hostnames_multiple(self):
        hostname.cached_server_hostnames = []
        config = AmbariConfig()
        default_server_hostname = config.get('server', 'hostname')
        config.set('server', 'hostname',
                   'ambari-host, ambari-host2, ambari-host3')
        server_hostnames = hostname.server_hostnames(config)
        self.assertEquals(len(server_hostnames), 3)
        self.assertEquals(['ambari-host', 'ambari-host2', 'ambari-host3'],
                          server_hostnames,
                          "expected host name ['ambari-host']; got {0}".format(
                              server_hostnames))
        config.set('server', 'hostname', default_server_hostname)
        pass

    @not_for_platform(PLATFORM_WINDOWS)
    def test_server_hostnames_override(self):
        hostname.cached_server_hostnames = []
        fd = tempfile.mkstemp(text=True)
        tmpname = fd[1]
        os.close(fd[0])
        os.chmod(tmpname, os.stat(tmpname).st_mode | stat.S_IXUSR)

        tmpfile = file(tmpname, "w+")
        config = AmbariConfig()
        try:
            tmpfile.write("#!/bin/sh\n\necho 'test.example.com'")
            tmpfile.close()

            config.set('server', 'hostname_script', tmpname)

            server_hostnames = hostname.server_hostnames(config)
            self.assertEquals(
                server_hostnames, ['test.example.com'],
                "expected hostname ['test.example.com']; got {0}".format(
                    server_hostnames))
        finally:
            os.remove(tmpname)
            config.remove_option('server', 'hostname_script')
        pass

    @not_for_platform(PLATFORM_WINDOWS)
    def test_server_hostnames_multiple_override(self):
        hostname.cached_server_hostnames = []
        fd = tempfile.mkstemp(text=True)
        tmpname = fd[1]
        os.close(fd[0])
        os.chmod(tmpname, os.stat(tmpname).st_mode | stat.S_IXUSR)

        tmpfile = file(tmpname, "w+")
        config = AmbariConfig()
        try:
            tmpfile.write(
                "#!/bin/sh\n\necho 'host1.example.com, host2.example.com, host3.example.com'"
            )
            tmpfile.close()

            config.set('server', 'hostname_script', tmpname)

            expected_hostnames = [
                'host1.example.com', 'host2.example.com', 'host3.example.com'
            ]
            server_hostnames = hostname.server_hostnames(config)
            self.assertEquals(
                server_hostnames, expected_hostnames,
                "expected hostnames {0}; got {1}".format(
                    expected_hostnames, server_hostnames))
        finally:
            os.remove(tmpname)
            config.remove_option('server', 'hostname_script')
        pass

    @not_for_platform(PLATFORM_WINDOWS)
    def test_hostname_override(self):
        hostname.cached_hostname = None
        hostname.cached_public_hostname = None
        fd = tempfile.mkstemp(text=True)
        tmpname = fd[1]
        os.close(fd[0])
        os.chmod(tmpname, os.stat(tmpname).st_mode | stat.S_IXUSR)

        tmpfile = file(tmpname, "w+")
        config = AmbariConfig()
        try:
            tmpfile.write("#!/bin/sh\n\necho 'test.example.com'")
            tmpfile.close()

            config.set('agent', 'hostname_script', tmpname)

            self.assertEquals(hostname.hostname(config), 'test.example.com',
                              "expected hostname 'test.example.com'")
        finally:
            os.remove(tmpname)
            config.remove_option('agent', 'hostname_script')
        pass

    @not_for_platform(PLATFORM_WINDOWS)
    def test_public_hostname_override(self):
        hostname.cached_hostname = None
        hostname.cached_public_hostname = None
        fd = tempfile.mkstemp(text=True)
        tmpname = fd[1]
        os.close(fd[0])
        os.chmod(tmpname, os.stat(tmpname).st_mode | stat.S_IXUSR)

        tmpfile = file(tmpname, "w+")

        config = AmbariConfig()
        try:
            tmpfile.write("#!/bin/sh\n\necho 'test.example.com'")
            tmpfile.close()

            config.set('agent', 'public_hostname_script', tmpname)

            self.assertEquals(hostname.public_hostname(config),
                              'test.example.com',
                              "expected hostname 'test.example.com'")
        finally:
            os.remove(tmpname)
            config.remove_option('agent', 'public_hostname_script')
        pass

    @patch.object(OSCheck,
                  "os_distribution",
                  new=MagicMock(return_value=os_distro_value))
    @patch.object(socket, "getfqdn")
    def test_caching(self, getfqdn_mock):
        hostname.cached_hostname = None
        hostname.cached_public_hostname = None
        config = AmbariConfig()
        getfqdn_mock.side_effect = ["test.example.com", "test2.example.com'"]
        self.assertEquals(hostname.hostname(config), "test.example.com")
        self.assertEquals(hostname.hostname(config), "test.example.com")
        self.assertEqual(getfqdn_mock.call_count, 1)
        pass
示例#44
0
    def test_heartbeatWithServer(self, dumpsMock, loadsMock, sleepMock,
                                 event_mock):

        out = StringIO.StringIO()
        sys.stdout = out

        hearbeat = MagicMock()
        self.controller.heartbeat = hearbeat

        dumpsMock.return_value = "data"

        sendRequest = MagicMock(name="sendRequest")
        self.controller.sendRequest = sendRequest

        self.controller.responseId = 1
        response = {"responseId": "2", "restartAgent": "false"}
        loadsMock.return_value = response

        def one_heartbeat(*args, **kwargs):
            self.controller.DEBUG_STOP_HEARTBEATING = True
            return "data"

        sendRequest.side_effect = one_heartbeat

        actionQueue = MagicMock()
        actionQueue.isIdle.return_value = True

        # one successful request, after stop
        self.controller.actionQueue = actionQueue
        self.controller.heartbeatWithServer()
        self.assertTrue(sendRequest.called)

        calls = []

        def retry(*args, **kwargs):
            if len(calls) == 0:
                calls.append(1)
                response["responseId"] = "3"
                raise Exception()
            if len(calls) > 0:
                self.controller.DEBUG_STOP_HEARTBEATING = True
            return "data"

        # exception, retry, successful and stop
        sendRequest.side_effect = retry
        self.controller.DEBUG_STOP_HEARTBEATING = False
        self.controller.heartbeatWithServer()

        self.assertEqual(1, self.controller.DEBUG_SUCCESSFULL_HEARTBEATS)

        # retry registration
        response["registrationCommand"] = "true"
        sendRequest.side_effect = one_heartbeat
        self.controller.DEBUG_STOP_HEARTBEATING = False
        self.controller.heartbeatWithServer()

        self.assertTrue(self.controller.repeatRegistration)

        # components are not mapped
        response["registrationCommand"] = "false"
        response["hasMappedComponents"] = False
        sendRequest.side_effect = one_heartbeat
        self.controller.DEBUG_STOP_HEARTBEATING = False
        self.controller.heartbeatWithServer()

        self.assertFalse(self.controller.hasMappedComponents)

        # components are mapped
        response["hasMappedComponents"] = True
        sendRequest.side_effect = one_heartbeat
        self.controller.DEBUG_STOP_HEARTBEATING = False
        self.controller.heartbeatWithServer()

        self.assertTrue(self.controller.hasMappedComponents)

        # components are mapped
        del response["hasMappedComponents"]
        sendRequest.side_effect = one_heartbeat
        self.controller.DEBUG_STOP_HEARTBEATING = False
        self.controller.heartbeatWithServer()

        self.assertTrue(self.controller.hasMappedComponents)

        # wrong responseId => restart
        response = {"responseId": "2", "restartAgent": "false"}
        loadsMock.return_value = response

        restartAgent = MagicMock(name="restartAgent")
        self.controller.restartAgent = restartAgent
        self.controller.DEBUG_STOP_HEARTBEATING = False
        self.controller.heartbeatWithServer()

        restartAgent.assert_called_once_with()

        # executionCommands, statusCommands
        self.controller.responseId = 1
        addToQueue = MagicMock(name="addToQueue")
        self.controller.addToQueue = addToQueue
        response["executionCommands"] = "executionCommands"
        response["statusCommands"] = "statusCommands"
        self.controller.DEBUG_STOP_HEARTBEATING = False
        self.controller.heartbeatWithServer()

        addToQueue.assert_has_calls(
            [call("executionCommands"),
             call("statusCommands")])

        # restartAgent command
        self.controller.responseId = 1
        self.controller.DEBUG_STOP_HEARTBEATING = False
        response["restartAgent"] = "true"
        restartAgent = MagicMock(name="restartAgent")
        self.controller.restartAgent = restartAgent
        self.controller.heartbeatWithServer()

        restartAgent.assert_called_once_with()

        # actionQueue not idle
        self.controller.responseId = 1
        self.controller.DEBUG_STOP_HEARTBEATING = False
        actionQueue.isIdle.return_value = False
        response["restartAgent"] = "false"
        self.controller.heartbeatWithServer()

        sleepMock.assert_called_with(
            self.controller.netutil.MINIMUM_INTERVAL_BETWEEN_HEARTBEATS)

        sys.stdout = sys.__stdout__
        self.controller.sendRequest = Controller.Controller.sendRequest
        self.controller.sendRequest = Controller.Controller.addToQueue
示例#45
0
class TestMapReduce2Client(RMFTestCase):
  COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
  STACK_VERSION = "2.0.6"

  def test_configure_default(self):
    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py",
                       classname = "MapReduce2Client",
                       command = "configure",
                       config_file="default.json",
                       hdp_stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES
    )

    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
      owner = 'yarn',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
      owner = 'yarn',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
      owner = 'yarn',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce',
      owner = 'mapred',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
      owner = 'mapred',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce',
      owner = 'mapred',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
      owner = 'mapred',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
      owner = 'yarn',
      create_parents = True,
      ignore_failures = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('XmlConfig', 'core-site.xml',
      owner = 'hdfs',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['core-site'],
      configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
    )
    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
      owner = 'hdfs',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['hdfs-site'],
      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
    )
    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
      owner = 'yarn',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['mapred-site'],
      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
    )
    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
      owner = 'yarn',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['yarn-site'],
      configuration_attributes = self.getConfig()['configuration_attributes']['yarn-site']
    )
    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
      owner = 'yarn',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['capacity-scheduler'],
      configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
    )
    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
      owner = 'yarn',
      group = 'hadoop',
    )
    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
      content = Template('yarn.conf.j2'),
      mode = 0644,
    )
    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
      content = Template('mapreduce.conf.j2'),
      mode = 0644,
    )
    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
      content = InlineTemplate(self.getConfig()['configurations']['yarn-env']['content']),
      owner = 'yarn',
      group = 'hadoop',
      mode = 0755,
    )
    self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
                              group = 'hadoop',
                              mode = 02050,
                              )
    self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
                              content = Template('container-executor.cfg.j2'),
                              group = 'hadoop',
                              mode = 0644,
                              )
    self.assertResourceCalled('Directory', '/cgroups_test/cpu',
                              group = 'hadoop',
                              create_parents = True,
                              mode = 0755,
                              cd_access="a"
    )
    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-env.sh',
                              content = InlineTemplate(self.getConfig()['configurations']['mapred-env']['content']),
                              mode = 0755,
                              owner = 'hdfs',
                              )
    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
                              content = Template('taskcontroller.cfg.j2'),
                              owner = 'hdfs',
                              )
    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
                              owner = 'mapred',
                              group = 'hadoop',
                              conf_dir = '/etc/hadoop/conf',
                              configurations = self.getConfig()['configurations']['mapred-site'],
                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
                              )
    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
                              owner = 'hdfs',
                              group = 'hadoop',
                              conf_dir = '/etc/hadoop/conf',
                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
                              configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
                              )
    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
                              owner = 'mapred',
                              group = 'hadoop',
                              )
    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
                              owner = 'mapred',
                              group = 'hadoop',
                              )
    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
                              owner = 'mapred',
                              group = 'hadoop',
                              )
    self.assertNoMoreResources()

  def test_configure_secured(self):

    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py",
                       classname = "MapReduce2Client",
                       command = "configure",
                       config_file="secured.json",
                       hdp_stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES
    )
    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
      owner = 'yarn',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
      owner = 'yarn',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
      owner = 'yarn',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce',
      owner = 'mapred',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
      owner = 'mapred',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce',
      owner = 'mapred',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
      owner = 'mapred',
      group = 'hadoop',
      create_parents = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
      owner = 'yarn',
      create_parents = True,
      ignore_failures = True,
      cd_access = 'a',
    )
    self.assertResourceCalled('XmlConfig', 'core-site.xml',
      owner = 'hdfs',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['core-site'],
      configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
    )
    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
      owner = 'hdfs',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['hdfs-site'],
      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
    )
    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
      owner = 'yarn',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['mapred-site'],
      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
    )
    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
      owner = 'yarn',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['yarn-site'],
      configuration_attributes = self.getConfig()['configuration_attributes']['yarn-site']
    )
    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
      owner = 'yarn',
      group = 'hadoop',
      mode = 0644,
      conf_dir = '/etc/hadoop/conf',
      configurations = self.getConfig()['configurations']['capacity-scheduler'],
      configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
    )
    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
      owner = 'yarn',
      group = 'hadoop',
    )
    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
      content = Template('yarn.conf.j2'),
      mode = 0644,
    )
    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
      content = Template('mapreduce.conf.j2'),
      mode = 0644,
    )
    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
      content = InlineTemplate(self.getConfig()['configurations']['yarn-env']['content']),
      owner = 'yarn',
      group = 'hadoop',
      mode = 0755,
    )
    self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
      group = 'hadoop',
      mode = 06050,
    )
    self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
      content = Template('container-executor.cfg.j2'),
      group = 'hadoop',
      mode = 0644,
    )
    self.assertResourceCalled('Directory', '/cgroups_test/cpu',
                              group = 'hadoop',
                              create_parents = True,
                              mode = 0755,
                              cd_access="a"
    )
    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-env.sh',
                              content = InlineTemplate(self.getConfig()['configurations']['mapred-env']['content']),
                              mode = 0755,
                              owner = 'root',
                              )
    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
                              owner = 'root',
                              group = 'hadoop',
                              mode = 06050,
                              )
    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
                              content = Template('taskcontroller.cfg.j2'),
                              owner = 'root',
                              group = 'hadoop',
                              mode = 0644,
                              )
    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
                              owner = 'mapred',
                              group = 'hadoop',
                              conf_dir = '/etc/hadoop/conf',
                              configurations = self.getConfig()['configurations']['mapred-site'],
                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
                              )
    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
                              owner = 'hdfs',
                              group = 'hadoop',
                              conf_dir = '/etc/hadoop/conf',
                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
                              configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
                              )
    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
                              owner = 'mapred',
                              group = 'hadoop',
                              )
    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
                              owner = 'mapred',
                              group = 'hadoop',
                              )
    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
                              owner = 'mapred',
                              group = 'hadoop',
                              )
    self.assertNoMoreResources()

  @patch.object(functions, "get_hdp_version", new=MagicMock(return_value="2.2.0.0-2041"))
  def test_upgrade(self):
    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py",
                   classname = "MapReduce2Client",
                   command = "restart",
                   config_file="client-upgrade.json",
                   hdp_stack_version = self.STACK_VERSION,
                   target = RMFTestCase.TARGET_COMMON_SERVICES
    )

    # for now, it's enough that hdp-select is confirmed
    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', '2.2.1.0-2067'), sudo=True)


  def test_pre_upgrade_restart_23(self):
    config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/client-upgrade.json"
    with open(config_file, "r") as f:
      json_content = json.load(f)
    version = '2.3.0.0-1234'
    json_content['commandParams']['version'] = version

    mocks_dict = {}
    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py",
                       classname = "MapReduce2Client",
                       command = "pre_upgrade_restart",
                       config_dict = json_content,
                       hdp_stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES,
                       call_mocks = [(0, None, ''), (0, None)],
                       mocks_dict = mocks_dict)

    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
    self.assertNoMoreResources()

    self.assertEquals(1, mocks_dict['call'].call_count)
    self.assertEquals(1, mocks_dict['checked_call'].call_count)
    self.assertEquals(
      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
       mocks_dict['checked_call'].call_args_list[0][0][0])
    self.assertEquals(
      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
       mocks_dict['call'].call_args_list[0][0][0])
示例#46
0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from mock.mock import MagicMock, call, patch
from stacks.utils.RMFTestCase import *
import os

origin_exists = os.path.exists


@patch.object(os.path,
              "exists",
              new=MagicMock(side_effect=lambda *args: origin_exists(args[0])
                            if args[0][-2:] == "j2" else True))
class TestNodeManager(RMFTestCase):
    def test_configure_default(self):
        self.executeScript(
            "2.0.6/services/YARN/package/scripts/nodemanager.py",
            classname="Nodemanager",
            command="configure",
            config_file="default.json")
        self.assert_configure_default()
        self.assertNoMoreResources()

    def test_start_default(self):
        self.executeScript(
            "2.0.6/services/YARN/package/scripts/nodemanager.py",
            classname="Nodemanager",
            command="start",
示例#47
0
    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''

from stacks.utils.RMFTestCase import *
from mock.mock import MagicMock, call, patch
from resource_management import Hook
import json

@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
@patch("os.path.exists", new = MagicMock(return_value=True))
@patch.object(Hook, "run_custom_hook", new = MagicMock())
class TestHookBeforeStart(RMFTestCase):
  def test_hook_default(self):
    self.executeScript("2.0.6/hooks/before-START/scripts/hook.py",
                       classname="BeforeStartHook",
                       command="hook",
                       config_file="default.json"
    )
    self.assertResourceCalled('Execute', ('setenforce', '0'),
                              only_if = 'test -f /selinux/enforce',
                              not_if = "(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)",
                              sudo=True,
                              )
    self.assertResourceCalled('Directory', '/var/log/hadoop',
示例#48
0
from ambari_commons.exceptions import FatalException

os.environ["ROOT"] = ""

from only_for_platform import get_platform, not_for_platform, only_for_platform, os_distro_value, PLATFORM_LINUX, PLATFORM_WINDOWS
from ambari_commons import os_utils

import shutil
project_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                           os.path.normpath("../../../../"))
shutil.copyfile(project_dir + "/ambari-server/conf/unix/ambari.properties",
                "/tmp/ambari.properties")

# We have to use this import HACK because the filename contains a dash
_search_file = os_utils.search_file
os_utils.search_file = MagicMock(return_value="/tmp/ambari.properties")
with patch.object(platform,
                  "linux_distribution",
                  return_value=MagicMock(return_value=('Redhat', '6.4',
                                                       'Final'))):
    with patch("os.path.isdir", return_value=MagicMock(return_value=True)):
        with patch("os.access", return_value=MagicMock(return_value=True)):
            with patch.object(
                    os_utils,
                    "parse_log4j_file",
                    return_value={'ambari.log.dir': '/var/log/ambari-server'}):
                with patch("platform.linux_distribution",
                           return_value=os_distro_value):
                    with patch("os.symlink"):
                        with patch("glob.glob",
                                   return_value=['/etc/init.d/postgresql-9.3'
示例#49
0
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""

import json
import os

from mock.mock import MagicMock, call, patch
from stacks.utils.RMFTestCase import *
from resource_management.libraries.functions.constants import Direction
from resource_management.libraries.script.script import Script


@patch("platform.linux_distribution", new=MagicMock(return_value="Linux"))
@patch(
    "resource_management.libraries.functions.get_user_call_output.get_user_call_output",
    new=MagicMock(return_value=(0, '123', '')))
class TestHiveMetastore(RMFTestCase):
    COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
    STACK_VERSION = "2.0.6"

    def test_configure_default(self):
        self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR +
                           "/scripts/hive_metastore.py",
                           classname="HiveMetastore",
                           command="configure",
                           config_file="../../2.1/configs/default.json",
                           stack_version=self.STACK_VERSION,
                           target=RMFTestCase.TARGET_COMMON_SERVICES)
示例#50
0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
from mock.mock import MagicMock, call, patch
from resource_management.libraries.functions import version
from stacks.utils.RMFTestCase import *
import os
from resource_management.libraries import functions

origin_exists = os.path.exists
@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
@patch.object(os.path, "exists", new=MagicMock(
  side_effect=lambda *args: origin_exists(args[0])
  if args[0][-2:] == "j2" else True))
@patch.object(functions, "get_hdp_version", new = MagicMock(return_value="2.2.0.0-1234"))
class TestMapReduce2Client(RMFTestCase):
  COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
  STACK_VERSION = "2.0.6"

  def test_configure_default(self):
    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py",
                       classname = "MapReduce2Client",
                       command = "configure",
                       config_file="default.json",
                       hdp_stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES
示例#51
0
    def test_cjk_forms(self):
        """
        Check form widgets work with CJK characters.
        """
        # Create a dummy screen.
        screen = MagicMock(spec=Screen, colours=8, unicode_aware=False)
        scene = MagicMock(spec=Scene)
        canvas = Canvas(screen, 10, 40, 0, 0)

        # Create the form we want to test.
        form = Frame(canvas, canvas.height, canvas.width, has_border=False)
        layout = Layout([100], fill_frame=True)
        mc_list = MultiColumnListBox(
            4,
            [3, 5, 0],
            [
                (["1", "2", "3"], 1),
                ([u"你", u"確", u"定"], 2),
            ],
            titles=[u"你確定嗎?", u"你確定嗎?", u"你確定嗎?"])
        text = Text()
        text_box = TextBox(3)
        form.add_layout(layout)
        layout.add_widget(mc_list)
        layout.add_widget(text)
        layout.add_widget(text_box)
        form.fix()
        form.register_scene(scene)
        form.reset()

        # Set some interesting values...
        text.value = u"你確定嗎? 你確定嗎? 你確定嗎?"
        text_box.value = [u"你確定嗎", u"?"]

        # Check that the CJK characters render correctly - no really this is correctly aligned!
        self.maxDiff = None
        form.update(0)
        self.assert_canvas_equals(
            canvas,
            u"你你 你你確確 你你確確定定嗎嗎??                      \n" +
            u"1  2    3                               \n" +
            u"你你 確確   定定                              \n" +
            u"                                        \n" +
            u"你你確確定定嗎嗎?? 你你確確定定嗎嗎?? 你你確確定定嗎嗎??        \n" +
            u"你你確確定定嗎嗎                                \n" +
            u"??                                      \n" +
            u"                                        \n" +
            u"                                        \n" +
            u"                                        \n")

        # Check that mouse input takes into account the glyph width
        self.process_mouse(form, [(5, 4, MouseEvent.LEFT_CLICK)])
        self.process_keys(form, ["b"])
        self.process_mouse(form, [(2, 4, MouseEvent.LEFT_CLICK)])
        self.process_keys(form, ["p"])
        form.save()
        self.assertEqual(text.value, u"你p確b定嗎? 你確定嗎? 你確定嗎?")

        self.process_mouse(form, [(2, 5, MouseEvent.LEFT_CLICK)])
        self.process_keys(form, ["p"])
        self.process_mouse(form, [(1, 6, MouseEvent.LEFT_CLICK)])
        self.process_keys(form, ["b"])
        form.save()
        self.assertEqual(text_box.value, [u"你p確定嗎", u"b?"])
示例#52
0
from ambari_commons.os_check import OSCheck

from resource_management.core import Environment
from resource_management.core.system import System
from resource_management.core.source import StaticFile
from resource_management.core.source import DownloadSource
from resource_management.core.source import Template
from resource_management.core.source import InlineTemplate

from ambari_jinja2 import UndefinedError, TemplateNotFound
import urllib2


@patch.object(OSCheck,
              "os_distribution",
              new=MagicMock(return_value=os_distro_value))
class TestContentSources(TestCase):
    @patch.object(os.path, "isfile")
    @patch.object(os.path, "join")
    def test_static_file_absolute_path(self, join_mock, is_file_mock):
        """
    Testing StaticFile source with absolute path
    """
        sudo.read_file = lambda path: 'content'
        is_file_mock.return_value = True

        with Environment("/base") as env:
            static_file = StaticFile("/absolute/path/file")
            content = static_file.get_content()

        self.assertEqual('content', content)
示例#53
0
    def test_textbox_input(self):
        """
        Check TextBox input works as expected.
        """
        screen = MagicMock(spec=Screen, colours=8, unicode_aware=False)
        canvas = Canvas(screen, 10, 40, 0, 0)
        form = TestFrame(canvas)
        form.reset()

        # Check basic movement keys
        self.process_keys(form,  ["ABC", Screen.KEY_LEFT, "D"])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDC"])
        self.process_keys(form,  [Screen.KEY_RIGHT, Screen.KEY_RIGHT, "E"])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDCE"])
        self.process_keys(form,  ["\nFGH", Screen.KEY_UP, Screen.KEY_UP, "I"])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICE", "FGH"])
        self.process_keys(form,  [Screen.KEY_DOWN, Screen.KEY_DOWN, "J"])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICE", "FGHJ"])
        self.process_keys(form,  [Screen.KEY_HOME, "K"])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICE", "KFGHJ"])
        self.process_keys(form,  [Screen.KEY_END, "L"])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICE", "KFGHJL"])

        # Backspace - normal and wrapping lines
        self.process_keys(form,  [Screen.KEY_BACK])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICE", "KFGHJ"])
        self.process_keys(form,  [Screen.KEY_HOME, Screen.KEY_BACK])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICEKFGHJ"])

        # Check cursor line-wrapping
        self.process_keys(form,  ["\n"])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICE", "KFGHJ"])
        self.process_keys(form,  [Screen.KEY_LEFT, "M"])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICEM", "KFGHJ"])
        self.process_keys(form,  [Screen.KEY_RIGHT, "N"])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICEM", "NKFGHJ"])

        # Delete - normal and wrapping lines and at end of all data.
        self.process_keys(form,  [Screen.KEY_DELETE])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICEM", "NFGHJ"])
        self.process_keys(form,
                          [Screen.KEY_UP, Screen.KEY_END, Screen.KEY_DELETE])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICEMNFGHJ"])
        self.process_keys(form, [Screen.KEY_END, Screen.KEY_DELETE])
        form.save()
        self.assertEqual(form.data["TA"], ["ABDICEMNFGHJ"])

        # Check that the current focus ignores unknown events.
        event = object()
        self.assertEqual(event, form.process_event(event))
示例#54
0
 def setUp(self):
     HostCleanup.logger = MagicMock()
     self.hostcleanup = HostCleanup.HostCleanup()
     # disable stdout
     out = StringIO.StringIO()
     sys.stdout = out
示例#55
0
class TestScript(RMFTestCase):
    def setUp(self):
        # disable stdout
        out = StringIO.StringIO()
        sys.stdout = out

    @patch("__builtin__.open")
    def test_structured_out(self, open_mock):
        script = Script()
        script.stroutfile = ''
        self.assertEqual(Script.structuredOut, {})

        script.put_structured_out({"1": "1"})
        self.assertEqual(Script.structuredOut, {"1": "1"})
        self.assertTrue(open_mock.called)

        script.put_structured_out({"2": "2"})
        self.assertEqual(open_mock.call_count, 2)
        self.assertEqual(Script.structuredOut, {"1": "1", "2": "2"})

        #Overriding
        script.put_structured_out({"1": "3"})
        self.assertEqual(open_mock.call_count, 3)
        self.assertEqual(Script.structuredOut, {"1": "3", "2": "2"})

    @patch("__builtin__.open")
    def test_status_commands_clear_structured_out(self, open_mock):
        """
    Tests that status commands will clear any stored structured output from prior status commands.
    :param open_mock: 
    :return: 
    """
        class MagicFile(object):
            def read(self):
                return "{}"

            def write(self, data):
                pass

            def __exit__(self, exc_type, exc_val, exc_tb):
                pass

            def __enter__(self):
                return self

        sys.argv = ["", "status", "foo.py", "", "", "INFO", ""]
        open_mock.side_effect = [MagicFile()]

        try:
            with Environment(".", test_mode=True) as env:
                script = Script()
                Script.structuredOut = {"version": "old_version"}
                script.execute()
        except:
            pass

        self.assertTrue(open_mock.called)
        self.assertEquals({}, Script.structuredOut)

    @patch.object(Logger, "error", new=MagicMock())
    @patch.object(Script, "put_structured_out")
    @patch(
        "resource_management.libraries.functions.version_select_util.get_component_version_from_symlink",
        new=MagicMock(return_value=None))
    @patch(
        "resource_management.libraries.functions.stack_select.get_package_name",
        new=MagicMock(return_value="foo-package"))
    @patch(
        "resource_management.libraries.functions.stack_select.unsafe_get_stack_versions",
        new=MagicMock(return_value=("", 0, ["2.6.0.0-1234"])))
    def test_save_version_structured_out_stack_select(self, pso_mock):
        """
    Tests that when writing out the version of the component to the structure output,
    if all else fails, we'll invoke the stack-select tool to see if there are any versions
    reported.
    :param pso_mock:
    :return:
    """
        script = Script()
        script.stroutfile = ''
        script.save_component_version_to_structured_out("start")

        self.assertEqual(pso_mock.call_count, 1)
        self.assertEquals(pso_mock.call_args[0][0],
                          {'version': '2.6.0.0-1234'})

    def tearDown(self):
        # enable stdout
        sys.stdout = sys.__stdout__
示例#56
0
    def test_multi_column_list_box(self):
        """
        Check MultiColumnListBox works as expected.
        """
        # Create a dummy screen.
        screen = MagicMock(spec=Screen, colours=8, unicode_aware=False)
        scene = MagicMock(spec=Scene)
        canvas = Canvas(screen, 10, 40, 0, 0)

        # Create the form we want to test.
        form = Frame(canvas, canvas.height, canvas.width, has_border=False)
        layout = Layout([100], fill_frame=True)
        mc_list = MultiColumnListBox(
            Widget.FILL_FRAME,
            [3, "4", ">4", "<4", ">10%", "100%"],
            [
                (["1", "2", "3", "4", "5", "6"], 1),
                (["11", "222", "333", "444", "555", "6"], 2),
                (["111", "2", "3", "4", "5", "6"], 3),
                (["1", "2", "33333", "4", "5", "6"], 4),
                (["1", "2", "3", "4", "5", "6666666666666666666666"], 5),
            ],
            titles=["A", "B", "C", "D", "E", "F"],
            name="mc_list")
        form.add_layout(layout)
        layout.add_widget(mc_list)
        form.fix()
        form.register_scene(scene)
        form.reset()

        # Check we have a default value for our list.
        form.save()
        self.assertEqual(form.data, {"mc_list": 1})

        # Check that UP/DOWN change selection.
        self.process_keys(form, [Screen.KEY_DOWN])
        form.save()
        self.assertEqual(form.data, {"mc_list": 2})
        self.process_keys(form, [Screen.KEY_UP])
        form.save()
        self.assertEqual(form.data, {"mc_list": 1})

        # Check that PGUP/PGDN change selection.
        self.process_keys(form, [Screen.KEY_PAGE_DOWN])
        form.save()
        self.assertEqual(form.data, {"mc_list": 5})
        self.process_keys(form, [Screen.KEY_PAGE_UP])
        form.save()
        self.assertEqual(form.data, {"mc_list": 1})

        # Check that the widget is rendered correctly.
        form.update(0)
        self.assert_canvas_equals(
            canvas,
            "A  B      C D      E F                  \n" +
            "1  2      3 4      5 6                  \n" +
            "11 222  333 444  555 6                  \n" +
            "...2      3 4      5 6                  \n" +
            "1  2   3... 4      5 6                  \n" +
            "1  2      3 4      5 6666666666666666666\n" +
            "                                        \n" +
            "                                        \n" +
            "                                        \n" +
            "                                        \n")

        # Check that mouse input changes selection.
        self.process_mouse(form, [(2, 2, MouseEvent.LEFT_CLICK)])
        form.save()
        self.assertEqual(form.data, {"mc_list": 2})
        self.process_mouse(form, [(2, 1, MouseEvent.LEFT_CLICK)])
        form.save()
        self.assertEqual(form.data, {"mc_list": 1})

        # Check that the start_line can be read and set - and enforces good behaviour
        mc_list.start_line = 0
        self.assertEqual(mc_list.start_line, 0)
        mc_list.start_line = len(mc_list.options) - 1
        self.assertEqual(mc_list.start_line, len(mc_list.options) - 1)
        mc_list.start_line = 10000000
        self.assertEqual(mc_list.start_line, len(mc_list.options) - 1)

        # Check that options can be read and set.
        mc_list.options = [(["a", "b", "c", "d", "e", "f"], 0)]
        self.assertEqual(mc_list.options, [(["a", "b", "c", "d", "e", "f"], 0)])
        mc_list.options = []
        self.assertEqual(mc_list.options, [])

        # Check that the form re-renders correctly afterwards.
        form.update(1)
        self.assert_canvas_equals(
            canvas,
            "A  B      C D      E F                  \n" +
            "                                        \n" +
            "                                        \n" +
            "                                        \n" +
            "                                        \n" +
            "                                        \n" +
            "                                        \n" +
            "                                        \n" +
            "                                        \n" +
            "                                        \n")

        # Check that the current focus ignores unknown events.
        event = object()
        self.assertEqual(event, form.process_event(event))
示例#57
0
 def test_district_functionality_report(self, mock_run_query):
     mock = MagicMock()
     mock.couch_user = self.user
     mock.GET = {
         'lang': '',
         'sf': 'sf5',
         'startdate': '2018-01-01',
         'enddate': '2018-01-31',
         'hierarchy_af': '646eb23165f2f3ee9966b0512efc9494',
         'month': '01',
         'year': '2018',
         'hierarchy_district': 'kaushambi',
         'hierarchy_block': 'Chail',
     }
     district_functionality_report = DistrictFunctionalityReport(
         request=mock, domain='up-nrhm')
     rows = district_functionality_report.rows
     expected = ([
         [
             'Newborn visits within first day of birth in case of home deliveries',
             {
                 'sort_key': '1.4%',
                 'html': '1.4%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '1.6%',
                 'html': '1.6%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '3.2%',
                 'html': '3.2%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '2.1%',
                 'html': '2.1%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '3.0%',
                 'html': '3.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '3.2%',
                 'html': '3.2%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '4.7%',
                 'html': '4.7%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             'Set of home visits for newborn care as specified in the HBNC guidelines<br/>(six visits'
             ' in case of Institutional delivery and seven in case of a home delivery)',
             {
                 'sort_key': '18.3%',
                 'html': '18.3%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '16.3%',
                 'html': '16.3%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '14.1%',
                 'html': '14.1%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '18.5%',
                 'html': '18.5%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '16.4%',
                 'html': '16.4%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '20.1%',
                 'html': '20.1%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '17.5%',
                 'html': '17.5%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '17.1%',
                 'html': '17.1%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             'Attending VHNDs/Promoting immunization', {
                 'sort_key': '26.8%',
                 'html': '26.8%'
             }, {
                 'sort_key': 'C',
                 'html': 'C'
             }, {
                 'sort_key': '22.5%',
                 'html': '22.5%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '20.2%',
                 'html': '20.2%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '26.6%',
                 'html': '26.6%'
             }, {
                 'sort_key': 'C',
                 'html': 'C'
             }, {
                 'sort_key': '25.4%',
                 'html': '25.4%'
             }, {
                 'sort_key': 'C',
                 'html': 'C'
             }, {
                 'sort_key': '27.6%',
                 'html': '27.6%'
             }, {
                 'sort_key': 'C',
                 'html': 'C'
             }, {
                 'sort_key': '25.4%',
                 'html': '25.4%'
             }, {
                 'sort_key': 'C',
                 'html': 'C'
             }, {
                 'sort_key': '24.8%',
                 'html': '24.8%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             'Supporting institutional delivery', {
                 'sort_key': '20.4%',
                 'html': '20.4%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '20.9%',
                 'html': '20.9%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '18.2%',
                 'html': '18.2%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '18.5%',
                 'html': '18.5%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '20.6%',
                 'html': '20.6%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '21.6%',
                 'html': '21.6%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '18.3%',
                 'html': '18.3%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '16.3%',
                 'html': '16.3%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             'Management of childhood illness - especially diarrhea and pneumonia',
             {
                 'sort_key': '21.1%',
                 'html': '21.1%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '15.5%',
                 'html': '15.5%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '20.2%',
                 'html': '20.2%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '16.9%',
                 'html': '16.9%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '14.3%',
                 'html': '14.3%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '10.4%',
                 'html': '10.4%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '21.4%',
                 'html': '21.4%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '18.6%',
                 'html': '18.6%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             'Household visits with nutrition counseling', {
                 'sort_key': '16.9%',
                 'html': '16.9%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '7.0%',
                 'html': '7.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '9.1%',
                 'html': '9.1%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '7.9%',
                 'html': '7.9%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.7%',
                 'html': '0.7%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '12.4%',
                 'html': '12.4%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             'Fever cases seen/malaria slides made in malaria endemic area',
             {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '1.0%',
                 'html': '1.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             'Acting as DOTS provider', {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '5.4%',
                 'html': '5.4%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '4.0%',
                 'html': '4.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '4.0%',
                 'html': '4.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '3.7%',
                 'html': '3.7%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '2.2%',
                 'html': '2.2%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '6.2%',
                 'html': '6.2%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             'Holding or attending village/VHSNC meeting', {
                 'sort_key': '7.7%',
                 'html': '7.7%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '1.6%',
                 'html': '1.6%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '5.1%',
                 'html': '5.1%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '7.3%',
                 'html': '7.3%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '0.0%',
                 'html': '0.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '6.7%',
                 'html': '6.7%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '4.0%',
                 'html': '4.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '10.9%',
                 'html': '10.9%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             'Successful referral of the IUD, female sterilization or male sterilization cases and/or '
             'providing OCPs/Condoms', {
                 'sort_key': '26.1%',
                 'html': '26.1%'
             }, {
                 'sort_key': 'C',
                 'html': 'C'
             }, {
                 'sort_key': '23.3%',
                 'html': '23.3%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '24.2%',
                 'html': '24.2%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '24.2%',
                 'html': '24.2%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '21.7%',
                 'html': '21.7%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '25.4%',
                 'html': '25.4%'
             }, {
                 'sort_key': 'C',
                 'html': 'C'
             }, {
                 'sort_key': '23.8%',
                 'html': '23.8%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': '24.0%',
                 'html': '24.0%'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ],
         [
             '<b>Total number of ASHAs who are functional on at least 60% of the tasks</b>',
             {
                 'sort_key': 21,
                 'html': '30/142 (21%)'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': 18,
                 'html': '24/129 (18%)'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': 18,
                 'html': '18/100 (18%)'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': 20,
                 'html': '26/124 (20%)'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': 19,
                 'html': '37/189 (19%)'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': 23,
                 'html': '31/134 (23%)'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': 21,
                 'html': '27/126 (21%)'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }, {
                 'sort_key': 21,
                 'html': '28/129 (21%)'
             }, {
                 'sort_key': 'D',
                 'html': 'D'
             }
         ]
     ], 0)
     self.assertEqual(len(rows), len(expected))
     self.assertEqual(len(rows[0]), len(expected[0]))
     for i in range(len(rows[0])):
         self.assertEqual(len(rows[0][i]), len(expected[0][i]))
         for record in expected[0][i]:
             self.assertIn(record, rows[0][i])
示例#58
0
    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from stacks.utils.RMFTestCase import *
import resource_management.libraries.functions
from mock.mock import MagicMock, call, patch


@patch("glob.glob",
       new=MagicMock(return_value="/usr/something/oozie-client/lib"))
@patch("platform.linux_distribution", new=MagicMock(return_value="Linux"))
class TestServiceCheck(RMFTestCase):
    COMMON_SERVICES_PACKAGE_DIR = "OOZIE/4.0.0.2.0/package"
    STACK_VERSION = "2.0.6"

    def test_service_check_default(self):
        self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR +
                           "/scripts/service_check.py",
                           classname="OozieServiceCheck",
                           command="service_check",
                           config_file="default.json",
                           hdp_stack_version=self.STACK_VERSION,
                           target=RMFTestCase.TARGET_COMMON_SERVICES)

        self.assertResourceCalled(
示例#59
0
 def test_block_level_af_report(self, mock_run_query):
     mock = MagicMock()
     mock.couch_user = self.user
     mock.GET = {
         'lang': '',
         'sf': 'sf4',
         'startdate': '2018-01-01',
         'enddate': '2018-01-31',
         'hierarchy_af': '646eb23165f2f3ee9966b0512efc9494',
         'month': '01',
         'year': '2018',
         'hierarchy_district': 'kaushambi',
         'hierarchy_block': 'Chail',
     }
     block_level_af_report = BlockLevelAFReport(request=mock,
                                                domain='up-nrhm')
     rows = block_level_af_report.rows
     expected = ([
         [
             'Newborn visits within first day of birth in case of home deliveries',
             {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '2',
                 'html': '2'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': 2.0,
                 'html': 2.0
             }
         ],
         [
             'Set of home visits for newborn care as specified in the HBNC guidelines<br/>(six visits '
             'in case of Institutional delivery and seven in case of a home delivery)',
             {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '2',
                 'html': '2'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '2',
                 'html': '2'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '2',
                 'html': '2'
             }, {
                 'sort_key': '2',
                 'html': '2'
             }, {
                 'sort_key': 26.0,
                 'html': 26.0
             }
         ],
         [
             'Attending VHNDs/Promoting immunization', {
                 'sort_key': '5',
                 'html': '5'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '5',
                 'html': '5'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '5',
                 'html': '5'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': 38.0,
                 'html': 38.0
             }
         ],
         [
             'Supporting institutional delivery', {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '1',
                 'html': '1'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': 29.0,
                 'html': 29.0
             }
         ],
         [
             'Management of childhood illness - especially diarrhea and pneumonia',
             {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '2',
                 'html': '2'
             }, {
                 'sort_key': 30.0,
                 'html': 30.0
             }
         ],
         [
             'Household visits with nutrition counseling', {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '2',
                 'html': '2'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '2',
                 'html': '2'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '2',
                 'html': '2'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': 24.0,
                 'html': 24.0
             }
         ],
         [
             'Fever cases seen/malaria slides made in malaria endemic area',
             {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': 0.0,
                 'html': 0.0
             }
         ],
         [
             'Acting as DOTS provider', {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': 0.0,
                 'html': 0.0
             }
         ],
         [
             'Holding or attending village/VHSNC meeting', {
                 'sort_key': '1',
                 'html': '1'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '0',
                 'html': '0'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': 11.0,
                 'html': 11.0
             }
         ],
         [
             'Successful referral of the IUD, female sterilization or male sterilization cases and/or '
             'providing OCPs/Condoms', {
                 'sort_key': '5',
                 'html': '5'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '5',
                 'html': '5'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': '3',
                 'html': '3'
             }, {
                 'sort_key': '4',
                 'html': '4'
             }, {
                 'sort_key': 37.0,
                 'html': 37.0
             }
         ],
         [
             '<b>Total number of ASHAs who are functional on at least 60% of the tasks</b>',
             {
                 'sort_key': 25.925925925925927,
                 'html': '4/18 (25%)'
             }, {
                 'sort_key': 24.444444444444443,
                 'html': '3/15 (24%)'
             }, {
                 'sort_key': 25.0,
                 'html': '4/16 (25%)'
             }, {
                 'sort_key': 24.444444444444443,
                 'html': '3/15 (24%)'
             }, {
                 'sort_key': 24.444444444444443,
                 'html': '3/15 (24%)'
             }, {
                 'sort_key': 25.49019607843137,
                 'html': '4/17 (25%)'
             }, {
                 'sort_key': 19.607843137254903,
                 'html': '3/17 (19%)'
             }, {
                 'sort_key': 23.809523809523814,
                 'html': '3/14 (23%)'
             }, {
                 'sort_key': 24.444444444444443,
                 'html': '3/15 (24%)'
             }, {
                 'sort_key': 21,
                 'html': '30/142 (21%)'
             }
         ],
         [
             '<b>Total number of ASHAs who did not report/not known</b>', {
                 'sort_key': '13',
                 'html': '13'
             }, {
                 'sort_key': '11',
                 'html': '11'
             }, {
                 'sort_key': '11',
                 'html': '11'
             }, {
                 'sort_key': '11',
                 'html': '11'
             }, {
                 'sort_key': '11',
                 'html': '11'
             }, {
                 'sort_key': '12',
                 'html': '12'
             }, {
                 'sort_key': '13',
                 'html': '13'
             }, {
                 'sort_key': '11',
                 'html': '11'
             }, {
                 'sort_key': '11',
                 'html': '11'
             }, {
                 'sort_key': 104.0,
                 'html': 104.0
             }
         ],
         [
             '<b>Total Number of ASHAs under each Facilitator</b>', {
                 'sort_key': 18,
                 'html': 18
             }, {
                 'sort_key': 15,
                 'html': 15
             }, {
                 'sort_key': 16,
                 'html': 16
             }, {
                 'sort_key': 15,
                 'html': 15
             }, {
                 'sort_key': 15,
                 'html': 15
             }, {
                 'sort_key': 17,
                 'html': 17
             }, {
                 'sort_key': 17,
                 'html': 17
             }, {
                 'sort_key': 14,
                 'html': 14
             }, {
                 'sort_key': 15,
                 'html': 15
             }, {
                 'sort_key': 142,
                 'html': 142
             }
         ]
     ], 142)
     self.assertEqual(len(rows), len(expected))
     self.assertEqual(len(rows[0]), len(expected[0]))
     for i in range(len(rows[0])):
         self.assertEqual(len(rows[0][i]), len(expected[0][i]))
         for record in expected[0][i]:
             self.assertIn(record, rows[0][i])
示例#60
0
 def test_asha_facilitators_report(self, mock_run_query):
     mock = MagicMock()
     mock.couch_user = self.user
     mock.GET = {
         'lang': '',
         'sf': 'sf2',
         'startdate': '2018-01-01',
         'enddate': '2018-01-31',
         'hierarchy_af': '646eb23165f2f3ee9966b0512efc9494',
         'month': '01',
         'year': '2018',
     }
     asha_facilitators_report = ASHAFacilitatorsReport(request=mock,
                                                       domain='up-nrhm')
     rows = asha_facilitators_report.rows
     expected = ([
         [
             'Newborn visits within first day of birth in case of home deliveries',
             '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             'Set of home visits for newborn care as specified in the HBNC guidelines<br/>(six visits '
             'in case of Institutional delivery and seven in case of a home delivery)',
             '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             'Attending VHNDs/Promoting immunization', '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             'Supporting institutional delivery', '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             'Management of childhood illness - especially diarrhea and pneumonia',
             '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             'Household visits with nutrition counseling', '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             'Fever cases seen/malaria slides made in malaria endemic area',
             '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             'Acting as DOTS provider', '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             'Holding or attending village/VHSNC meeting', '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             'Successful referral of the IUD, female sterilization or male sterilization cases and/or '
             'providing OCPs/Condoms', '--', {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ],
         [
             '<b>Total number of ASHAs who are functional on at least 60% of the tasks</b>',
             {
                 'sort_key': 0.0,
                 'html': '0/17 (0%)'
             }, {
                 'sort_key': 17,
                 'html': 17
             }, ''
         ]
     ], 17, 0)
     self.assertEqual(len(rows), len(expected))
     self.assertEqual(len(rows[0]), len(expected[0]))
     for i in range(len(rows[0])):
         self.assertEqual(len(rows[0][i]), len(expected[0][i]))
         for record in expected[0][i]:
             self.assertIn(record, rows[0][i])