Example #1
0
  def test_execute_alert_from_extension(self):
    execution_commands = [
      {
        'clusterName': 'cluster',
        'hostName': 'host',
        'publicHostName' : 'host',
        'alertDefinition': {
          'name': 'alert1'
        }
      }
    ]

    initializer_module = InitializerModule()
    initializer_module.init()
    
    scheduler = AlertSchedulerHandler(initializer_module)
    #'wrong_path', 'wrong_path', 'wrong_path', TEST_PATH, 'wrong_path', None, self.config, None)
    alert_mock = MagicMock()
    alert_mock.collect = Mock()
    alert_mock.set_helpers = Mock()
    scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
    scheduler._AlertSchedulerHandler__config_maps = {
      'cluster': {}
    }

    scheduler.execute_alert(execution_commands)

    scheduler._AlertSchedulerHandler__json_to_callable.assert_called_with('cluster', 'host', 'host', {'name': 'alert1'})
    self.assertTrue(alert_mock.collect.called)
Example #2
0
  def __test_start(self):
    execution_commands = [
      {
        'clusterName': 'cluster',
        'hostName': 'host',
        'publicHostName' : 'host',
        'alertDefinition': {
          'name': 'alert1'
        }
      }
    ]

    initializer_module = InitializerModule()
    initializer_module.init()
    
    scheduler = AlertSchedulerHandler(initializer_module)
    #TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
    alert_mock = MagicMock()
    alert_mock.interval = Mock(return_value=5)
    alert_mock.collect = Mock()
    alert_mock.set_helpers = Mock()
    scheduler.schedule_definition = MagicMock()
    scheduler._AlertSchedulerHandler__scheduler = MagicMock()
    scheduler._AlertSchedulerHandler__scheduler.running = False
    scheduler._AlertSchedulerHandler__scheduler.start = Mock()
    scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
    scheduler._AlertSchedulerHandler__config_maps = {
      'cluster': {}
    }

    scheduler.start()

    self.assertTrue(scheduler._AlertSchedulerHandler__scheduler.start.called)
    scheduler.schedule_definition.assert_called_with(alert_mock)
Example #3
0
  def test_load_definitions(self):
    definitions = {
     'alertDefinitions':
      [
       {
         'source': 
         {
           'type': 'PORT'
         }
       }
       ]
     }
    initializer_module = InitializerModule()
    initializer_module.init()
    initializer_module.alert_definitions_cache.rewrite_cluster_cache('0', definitions)
    
    scheduler = AlertSchedulerHandler(initializer_module)#(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
    scheduler._AlertSchedulerHandler__config_maps = {
      'cluster': {}
    }

    definitions = scheduler._AlertSchedulerHandler__load_definitions()

    alert_def = definitions[0]
    self.assertTrue(isinstance(alert_def, PortAlert))
Example #4
0
  def test_execute_background_command(self, CustomServiceOrchestrator_mock,
                                  runCommand_mock,
                                  ):
    CustomServiceOrchestrator_mock.return_value = None
    CustomServiceOrchestrator.runCommand.return_value = {'exitcode' : 0,
                                                         'stdout': 'out-11',
                                                         'stderr' : 'err-13'}

    initializer_module = InitializerModule()
    initializer_module.init()
    
    actionQueue = ActionQueue(initializer_module)

    execute_command = copy.deepcopy(self.background_command)
    actionQueue.put([execute_command])
    actionQueue.processBackgroundQueueSafeEmpty();
    # actionQueue.controller.statusCommandExecutor.process_results();
    
    # assert that python execturor start
    self.assertTrue(runCommand_mock.called)
    runningCommand = actionQueue.commandStatuses.current_state.get(execute_command['taskId'])
    self.assertTrue(runningCommand is not None)
    self.assertEqual(runningCommand[1]['status'], ActionQueue.IN_PROGRESS_STATUS)
    
    reports = actionQueue.commandStatuses.generate_report()[CLUSTER_ID]
    self.assertEqual(len(reports), 1)
Example #5
0
    def test_dump_command_to_json_with_retry(self, FileCache_mock, unlink_mock,
                                             isfile_mock, hostname_mock):
        FileCache_mock.return_value = None
        hostname_mock.return_value = "test.hst"
        command = {
            'commandType': 'EXECUTION_COMMAND',
            'role': u'DATANODE',
            'roleCommand': u'INSTALL',
            'commandId': '1-1',
            'taskId': 3,
            'clusterName': u'cc',
            'serviceName': u'HDFS',
            'configurations': {
                'global': {}
            },
            'configurationTags': {
                'global': {
                    'tag': 'v1'
                }
            },
            'clusterHostInfo': {
                'namenode_host': ['1'],
                'slave_hosts': ['0', '1'],
                'all_racks': [u'/default-rack:0'],
                'ambari_server_host': 'a.b.c',
                'ambari_server_port': '123',
                'ambari_server_use_ssl': 'false',
                'all_ipv4_ips': [u'192.168.12.101:0'],
                'all_hosts': ['h1.hortonworks.com', 'h2.hortonworks.com'],
                'all_ping_ports': ['8670:0,1']
            },
            'hostLevelParams': {}
        }

        tempdir = tempfile.gettempdir()
        initializer_module = InitializerModule()
        initializer_module.init()
        initializer_module.config.set('agent', 'prefix', tempdir)
        orchestrator = CustomServiceOrchestrator(initializer_module)
        isfile_mock.return_value = True
        # Test dumping EXECUTION_COMMAND
        json_file = orchestrator.dump_command_to_json(command)
        self.assertTrue(os.path.exists(json_file))
        self.assertTrue(os.path.getsize(json_file) > 0)
        if get_platform() != PLATFORM_WINDOWS:
            self.assertEqual(oct(os.stat(json_file).st_mode & 0777), '0600')
        self.assertTrue(json_file.endswith("command-3.json"))
        os.unlink(json_file)
        # Test dumping STATUS_COMMAND
        json_file = orchestrator.dump_command_to_json(command, True)
        self.assertTrue(os.path.exists(json_file))
        self.assertTrue(os.path.getsize(json_file) > 0)
        if get_platform() != PLATFORM_WINDOWS:
            self.assertEqual(oct(os.stat(json_file).st_mode & 0777), '0600')
        self.assertTrue(json_file.endswith("command-3.json"))
        os.unlink(json_file)
        # Testing side effect of dump_command_to_json
        self.assertNotEquals(command['clusterHostInfo'], {})
        self.assertTrue(unlink_mock.called)
Example #6
0
  def __test_execute_python_executor(self, resolve_script_path_mock,
                                   get_py_executor_mock):
    
    dummy_controller = MagicMock()
    cfg = AmbariConfig()
    cfg.set('agent', 'tolerate_download_failures', 'true')
    cfg.set('agent', 'prefix', '.')
    cfg.set('agent', 'cache_dir', 'background_tasks')
    
    initializer_module = InitializerModule()
    initializer_module.init()
    initializer_module.config = cfg
    initializer_module.metadata_cache.cache_update({CLUSTER_ID:{'clusterLevelParams':{}}}, 'abc')
    initializer_module.configurations_cache.cache_update({CLUSTER_ID:{}}, 'abc')
    initializer_module.host_level_params_cache.cache_update({CLUSTER_ID:{}}, 'abc')
    CustomServiceOrchestrator.runCommand = default_run_command
    
    actionQueue = ActionQueue(initializer_module)
    pyex = PythonExecutor(actionQueue.customServiceOrchestrator.tmp_dir, actionQueue.customServiceOrchestrator.config)
    patch_output_file(pyex)
    get_py_executor_mock.return_value = pyex
    actionQueue.customServiceOrchestrator.dump_command_to_json = MagicMock()
   
    result = {}
    lock = threading.RLock()
    complete_done = threading.Condition(lock)
    
    def command_complete_w(process_condensed_result, handle):
      with lock:
        result['command_complete'] = {'condensed_result' : copy.copy(process_condensed_result),
                                      'handle' : copy.copy(handle),
                                      'command_status' : actionQueue.commandStatuses.get_command_status(handle.command['taskId'])
                                      }
        complete_done.notifyAll()

    actionQueue.on_background_command_complete_callback = wraped(actionQueue.on_background_command_complete_callback,
                                                                 None, command_complete_w)
    actionQueue.put([self.background_command])
    actionQueue.processBackgroundQueueSafeEmpty();
    
    with lock:
      complete_done.wait(0.1)
      
      finished_status = result['command_complete']['command_status']
      self.assertEqual(finished_status['status'], ActionQueue.COMPLETED_STATUS)
      self.assertEqual(finished_status['stdout'], 'process_out')
      self.assertEqual(finished_status['stderr'], 'process_err')
      self.assertEqual(finished_status['exitCode'], 0)
      
    
    runningCommand = actionQueue.commandStatuses.current_state.get(self.background_command['taskId'])
    self.assertTrue(runningCommand is not None)
    
    report = actionQueue.result()
    self.assertEqual(len(reports), 1)
    self.assertEqual(reports[0]['stdout'], 'process_out')
Example #7
0
  def test_load_definitions_noFile(self):
    initializer_module = InitializerModule()
    initializer_module.init()
    
    scheduler = AlertSchedulerHandler(initializer_module)
    #('wrong_path', 'wrong_path', 'wrong_path', 'wrong_path', 'wrong_path', None, self.config, None)
    scheduler._AlertSchedulerHandler__config_maps = {
      'cluster': {}
    }

    definitions = scheduler._AlertSchedulerHandler__load_definitions()

    self.assertEquals(definitions, [])
Example #8
0
 def test_cancel(self, CustomServiceOrchestrator_mock,
                      get_mock, process_command_mock, gpeo_mock):
   CustomServiceOrchestrator_mock.return_value = None
   
   initializer_module = InitializerModule()
   initializer_module.init()
   
   dummy_controller = MagicMock(initializer_module)
   config = MagicMock()
   gpeo_mock.return_value = 0
   config.get_parallel_exec_option = gpeo_mock
   
   initializer_module = InitializerModule()
   initializer_module.init()
   
   actionQueue = ActionQueue(initializer_module)
   actionQueue.start()
   actionQueue.put([self.datanode_install_command, self.hbase_install_command])
   self.assertEqual(2, actionQueue.commandQueue.qsize())
   actionQueue.reset()
   self.assertTrue(actionQueue.commandQueue.empty())
   time.sleep(0.1)
   initializer_module.stop_event.set()
   actionQueue.join()
   self.assertEqual(actionQueue.is_alive(), False, 'Action queue is not stopped.')
Example #9
0
  def test_execute_alert_emptyCommands(self):
    execution_commands = []

    initializer_module = InitializerModule()
    initializer_module.init()
    
    scheduler = AlertSchedulerHandler(initializer_module)
    #TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
    alert_mock = Mock()
    scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)

    scheduler.execute_alert(execution_commands)

    self.assertFalse(alert_mock.collect.called)
Example #10
0
 def test_parallel_exec_no_retry(self, CustomServiceOrchestrator_mock,
                        process_command_mock, gpeo_mock, threading_mock):
   CustomServiceOrchestrator_mock.return_value = None
   
   initializer_module = InitializerModule()
   initializer_module.init()
   
   dummy_controller = MagicMock(initializer_module)
   config = MagicMock()
   gpeo_mock.return_value = 1
   config.get_parallel_exec_option = gpeo_mock
   
   initializer_module = InitializerModule()
   initializer_module.init()
   
   actionQueue = ActionQueue(initializer_module)
   actionQueue.put([self.datanode_install_no_retry_command, self.snamenode_install_command])
   self.assertEqual(2, actionQueue.commandQueue.qsize())
   actionQueue.start()
   time.sleep(1)
   initializer_module.stop_event.set()
   actionQueue.join()
   self.assertEqual(actionQueue.is_alive(), False, 'Action queue is not stopped.')
   self.assertEqual(2, process_command_mock.call_count)
   self.assertEqual(0, threading_mock.call_count)
   process_command_mock.assert_any_calls([call(self.datanode_install_command), call(self.hbase_install_command)])
Example #11
0
  def test_json_to_callable_none(self):
    json_definition = {
      'source': {
        'type': 'SOMETHING'
      }
    }

    initializer_module = InitializerModule()
    initializer_module.init()
    
    scheduler = AlertSchedulerHandler(initializer_module)
    #(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
    callable_result = scheduler._AlertSchedulerHandler__json_to_callable('cluster', 'host', 'host', copy.deepcopy(json_definition))

    self.assertTrue(callable_result is None)
Example #12
0
  def test_process_command(self, execute_command_mock, log_exc_mock):
    dummy_controller = MagicMock()
    config = AmbariConfig()
    config.set('agent', 'tolerate_download_failures', "true")
    
    initializer_module = InitializerModule()
    initializer_module.init()
    
    actionQueue = ActionQueue(initializer_module)
    execution_command = {
      'commandType' : ActionQueue.EXECUTION_COMMAND,
    }
    status_command = {
      'commandType' : ActionQueue.STATUS_COMMAND,
    }
    wrong_command = {
      'commandType' : "SOME_WRONG_COMMAND",
    }
    # Try wrong command
    actionQueue.process_command(wrong_command)
    self.assertFalse(execute_command_mock.called)
    self.assertFalse(log_exc_mock.called)

    execute_command_mock.reset_mock()
    log_exc_mock.reset_mock()
    # Try normal execution
    actionQueue.process_command(execution_command)
    self.assertTrue(execute_command_mock.called)
    self.assertFalse(log_exc_mock.called)

    execute_command_mock.reset_mock()
    log_exc_mock.reset_mock()

    execute_command_mock.reset_mock()
    log_exc_mock.reset_mock()

    # Try exception to check proper logging
    def side_effect(self):
      raise Exception("TerribleException")
    execute_command_mock.side_effect = side_effect
    actionQueue.process_command(execution_command)
    self.assertTrue(log_exc_mock.called)

    log_exc_mock.reset_mock()

    actionQueue.process_command(execution_command)
    self.assertTrue(log_exc_mock.called)
Example #13
0
    def test_runCommand_custom_action(self, get_custom_actions_base_dir_mock,
                                      FileCache_mock, run_file_mock,
                                      dump_command_to_json_mock,
                                      ambari_config_get,
                                      get_configuration_mock):
        ambari_config_get.return_value = "0"
        FileCache_mock.return_value = None
        get_custom_actions_base_dir_mock.return_value = "some path"
        _, script = tempfile.mkstemp()
        command = {
            'role': 'any',
            'commandParams': {
                'script_type': 'PYTHON',
                'script': 'some_custom_action.py',
                'command_timeout': '600',
            },
            'ambariLevelParams': {
                'jdk_location': 'some_location'
            },
            'taskId': '3',
            'roleCommand': 'ACTIONEXECUTE',
            'clusterId': '-1',
        }
        get_configuration_mock.return_value = command

        initializer_module = InitializerModule()
        initializer_module.config = self.config
        initializer_module.init()

        orchestrator = CustomServiceOrchestrator(initializer_module)
        unix_process_id = 111
        orchestrator.commands_in_progress = {
            command['taskId']: unix_process_id
        }
        # normal run case
        run_file_mock.return_value = {
            'stdout': 'sss',
            'stderr': 'eee',
            'exitcode': 0,
        }
        ret = orchestrator.runCommand(command, "out.txt", "err.txt")
        self.assertEqual(ret['exitcode'], 0)
        self.assertTrue(run_file_mock.called)
        # Hoooks are not supported for custom actions,
        # that's why run_file() should be called only once
        self.assertEqual(run_file_mock.call_count, 1)
Example #14
0
    def test_runCommand_background_action(self,
                                          get_custom_actions_base_dir_mock,
                                          FileCache_mock,
                                          dump_command_to_json_mock,
                                          get_py_executor_mock,
                                          get_configuration_mock):
        FileCache_mock.return_value = None
        get_custom_actions_base_dir_mock.return_value = "some path"
        _, script = tempfile.mkstemp()
        command = {
            'role':
            'any',
            'commandParams': {
                'script_type': 'PYTHON',
                'script': 'some_custom_action.py',
                'command_timeout': '600',
            },
            'ambariLevelParams': {
                'jdk_location': 'some_location'
            },
            'clusterId':
            '-1',
            'taskId':
            '13',
            'roleCommand':
            'ACTIONEXECUTE',
            'commandType':
            'BACKGROUND_EXECUTION_COMMAND',
            '__handle':
            BackgroundCommandExecutionHandle({'taskId': '13'}, 13, MagicMock(),
                                             MagicMock())
        }
        initializer_module = InitializerModule()
        initializer_module.init()
        orchestrator = CustomServiceOrchestrator(initializer_module)

        import TestActionQueue
        pyex = PythonExecutor(orchestrator.tmp_dir, orchestrator.config)
        TestActionQueue.patch_output_file(pyex)
        pyex.condenseOutput = MagicMock()
        get_py_executor_mock.return_value = pyex
        orchestrator.dump_command_to_json = MagicMock()

        ret = orchestrator.runCommand(command, "out.txt", "err.txt")
        self.assertEqual(ret['exitcode'], 777)
Example #15
0
 def test_ActionQueueStartStop(self, CustomServiceOrchestrator_mock,
                               get_mock, process_command_mock, get_parallel_exec_option_mock):
   CustomServiceOrchestrator_mock.return_value = None
   dummy_controller = MagicMock()
   config = MagicMock()
   get_parallel_exec_option_mock.return_value = 0
   config.get_parallel_exec_option = get_parallel_exec_option_mock
   
   initializer_module = InitializerModule()
   initializer_module.init()
   
   actionQueue = ActionQueue(initializer_module)
   actionQueue.start()
   time.sleep(0.1)
   initializer_module.stop_event.set()
   actionQueue.join()
   self.assertEqual(actionQueue.is_alive(), False, 'Action queue is not stopped.')
   self.assertTrue(process_command_mock.call_count > 1)
Example #16
0
  def test_json_to_callable_ams(self):
    initializer_module = InitializerModule()
    initializer_module.init()
    
    scheduler = AlertSchedulerHandler(initializer_module)
    #(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
    json_definition = {
      'source': {
        'type': 'AMS'
      }
    }

    callable_result = scheduler._AlertSchedulerHandler__json_to_callable('cluster', 'host', 'host', copy.deepcopy(json_definition))

    self.assertTrue(callable_result is not None)
    self.assertTrue(isinstance(callable_result, AmsAlert))
    self.assertEquals(callable_result.alert_meta, json_definition)
    self.assertEquals(callable_result.alert_source_meta, json_definition['source'])
Example #17
0
  def test_do_not_log_execution_commands(self, command_status_dict_mock,
                                         cso_runCommand_mock, mock_log_command_output):
    custom_service_orchestrator_execution_result_dict = {
      'stdout': 'out',
      'stderr': 'stderr',
      'structuredOut': '',
      'exitcode': 0
    }
    cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict

    config = AmbariConfig()
    tempdir = tempfile.gettempdir()
    config.set('agent', 'prefix', tempdir)
    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
    config.set('agent', 'tolerate_download_failures', "true")
    config.set('logging', 'log_command_executes', 1)
    dummy_controller = MagicMock()
    
    initializer_module = InitializerModule()
    initializer_module.init()
    

    actionQueue = ActionQueue(initializer_module)
    actionQueue.execute_command(self.datanode_restart_command_no_logging)
    reports = actionQueue.commandStatuses.generate_report()[CLUSTER_ID]
    expected = {'status': 'COMPLETED',
                'stderr': 'stderr',
                'stdout': 'out\n\nCommand completed successfully!\n',
                'clusterId': CLUSTER_ID,
                'structuredOut': '""',
                'roleCommand': u'CUSTOM_COMMAND',
                'serviceName': u'HDFS',
                'role': u'DATANODE',
                'actionId': '1-1',
                'taskId': 9,
                'exitCode': 0}
    # Agent caches configurationTags if custom_command RESTART completed
    mock_log_command_output.assert_not_called(
      [call("out\n\nCommand completed successfully!\n", "9"), call("stderr", "9")], any_order=True)
    self.assertEqual(len(reports), 1)
    self.assertEqual(expected, reports[0])
Example #18
0
 def test_resolve_script_path(self, FileCache_mock, exists_mock):
     FileCache_mock.return_value = None
     dummy_controller = MagicMock()
     config = AmbariConfig()
     initializer_module = InitializerModule()
     initializer_module.init()
     orchestrator = CustomServiceOrchestrator(initializer_module)
     # Testing existing path
     exists_mock.return_value = True
     path = orchestrator.\
       resolve_script_path(os.path.join("HBASE", "package"), os.path.join("scripts", "hbase_master.py"))
     self.assertEqual(
         os.path.join("HBASE", "package", "scripts", "hbase_master.py"),
         path)
     # Testing not existing path
     exists_mock.return_value = False
     try:
         orchestrator.resolve_script_path(
             "/HBASE", os.path.join("scripts", "hbase_master.py"))
         self.fail('ExpectedException not thrown')
     except AgentException:
         pass  # Expected
Example #19
0
  def test_store_config_tags_on_install_client_command(self, command_status_dict_mock,
      cso_runCommand_mock):

    custom_service_orchestrator_execution_result_dict = {
      'stdout': 'out',
      'stderr': 'stderr',
      'structuredOut' : '',
      'exitcode' : 0
    }
    cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict

    tez_client_install_command = {
      'commandType': 'EXECUTION_COMMAND',
      'role': u'TEZ_CLIENT',
      'roleCommand': u'INSTALL',
      'commandId': '1-1',
      'taskId': 9,
      'clusterName': u'cc',
      'serviceName': u'TEZ',
      'configurations': {'global' : {}},
      'configurationTags': {'global' : { 'tag': 'v123' }},
      'hostLevelParams': {},
      'clusterId': CLUSTER_ID,
    }
    LiveStatus.CLIENT_COMPONENTS = ({'serviceName': 'TEZ', 'componentName': 'TEZ_CLIENT'},)

    config = AmbariConfig()
    tempdir = tempfile.gettempdir()
    config.set('agent', 'prefix', tempdir)
    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
    config.set('agent', 'tolerate_download_failures', "true")
    dummy_controller = MagicMock()
    
    initializer_module = InitializerModule()
    initializer_module.init()
    
    actionQueue = ActionQueue(initializer_module)
    actionQueue.execute_command(tez_client_install_command)
Example #20
0
 def test_resolve_hook_script_path(self, FileCache_mock, isfile_mock):
     FileCache_mock.return_value = None
     dummy_controller = MagicMock()
     initializer_module = InitializerModule()
     initializer_module.init()
     orchestrator = CustomServiceOrchestrator(initializer_module)
     # Testing None param
     res1 = orchestrator.resolve_hook_script_path(None, "prefix", "command",
                                                  "script_type")
     self.assertEqual(res1, None)
     # Testing existing hook script
     isfile_mock.return_value = True
     res2 = orchestrator.resolve_hook_script_path("hooks_dir", "prefix",
                                                  "command", "script_type")
     self.assertEqual(
         res2,
         (os.path.join('hooks_dir', 'prefix-command', 'scripts', 'hook.py'),
          os.path.join('hooks_dir', 'prefix-command')))
     # Testing not existing hook script
     isfile_mock.return_value = False
     res3 = orchestrator.resolve_hook_script_path("hooks_dir", "prefix",
                                                  "command", "script_type")
     self.assertEqual(res3, None)
Example #21
0
    def test_requestComponentStatus(self, FileCache_mock, runCommand_mock):
        FileCache_mock.return_value = None
        status_command = {
            "serviceName": 'HDFS',
            "commandType": "STATUS_COMMAND",
            "clusterName": "",
            "componentName": "DATANODE",
            'configurations': {}
        }
        dummy_controller = MagicMock()
        initializer_module = InitializerModule()
        initializer_module.init()
        orchestrator = CustomServiceOrchestrator(initializer_module)
        # Test alive case
        runCommand_mock.return_value = {"exitcode": 0}

        status = orchestrator.requestComponentStatus(status_command)
        self.assertEqual(runCommand_mock.return_value, status)

        # Test dead case
        runCommand_mock.return_value = {"exitcode": 1}
        status = orchestrator.requestComponentStatus(status_command)
        self.assertEqual(runCommand_mock.return_value, status)
Example #22
0
  def test_refresh_queues_custom_command(self, command_status_dict_mock,
                                                            cso_runCommand_mock):
    custom_service_orchestrator_execution_result_dict = {
      'stdout': 'out',
      'stderr': 'stderr',
      'structuredOut' : '',
      'exitcode' : 0
    }
    cso_runCommand_mock.return_value = custom_service_orchestrator_execution_result_dict

    config = AmbariConfig()
    tempdir = tempfile.gettempdir()
    config.set('agent', 'prefix', tempdir)
    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
    config.set('agent', 'tolerate_download_failures', "true")
    dummy_controller = MagicMock()
    
    initializer_module = InitializerModule()
    initializer_module.init()
    
    actionQueue = ActionQueue(initializer_module)
    actionQueue.execute_command(self.yarn_refresh_queues_custom_command)

    reports = actionQueue.commandStatuses.generate_report()[CLUSTER_ID]
    expected = {'status': 'COMPLETED',
                'stderr': 'stderr',
                'stdout': 'out\n\nCommand completed successfully!\n',
                'clusterId': CLUSTER_ID,
                'structuredOut': '""',
                'roleCommand': u'CUSTOM_COMMAND',
                'serviceName': u'YARN',
                'role': u'RESOURCEMANAGER',
                'actionId': '1-1',
                'taskId': 9,
                'exitCode': 0}
    self.assertEqual(len(reports), 1)
    self.assertEqual(expected, reports[0])
Example #23
0
  def test_job_context_injector(self, reconfigure_urllib2_opener_mock):
    self.config.use_system_proxy_setting = lambda: False
    
    initializer_module = InitializerModule()
    initializer_module.init()
    
    scheduler = AlertSchedulerHandler(initializer_module)
    #(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
    scheduler._job_context_injector(self.config)

    self.assertTrue(reconfigure_urllib2_opener_mock.called)

    reconfigure_urllib2_opener_mock.reset_mock()

    self.config.use_system_proxy_setting = lambda: True
    
    initializer_module = InitializerModule()
    initializer_module.init()
    
    scheduler = AlertSchedulerHandler(initializer_module)
    #(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None)
    scheduler._job_context_injector(self.config)
    self.assertFalse(reconfigure_urllib2_opener_mock.called)
Example #24
0
    def test_mock_server_can_start(self, runCommand_mock):
        runCommand_mock.return_value = {
            'stdout': '...',
            'stderr': '...',
            'structuredOut': '{}',
            'exitcode': 1
        }

        self.initializer_module = InitializerModule()
        self.initializer_module.init()

        heartbeat_thread = HeartbeatThread.HeartbeatThread(
            self.initializer_module)
        heartbeat_thread.start()

        action_queue = self.initializer_module.action_queue
        action_queue.start()
        self.initializer_module.alert_scheduler_handler.start()

        component_status_executor = ComponentStatusExecutor(
            self.initializer_module)
        component_status_executor.start()

        connect_frame = self.server.frames_queue.get()
        users_subscribe_frame = self.server.frames_queue.get()
        registration_frame = self.server.frames_queue.get()

        # server sends registration response
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '0'
                  },
                  body=self.get_json("registration_response.json"))
        self.server.topic_manager.send(f)

        # response to /initial_topology
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '1'
                  },
                  body=self.get_json("topology_create.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '2'
                  },
                  body=self.get_json("metadata_after_registration.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '3'
                  },
                  body=self.get_json("configurations_update.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '4'
                  },
                  body=self.get_json("host_level_params.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '5'
                  },
                  body=self.get_json("alert_definitions.json"))
        self.server.topic_manager.send(f)

        initial_topology_request = self.server.frames_queue.get()
        initial_metadata_request = self.server.frames_queue.get()
        initial_configs_request = self.server.frames_queue.get()
        initial_host_level_params_request = self.server.frames_queue.get()
        initial_alert_definitions_request = self.server.frames_queue.get()

        while not self.initializer_module.is_registered:
            time.sleep(0.1)

        command_status_reporter = CommandStatusReporter(
            self.initializer_module)
        command_status_reporter.start()

        host_status_reporter = HostStatusReporter(self.initializer_module)
        host_status_reporter.start()

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/user/commands'},
                  body=self.get_json("execution_commands.json"))
        self.server.topic_manager.send(f)

        commands_subscribe_frame = self.server.frames_queue.get()
        configurations_subscribe_frame = self.server.frames_queue.get()
        metadata_subscribe_frame = self.server.frames_queue.get()
        topologies_subscribe_frame = self.server.frames_queue.get()
        host_level_params_subscribe_frame = self.server.frames_queue.get()
        alert_definitions_subscribe_frame = self.server.frames_queue.get()
        heartbeat_frame = self.server.frames_queue.get()
        dn_install_in_progress_frame = json.loads(
            self.server.frames_queue.get().body)
        dn_install_failed_frame = json.loads(
            self.server.frames_queue.get().body)
        zk_install_in_progress_frame = json.loads(
            self.server.frames_queue.get().body)
        zk_install_failed_frame = json.loads(
            self.server.frames_queue.get().body)
        action_status_in_progress_frame = json.loads(
            self.server.frames_queue.get().body)
        action_status_failed_frame = json.loads(
            self.server.frames_queue.get().body)
        dn_recovery_in_progress_frame = json.loads(
            self.server.frames_queue.get().body)
        dn_recovery_failed_frame = json.loads(
            self.server.frames_queue.get().body)
        host_status_report = json.loads(self.server.frames_queue.get().body)

        self.initializer_module.stop_event.set()

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '6'
                  },
                  body=json.dumps({'id': '1'}))
        self.server.topic_manager.send(f)

        command_status_reporter.join()
        heartbeat_thread.join()
        component_status_executor.join()
        host_status_reporter.join()
        action_queue.join()

        self.assertTrue('mounts' in host_status_report)
        self.assertEquals(
            self.initializer_module.topology_cache['0']['hosts'][0]
            ['hostName'], 'c6401.ambari.apache.org')
        self.assertEquals(
            self.initializer_module.metadata_cache['0']
            ['status_commands_to_run'], ('STATUS', ))
        self.assertEquals(
            self.initializer_module.configurations_cache['0']['configurations']
            ['zoo.cfg']['clientPort'], '2181')
        self.assertEquals(
            dn_install_in_progress_frame['clusters']['0'][0]['roleCommand'],
            'INSTALL')
        self.assertEquals(
            dn_install_in_progress_frame['clusters']['0'][0]['role'],
            'DATANODE')
        self.assertEquals(
            dn_install_in_progress_frame['clusters']['0'][0]['status'],
            'IN_PROGRESS')
        self.assertEquals(
            dn_install_failed_frame['clusters']['0'][0]['status'], 'FAILED')
        self.assertEquals(
            dn_recovery_in_progress_frame['clusters']['0'][0]['roleCommand'],
            'INSTALL')
        self.assertEquals(
            dn_recovery_in_progress_frame['clusters']['0'][0]['role'],
            'DATANODE')
        self.assertEquals(
            dn_recovery_in_progress_frame['clusters']['0'][0]['status'],
            'IN_PROGRESS')

        #============================================================================================
        #============================================================================================

        self.initializer_module = InitializerModule()
        self.initializer_module.init()

        self.server.frames_queue.queue.clear()

        heartbeat_thread = HeartbeatThread.HeartbeatThread(
            self.initializer_module)
        heartbeat_thread.start()

        action_queue = self.initializer_module.action_queue
        action_queue.start()
        self.initializer_module.alert_scheduler_handler.start()

        component_status_executor = ComponentStatusExecutor(
            self.initializer_module)
        component_status_executor.start()

        command_status_reporter = CommandStatusReporter(
            self.initializer_module)
        command_status_reporter.start()

        host_status_reporter = HostStatusReporter(self.initializer_module)
        host_status_reporter.start()

        connect_frame = self.server.frames_queue.get()
        users_subscribe_frame = self.server.frames_queue.get()
        registration_frame = self.server.frames_queue.get()

        # server sends registration response
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '0'
                  },
                  body=self.get_json("registration_response.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '1'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '2'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '3'
                  },
                  body='{"timestamp":1510577217}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '4'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '5'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        commands_subscribe_frame = self.server.frames_queue.get()
        configurations_subscribe_frame = self.server.frames_queue.get()
        metadata_subscribe_frame = self.server.frames_queue.get()
        topologies_subscribe_frame = self.server.frames_queue.get()
        heartbeat_frame = self.server.frames_queue.get()
        status_reports_frame = self.server.frames_queue.get()

        self.initializer_module.stop_event.set()

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '6'
                  },
                  body=json.dumps({'id': '1'}))
        self.server.topic_manager.send(f)

        heartbeat_thread.join()
        component_status_executor.join()
        command_status_reporter.join()
        host_status_reporter.join()
        action_queue.join()
Example #25
0
        # Launch Controller communication
        run_threads(initializer_module)

      #
      # If Ambari Agent connected to the server or
      # Ambari Agent was stopped using stop event
      # Clean up if not Windows OS
      #
      if connected or stopped:
        ExitHelper().exit()
        logger.info("finished")
        break
    pass # for server_hostname in server_hostnames
  pass # while not (connected or stopped)

  return active_server

if __name__ == "__main__":
  is_logger_setup = False
  try:
    initializer_module = InitializerModule()
    heartbeat_stop_callback = bind_signal_handlers(agentPid, initializer_module.stop_event)

    main(initializer_module, heartbeat_stop_callback)
  except SystemExit:
    raise
  except BaseException:
    if is_logger_setup:
      logger.exception("Exiting with exception:")
    raise
Example #26
0
    def test_alert_definitions_update_and_delete(self):
        self.initializer_module = InitializerModule()
        self.initializer_module.init()

        heartbeat_thread = HeartbeatThread.HeartbeatThread(
            self.initializer_module)
        heartbeat_thread.start()

        connect_frame = self.server.frames_queue.get()
        users_subscribe_frame = self.server.frames_queue.get()
        registration_frame = self.server.frames_queue.get()

        # server sends registration response
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '0'
                  },
                  body=self.get_json("registration_response.json"))
        self.server.topic_manager.send(f)

        # response to /initial_topology
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '1'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '2'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '3'
                  },
                  body='{"timestamp":1510577217}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '4'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '5'
                  },
                  body=self.get_json("alert_definitions_small.json"))
        self.server.topic_manager.send(f)

        initial_topology_request = self.server.frames_queue.get()
        initial_metadata_request = self.server.frames_queue.get()
        initial_configs_request = self.server.frames_queue.get()
        initial_host_level_params_request = self.server.frames_queue.get()
        initial_alert_definitions_request = self.server.frames_queue.get()

        while not self.initializer_module.is_registered:
            time.sleep(0.1)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/user/alert_definitions'},
                  body=self.get_json("alert_definitions_add.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/user/alert_definitions'},
                  body=self.get_json("alert_definitions_edit.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/user/alert_definitions'},
                  body=self.get_json("alert_definitions_delete.json"))
        self.server.topic_manager.send(f)

        def is_json_equal():
            #json_alert_definitions = json.dumps(self.initializer_module.alert_definitions_cache, indent=2, sort_keys=True)
            #json_excepted_definitions = json.dumps(self.get_dict_from_file("alert_definition_expected.json"), indent=2, sort_keys=True)
            #print json_definitions
            #print json_excepted_definitions
            self.assertEquals(
                Utils.get_mutable_copy(
                    self.initializer_module.alert_definitions_cache),
                self.get_dict_from_file("alert_definition_expected.json"))

        self.assert_with_retries(is_json_equal, tries=80, try_sleep=0.1)

        self.initializer_module.stop_event.set()

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '6'
                  },
                  body=json.dumps({'id': '1'}))
        self.server.topic_manager.send(f)

        heartbeat_thread.join()
Example #27
0
class TestAgentStompResponses(BaseStompServerTestCase):
    def setUp(self):
        self.remove_files([
            '/tmp/cluster_cache/configurations.json',
            '/tmp/cluster_cache/metadata.json',
            '/tmp/cluster_cache/topology.json', '/tmp/host_level_params.json',
            '/tmp/cluster_cache/alert_definitions.json'
        ])

        if not os.path.exists("/tmp/ambari-agent"):
            os.mkdir("/tmp/ambari-agent")

        with open("/tmp/ambari-agent/version", "w") as fp:
            fp.write("2.5.0.0")

        return super(TestAgentStompResponses, self).setUp()

    @patch.object(CustomServiceOrchestrator, "runCommand")
    def test_mock_server_can_start(self, runCommand_mock):
        runCommand_mock.return_value = {
            'stdout': '...',
            'stderr': '...',
            'structuredOut': '{}',
            'exitcode': 1
        }

        self.initializer_module = InitializerModule()
        self.initializer_module.init()

        heartbeat_thread = HeartbeatThread.HeartbeatThread(
            self.initializer_module)
        heartbeat_thread.start()

        action_queue = self.initializer_module.action_queue
        action_queue.start()
        self.initializer_module.alert_scheduler_handler.start()

        component_status_executor = ComponentStatusExecutor(
            self.initializer_module)
        component_status_executor.start()

        connect_frame = self.server.frames_queue.get()
        users_subscribe_frame = self.server.frames_queue.get()
        registration_frame = self.server.frames_queue.get()

        # server sends registration response
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '0'
                  },
                  body=self.get_json("registration_response.json"))
        self.server.topic_manager.send(f)

        # response to /initial_topology
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '1'
                  },
                  body=self.get_json("topology_create.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '2'
                  },
                  body=self.get_json("metadata_after_registration.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '3'
                  },
                  body=self.get_json("configurations_update.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '4'
                  },
                  body=self.get_json("host_level_params.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '5'
                  },
                  body=self.get_json("alert_definitions.json"))
        self.server.topic_manager.send(f)

        initial_topology_request = self.server.frames_queue.get()
        initial_metadata_request = self.server.frames_queue.get()
        initial_configs_request = self.server.frames_queue.get()
        initial_host_level_params_request = self.server.frames_queue.get()
        initial_alert_definitions_request = self.server.frames_queue.get()

        while not self.initializer_module.is_registered:
            time.sleep(0.1)

        command_status_reporter = CommandStatusReporter(
            self.initializer_module)
        command_status_reporter.start()

        host_status_reporter = HostStatusReporter(self.initializer_module)
        host_status_reporter.start()

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/user/commands'},
                  body=self.get_json("execution_commands.json"))
        self.server.topic_manager.send(f)

        commands_subscribe_frame = self.server.frames_queue.get()
        configurations_subscribe_frame = self.server.frames_queue.get()
        metadata_subscribe_frame = self.server.frames_queue.get()
        topologies_subscribe_frame = self.server.frames_queue.get()
        host_level_params_subscribe_frame = self.server.frames_queue.get()
        alert_definitions_subscribe_frame = self.server.frames_queue.get()
        heartbeat_frame = self.server.frames_queue.get()
        dn_install_in_progress_frame = json.loads(
            self.server.frames_queue.get().body)
        dn_install_failed_frame = json.loads(
            self.server.frames_queue.get().body)
        zk_install_in_progress_frame = json.loads(
            self.server.frames_queue.get().body)
        zk_install_failed_frame = json.loads(
            self.server.frames_queue.get().body)
        action_status_in_progress_frame = json.loads(
            self.server.frames_queue.get().body)
        action_status_failed_frame = json.loads(
            self.server.frames_queue.get().body)
        dn_recovery_in_progress_frame = json.loads(
            self.server.frames_queue.get().body)
        dn_recovery_failed_frame = json.loads(
            self.server.frames_queue.get().body)
        host_status_report = json.loads(self.server.frames_queue.get().body)

        self.initializer_module.stop_event.set()

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '6'
                  },
                  body=json.dumps({'id': '1'}))
        self.server.topic_manager.send(f)

        command_status_reporter.join()
        heartbeat_thread.join()
        component_status_executor.join()
        host_status_reporter.join()
        action_queue.join()

        self.assertTrue('mounts' in host_status_report)
        self.assertEquals(
            self.initializer_module.topology_cache['0']['hosts'][0]
            ['hostName'], 'c6401.ambari.apache.org')
        self.assertEquals(
            self.initializer_module.metadata_cache['0']
            ['status_commands_to_run'], ('STATUS', ))
        self.assertEquals(
            self.initializer_module.configurations_cache['0']['configurations']
            ['zoo.cfg']['clientPort'], '2181')
        self.assertEquals(
            dn_install_in_progress_frame['clusters']['0'][0]['roleCommand'],
            'INSTALL')
        self.assertEquals(
            dn_install_in_progress_frame['clusters']['0'][0]['role'],
            'DATANODE')
        self.assertEquals(
            dn_install_in_progress_frame['clusters']['0'][0]['status'],
            'IN_PROGRESS')
        self.assertEquals(
            dn_install_failed_frame['clusters']['0'][0]['status'], 'FAILED')
        self.assertEquals(
            dn_recovery_in_progress_frame['clusters']['0'][0]['roleCommand'],
            'INSTALL')
        self.assertEquals(
            dn_recovery_in_progress_frame['clusters']['0'][0]['role'],
            'DATANODE')
        self.assertEquals(
            dn_recovery_in_progress_frame['clusters']['0'][0]['status'],
            'IN_PROGRESS')

        #============================================================================================
        #============================================================================================

        self.initializer_module = InitializerModule()
        self.initializer_module.init()

        self.server.frames_queue.queue.clear()

        heartbeat_thread = HeartbeatThread.HeartbeatThread(
            self.initializer_module)
        heartbeat_thread.start()

        action_queue = self.initializer_module.action_queue
        action_queue.start()
        self.initializer_module.alert_scheduler_handler.start()

        component_status_executor = ComponentStatusExecutor(
            self.initializer_module)
        component_status_executor.start()

        command_status_reporter = CommandStatusReporter(
            self.initializer_module)
        command_status_reporter.start()

        host_status_reporter = HostStatusReporter(self.initializer_module)
        host_status_reporter.start()

        connect_frame = self.server.frames_queue.get()
        users_subscribe_frame = self.server.frames_queue.get()
        registration_frame = self.server.frames_queue.get()

        # server sends registration response
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '0'
                  },
                  body=self.get_json("registration_response.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '1'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '2'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '3'
                  },
                  body='{"timestamp":1510577217}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '4'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '5'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        commands_subscribe_frame = self.server.frames_queue.get()
        configurations_subscribe_frame = self.server.frames_queue.get()
        metadata_subscribe_frame = self.server.frames_queue.get()
        topologies_subscribe_frame = self.server.frames_queue.get()
        heartbeat_frame = self.server.frames_queue.get()
        status_reports_frame = self.server.frames_queue.get()

        self.initializer_module.stop_event.set()

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '6'
                  },
                  body=json.dumps({'id': '1'}))
        self.server.topic_manager.send(f)

        heartbeat_thread.join()
        component_status_executor.join()
        command_status_reporter.join()
        host_status_reporter.join()
        action_queue.join()

    def test_topology_update_and_delete(self):
        self.initializer_module = InitializerModule()
        self.initializer_module.init()

        heartbeat_thread = HeartbeatThread.HeartbeatThread(
            self.initializer_module)
        heartbeat_thread.start()

        connect_frame = self.server.frames_queue.get()
        users_subscribe_frame = self.server.frames_queue.get()
        registration_frame = self.server.frames_queue.get()

        # server sends registration response
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '0'
                  },
                  body=self.get_json("registration_response.json"))
        self.server.topic_manager.send(f)

        # response to /initial_topology
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '1'
                  },
                  body=self.get_json("topology_create.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '2'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '3'
                  },
                  body='{"timestamp":1510577217}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '4'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '5'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        initial_topology_request = self.server.frames_queue.get()
        initial_metadata_request = self.server.frames_queue.get()
        initial_configs_request = self.server.frames_queue.get()
        initial_host_level_params_request = self.server.frames_queue.get()
        initial_alert_definitions_request = self.server.frames_queue.get()

        while not self.initializer_module.is_registered:
            time.sleep(0.1)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/events/topologies'},
                  body=self.get_json("topology_add_component.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/events/topologies'},
                  body=self.get_json("topology_add_component_host.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/events/topologies'},
                  body=self.get_json("topology_add_host.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/events/topologies'},
                  body=self.get_json("topology_delete_host.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/events/topologies'},
                  body=self.get_json("topology_delete_component.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/events/topologies'},
                  body=self.get_json("topology_delete_component_host.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/events/topologies'},
                  body=self.get_json("topology_delete_cluster.json"))
        self.server.topic_manager.send(f)

        def is_json_equal():
            #json_topology = json.dumps(self.initializer_module.topology_cache, indent=2, sort_keys=True)
            #json_excepted_lopology = json.dumps(self.get_dict_from_file("topology_cache_expected.json"), indent=2, sort_keys=True)
            #print json_topology
            #print json_excepted_lopology
            self.assertEquals(
                Utils.get_mutable_copy(self.initializer_module.topology_cache),
                self.get_dict_from_file("topology_cache_expected.json"))

        self.assert_with_retries(is_json_equal, tries=80, try_sleep=0.1)

        self.initializer_module.stop_event.set()

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '6'
                  },
                  body=json.dumps({'id': '1'}))
        self.server.topic_manager.send(f)

        heartbeat_thread.join()

    def test_alert_definitions_update_and_delete(self):
        self.initializer_module = InitializerModule()
        self.initializer_module.init()

        heartbeat_thread = HeartbeatThread.HeartbeatThread(
            self.initializer_module)
        heartbeat_thread.start()

        connect_frame = self.server.frames_queue.get()
        users_subscribe_frame = self.server.frames_queue.get()
        registration_frame = self.server.frames_queue.get()

        # server sends registration response
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '0'
                  },
                  body=self.get_json("registration_response.json"))
        self.server.topic_manager.send(f)

        # response to /initial_topology
        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '1'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '2'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '3'
                  },
                  body='{"timestamp":1510577217}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '4'
                  },
                  body='{}')
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '5'
                  },
                  body=self.get_json("alert_definitions_small.json"))
        self.server.topic_manager.send(f)

        initial_topology_request = self.server.frames_queue.get()
        initial_metadata_request = self.server.frames_queue.get()
        initial_configs_request = self.server.frames_queue.get()
        initial_host_level_params_request = self.server.frames_queue.get()
        initial_alert_definitions_request = self.server.frames_queue.get()

        while not self.initializer_module.is_registered:
            time.sleep(0.1)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/user/alert_definitions'},
                  body=self.get_json("alert_definitions_add.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/user/alert_definitions'},
                  body=self.get_json("alert_definitions_edit.json"))
        self.server.topic_manager.send(f)

        f = Frame(frames.MESSAGE,
                  headers={'destination': '/user/alert_definitions'},
                  body=self.get_json("alert_definitions_delete.json"))
        self.server.topic_manager.send(f)

        def is_json_equal():
            #json_alert_definitions = json.dumps(self.initializer_module.alert_definitions_cache, indent=2, sort_keys=True)
            #json_excepted_definitions = json.dumps(self.get_dict_from_file("alert_definition_expected.json"), indent=2, sort_keys=True)
            #print json_definitions
            #print json_excepted_definitions
            self.assertEquals(
                Utils.get_mutable_copy(
                    self.initializer_module.alert_definitions_cache),
                self.get_dict_from_file("alert_definition_expected.json"))

        self.assert_with_retries(is_json_equal, tries=80, try_sleep=0.1)

        self.initializer_module.stop_event.set()

        f = Frame(frames.MESSAGE,
                  headers={
                      'destination': '/user/',
                      'correlationId': '6'
                  },
                  body=json.dumps({'id': '1'}))
        self.server.topic_manager.send(f)

        heartbeat_thread.join()
    def test_public_fqdn(self):
        initializer_module = InitializerModule()

        config_builder = ConfigurationBuilder(initializer_module)
        self.assertEqual('c6401.ambari.apache.org', config_builder.public_fqdn)
Example #29
0
    def test_cancel_command(
            self, FileCache_mock, run_file_mock, dump_command_to_json_mock,
            get_hook_base_dir_mock, get_service_base_dir_mock,
            get_host_scripts_base_dir_mock, resolve_hook_script_path_mock,
            resolve_script_path_mock, kill_process_with_children_mock,
            get_configuration_mock):
        FileCache_mock.return_value = None
        command = {
            'role': 'REGION_SERVER',
            'clusterLevelParams': {
                'stack_name': 'HDP',
                'stack_version': '2.0.7'
            },
            'ambariLevelParams': {
                'jdk_location': 'some_location'
            },
            'commandParams': {
                'script_type': 'PYTHON',
                'script': 'scripts/hbase_regionserver.py',
                'command_timeout': '600',
                'service_package_folder': 'HBASE'
            },
            'taskId': '3',
            'roleCommand': 'INSTALL',
            'clusterId': '-1'
        }
        get_configuration_mock.return_value = command

        get_host_scripts_base_dir_mock.return_value = "/host_scripts"
        get_service_base_dir_mock.return_value = "/basedir/"
        resolve_script_path_mock.return_value = "/basedir/scriptpath"
        resolve_hook_script_path_mock.return_value = \
          ('/hooks_dir/prefix-command/scripts/hook.py',
           '/hooks_dir/prefix-command')
        initializer_module = InitializerModule()
        initializer_module.init()
        orchestrator = CustomServiceOrchestrator(initializer_module)
        unix_process_id = 111
        orchestrator.commands_in_progress = {
            command['taskId']: unix_process_id
        }
        get_hook_base_dir_mock.return_value = "/hooks/"
        run_file_mock_return_value = {
            'stdout': 'killed',
            'stderr': 'killed',
            'exitcode': 1,
        }

        def side_effect(*args, **kwargs):
            time.sleep(0.2)
            return run_file_mock_return_value

        run_file_mock.side_effect = side_effect

        _, out = tempfile.mkstemp()
        _, err = tempfile.mkstemp()
        pool = ThreadPool(processes=1)
        async_result = pool.apply_async(orchestrator.runCommand,
                                        (command, out, err))

        time.sleep(0.1)
        orchestrator.cancel_command(command['taskId'], 'reason')

        ret = async_result.get()

        self.assertEqual(ret['exitcode'], 1)
        self.assertEquals(ret['stdout'],
                          'killed\nCommand aborted. Reason: \'reason\'')
        self.assertEquals(ret['stderr'],
                          'killed\nCommand aborted. Reason: \'reason\'')

        self.assertTrue(kill_process_with_children_mock.called)
        self.assertFalse(
            command['taskId'] in orchestrator.commands_in_progress.keys())
        self.assertTrue(os.path.exists(out))
        self.assertTrue(os.path.exists(err))
        try:
            os.remove(out)
            os.remove(err)
        except:
            pass
Example #30
0
    def test_cancel_backgound_command(self, resolve_hook_script_path_mock,
                                      resolve_script_path_mock, FileCache_mock,
                                      kill_process_with_children_mock,
                                      get_py_executor_mock,
                                      get_configuration_mock):
        FileCache_mock.return_value = None
        FileCache_mock.cache_dir = MagicMock()
        resolve_hook_script_path_mock.return_value = None
        dummy_controller = MagicMock()
        cfg = AmbariConfig()
        cfg.set('agent', 'tolerate_download_failures', 'true')
        cfg.set('agent', 'prefix', '.')
        cfg.set('agent', 'cache_dir', 'background_tasks')

        initializer_module = InitializerModule()
        initializer_module.init()

        actionQueue = ActionQueue(initializer_module)
        orchestrator = CustomServiceOrchestrator(initializer_module)

        initializer_module.actionQueue = actionQueue

        orchestrator.file_cache = MagicMock()

        def f(a, b):
            return ""

        orchestrator.file_cache.get_service_base_dir = f
        actionQueue.customServiceOrchestrator = orchestrator

        import TestActionQueue
        import copy

        pyex = PythonExecutor(actionQueue.customServiceOrchestrator.tmp_dir,
                              actionQueue.customServiceOrchestrator.config)
        TestActionQueue.patch_output_file(pyex)
        pyex.prepare_process_result = MagicMock()
        get_py_executor_mock.return_value = pyex
        orchestrator.dump_command_to_json = MagicMock()

        lock = threading.RLock()
        complete_done = threading.Condition(lock)

        complete_was_called = {}

        def command_complete_w(process_condenced_result, handle):
            with lock:
                complete_was_called['visited'] = ''
                complete_done.wait(3)

        actionQueue.on_background_command_complete_callback = TestActionQueue.wraped(
            actionQueue.on_background_command_complete_callback,
            command_complete_w, None)
        execute_command = copy.deepcopy(
            TestActionQueue.TestActionQueue.background_command)
        get_configuration_mock.return_value = execute_command

        actionQueue.put([execute_command])
        actionQueue.processBackgroundQueueSafeEmpty()

        time.sleep(.1)

        orchestrator.cancel_command(19, 'reason')
        self.assertTrue(kill_process_with_children_mock.called)
        kill_process_with_children_mock.assert_called_with(33)

        with lock:
            complete_done.notifyAll()

        with lock:
            self.assertTrue(complete_was_called.has_key('visited'))

        time.sleep(.1)

        runningCommand = actionQueue.commandStatuses.get_command_status(19)
        self.assertTrue(runningCommand is not None)
        self.assertEqual(runningCommand['status'], ActionQueue.FAILED_STATUS)