示例#1
0
 def setUp(self):
     super(TestJobManager, self).setUp()
     p.patch_minidom_writexml()
     self.override_config('plugins', ['fake'])
     pb.setup_plugins()
     castellan.validate_config()
     ds_manager.setup_data_sources()
示例#2
0
 def setUp(self):
     super(TestJobManager, self).setUp()
     p.patch_minidom_writexml()
     self.override_config('plugins', ['fake'])
     pb.setup_plugins()
     castellan.validate_config()
     ds_manager.setup_data_sources()
示例#3
0
    def setUp(self):
        super(TestStorm, self).setUp()

        self.master_host = "master"
        self.master_inst = "6789"
        self.storm_topology_name = "MyJob_ed8347a9-39aa-477c-8108-066202eb6130"
        self.workflow_dir = "/wfdir"

        jb_manager.setup_job_binaries()
        ds_manager.setup_data_sources()
示例#4
0
    def setUp(self):
        super(TestSparkShellEngine, self).setUp()
        self.master_host = "master"
        self.master_port = 7077
        self.master_instance_id = "6789"
        self.spark_pid = "12345"
        self.spark_home = "/opt/spark"
        self.workflow_dir = "/wfdir"

        ds_manager.setup_data_sources()
示例#5
0
    def setUp(self):
        super(TestStorm, self).setUp()

        self.master_host = "master"
        self.master_inst = "6789"
        self.storm_topology_name = "MyJob_ed8347a9-39aa-477c-8108-066202eb6130"
        self.workflow_dir = "/wfdir"

        jb_manager.setup_job_binaries()
        ds_manager.setup_data_sources()
示例#6
0
文件: base.py 项目: wuhsh/sahara
    def setUp(self):
        super(TestSpark, self).setUp()

        # These variables are initialized in subclasses because its values
        # depend on plugin
        self.master_host = None
        self.engine_class = None
        self.spark_user = None
        self.spark_submit = None
        self.master = None
        self.deploy_mode = None

        self.master_port = 7077
        self.master_inst = "6789"
        self.spark_pid = "12345"
        self.spark_home = "/opt/spark"
        self.workflow_dir = "/wfdir"
        self.driver_cp = "/usr/lib/hadoop-mapreduce/hadoop-openstack.jar:"

        ds_manager.setup_data_sources()
示例#7
0
文件: base.py 项目: openstack/sahara
    def setUp(self):
        super(TestSpark, self).setUp()

        # These variables are initialized in subclasses because its values
        # depend on plugin
        self.master_host = None
        self.engine_class = None
        self.spark_user = None
        self.spark_submit = None
        self.master = None
        self.deploy_mode = None

        self.master_port = 7077
        self.master_inst = "6789"
        self.spark_pid = "12345"
        self.spark_home = "/opt/spark"
        self.workflow_dir = "/wfdir"
        self.driver_cp = "/usr/lib/hadoop-mapreduce/hadoop-openstack.jar:"

        ds_manager.setup_data_sources()
示例#8
0
 def setUp(self):
     super(TestOozieEngine, self).setUp()
     pb.setup_plugins()
     jb_manager.setup_job_binaries()
     ds_manager.setup_data_sources()
示例#9
0
 def setUp(self):
     super(TestOozieEngine, self).setUp()
     self.override_config('plugins', ['fake'])
     pb.setup_plugins()
     jb_manager.setup_job_binaries()
     ds_manager.setup_data_sources()
示例#10
0
 def setUp(self):
     super(TestOozieEngine, self).setUp()
     self.override_config('plugins', ['fake'])
     pb.setup_plugins()
     jb_manager.setup_job_binaries()
     ds_manager.setup_data_sources()