def setUp(self): super(TestUtils, self).setUp() self.plugin = p.VanillaProvider() self.ng_manager = tu.make_ng_dict( 'mng', 'f1', ['manager'], 1, [tu.make_inst_dict('mng1', 'manager')]) self.ng_namenode = tu.make_ng_dict( 'nn', 'f1', ['namenode'], 1, [tu.make_inst_dict('nn1', 'namenode')]) self.ng_jobtracker = tu.make_ng_dict( 'jt', 'f1', ['jobtracker'], 1, [tu.make_inst_dict('jt1', 'jobtracker')]) self.ng_datanode = tu.make_ng_dict('dn', 'f1', ['datanode'], 2, [ tu.make_inst_dict('dn1', 'datanode-1'), tu.make_inst_dict('dn2', 'datanode-2') ]) self.ng_tasktracker = tu.make_ng_dict('tt', 'f1', ['tasktracker'], 2, [ tu.make_inst_dict('tt1', 'tasktracker-1'), tu.make_inst_dict('tt2', 'tasktracker-2') ]) self.ng_oozie = tu.make_ng_dict('ooz1', 'f1', ['oozie'], 1, [tu.make_inst_dict('ooz1', 'oozie')]) self.ng_hiveserver = tu.make_ng_dict( 'hs', 'f1', ['hiveserver'], 1, [tu.make_inst_dict('hs1', 'hiveserver')]) self.ng_secondarynamenode = tu.make_ng_dict( 'snn', 'f1', ['secondarynamenode'], 1, [tu.make_inst_dict('snn1', 'secondarynamenode')])
def setUp(self): super(TestUtils, self).setUp() self.plugin = p.VanillaProvider() self.ng_manager = tu.make_ng_dict( 'mng', 'f1', ['manager'], 1, [tu.make_inst_dict('mng1', 'manager')]) self.ng_namenode = tu.make_ng_dict( 'nn', 'f1', ['namenode'], 1, [tu.make_inst_dict('nn1', 'namenode')]) self.ng_resourcemanager = tu.make_ng_dict( 'jt', 'f1', ['resourcemanager'], 1, [tu.make_inst_dict('jt1', 'resourcemanager')]) self.ng_datanode = tu.make_ng_dict( 'dn', 'f1', ['datanode'], 2, [tu.make_inst_dict('dn1', 'datanode-1'), tu.make_inst_dict('dn2', 'datanode-2')]) self.ng_nodemanager = tu.make_ng_dict( 'tt', 'f1', ['nodemanager'], 2, [tu.make_inst_dict('tt1', 'nodemanager-1'), tu.make_inst_dict('tt2', 'nodemanager-2')]) self.ng_oozie = tu.make_ng_dict( 'ooz1', 'f1', ['oozie'], 1, [tu.make_inst_dict('ooz1', 'oozie')]) self.ng_hiveserver = tu.make_ng_dict( 'hs', 'f1', ['hiveserver'], 1, [tu.make_inst_dict('hs1', 'hiveserver')]) self.ng_secondarynamenode = tu.make_ng_dict( 'snn', 'f1', ['secondarynamenode'], 1, [tu.make_inst_dict('snn1', 'secondarynamenode')])
def get_fake_cluster(**kwargs): mng = tu.make_inst_dict('id1', 'manager_inst', management_ip='1.2.3.4') mng_ng = tu.make_ng_dict('manager_ng', 1, ['MANAGER'], 1, [mng]) mst = tu.make_inst_dict('id2', 'master_inst', management_ip='1.2.3.5') mst_ng = tu.make_ng_dict('master_ng', 1, ['NAMENODE', 'SECONDARYNAMENODE', 'RESOURCEMANAGER', 'JOBHISTORY', 'OOZIE_SERVER'], 1, [mst]) wkrs = _get_workers() wkrs_ng = tu.make_ng_dict('worker_ng', 1, ['DATANODE', 'NODEMANAGER'], len(wkrs), wkrs) return tu.create_cluster('test_cluster', 1, 'cdh', '5', [mng_ng, mst_ng, wkrs_ng], **kwargs)
def test_edp_main_class_spark(self, job_get, cluster_get): job_get.return_value = mock.Mock(type=edp.JOB_TYPE_SPARK, interface=[]) ng = tu.make_ng_dict('master', 42, ['namenode'], 1, instances=[tu.make_inst_dict('id', 'name')]) cluster_get.return_value = tu.create_cluster("cluster", "tenant1", "spark", "1.0.0", [ng]) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "job_configs": {"configs": {}, "params": {}, "args": []} }, bad_req_i=(1, "INVALID_DATA", "%s job must " "specify edp.java.main_class" % edp.JOB_TYPE_SPARK)) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "job_configs": { "configs": { "edp.java.main_class": "org.me.myclass"}, "params": {}, "args": []} })
def _get_context(self): i1 = tu.make_inst_dict('id_1', 'instance_1', '1.1.1.1') master_proc = [ yarn.RESOURCE_MANAGER.ui_name, yarn.NODE_MANAGER.ui_name, yarn.HISTORY_SERVER.ui_name, maprfs.CLDB.ui_name, maprfs.FILE_SERVER.ui_name, oozie.OOZIE.ui_name, management.ZOOKEEPER.ui_name, ] master_ng = tu.make_ng_dict('master', 'large', master_proc, 1, [i1]) cluster_configs = { 'Service': { 'key': 'value', 'Service Version': '1.1', }, } cluster = tu.create_cluster( name='test_cluster', tenant='large', plugin='mapr', version='4.0.1.mrv1', node_groups=[master_ng], cluster_configs=cluster_configs, ) self.ng = cluster.node_groups[0] self.instance = self.ng.instances[0] return cc.Context(cluster, handler.VersionHandler())
def test_edp_main_class_spark(self, job_get, cluster_get): job_get.return_value = mock.Mock(type=edp.JOB_TYPE_SPARK, interface=[]) ng = tu.make_ng_dict('master', 42, ['namenode'], 1, instances=[tu.make_inst_dict('id', 'name')]) cluster_get.return_value = tu.create_cluster("cluster", "tenant1", "spark", "1.0.0", [ng]) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "job_configs": { "configs": {}, "params": {}, "args": [] } }, bad_req_i=(1, "INVALID_DATA", "%s job must " "specify edp.java.main_class" % edp.JOB_TYPE_SPARK)) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "job_configs": { "configs": { "edp.java.main_class": "org.me.myclass" }, "params": {}, "args": [] } })
def test_check_edp_job_support_spark(self, get_job, get_cluster): # utils.start_patch will construct a vanilla cluster as a # default for get_cluster, but we want a Spark cluster. # So, we'll make our own. # Note that this means we cannot use assert_create_object_validation() # because it calls start_patch() and will override our setting job = mock.Mock(type=edp.JOB_TYPE_SPARK, mains=["main"], interface=[]) get_job.return_value = job ng = tu.make_ng_dict('master', 42, [], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "spark", "2.2", [ng]) # Everything is okay, spark cluster supports EDP by default # because cluster requires a master and slaves >= 1 wrap_it( data={ "cluster_id": uuidutils.generate_uuid(), "job_configs": { "configs": { "edp.java.main_class": "org.me.class" } } })
def _get_cluster(self): i1 = tu.make_inst_dict('id_1', 'instance_1', '1.1.1.1') master_proc = [ yarn.RESOURCE_MANAGER.ui_name, yarn.NODE_MANAGER.ui_name, yarn.HISTORY_SERVER.ui_name, maprfs.CLDB.ui_name, maprfs.FILE_SERVER.ui_name, oozie.OOZIE.ui_name, management.ZOOKEEPER.ui_name, ] master_ng = tu.make_ng_dict('master', 'large', master_proc, 1, [i1]) cluster_configs = { 'Service': { 'key': 'value', 'Service Version': '1.1', }, } cluster = tu.create_cluster( name='test_cluster', tenant='large', plugin='mapr', version='4.0.1.mrv1', node_groups=[master_ng], cluster_configs=cluster_configs, ) self.ng = cluster.node_groups[0] self.instance = self.ng.instances[0] return cluster
def test_data_sources_differ(self, get_job, get_data_source, get_cluster): get_job.return_value = mock.Mock(type=edp.JOB_TYPE_MAPREDUCE_STREAMING, libs=[], interface=[]) ds1_id = six.text_type(uuid.uuid4()) ds2_id = six.text_type(uuid.uuid4()) data_sources = { ds1_id: mock.Mock(type="swift", url="http://swift/test"), ds2_id: mock.Mock(type="swift", url="http://swift/test2"), } get_data_source.side_effect = lambda ctx, x: data_sources[x] ng = tu.make_ng_dict('master', 42, ['oozie'], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "2.6.0", [ng]) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": ds1_id, "output_id": ds2_id, "job_configs": { "configs": { "edp.streaming.mapper": "/bin/cat", "edp.streaming.reducer": "/usr/bin/wc" }, "params": {}, "args": [] } }) data_sources[ds2_id].url = "http://swift/test" err_msg = ("Provided input and output DataSources reference the " "same location: %s" % data_sources[ds2_id].url) self._assert_create_object_validation(data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": ds1_id, "output_id": ds2_id, "job_configs": { "configs": { "edp.streaming.mapper": "/bin/cat", "edp.streaming.reducer": "/usr/bin/wc" }, "params": {}, "args": [] } }, bad_req_i=(1, "INVALID_DATA", err_msg))
def _get_context(self): i1 = tu.make_inst_dict('id_1', 'instance_1', MANAGEMENT_IP) i1['internal_ip'] = INTERNAL_IP master_proc = [ yarn.RESOURCE_MANAGER.ui_name, yarn.NODE_MANAGER.ui_name, yarn.HISTORY_SERVER.ui_name, maprfs.CLDB.ui_name, maprfs.FILE_SERVER.ui_name, oozie.OOZIE.ui_name, management.ZOOKEEPER.ui_name, ] master_ng = tu.make_ng_dict('master', 'large', master_proc, 1, [i1]) cluster_configs = { 'Service': { 'key': 'value', 'Service Version': '1.1', }, 'Oozie': { 'Oozie Version': '4.2.0', } } cluster = tu.create_cluster( name='test_cluster', tenant='large', plugin='mapr', version='5.2.0.mrv2', node_groups=[master_ng], cluster_configs=cluster_configs, ) self.ng = cluster.node_groups[0] self.instance = self.ng.instances[0] return cc.Context(cluster, handler.VersionHandler())
def _get_workers(): workers = [] for i in range(3): w = tu.make_inst_dict('id0%d' % i, 'worker-0%d' % i, management_ip='1.2.3.1%d' % i) workers.append(w) return workers
def setUp(self): super(GeneralUtilsTest, self).setUp() i1 = tu.make_inst_dict("i1", "master") i2 = tu.make_inst_dict("i2", "worker1") i3 = tu.make_inst_dict("i3", "worker2") i4 = tu.make_inst_dict("i4", "worker3") i5 = tu.make_inst_dict("i5", "sn") ng1 = tu.make_ng_dict("master", "f1", ["jt", "nn"], 1, [i1]) ng2 = tu.make_ng_dict("workers", "f1", ["tt", "dn"], 3, [i2, i3, i4]) ng3 = tu.make_ng_dict("sn", "f1", ["dn"], 1, [i5]) self.c1 = tu.create_cluster("cluster1", "tenant1", "general", "2.6.0", [ng1, ng2, ng3]) self.ng1 = self.c1.node_groups[0] self.ng2 = self.c1.node_groups[1] self.ng3 = self.c1.node_groups[2]
def setUp(self): i1 = tu.make_inst_dict('i1', 'master') i2 = tu.make_inst_dict('i2', 'worker1') i3 = tu.make_inst_dict('i3', 'worker2') i4 = tu.make_inst_dict('i4', 'worker3') i5 = tu.make_inst_dict('i5', 'sn') ng1 = tu.make_ng_dict("master", "f1", ["jt", "nn"], 1, [i1]) ng2 = tu.make_ng_dict("workers", "f1", ["tt", "dn"], 3, [i2, i3, i4]) ng3 = tu.make_ng_dict("sn", "f1", ["dn"], 1, [i5]) self.c1 = tu.create_cluster("cluster1", "tenant1", "general", "1.2.1", [ng1, ng2, ng3]) self.ng1 = self.c1.node_groups[0] self.ng2 = self.c1.node_groups[1] self.ng3 = self.c1.node_groups[2]
def setUp(self): super(GeneralUtilsTest, self).setUp() i1 = tu.make_inst_dict('i1', 'master') i2 = tu.make_inst_dict('i2', 'worker1') i3 = tu.make_inst_dict('i3', 'worker2') i4 = tu.make_inst_dict('i4', 'worker3') i5 = tu.make_inst_dict('i5', 'sn') ng1 = tu.make_ng_dict("master", "f1", ["jt", "nn"], 1, [i1]) ng2 = tu.make_ng_dict("workers", "f1", ["tt", "dn"], 3, [i2, i3, i4]) ng3 = tu.make_ng_dict("sn", "f1", ["dn"], 1, [i5]) self.c1 = tu.create_cluster("cluster1", "tenant1", "general", "2.6.0", [ng1, ng2, ng3]) self.ng1 = self.c1.node_groups[0] self.ng2 = self.c1.node_groups[1] self.ng3 = self.c1.node_groups[2]
def test_streaming(self, get_job, get_data_source, get_cluster): get_job.return_value = mock.Mock(type=edp.JOB_TYPE_MAPREDUCE_STREAMING, libs=[], interface=[]) ds1_id = uuidutils.generate_uuid() ds2_id = uuidutils.generate_uuid() data_sources = { ds1_id: mock.Mock(type="swift", url="http://swift/test"), ds2_id: mock.Mock(type="swift", url="http://swift/test2"), } get_data_source.side_effect = lambda ctx, x: data_sources[x] ng = tu.make_ng_dict('master', 42, ['oozie'], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "2.7.1", [ng]) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "input_id": ds1_id, "output_id": ds2_id, "job_configs": { "configs": {}, "params": {}, "args": [], "job_execution_info": {} } }, bad_req_i=(1, "INVALID_DATA", "MapReduce.Streaming job " "must specify streaming mapper " "and reducer")) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "input_id": ds1_id, "output_id": ds2_id, "job_configs": { "configs": { "edp.streaming.mapper": "/bin/cat", "edp.streaming.reducer": "/usr/bin/wc" }, "params": {}, "job_execution_info": {}, "args": [] } })
def test_data_sources_differ(self, get_job, get_data_source, get_cluster): get_job.return_value = mock.Mock( type=edp.JOB_TYPE_MAPREDUCE_STREAMING, libs=[], interface=[]) ds1_id = six.text_type(uuid.uuid4()) ds2_id = six.text_type(uuid.uuid4()) data_sources = { ds1_id: mock.Mock(type="swift", url="http://swift/test"), ds2_id: mock.Mock(type="swift", url="http://swift/test2"), } get_data_source.side_effect = lambda ctx, x: data_sources[x] ng = tu.make_ng_dict('master', 42, ['oozie'], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "2.7.1", [ng]) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": ds1_id, "output_id": ds2_id, "job_configs": { "configs": { "edp.streaming.mapper": "/bin/cat", "edp.streaming.reducer": "/usr/bin/wc"}, "params": {}, "job_execution_info": {}, "args": []} }) data_sources[ds2_id].url = "http://swift/test" err_msg = ("Provided input and output DataSources reference the " "same location: %s" % data_sources[ds2_id].url) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": ds1_id, "output_id": ds2_id, "job_configs": { "configs": { "edp.streaming.mapper": "/bin/cat", "edp.streaming.reducer": "/usr/bin/wc"}, "params": {}, "job_execution_info": {}, "args": []} }, bad_req_i=(1, "INVALID_DATA", err_msg))
def make_ng_dict_with_inst(counter, name, flavor, processes, count, instances=None, volumes_size=None, node_configs=None, **kwargs): if not instances: instances = [] for i in range(count): n = six.next(counter) instance = tu.make_inst_dict("id{0}".format(n), "fake_inst{0}".format(n), management_ip='1.2.3.{0}'.format(n)) instances.append(instance) return tu.make_ng_dict(name, flavor, processes, count, instances, volumes_size, node_configs, **kwargs)
def make_ng_dict_with_inst(counter, name, flavor, processes, count, instances=None, **kwargs): if not instances: instances = [] for i in range(count): n = six.next(counter) instance = tu.make_inst_dict("id{0}".format(n), "fake_inst{0}".format(n), management_ip='1.2.3.{0}'.format(n)) instances.append(instance) return tu.make_ng_dict(name, flavor, processes, count, instances, **kwargs)
def test_edp_main_class_java(self, job_get, cluster_get): job_get.return_value = mock.Mock(type=edp.JOB_TYPE_JAVA, interface=[]) ng = tu.make_ng_dict('master', 42, ['namenode', 'oozie'], 1, instances=[tu.make_inst_dict('id', 'name')]) cluster_get.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "2.7.1", [ng]) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "job_configs": { "configs": {}, "params": {}, "args": [], "job_execution_info": {} } }, bad_req_i=(1, "INVALID_DATA", "%s job must " "specify edp.java.main_class" % edp.JOB_TYPE_JAVA)) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "job_configs": { "configs": { "edp.java.main_class": "" }, "params": {}, "args": [], "job_execution_info": {} } }, bad_req_i=(1, "INVALID_DATA", "%s job must " "specify edp.java.main_class" % edp.JOB_TYPE_JAVA)) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "job_configs": { "configs": { "edp.java.main_class": "org.me.myclass" }, "params": {}, "job_execution_info": {}, "args": [] } })
def test_edp_main_class_java(self, job_get, cluster_get): job_get.return_value = mock.Mock(type=edp.JOB_TYPE_JAVA, interface=[]) ng = tu.make_ng_dict("master", 42, ["namenode", "oozie"], 1, instances=[tu.make_inst_dict("id", "name")]) cluster_get.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "2.6.0", [ng]) self._assert_create_object_validation( data={"cluster_id": six.text_type(uuid.uuid4()), "job_configs": {"configs": {}, "params": {}, "args": []}}, bad_req_i=(1, "INVALID_DATA", "%s job must " "specify edp.java.main_class" % edp.JOB_TYPE_JAVA), ) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "job_configs": {"configs": {"edp.java.main_class": "org.me.myclass"}, "params": {}, "args": []}, } )
def test_streaming(self, get_job, get_data_source, get_cluster): get_job.return_value = mock.Mock( type=edp.JOB_TYPE_MAPREDUCE_STREAMING, libs=[], interface=[]) ds1_id = uuidutils.generate_uuid() ds2_id = uuidutils.generate_uuid() data_sources = { ds1_id: mock.Mock(type="swift", url="http://swift/test"), ds2_id: mock.Mock(type="swift", url="http://swift/test2"), } get_data_source.side_effect = lambda ctx, x: data_sources[x] ng = tu.make_ng_dict('master', 42, ['oozie'], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "fake", "0.1", [ng]) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "input_id": ds1_id, "output_id": ds2_id, "job_configs": {"configs": {}, "params": {}, "args": [], "job_execution_info": {}} }, bad_req_i=(1, "INVALID_DATA", "MapReduce.Streaming job " "must specify streaming mapper " "and reducer")) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "input_id": ds1_id, "output_id": ds2_id, "job_configs": { "configs": { "edp.streaming.mapper": "/bin/cat", "edp.streaming.reducer": "/usr/bin/wc"}, "params": {}, "job_execution_info": {}, "args": []} })
def test_check_edp_job_support(self, get_job, get_cluster): get_job.return_value = FakeJob() self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": six.text_type(uuid.uuid4()), "output_id": six.text_type(uuid.uuid4()) }, bad_req_i=(1, "INVALID_COMPONENT_COUNT", "Hadoop cluster should contain 1 oozie component(s). " "Actual oozie count is 0")) ng = tu.make_ng_dict('master', 42, ['oozie'], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "1.2.1", [ng]) validation_base.check_edp_job_support('some_id')
def test_check_edp_no_oozie(self, get_job, get_cluster): get_job.return_value = mock.Mock(type=edp.JOB_TYPE_PIG, libs=[]) ng = tu.make_ng_dict('master', 42, ['namenode'], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "1.2.1", [ng]) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": six.text_type(uuid.uuid4()), "output_id": six.text_type(uuid.uuid4()) }, bad_req_i=(1, "INVALID_COMPONENT_COUNT", "Hadoop cluster should contain 1 oozie component(s). " "Actual oozie count is 0"))
def test_check_edp_no_oozie(self, get_job, get_cluster): get_job.return_value = mock.Mock(type=edp.JOB_TYPE_PIG, libs=[], interface=[]) ng = tu.make_ng_dict('master', 42, ['namenode'], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "2.7.1", [ng]) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": six.text_type(uuid.uuid4()), "output_id": six.text_type(uuid.uuid4()) }, bad_req_i=(1, "INVALID_COMPONENT_COUNT", "Hadoop cluster should contain 1 oozie component(s). " "Actual oozie count is 0"))
def test_edp_main_class_java(self, job_get, cluster_get): job_get.return_value = mock.Mock(type=edp.JOB_TYPE_JAVA, interface=[]) ng = tu.make_ng_dict('master', 42, ['namenode', 'oozie'], 1, instances=[tu.make_inst_dict('id', 'name')]) cluster_get.return_value = tu.create_cluster("cluster", "tenant1", "fake", "0.1", [ng]) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "job_configs": {"configs": {}, "params": {}, "args": [], "job_execution_info": {}} }, bad_req_i=(1, "INVALID_DATA", "%s job must " "specify edp.java.main_class" % edp.JOB_TYPE_JAVA)) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "job_configs": { "configs": { "edp.java.main_class": ""}, "params": {}, "args": [], "job_execution_info": {}} }, bad_req_i=(1, "INVALID_DATA", "%s job must " "specify edp.java.main_class" % edp.JOB_TYPE_JAVA)) self._assert_create_object_validation( data={ "cluster_id": uuidutils.generate_uuid(), "job_configs": { "configs": { "edp.java.main_class": "org.me.myclass"}, "params": {}, "job_execution_info": {}, "args": []} })
def test_check_edp_job_support(self, get_job, get_cluster): get_job.return_value = FakeJob() self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": six.text_type(uuid.uuid4()), "output_id": six.text_type(uuid.uuid4()) }, bad_req_i=(1, "INVALID_COMPONENT_COUNT", "Hadoop cluster should contain 1 oozie components. " "Actual oozie count is 0")) ng = tu.make_ng_dict('master', 42, ['oozie'], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "1.2.1", [ng]) validation_base.check_edp_job_support('some_id')
def test_check_edp_job_support_spark(self, get_job, get_cluster): # utils.start_patch will construct a vanilla cluster as a # default for get_cluster, but we want a Spark cluster. # So, we'll make our own. # Note that this means we cannot use assert_create_object_validation() # because it calls start_patch() and will override our setting job = mock.Mock(type=edp.JOB_TYPE_SPARK, mains=["main"], interface=[]) get_job.return_value = job ng = tu.make_ng_dict('master', 42, [], 1, instances=[tu.make_inst_dict('id', 'name')]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "spark", "1.3.1", [ng]) # Everything is okay, spark cluster supports EDP by default # because cluster requires a master and slaves >= 1 wrap_it(data={"cluster_id": six.text_type(uuid.uuid4()), "job_configs": { "configs": { "edp.java.main_class": "org.me.class"}}})
def test_streaming(self, get_job, get_data_source, get_cluster): get_job.return_value = mock.Mock(type=edp.JOB_TYPE_MAPREDUCE_STREAMING, libs=[], interface=[]) ds1_id = six.text_type(uuid.uuid4()) ds2_id = six.text_type(uuid.uuid4()) data_sources = { ds1_id: mock.Mock(type="swift", url="http://swift/test"), ds2_id: mock.Mock(type="swift", url="http://swift/test2"), } get_data_source.side_effect = lambda ctx, x: data_sources[x] ng = tu.make_ng_dict("master", 42, ["oozie"], 1, instances=[tu.make_inst_dict("id", "name")]) get_cluster.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "2.6.0", [ng]) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": ds1_id, "output_id": ds2_id, "job_configs": {"configs": {}, "params": {}, "args": []}, }, bad_req_i=(1, "INVALID_DATA", "MapReduce.Streaming job " "must specify streaming mapper " "and reducer"), ) self._assert_create_object_validation( data={ "cluster_id": six.text_type(uuid.uuid4()), "input_id": ds1_id, "output_id": ds2_id, "job_configs": { "configs": {"edp.streaming.mapper": "/bin/cat", "edp.streaming.reducer": "/usr/bin/wc"}, "params": {}, "args": [], }, } )
def make_inst_dict(inst_id, inst_name, management_ip='1.2.3.4', **kwargs): return testutils.make_inst_dict(inst_id, inst_name, management_ip)