Ejemplo n.º 1
0
    def test_possible_configs(self):
        res = w_f.get_possible_job_config(edp.JOB_TYPE_MAPREDUCE)
        sample_config_property = {
            'name': 'mapred.map.tasks',
            'value': '2',
            'description': 'The default number of map tasks per job.'
            'Ignored when mapred.job.tracker is "local".  '
        }
        self.assertIn(sample_config_property, res['job_config']["configs"])

        res = w_f.get_possible_job_config(edp.JOB_TYPE_HIVE)
        sample_config_property = {
            "description": "The serde used by FetchTask to serialize the "
                           "fetch output.",
            "name": "hive.fetch.output.serde",
            "value": "org.apache.hadoop.hive.serde2.DelimitedJSONSerDe"
        }
        self.assertIn(sample_config_property, res["job_config"]['configs'])
        res = w_f.get_possible_job_config("impossible_config")
        self.assertIsNone(res)
Ejemplo n.º 2
0
    def test_possible_configs(self):
        res = w_f.get_possible_job_config(edp.JOB_TYPE_MAPREDUCE)
        sample_config_property = {
            'name': 'mapreduce.jobtracker.expire.trackers.interval',
            'value': '600000',
            'description': "Expert: The time-interval, in miliseconds, after "
                           "whicha tasktracker is declared 'lost' if it "
                           "doesn't send heartbeats."
        }
        self.assertIn(sample_config_property, res['job_config']["configs"])

        res = w_f.get_possible_job_config(edp.JOB_TYPE_HIVE)
        sample_config_property = {
            "description": "The serde used by FetchTask to serialize the "
                           "fetch output.",
            "name": "hive.fetch.output.serde",
            "value": "org.apache.hadoop.hive.serde2.DelimitedJSONSerDe"
        }
        self.assertIn(sample_config_property, res["job_config"]['configs'])
        res = w_f.get_possible_job_config("impossible_config")
        self.assertIsNone(res)
Ejemplo n.º 3
0
    def test_possible_configs(self):
        res = w_f.get_possible_job_config(edp.JOB_TYPE_MAPREDUCE)
        sample_config_property = {
            'name':
            'mapreduce.jobtracker.expire.trackers.interval',
            'value':
            '600000',
            'description':
            "Expert: The time-interval, in miliseconds, after "
            "whicha tasktracker is declared 'lost' if it "
            "doesn't send heartbeats."
        }
        self.assertIn(sample_config_property, res['job_config']["configs"])

        res = w_f.get_possible_job_config(edp.JOB_TYPE_HIVE)
        sample_config_property = {
            "description": "The serde used by FetchTask to serialize the "
            "fetch output.",
            "name": "hive.fetch.output.serde",
            "value": "org.apache.hadoop.hive.serde2.DelimitedJSONSerDe"
        }
        self.assertIn(sample_config_property, res["job_config"]['configs'])
        res = w_f.get_possible_job_config("impossible_config")
        self.assertIsNone(res)
Ejemplo n.º 4
0
 def get_possible_job_config(job_type):
     return workflow_factory.get_possible_job_config(job_type)
Ejemplo n.º 5
0
 def get_possible_job_config(job_type):
     return workflow_factory.get_possible_job_config(job_type)
Ejemplo n.º 6
0
def get_possible_job_config(job_type):
    # TODO(tmckay): when config hints are fixed to be relative
    # to the plugin, this may move into the job engines as
    # an abstract method
    return workflow_factory.get_possible_job_config(job_type)