示例#1
0
  def test_convert_mapreduce(self):
    wf = Workflow.objects.new_workflow(self.user)
    wf.save()
    Workflow.objects.initialize(wf)
    Link.objects.filter(parent__workflow=wf).delete()
    action = add_node(wf, 'action-name-1', 'mapreduce', [wf.start], {
      'description': 'Test MR job design',
      'files': '[]',
      'jar_path': '/user/hue/oozie/examples/lib/hadoop-examples.jar',
      'job_properties': '[{"name": "sleep.job.map.sleep.time", "value": "5"}, {"name": "sleep.job.reduce.sleep.time", "value": "10"}]',
      'prepares': '[{"value":"${output}","type":"delete"},{"value":"/test","type":"mkdir"}]',
      'archives': '[]',
    })
    Link(parent=action, child=wf.end, name="ok").save()

    # Setting doc.last_modified to older date
    doc = Document.objects.get(id=wf.doc.get().id)
    Document.objects.filter(id=doc.id).update(last_modified=datetime.strptime('2000-01-01T00:00:00Z', '%Y-%m-%dT%H:%M:%SZ'))
    doc = Document.objects.get(id=doc.id)

    try:
      # Test that corresponding doc2 is created after convert
      assert_false(Document2.objects.filter(owner=self.user, type='query-mapreduce').exists())

      converter = DocumentConverter(self.user)
      converter.convert()

      doc2 = Document2.objects.get(owner=self.user, type='query-mapreduce')

      # Verify snippet values
      assert_equal('ready', doc2.data_dict['snippets'][0]['status'])
      assert_equal('/user/hue/oozie/examples/lib/hadoop-examples.jar', doc2.data_dict['snippets'][0]['properties']['app_jar'])
      assert_equal(['sleep.job.map.sleep.time=5', 'sleep.job.reduce.sleep.time=10'], doc2.data_dict['snippets'][0]['properties']['hadoopProperties'])
    finally:
      wf.delete()
示例#2
0
  def test_convert_shell(self):
    wf = Workflow.objects.new_workflow(self.user)
    wf.save()
    Workflow.objects.initialize(wf)
    Link.objects.filter(parent__workflow=wf).delete()
    action = add_node(wf, 'action-name-1', 'shell', [wf.start], {
      u'job_xml': 'my-job.xml',
      u'files': '["hello.py"]',
      u'name': 'Shell',
      u'job_properties': '[{"name": "mapred.job.queue.name", "value": "test"}]',
      u'capture_output': 'on',
      u'command': 'hello.py',
      u'archives': '[{"dummy": "", "name": "test.zip"}]',
      u'prepares': '[]',
      u'params': '[{"type": "argument", "value": "baz"}, {"type": "env-var", "value": "foo=bar"}]',
      u'description': 'Execute a Python script printing its arguments'
    })
    Link(parent=action, child=wf.end, name="ok").save()

    # Setting doc.last_modified to older date
    doc = Document.objects.get(id=wf.doc.get().id)
    Document.objects.filter(id=doc.id).update(last_modified=datetime.strptime('2000-01-01T00:00:00Z', '%Y-%m-%dT%H:%M:%SZ'))
    doc = Document.objects.get(id=doc.id)

    try:
      if IS_HUE_4.get():
        # Test that corresponding doc2 is created after convert
        assert_false(Document2.objects.filter(owner=self.user, type='query-shell').exists())

        converter = DocumentConverter(self.user)
        converter.convert()

        doc2 = Document2.objects.get(owner=self.user, type='query-shell')

        # Verify snippet values
        assert_equal('ready', doc2.data_dict['snippets'][0]['status'])
        assert_equal('hello.py', doc2.data_dict['snippets'][0]['properties']['command_path'])
        assert_equal(['baz'], doc2.data_dict['snippets'][0]['properties']['arguments'])
        assert_equal(['foo=bar'], doc2.data_dict['snippets'][0]['properties']['env_var'])
        assert_equal(['mapred.job.queue.name=test'], doc2.data_dict['snippets'][0]['properties']['hadoopProperties'])
        assert_equal(['test.zip'], doc2.data_dict['snippets'][0]['properties']['archives'])
        assert_equal([{'type': 'file', 'path': 'hello.py'}], doc2.data_dict['snippets'][0]['properties']['files'])
        assert_equal(True, doc2.data_dict['snippets'][0]['properties']['capture_output'])
      else:
        # Test that corresponding doc2 is created after convert
        assert_false(Document2.objects.filter(owner=self.user, type='link-workflow').exists())

        converter = DocumentConverter(self.user)
        converter.convert()

        doc2 = Document2.objects.get(owner=self.user, type='link-workflow')

        # Verify absolute_url
        response = self.client.get(doc2.get_absolute_url())
        assert_equal(200, response.status_code)
        assert_equal(doc.last_modified.strftime('%Y-%m-%dT%H:%M:%S'), doc2.last_modified.strftime('%Y-%m-%dT%H:%M:%S'))
    finally:
      wf.delete()
示例#3
0
  def test_convert_mapreduce(self):
    wf = Workflow.objects.new_workflow(self.user)
    wf.save()
    Workflow.objects.initialize(wf)
    Link.objects.filter(parent__workflow=wf).delete()
    action = add_node(wf, 'action-name-1', 'mapreduce', [wf.start], {
      'description': 'Test MR job design',
      'files': '[]',
      'jar_path': '/user/hue/oozie/examples/lib/hadoop-examples.jar',
      'job_properties': '[{"name": "sleep.job.map.sleep.time", "value": "5"}, {"name": "sleep.job.reduce.sleep.time", "value": "10"}]',
      'prepares': '[{"value":"${output}","type":"delete"},{"value":"/test","type":"mkdir"}]',
      'archives': '[]',
    })
    Link(parent=action, child=wf.end, name="ok").save()

    # Setting doc.last_modified to older date
    doc = Document.objects.get(id=wf.doc.get().id)
    Document.objects.filter(id=doc.id).update(last_modified=datetime.strptime('2000-01-01T00:00:00Z', '%Y-%m-%dT%H:%M:%SZ'))
    doc = Document.objects.get(id=doc.id)

    try:
      if IS_HUE_4.get():
        # Test that corresponding doc2 is created after convert
        assert_false(Document2.objects.filter(owner=self.user, type='query-mapreduce').exists())

        converter = DocumentConverter(self.user)
        converter.convert()

        doc2 = Document2.objects.get(owner=self.user, type='query-mapreduce')

        # Verify snippet values
        assert_equal('ready', doc2.data_dict['snippets'][0]['status'])
        assert_equal('/user/hue/oozie/examples/lib/hadoop-examples.jar', doc2.data_dict['snippets'][0]['properties']['app_jar'])
        assert_equal(['sleep.job.map.sleep.time=5', 'sleep.job.reduce.sleep.time=10'], doc2.data_dict['snippets'][0]['properties']['hadoopProperties'])
      else:
        # Test that corresponding doc2 is created after convert
        assert_false(Document2.objects.filter(owner=self.user, type='link-workflow').exists())

        converter = DocumentConverter(self.user)
        converter.convert()

        doc2 = Document2.objects.get(owner=self.user, type='link-workflow')

        # Verify absolute_url
        response = self.client.get(doc2.get_absolute_url())
        assert_equal(200, response.status_code)
        assert_equal(doc.last_modified.strftime('%Y-%m-%dT%H:%M:%S'), doc2.last_modified.strftime('%Y-%m-%dT%H:%M:%S'))
    finally:
      wf.delete()
示例#4
0
  def test_convert_java(self):
    wf = Workflow.objects.new_workflow(self.user)
    wf.save()
    Workflow.objects.initialize(wf)
    Link.objects.filter(parent__workflow=wf).delete()
    action = add_node(wf, 'action-name-1', 'java', [wf.start], {
      'name': 'MyTeragen',
      "description": "Generate N number of records",
      "main_class": "org.apache.hadoop.examples.terasort.TeraGen",
      "args": "1000 ${output_dir}/teragen",
      "files": '["my_file","my_file2"]',
      "job_xml": "",
      "java_opts": "-Dexample-property=natty",
      "jar_path": "/user/hue/oozie/workspaces/lib/hadoop-examples.jar",
      'job_properties': '[{"name": "mapred.job.queue.name", "value": "test"}]',
      "prepares": '[{"value":"/test","type":"mkdir"}]',
      "archives": '[{"dummy":"","name":"my_archive"},{"dummy":"","name":"my_archive2"}]',
      "capture_output": True,
    })
    Link(parent=action, child=wf.end, name="ok").save()

    # Setting doc.last_modified to older date
    doc = Document.objects.get(id=wf.doc.get().id)
    Document.objects.filter(id=doc.id).update(
      last_modified=datetime.strptime('2000-01-01T00:00:00Z', '%Y-%m-%dT%H:%M:%SZ'))
    doc = Document.objects.get(id=doc.id)

    try:
      # Test that corresponding doc2 is created after convert
      assert_false(Document2.objects.filter(owner=self.user, type='query-java').exists())

      converter = DocumentConverter(self.user)
      converter.convert()

      doc2 = Document2.objects.get(owner=self.user, type='query-java')

      # Verify snippet values
      assert_equal('ready', doc2.data_dict['snippets'][0]['status'])
      assert_equal('/user/hue/oozie/workspaces/lib/hadoop-examples.jar', doc2.data_dict['snippets'][0]['properties']['app_jar'])
      assert_equal('org.apache.hadoop.examples.terasort.TeraGen', doc2.data_dict['snippets'][0]['properties']['class'])
      assert_equal('1000 ${output_dir}/teragen', doc2.data_dict['snippets'][0]['properties']['args'])
      assert_equal('-Dexample-property=natty', doc2.data_dict['snippets'][0]['properties']['java_opts'])
      assert_equal(['mapred.job.queue.name=test'], doc2.data_dict['snippets'][0]['properties']['hadoopProperties'])
      assert_equal(['my_archive', 'my_archive2'], doc2.data_dict['snippets'][0]['properties']['archives'])
      assert_equal([{'type': 'file', 'path': 'my_file'}, {'type': 'file', 'path': 'my_file2'}], doc2.data_dict['snippets'][0]['properties']['files'])
      assert_equal(True, doc2.data_dict['snippets'][0]['properties']['capture_output'])
    finally:
      wf.delete()
示例#5
0
  def test_convert_shell(self):
    wf = Workflow.objects.new_workflow(self.user)
    wf.save()
    Workflow.objects.initialize(wf)
    Link.objects.filter(parent__workflow=wf).delete()
    action = add_node(wf, 'action-name-1', 'shell', [wf.start], {
      u'job_xml': 'my-job.xml',
      u'files': '["hello.py"]',
      u'name': 'Shell',
      u'job_properties': '[{"name": "mapred.job.queue.name", "value": "test"}]',
      u'capture_output': True,
      u'command': 'hello.py',
      u'archives': '[{"dummy": "", "name": "test.zip"}]',
      u'prepares': '[]',
      u'params': '[{"type": "argument", "value": "baz"}, {"type": "env-var", "value": "foo=bar"}]',
      u'description': 'Execute a Python script printing its arguments'
    })
    Link(parent=action, child=wf.end, name="ok").save()

    # Setting doc.last_modified to older date
    doc = Document.objects.get(id=wf.doc.get().id)
    Document.objects.filter(id=doc.id).update(last_modified=datetime.strptime('2000-01-01T00:00:00Z', '%Y-%m-%dT%H:%M:%SZ'))
    doc = Document.objects.get(id=doc.id)

    try:
      # Test that corresponding doc2 is created after convert
      assert_false(Document2.objects.filter(owner=self.user, type='query-shell').exists())

      converter = DocumentConverter(self.user)
      converter.convert()

      doc2 = Document2.objects.get(owner=self.user, type='query-shell')

      # Verify snippet values
      assert_equal('ready', doc2.data_dict['snippets'][0]['status'])
      assert_equal('hello.py', doc2.data_dict['snippets'][0]['properties']['command_path'])
      assert_equal(['baz'], doc2.data_dict['snippets'][0]['properties']['arguments'])
      assert_equal(['foo=bar'], doc2.data_dict['snippets'][0]['properties']['env_var'])
      assert_equal(['mapred.job.queue.name=test'], doc2.data_dict['snippets'][0]['properties']['hadoopProperties'])
      assert_equal(['test.zip'], doc2.data_dict['snippets'][0]['properties']['archives'])
      assert_equal([{'type': 'file', 'path': 'hello.py'}], doc2.data_dict['snippets'][0]['properties']['files'])
      assert_equal(True, doc2.data_dict['snippets'][0]['properties']['capture_output'])
    finally:
      wf.delete()
示例#6
0
    def test_convert_java(self):
        wf = Workflow.objects.new_workflow(self.user)
        wf.save()
        Workflow.objects.initialize(wf)
        Link.objects.filter(parent__workflow=wf).delete()
        action = add_node(
            wf, 'action-name-1', 'java', [wf.start], {
                'name': 'MyTeragen',
                "description": "Generate N number of records",
                "main_class": "org.apache.hadoop.examples.terasort.TeraGen",
                "args": "1000 ${output_dir}/teragen",
                "files": '["my_file","my_file2"]',
                "job_xml": "",
                "java_opts": "-Dexample-property=natty",
                "jar_path":
                "/user/hue/oozie/workspaces/lib/hadoop-examples.jar",
                'job_properties':
                '[{"name": "mapred.job.queue.name", "value": "test"}]',
                "prepares": '[{"value":"/test","type":"mkdir"}]',
                "archives":
                '[{"dummy":"","name":"my_archive"},{"dummy":"","name":"my_archive2"}]',
                "capture_output": True,
            })
        Link(parent=action, child=wf.end, name="ok").save()

        # Setting doc.last_modified to older date
        doc = Document.objects.get(id=wf.doc.get().id)
        Document.objects.filter(
            id=doc.id).update(last_modified=datetime.strptime(
                '2000-01-01T00:00:00Z', '%Y-%m-%dT%H:%M:%SZ'))
        doc = Document.objects.get(id=doc.id)

        try:
            # Test that corresponding doc2 is created after convert
            assert_false(
                Document2.objects.filter(owner=self.user,
                                         type='query-java').exists())

            converter = DocumentConverter(self.user)
            converter.convert()

            doc2 = Document2.objects.get(owner=self.user, type='query-java')

            # Verify snippet values
            assert_equal('ready', doc2.data_dict['snippets'][0]['status'])
            assert_equal(
                '/user/hue/oozie/workspaces/lib/hadoop-examples.jar',
                doc2.data_dict['snippets'][0]['properties']['app_jar'])
            assert_equal('org.apache.hadoop.examples.terasort.TeraGen',
                         doc2.data_dict['snippets'][0]['properties']['class'])
            assert_equal('1000 ${output_dir}/teragen',
                         doc2.data_dict['snippets'][0]['properties']['args'])
            assert_equal(
                '-Dexample-property=natty',
                doc2.data_dict['snippets'][0]['properties']['java_opts'])
            assert_equal([
                'mapred.job.queue.name=test'
            ], doc2.data_dict['snippets'][0]['properties']['hadoopProperties'])
            assert_equal(
                ['my_archive', 'my_archive2'],
                doc2.data_dict['snippets'][0]['properties']['archives'])
            assert_equal([{
                'type': 'file',
                'path': 'my_file'
            }, {
                'type': 'file',
                'path': 'my_file2'
            }], doc2.data_dict['snippets'][0]['properties']['files'])
            assert_equal(
                True,
                doc2.data_dict['snippets'][0]['properties']['capture_output'])
        finally:
            wf.delete()