def testStart_Prefix(self):
        """Start sub-command unit test with long prefix."""
        with mock.patch('gce_cluster.GceCluster'):
            hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
            )
            hadoop_cluster.ParseArgumentsAndExecute(
                ['start', 'project-piyo', 'bucket-bar', '--prefix', 'a6b-c'])
            hadoop_cluster.ParseArgumentsAndExecute([
                'start', 'project-piyo', 'bucket-bar', '--prefix',
                'ends-with-dash-'
            ])

            # Invalid patterns.
            self.assertRaises(SystemExit,
                              hadoop_cluster.ParseArgumentsAndExecute, [
                                  'start', 'project-piyo', 'bucket-bar',
                                  '--prefix', 'insanely-long-prefix'
                              ])
            self.assertRaises(SystemExit,
                              hadoop_cluster.ParseArgumentsAndExecute, [
                                  'start', 'project-piyo', 'bucket-bar',
                                  '--prefix', 'upperCase'
                              ])
            self.assertRaises(SystemExit,
                              hadoop_cluster.ParseArgumentsAndExecute, [
                                  'start', 'project-piyo', 'bucket-bar',
                                  '--prefix', 'invalid*char'
                              ])
            self.assertRaises(
                SystemExit, hadoop_cluster.ParseArgumentsAndExecute,
                ['start', 'project-piyo', 'bucket-bar', '--prefix', '0number'])
 def testMapReduce_NoInputOutput(self):
     """MapReduce sub-command unit test."""
     with mock.patch('gce_cluster.GceCluster'):
         hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
         )
         self.assertRaises(SystemExit,
                           hadoop_cluster.ParseArgumentsAndExecute,
                           ['mapreduce', 'project-name', 'bucket-name'])
 def testSetUp_NoBucket(self):
     """Setup sub-command unit test with no bucket option."""
     with mock.patch('gce_cluster.GceCluster'):
         # Fails to execute sub-command.
         hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
         )
         self.assertRaises(SystemExit,
                           hadoop_cluster.ParseArgumentsAndExecute,
                           ['setup', 'project-name'])
 def testShutdown_InvalidOption(self):
     """Shutdown sub-command unit test with invalid optional param."""
     with mock.patch('gce_cluster.GceCluster'):
         # Fails to execute sub-command.
         hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
         )
         self.assertRaises(SystemExit,
                           hadoop_cluster.ParseArgumentsAndExecute,
                           ['shutdown', 'project-name', '--image', 'foo'])
 def testShutdown_MissingParamValue(self):
     """Shutdown sub-command unit test with missing param value."""
     with mock.patch('gce_cluster.GceCluster'):
         # Fails to execute sub-command.
         hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
         )
         self.assertRaises(SystemExit,
                           hadoop_cluster.ParseArgumentsAndExecute,
                           ['shutdown', 'project-name', '--prefix'])
 def testStart_InvalidExternalIp(self):
     """Start sub-command unit test with no bucket option."""
     with mock.patch('gce_cluster.GceCluster'):
         # Fails to execute sub-command.
         hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
         )
         self.assertRaises(SystemExit,
                           hadoop_cluster.ParseArgumentsAndExecute, [
                               'start', 'project-piyo', 'bucket-bar',
                               '--external-ip', 'foo'
                           ])
    def testShutdown(self):
        """Shutdown sub-command unit test."""
        with mock.patch('gce_cluster.GceCluster') as mock_cluster:
            hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
            )
            hadoop_cluster.ParseArgumentsAndExecute(
                ['shutdown', 'project-name'])

            self.assertEqual(1, mock_cluster.call_count)
            flags = self._GetFlags(mock_cluster)
            self.assertEqual('project-name', flags.project)
            mock_cluster.return_value.TeardownCluster.assert_called_once_with()
    def testSetUp(self):
        """Setup sub-command unit test."""
        with mock.patch('gce_cluster.GceCluster') as mock_cluster:
            hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
            )
            hadoop_cluster.ParseArgumentsAndExecute(
                ['setup', 'project-name', 'bucket-name'])

            self.assertEqual(1, mock_cluster.call_count)
            flags = self._GetFlags(mock_cluster)
            self.assertEqual('project-name', flags.project)
            self.assertEqual('bucket-name', flags.bucket)
            mock_cluster.return_value.EnvironmentSetUp.assert_called_once_with(
            )
예제 #9
0
    def testStart_DefaultClusterSize(self):
        """Start sub-command unit test with default cluster size."""
        with mock.patch('gce_cluster.GceCluster') as mock_cluster:
            hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
            )
            hadoop_cluster.ParseArgumentsAndExecute(
                ['start', 'project-foo', 'bucket-bar'])

            self.assertEqual(1, mock_cluster.call_count)
            flags = self._GetFlags(mock_cluster)
            self.assertEqual('project-foo', flags.project)
            self.assertEqual('bucket-bar', flags.bucket)
            self.assertEqual(5, flags.num_workers)
            mock_cluster.return_value.StartCluster.assert_called_once_with()
    def testStart(self):
        """Start sub-command unit test."""
        with mock.patch('gce_cluster.GceCluster') as mock_cluster:
            hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
            )
            hadoop_cluster.ParseArgumentsAndExecute(
                ['start', 'project-name', 'bucket-name', '10'])

            self.assertEqual(1, mock_cluster.call_count)
            flags = self._GetFlags(mock_cluster)
            self.assertEqual('project-name', flags.project)
            self.assertEqual('bucket-name', flags.bucket)
            self.assertEqual(10, flags.num_workers)
            self.assertEqual('all', flags.external_ip)
            mock_cluster.return_value.StartCluster.assert_called_once_with()
    def testShutdown_OptionalParams(self):
        """Shutdown sub-command unit test with optional params."""
        with mock.patch('gce_cluster.GceCluster') as mock_cluster:
            hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
            )
            hadoop_cluster.ParseArgumentsAndExecute([
                'shutdown', 'project-name', '--prefix', 'foo', '--zone', 'abc'
            ])

            self.assertEqual(1, mock_cluster.call_count)
            flags = self._GetFlags(mock_cluster)
            self.assertEqual('project-name', flags.project)
            self.assertEqual('foo', flags.prefix)
            self.assertEqual('abc', flags.zone)
            mock_cluster.return_value.TeardownCluster.assert_called_once_with()
    def testMapReduce(self):
        """MapReduce sub-command unit test."""
        with mock.patch('gce_cluster.GceCluster') as mock_cluster:
            hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
            )
            hadoop_cluster.ParseArgumentsAndExecute([
                'mapreduce', 'project-name', 'bucket-name', '--input',
                'gs://some-bucket/inputs', '--output',
                'gs://some-bucket/outputs'
            ])

            self.assertEqual(1, mock_cluster.call_count)
            flags = self._GetFlags(mock_cluster)
            self.assertEqual('project-name', flags.project)
            self.assertEqual('bucket-name', flags.bucket)
            self.assertEqual('gs://some-bucket/inputs', flags.input)
            self.assertEqual('gs://some-bucket/outputs', flags.output)
            mock_cluster.return_value.StartMapReduce.assert_called_once_with()
예제 #13
0
    def testStart_OptionalParams(self):
        """Start sub-command unit test with optional params."""
        with mock.patch('gce_cluster.GceCluster') as mock_cluster:
            hadoop_cluster = compute_cluster_for_hadoop.ComputeClusterForHadoop(
            )
            hadoop_cluster.ParseArgumentsAndExecute([
                'start', 'project-name', 'bucket-name', '--prefix', 'fuga',
                '--zone', 'piyo', '--command', '"additional command"'
            ])

            self.assertEqual(1, mock_cluster.call_count)
            flags = self._GetFlags(mock_cluster)
            self.assertEqual('project-name', flags.project)
            self.assertEqual('bucket-name', flags.bucket)
            self.assertEqual(5, flags.num_workers)
            self.assertEqual('fuga', flags.prefix)
            self.assertEqual('piyo', flags.zone)
            self.assertEqual('"additional command"', flags.command)
            mock_cluster.return_value.StartCluster.assert_called_once_with()