コード例 #1
0
 def test_get_newdirs(self):
     tf = make_tempfile()
     tempfile_write(tf, self.replay_file_contents)
     rh = trh.ReplayHandler(datacfg_file="foo",
                            envcfg_file="bar",
                            resources_file="baz.zip",
                            replaydirs_file=tf.name)
     self.assertEqual(rh.get_newdirs(), self.newdirs)
コード例 #2
0
 def test_execute(self, mock_lh_execute):
     tf = make_tempfile()
     tempfile_write(tf, self.replay_file_contents)
     rh = trh.ReplayHandler(datacfg_file="foo",
                            envcfg_file="bar",
                            resources_file="baz.zip",
                            replaydirs_file=tf.name)
     rh.execute()
     mock_lh_execute.assert_called_with(load_type="replay")
コード例 #3
0
 def test_execute_VerticaManager_clone_schema_call(self):
     cv = self.config_value
     tf = make_tempfile()
     tempfile_write(tf, self.ptn_file_contents)
     rh = trh.RollbackHandler(datacfg_file="foo", envcfg_file="bar",
                              resources_file="baz.zip", partitions_file=tf.name)
     rh.execute()
     vmgr = self.mock_vtica.return_value
     vmgr.clone_schema.assert_called_with(cv, cv, cv, "%s__rollback__" % cv)
コード例 #4
0
 def test_execute_exception(self):
     tf = make_tempfile()
     tempfile_write(tf, self.ptn_file_contents)
     vmgr = self.mock_vtica.return_value
     vmgr.truncate.side_effect = Exception()
     rh = trh.RollbackHandler(datacfg_file="foo", envcfg_file="bar",
                              resources_file="baz.zip", partitions_file=tf.name)
     with self.assertRaises(Exception):
         rh.execute()
コード例 #5
0
 def test_execute_HdfsManager_path_exists_calls(self):
     cv = self.config_value
     tf = make_tempfile()
     tempfile_write(tf, self.ptn_file_contents)
     rh = trh.RollbackHandler(datacfg_file="foo", envcfg_file="bar",
                              resources_file="baz.zip", partitions_file=tf.name)
     rh.execute()
     hdfs_paths = ["%s/%s" % (cv, ptn) for ptn in self.ptns]
     path_exist_calls = [mock.call(s) for s in hdfs_paths]
     hdfs_mgr = self.mock_hdfs.return_value
     self.assertEqual(path_exist_calls, hdfs_mgr.path_exists.call_args_list)
コード例 #6
0
 def test_execute_MetadataManager_delete_partition_calls(self):
     cv = self.config_value
     tf = make_tempfile()
     tempfile_write(tf, self.ptn_file_contents)
     rh = trh.RollbackHandler(datacfg_file="foo", envcfg_file="bar",
                              resources_file="baz.zip", partitions_file=tf.name)
     rh.execute()
     md_delete_calls = [mock.call(cv, mdcolname="hive_last_partition", mdcolvalue=p)
                        for p in self.ptns]
     md_mgr = self.mock_mm.return_value
     md_mgr.delete.assert_has_calls(md_delete_calls)
コード例 #7
0
 def test_execute_HiveManager_drop_partition_calls(self):
     tf = make_tempfile()
     tempfile_write(tf, self.ptn_file_contents)
     rh = trh.RollbackHandler(datacfg_file="foo", envcfg_file="bar",
                              resources_file="baz.zip", partitions_file=tf.name)
     rh.execute()
     ptn_strs = ["year=%s, month=%s, day=%s, hour=%s, part=%s" \
                 % tuple(p.split("/")) for p in self.ptns]
     drop_ptn_calls = [mock.call(ps) for ps in ptn_strs]
     hive_mgr = self.mock_hive.return_value
     hive_mgr.drop_partition.assert_has_calls(drop_ptn_calls)
コード例 #8
0
 def test_execute_VerticaManager_truncate_calls(self):
     cv = self.config_value
     tf = make_tempfile()
     tempfile_write(tf, self.ptn_file_contents)
     src_schema, src_table, rb_schema, rb_table = cv, cv, cv, "%s__rollback__" % cv
     rh = trh.RollbackHandler(datacfg_file="foo", envcfg_file="bar",
                              resources_file="baz.zip", partitions_file=tf.name)
     rh.execute()
     truncate_calls = [mock.call(rb_schema, rb_table) for p in self.ptns]
     vmgr = self.mock_vtica.return_value
     vmgr.truncate.assert_has_calls(truncate_calls)
コード例 #9
0
 def test_execute_LoadHandler_vload_copy_direct_calls(self, mock_vcopy):
     cv = self.config_value
     tf = make_tempfile()
     tempfile_write(tf, self.ptn_file_contents)
     src_schema, src_table, rb_schema, rb_table = cv, cv, cv, "%s__rollback__" % cv
     rh = trh.RollbackHandler(datacfg_file="foo", envcfg_file="bar",
                              resources_file="baz.zip", partitions_file=tf.name)
     rh.execute()
     copy_calls = [mock.call(p, rb_schema, rb_table, mode="direct")
                   for p in self.ptns]
     mock_vcopy.assert_has_calls(copy_calls)
コード例 #10
0
 def test_make_oozie_workflow(self, mock_open, mock_materialize):
     mock_template_str = "templatefoo"
     tf = make_tempfile()
     tempfile_write(tf, mock_template_str)
     mock_open.return_value = tf
     mock_joinval = "foo/script/foo"
     mock_outfile = "foo/foo"
     mock_subs = {"@MAPPER": "foo", "@HDFS_PATH": mock_joinval,
                  "@CODEC": "foo"}
     self.sh.make_oozie_workflow()
     mock_materialize.assert_called_with(mock_template_str, mock_subs,
                                         mock_outfile)
コード例 #11
0
    def test__make_schema(self):
        tf_cols = make_tempfile()
        tf_template = make_tempfile()
        cols = "col1 int\ncol2 float\ncol3 varchar(20)"
        template = """
        use @DATABASE;
        create external table @TABLE (
            @COLUMNMAPPINGS
        )
          partitioned by (year string, month string, day string, hour string, part string)
          row format delimited
          fields terminated by '\u0001'
          null defined as '';
        """
        tempfile_write(tf_cols, cols)
        tempfile_write(tf_template, template)
        substitutions = {
            "@DATABASE": "dbfoo",
            "@COLUMNMAPPINGS": cols,
            "@TABLE": "tablefoo"
        }

        tf_outfile = make_tempfile()
        self.sh._make_schema(tf_cols.name, tf_template.name, tf_outfile.name,
                             substitutions)
        tf_outfile.seek(0)

        expected = \
            """
            use dbfoo;
                    create external table tablefoo (
                        col1 int,
                        col2 float,
                        col3 varchar(20)
                    )
                      partitioned by (year string, month string, day string, hour string, part string)
                      row format delimited
                      fields terminated by '\u0001'
                      null defined as '';

            """
        self.assertEqual(squeeze(expected), squeeze(tf_outfile.read()))