Example #1
0
 def test_hive_arguments(self):
   RAW = '''{
     "sourceService" : {
       "peerName" : "vst2",
       "clusterName" : "Cluster 1 - CDH4",
       "serviceName" : "HIVE-1"
     },
     "force" : true,
     "replicateData" : true,
     "hdfsArguments" : {
       "mapreduceServiceName" : "MAPREDUCE-1",
       "dryRun" : false,
       "abortOnError" : false,
       "removeMissingFiles" : false,
       "preserveReplicationCount" : false,
       "preserveBlockSize" : false,
       "preservePermissions" : false
     },
     "tableFilters" : [
       { "database" : "db1", "tableName" : "table1" }
     ],
     "dryRun" : false
   }'''
   args = utils.deserialize(RAW, ApiHiveReplicationArguments)
   self.assertEquals('vst2', args.sourceService.peerName)
   self.assertEquals('Cluster 1 - CDH4', args.sourceService.clusterName)
   self.assertEquals('HIVE-1', args.sourceService.serviceName)
   self.assertTrue(args.force)
   self.assertTrue(args.replicateData)
   self.assertIsInstance(args.hdfsArguments, ApiHdfsReplicationArguments)
   self.assertIsInstance(args.tableFilters, list)
   self.assertEquals(1, len(args.tableFilters))
   self.assertIsInstance(args.tableFilters[0], ApiHiveTable)
   self.assertEquals("db1", args.tableFilters[0].database)
   self.assertEquals("table1", args.tableFilters[0].tableName)
Example #2
0
 def test_hdfs_arguments(self):
   RAW = '''{
     "sourceService" : {
       "peerName" : "vst2",
       "clusterName" : "Cluster 1 - CDH4",
       "serviceName" : "HDFS-1"
     },
     "sourcePath" : "/data",
     "destinationPath" : "/copy/data2",
     "mapreduceServiceName" : "MAPREDUCE-1",
     "schedulerPoolName" : "medium",
     "userName" : "systest",
     "dryRun" : false,
     "abortOnError" : true,
     "removeMissingFiles" : false,
     "preserveReplicationCount" : true,
     "preserveBlockSize" : true,
     "preservePermissions" : false
   }'''
   args = utils.deserialize(RAW, ApiHdfsReplicationArguments)
   self.assertEquals('vst2', args.sourceService.peerName)
   self.assertEquals('Cluster 1 - CDH4', args.sourceService.clusterName)
   self.assertEquals('HDFS-1', args.sourceService.serviceName)
   self.assertEquals('/data', args.sourcePath)
   self.assertEquals('/copy/data2', args.destinationPath)
   self.assertEquals('MAPREDUCE-1', args.mapreduceServiceName)
   self.assertEquals('medium', args.schedulerPoolName)
   self.assertEquals('systest', args.userName)
   self.assertFalse(args.dryRun)
   self.assertTrue(args.abortOnError)
   self.assertFalse(args.removeMissingFiles)
   self.assertTrue(args.preserveBlockSize)
   self.assertFalse(args.preservePermissions)
   self.assertTrue(args.preserveReplicationCount)
Example #3
0
    def test_hdfs_snapshot_result(self):
        RAW = '''{
      "processedPathCount" : 5,
      "processedPaths"     : ["/t1", "/t2", "/t3", "/t4", "/t5"],
      "unprocessedPathCount" : "2",
      "unprocessedPaths" : ["nt1", "nt2"],
      "createdSnapshotCount" : 5,
      "createdSnapshots" : [
          {"snapshotName" : "sn1",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t1",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn2",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t2",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn3",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t3",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn4",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t4",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn5",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t5",
          "creationTime" : "2012-12-10T23:11:31.041Z"}],
      "deletedSnapshotCount" : 1,
      "deletedSnapshots" : [
          {"snapshotName" : "dn1",
          "path" : "/t1",
          "snapshotPath" : "/t1/.snapshot/dn1",
          "creationTime" : "2012-12-10T23:11:31.041Z"}],
      "creationErrorCount" : 1,
      "creationErrors" : [{
          "snapshotName" : "sn1",
          "path" : "/t1",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "error" : "bad snapshot"}],
      "deletionErrorCount" : 0,
      "deletionErrors" : []
       }'''

        args = utils.deserialize(RAW, ApiHdfsSnapshotResult)
        self.assertEquals(5, args.processedPathCount)
        self.assertEquals(["/t1", "/t2", "/t3", "/t4", "/t5"],
                          args.processedPaths)
        self.assertEquals('2', args.unprocessedPathCount)
        self.assertEquals(['nt1', 'nt2'], args.unprocessedPaths)
        self.assertEquals(5, args.createdSnapshotCount)
        self.assertEquals('/t3', args.createdSnapshots[2].path)
        self.assertEquals(1, args.deletedSnapshotCount)
        self.assertEquals('dn1', args.deletedSnapshots[0].snapshotName)
        self.assertEquals(1, args.creationErrorCount)
        self.assertEquals("bad snapshot", args.creationErrors[0].error)
        self.assertEquals(0, args.deletionErrorCount)
        self.assertEquals([], args.deletionErrors)
Example #4
0
  def test_hdfs_snapshot_result(self):
    RAW = '''{
      "processedPathCount" : 5,
      "processedPaths"     : ["/t1", "/t2", "/t3", "/t4", "/t5"],
      "unprocessedPathCount" : "2",
      "unprocessedPaths" : ["nt1", "nt2"],
      "createdSnapshotCount" : 5,
      "createdSnapshots" : [
          {"snapshotName" : "sn1",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t1",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn2",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t2",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn3",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t3",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn4",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t4",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn5",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "path" : "/t5",
          "creationTime" : "2012-12-10T23:11:31.041Z"}],
      "deletedSnapshotCount" : 1,
      "deletedSnapshots" : [
          {"snapshotName" : "dn1",
          "path" : "/t1",
          "snapshotPath" : "/t1/.snapshot/dn1",
          "creationTime" : "2012-12-10T23:11:31.041Z"}],
      "creationErrorCount" : 1,
      "creationErrors" : [{
          "snapshotName" : "sn1",
          "path" : "/t1",
          "snapshotPath" : "/t1/.snapshot/sn1",
          "error" : "bad snapshot"}],
      "deletionErrorCount" : 0,
      "deletionErrors" : []
       }'''

    args = utils.deserialize(RAW, ApiHdfsSnapshotResult)
    self.assertEquals(5, args.processedPathCount)
    self.assertEquals(["/t1", "/t2", "/t3", "/t4", "/t5"], args.processedPaths)
    self.assertEquals('2', args.unprocessedPathCount)
    self.assertEquals(['nt1', 'nt2'], args.unprocessedPaths)
    self.assertEquals(5, args.createdSnapshotCount)
    self.assertEquals('/t3', args.createdSnapshots[2].path)
    self.assertEquals(1, args.deletedSnapshotCount)
    self.assertEquals('dn1', args.deletedSnapshots[0].snapshotName)
    self.assertEquals(1, args.creationErrorCount)
    self.assertEquals("bad snapshot", args.creationErrors[0].error)
    self.assertEquals(0, args.deletionErrorCount)
    self.assertEquals([], args.deletionErrors)
Example #5
0
 def test_hbase_snapshot_error(self):
   RAW = '''{
     "snapshotName" : "sn1",
     "tableName" : "table1",
     "error" : "bad snapshot" }'''
   args = utils.deserialize(RAW, ApiHBaseSnapshotError)
   self.assertEquals('sn1', args.snapshotName)
   self.assertEquals('table1', args.tableName)
   self.assertEquals('bad snapshot', args.error)
Example #6
0
 def test_hbase_snapshot(self):
   RAW = '''{
     "snapshotName" : "sn1",
     "tableName" : "table1",
     "creationTime" : "2012-12-10T23:11:31.041Z" }'''
   args = utils.deserialize(RAW, ApiHBaseSnapshot)
   self.assertEquals('sn1', args.snapshotName)
   self.assertEquals('table1', args.tableName)
   self.assertEquals(self._parse_time("2012-12-10T23:11:31.041Z"), args.creationTime)
Example #7
0
 def test_hbase_snapshot_error(self):
     RAW = '''{
   "snapshotName" : "sn1",
   "tableName" : "table1",
   "error" : "bad snapshot" }'''
     args = utils.deserialize(RAW, ApiHBaseSnapshotError)
     self.assertEquals('sn1', args.snapshotName)
     self.assertEquals('table1', args.tableName)
     self.assertEquals('bad snapshot', args.error)
Example #8
0
 def test_hbase_snapshot(self):
     RAW = '''{
   "snapshotName" : "sn1",
   "tableName" : "table1",
   "creationTime" : "2012-12-10T23:11:31.041Z" }'''
     args = utils.deserialize(RAW, ApiHBaseSnapshot)
     self.assertEquals('sn1', args.snapshotName)
     self.assertEquals('table1', args.tableName)
     self.assertEquals(self._parse_time("2012-12-10T23:11:31.041Z"),
                       args.creationTime)
Example #9
0
 def test_hdfs_snapshot(self):
   RAW = '''{
     "path" : "/abc",
     "snapshotName" : "sn1",
     "snapshotPath" : "/abc/.snapshot/sn1",
     "creationTime" : "2012-12-10T23:11:31.041Z" }'''
   args = utils.deserialize(RAW, ApiHdfsSnapshot)
   self.assertEquals('/abc', args.path)
   self.assertEquals('sn1', args.snapshotName)
   self.assertEquals('/abc/.snapshot/sn1', args.snapshotPath)
   self.assertEquals(self._parse_time("2012-12-10T23:11:31.041Z"), args.creationTime)
Example #10
0
 def test_hdfs_snapshot_error(self):
     RAW = '''{
   "snapshotPath" : "/abc/.snapshot/sn1",
   "snapshotName" : "sn1",
   "path" : "/abc",
   "error" : "bad snapshot" }'''
     args = utils.deserialize(RAW, ApiHdfsSnapshotError)
     self.assertEquals('/abc/.snapshot/sn1', args.snapshotPath)
     self.assertEquals('/abc', args.path)
     self.assertEquals('sn1', args.snapshotName)
     self.assertEquals('bad snapshot', args.error)
Example #11
0
 def test_hdfs_snapshot_error(self):
   RAW = '''{
     "snapshotPath" : "/abc/.snapshot/sn1",
     "snapshotName" : "sn1",
     "path" : "/abc",
     "error" : "bad snapshot" }'''
   args = utils.deserialize(RAW, ApiHdfsSnapshotError)
   self.assertEquals('/abc/.snapshot/sn1', args.snapshotPath)
   self.assertEquals('/abc', args.path)
   self.assertEquals('sn1', args.snapshotName)
   self.assertEquals('bad snapshot', args.error)
Example #12
0
 def test_hdfs_snapshot(self):
     RAW = '''{
   "path" : "/abc",
   "snapshotName" : "sn1",
   "snapshotPath" : "/abc/.snapshot/sn1",
   "creationTime" : "2012-12-10T23:11:31.041Z" }'''
     args = utils.deserialize(RAW, ApiHdfsSnapshot)
     self.assertEquals('/abc', args.path)
     self.assertEquals('sn1', args.snapshotName)
     self.assertEquals('/abc/.snapshot/sn1', args.snapshotPath)
     self.assertEquals(self._parse_time("2012-12-10T23:11:31.041Z"),
                       args.creationTime)
Example #13
0
  def test_hbase_snapshot_result(self):
    RAW = '''{
      "processedTableCount" : 5,
      "processedTables"     : ["t1", "t2", "t3", "t4", "t5"],
      "unprocessedTableCount" : "2",
      "unprocessedTables" : ["nt1", "nt2"],
      "createdSnapshotCount" : 5,
      "createdSnapshots" : [
          {"snapshotName" : "sn1",
          "tableName" : "t1",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn2",
          "tableName" : "t2",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn3",
          "tableName" : "t3",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn4",
          "tableName" : "t4",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn5",
          "tableName" : "t5",
          "creationTime" : "2012-12-10T23:11:31.041Z"}],
      "deletedSnapshotCount" : 1,
      "deletedSnapshots" : [
          {"snapshotName" : "dn1",
          "tableName" : "t1",
          "creationTime" : "2012-12-10T23:11:31.041Z"}],
      "creationErrorCount" : 1,
      "creationErrors" : [{
          "snapshotName" : "sn1",
          "tableName" : "table1",
          "error" : "bad snapshot",
          "storage" : "LOCAL"}],
      "deletionErrorCount" : 0,
      "deletionErrors" : []
       }'''

    args = utils.deserialize(RAW, ApiHBaseSnapshotResult)
    self.assertEquals(5, args.processedTableCount)
    self.assertEquals(["t1", "t2", "t3", "t4", "t5"], args.processedTables)
    self.assertEquals('2', args.unprocessedTableCount)
    self.assertEquals(['nt1', 'nt2'], args.unprocessedTables)
    self.assertEquals(5, args.createdSnapshotCount)
    self.assertEquals('t3', args.createdSnapshots[2].tableName)
    self.assertEquals(1, args.deletedSnapshotCount)
    self.assertEquals('dn1', args.deletedSnapshots[0].snapshotName)
    self.assertEquals(1, args.creationErrorCount)
    self.assertEquals("bad snapshot", args.creationErrors[0].error)
    self.assertEquals("LOCAL", args.creationErrors[0].storage)
    self.assertEquals(0, args.deletionErrorCount)
    self.assertEquals([], args.deletionErrors)
Example #14
0
    def test_hbase_snapshot_result(self):
        RAW = '''{
      "processedTableCount" : 5,
      "processedTables"     : ["t1", "t2", "t3", "t4", "t5"],
      "unprocessedTableCount" : "2",
      "unprocessedTables" : ["nt1", "nt2"],
      "createdSnapshotCount" : 5,
      "createdSnapshots" : [
          {"snapshotName" : "sn1",
          "tableName" : "t1",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn2",
          "tableName" : "t2",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn3",
          "tableName" : "t3",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn4",
          "tableName" : "t4",
          "creationTime" : "2012-12-10T23:11:31.041Z"},
          {"snapshotName" : "sn5",
          "tableName" : "t5",
          "creationTime" : "2012-12-10T23:11:31.041Z"}],
      "deletedSnapshotCount" : 1,
      "deletedSnapshots" : [
          {"snapshotName" : "dn1",
          "tableName" : "t1",
          "creationTime" : "2012-12-10T23:11:31.041Z"}],
      "creationErrorCount" : 1,
      "creationErrors" : [{
          "snapshotName" : "sn1",
          "tableName" : "table1",
          "error" : "bad snapshot",
          "storage" : "LOCAL"}],
      "deletionErrorCount" : 0,
      "deletionErrors" : []
       }'''

        args = utils.deserialize(RAW, ApiHBaseSnapshotResult)
        self.assertEquals(5, args.processedTableCount)
        self.assertEquals(["t1", "t2", "t3", "t4", "t5"], args.processedTables)
        self.assertEquals('2', args.unprocessedTableCount)
        self.assertEquals(['nt1', 'nt2'], args.unprocessedTables)
        self.assertEquals(5, args.createdSnapshotCount)
        self.assertEquals('t3', args.createdSnapshots[2].tableName)
        self.assertEquals(1, args.deletedSnapshotCount)
        self.assertEquals('dn1', args.deletedSnapshots[0].snapshotName)
        self.assertEquals(1, args.creationErrorCount)
        self.assertEquals("bad snapshot", args.creationErrors[0].error)
        self.assertEquals("LOCAL", args.creationErrors[0].storage)
        self.assertEquals(0, args.deletionErrorCount)
        self.assertEquals([], args.deletionErrors)
Example #15
0
 def test_hive_results(self):
     RAW = '''{
   "phase" : "EXPORT",
   "tableCount" : 1,
   "tables" : [
     { "database" : "db1", "tableName" : "table1" }
   ],
   "impalaUDFCount" : 1,
   "impalaUDFs" : [
     { "database" : "db1", "signature" : "func1(STRING)" }
   ],
   "hiveUDFCount" : 2,
   "hiveUDFs" : [
     { "database" : "db1", "signature" : "func1(STRING)" },
     { "database" : "db2", "signature" : "func2(STRING)" }
   ],
   "errorCount" : 1,
   "errors" : [
     { "database" : "db1", "tableName" : "table2",
       "impalaUDF" : "func2(INT)", "error" : "message" }
   ],
   "dataReplicationResult" : {
     "progress" : 50
   },
   "dryRun" : false,
   "runAsUser" : "systest"
 }'''
     res = utils.deserialize(RAW, ApiHiveReplicationResult)
     self.assertEquals('EXPORT', res.phase)
     self.assertEquals(1, res.tableCount)
     self.assertEquals(1, len(res.tables))
     self.assertEquals('db1', res.tables[0].database)
     self.assertEquals('table1', res.tables[0].tableName)
     self.assertEquals(1, res.impalaUDFCount)
     self.assertEquals(1, len(res.impalaUDFs))
     self.assertEquals('db1', res.impalaUDFs[0].database)
     self.assertEquals('func1(STRING)', res.impalaUDFs[0].signature)
     self.assertEquals(2, res.hiveUDFCount)
     self.assertEquals(2, len(res.hiveUDFs))
     self.assertEquals('db1', res.hiveUDFs[0].database)
     self.assertEquals('func1(STRING)', res.hiveUDFs[0].signature)
     self.assertEquals('db2', res.hiveUDFs[1].database)
     self.assertEquals('func2(STRING)', res.hiveUDFs[1].signature)
     self.assertEquals(1, res.errorCount)
     self.assertEquals('db1', res.errors[0]['database'])
     self.assertEquals('table2', res.errors[0]['tableName'])
     self.assertEquals('func2(INT)', res.errors[0]['impalaUDF'])
     self.assertEquals('message', res.errors[0]['error'])
     self.assertEquals(50, res.dataReplicationResult.progress)
     self.assertFalse(res.dryRun)
     self.assertEquals(res.runAsUser, 'systest')
Example #16
0
 def test_hive_results(self):
   RAW = '''{
     "phase" : "EXPORT",
     "tableCount" : 1,
     "tables" : [
       { "database" : "db1", "tableName" : "table1" }
     ],
     "impalaUDFCount" : 1,
     "impalaUDFs" : [
       { "database" : "db1", "signature" : "func1(STRING)" }
     ],
     "hiveUDFCount" : 2,
     "hiveUDFs" : [
       { "database" : "db1", "signature" : "func1(STRING)" },
       { "database" : "db2", "signature" : "func2(STRING)" }
     ],
     "errorCount" : 1,
     "errors" : [
       { "database" : "db1", "tableName" : "table2",
         "impalaUDF" : "func2(INT)", "error" : "message" }
     ],
     "dataReplicationResult" : {
       "progress" : 50
     },
     "dryRun" : false,
     "runAsUser" : "systest"
   }'''
   res = utils.deserialize(RAW, ApiHiveReplicationResult)
   self.assertEquals('EXPORT', res.phase)
   self.assertEquals(1, res.tableCount)
   self.assertEquals(1, len(res.tables))
   self.assertEquals('db1', res.tables[0].database)
   self.assertEquals('table1', res.tables[0].tableName)
   self.assertEquals(1, res.impalaUDFCount)
   self.assertEquals(1, len(res.impalaUDFs))
   self.assertEquals('db1', res.impalaUDFs[0].database)
   self.assertEquals('func1(STRING)', res.impalaUDFs[0].signature)
   self.assertEquals(2, res.hiveUDFCount)
   self.assertEquals(2, len(res.hiveUDFs))
   self.assertEquals('db1', res.hiveUDFs[0].database)
   self.assertEquals('func1(STRING)', res.hiveUDFs[0].signature)
   self.assertEquals('db2', res.hiveUDFs[1].database)
   self.assertEquals('func2(STRING)', res.hiveUDFs[1].signature)
   self.assertEquals(1, res.errorCount)
   self.assertEquals('db1', res.errors[0]['database'])
   self.assertEquals('table2', res.errors[0]['tableName'])
   self.assertEquals('func2(INT)', res.errors[0]['impalaUDF'])
   self.assertEquals('message', res.errors[0]['error'])
   self.assertEquals(50, res.dataReplicationResult.progress)
   self.assertFalse(res.dryRun)
   self.assertEquals(res.runAsUser, 'systest')
Example #17
0
  def test_serde(self):
    JSON = '''
      {
        "child" : { "value" : "string1" },
        "children" : [
          { "value" : 1 },
          { "value" : "2" }
        ],
        "date" : "2013-02-12T12:17:15.831765Z",
        "readOnly" : true
      }
    '''
    obj = utils.deserialize(JSON, Parent)
    self.assertIsInstance(obj.child, Child)
    self.assertEqual('string1', obj.child.value)
    self.assertIsInstance(obj.children, list)
    self.assertEqual(2, len(obj.children))
    self.assertEqual(1, obj.children[0].value)
    self.assertEqual('2', obj.children[1].value)
    self.assertIsInstance(obj.date, datetime.datetime)
    self.assertEqual(2013, obj.date.year)
    self.assertEqual(2, obj.date.month)
    self.assertEqual(12, obj.date.day)
    self.assertEqual(12, obj.date.hour)
    self.assertEqual(17, obj.date.minute)
    self.assertEqual(15, obj.date.second)
    self.assertEqual(831765, obj.date.microsecond)
    self.assertTrue(obj.readOnly)

    JSON = '''
      {
        "children" : [ ]
      }
    '''
    obj = utils.deserialize(JSON, Parent)
    self.assertEquals([], obj.children)
    def test_serde(self):
        JSON = '''
      {
        "child" : { "value" : "string1" },
        "children" : [
          { "value" : 1 },
          { "value" : "2" }
        ],
        "date" : "2013-02-12T12:17:15.831765Z",
        "readOnly" : true
      }
    '''
        obj = utils.deserialize(JSON, Parent)
        self.assertIsInstance(obj.child, Child)
        self.assertEqual('string1', obj.child.value)
        self.assertIsInstance(obj.children, list)
        self.assertEqual(2, len(obj.children))
        self.assertEqual(1, obj.children[0].value)
        self.assertEqual('2', obj.children[1].value)
        self.assertIsInstance(obj.date, datetime.datetime)
        self.assertEqual(2013, obj.date.year)
        self.assertEqual(2, obj.date.month)
        self.assertEqual(12, obj.date.day)
        self.assertEqual(12, obj.date.hour)
        self.assertEqual(17, obj.date.minute)
        self.assertEqual(15, obj.date.second)
        self.assertEqual(831765, obj.date.microsecond)
        self.assertTrue(obj.readOnly)

        JSON = '''
      {
        "children" : [ ]
      }
    '''
        obj = utils.deserialize(JSON, Parent)
        self.assertEquals([], obj.children)
Example #19
0
 def test_hdfs_cloud_arguments(self):
     RAW = '''{
   "sourceService" : {
     "peerName" : "vst2",
     "clusterName" : "Cluster 1 - CDH4",
     "serviceName" : "HDFS-1"
   },
   "sourcePath" : "/data",
   "destinationPath" : "/copy/data2",
   "mapreduceServiceName" : "MAPREDUCE-1",
   "schedulerPoolName" : "medium",
   "userName" : "systest",
   "dryRun" : false,
   "abortOnError" : true,
   "removeMissingFiles" : false,
   "preserveReplicationCount" : true,
   "preserveBlockSize" : true,
   "preservePermissions" : false,
   "skipTrash" : false,
   "replicationStrategy" : "DYNAMIC",
   "logPath" : "/tmp",
   "bandwidthPerMap" : "20",
   "preserveXAttrs" : false,
   "exclusionFilters" : ["ac"],
   "sourceAccount" : "someTestAccount"
 }'''
     args = utils.deserialize(RAW, ApiHdfsCloudReplicationArguments)
     self.assertEquals('vst2', args.sourceService.peerName)
     self.assertEquals('Cluster 1 - CDH4', args.sourceService.clusterName)
     self.assertEquals('HDFS-1', args.sourceService.serviceName)
     self.assertEquals('/data', args.sourcePath)
     self.assertEquals('/copy/data2', args.destinationPath)
     self.assertEquals('MAPREDUCE-1', args.mapreduceServiceName)
     self.assertEquals('medium', args.schedulerPoolName)
     self.assertEquals('systest', args.userName)
     self.assertFalse(args.dryRun)
     self.assertTrue(args.abortOnError)
     self.assertFalse(args.removeMissingFiles)
     self.assertTrue(args.preserveBlockSize)
     self.assertFalse(args.preservePermissions)
     self.assertTrue(args.preserveReplicationCount)
     self.assertFalse(args.skipTrash)
     self.assertEquals('DYNAMIC', args.replicationStrategy)
     self.assertFalse(args.preserveXAttrs)
     self.assertEquals('someTestAccount', args.sourceAccount)
     self.assertEquals(None, args.destinationAccount)
Example #20
0
 def test_hdfs_cloud_arguments(self):
   RAW = '''{
     "sourceService" : {
       "peerName" : "vst2",
       "clusterName" : "Cluster 1 - CDH4",
       "serviceName" : "HDFS-1"
     },
     "sourcePath" : "/data",
     "destinationPath" : "/copy/data2",
     "mapreduceServiceName" : "MAPREDUCE-1",
     "schedulerPoolName" : "medium",
     "userName" : "systest",
     "dryRun" : false,
     "abortOnError" : true,
     "removeMissingFiles" : false,
     "preserveReplicationCount" : true,
     "preserveBlockSize" : true,
     "preservePermissions" : false,
     "skipTrash" : false,
     "replicationStrategy" : "DYNAMIC",
     "logPath" : "/tmp",
     "bandwidthPerMap" : "20",
     "preserveXAttrs" : false,
     "exclusionFilters" : ["ac"],
     "sourceAccount" : "someTestAccount"
   }'''
   args = utils.deserialize(RAW, ApiHdfsCloudReplicationArguments)
   self.assertEquals('vst2', args.sourceService.peerName)
   self.assertEquals('Cluster 1 - CDH4', args.sourceService.clusterName)
   self.assertEquals('HDFS-1', args.sourceService.serviceName)
   self.assertEquals('/data', args.sourcePath)
   self.assertEquals('/copy/data2', args.destinationPath)
   self.assertEquals('MAPREDUCE-1', args.mapreduceServiceName)
   self.assertEquals('medium', args.schedulerPoolName)
   self.assertEquals('systest', args.userName)
   self.assertFalse(args.dryRun)
   self.assertTrue(args.abortOnError)
   self.assertFalse(args.removeMissingFiles)
   self.assertTrue(args.preserveBlockSize)
   self.assertFalse(args.preservePermissions)
   self.assertTrue(args.preserveReplicationCount)
   self.assertFalse(args.skipTrash)
   self.assertEquals('DYNAMIC', args.replicationStrategy)
   self.assertFalse(args.preserveXAttrs)
   self.assertEquals('someTestAccount', args.sourceAccount)
   self.assertEquals(None, args.destinationAccount)
Example #21
0
 def test_hbase_arguments(self):
     RAW = '''{"tableRegExps" : "table1", "storage" : "LOCAL"}'''
     args = utils.deserialize(RAW, ApiHBaseSnapshotPolicyArguments)
     self.assertEquals('table1', args.tableRegExps)
     self.assertEquals('LOCAL', args.storage)
Example #22
0
 def test_hdfs_arguments(self):
     RAW = '''{"pathPatterns" : "/user/oozie"}'''
     args = utils.deserialize(RAW, ApiHdfsSnapshotPolicyArguments)
     self.assertEquals('/user/oozie', args.pathPatterns)
Example #23
0
 def test_hdfs_arguments(self):
   RAW = '''{"pathPatterns" : "/user/oozie"}'''
   args = utils.deserialize(RAW, ApiHdfsSnapshotPolicyArguments)
   self.assertEquals('/user/oozie', args.pathPatterns)
Example #24
0
 def test_hbase_arguments(self):
   RAW = '''{"tableRegExps" : "table1", "storage" : "LOCAL"}'''
   args = utils.deserialize(RAW, ApiHBaseSnapshotPolicyArguments)
   self.assertEquals('table1', args.tableRegExps)
   self.assertEquals('LOCAL', args.storage)
Example #25
0
  def test_schedule(self):
    RAW = '''{
      "id" : 39,
      "startTime" : "2012-12-10T23:11:31.041Z",
      "interval" : 1,
      "intervalUnit" : "DAY",
      "paused" : false,
      "nextRun" : "2013-01-15T23:11:31.041Z",
      "history" : [ {
        "id" : 738,
        "name" : "HiveReplicationCommand",
        "startTime" : "2013-01-15T18:28:24.895Z",
        "endTime" : "2013-01-15T18:30:49.446Z",
        "active" : false,
        "success" : true,
        "resultMessage" : "Hive Replication Finished Successfully.",
        "resultDataUrl" : "/cmf/command/738/download",
        "serviceRef" : {
          "clusterName" : "Cluster 1 - CDH4",
          "serviceName" : "HIVE-1"
        },
        "hiveResult" : {
          "tables" : [ {
            "database" : "default",
            "tableName" : "repl_test_1"
          }, {
            "database" : "default",
            "tableName" : "sample_07"
          }, {
            "database" : "default",
            "tableName" : "sample_08"
          } ],
          "errors" : [ ],
          "dataReplicationResult" : {
            "progress" : 100,
            "numFilesCopied" : 0,
            "numBytesCopied" : 0,
            "numFilesSkipped" : 3,
            "numBytesSkipped" : 92158,
            "numFilesDeleted" : 0,
            "numFilesCopyFailed" : 0,
            "numBytesCopyFailed" : 0,
            "dryRun" : false
          },
          "dryRun" : false
        }
      } ],
      "alertOnStart" : false,
      "alertOnSuccess" : false,
      "alertOnFail" : false,
      "alertOnAbort" : false,
      "hiveArguments" : {
        "sourceService" : {
          "peerName" : "vst2",
          "clusterName" : "Cluster 1 - CDH4",
          "serviceName" : "HIVE-1"
        },
        "force" : true,
        "replicateData" : true,
        "hdfsArguments" : {
          "mapreduceServiceName" : "MAPREDUCE-1",
          "dryRun" : false,
          "abortOnError" : false,
          "removeMissingFiles" : false,
          "preserveReplicationCount" : false,
          "preserveBlockSize" : false,
          "preservePermissions" : false
        },
        "dryRun" : false
      }
    }'''
    sched = utils.deserialize(RAW, ApiReplicationSchedule)
    self.assertEqual(39, sched.id)
    self.assertEqual(self._parse_time("2012-12-10T23:11:31.041Z"), sched.startTime)
    self.assertEqual('DAY', sched.intervalUnit)
    self.assertEqual(1, sched.interval)
    self.assertFalse(sched.paused)
    self.assertEqual(self._parse_time("2013-01-15T23:11:31.041Z"), sched.nextRun)
    self.assertFalse(sched.alertOnStart)
    self.assertIsNotNone(sched.hiveArguments)

    self.assertEqual(1, len(sched.history))
    self.assertIsInstance(sched.history[0], ApiReplicationCommand)
    self.assertEqual('default', sched.history[0].hiveResult.tables[0].database)
    self.assertEqual(92158, sched.history[0].hiveResult.dataReplicationResult.numBytesSkipped)