예제 #1
0
 def test_delete_dir(self):
     local = LocalFS(os.path.dirname(os.path.realpath(__file__)))
     hdfs_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     local.copy_to_hdfs(hdfs_file.path)
     self.assertTrue(hdfs_file.exists(), "Target HDFS dir does not exists")
     hdfs_file.delete(recursive=True)
     self.assertFalse(hdfs_file.exists(), "Target HDFS dir was not deleted")
예제 #2
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_delete_dir(self):
     local = LocalFS(os.path.dirname(os.path.realpath(__file__)))
     hdfs_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     local.copy_to_hdfs(hdfs_file.path)
     self.assertTrue(hdfs_file.exists(), "Target HDFS dir does not exists")
     hdfs_file.delete(recursive=True)
     self.assertFalse(hdfs_file.exists(), "Target HDFS dir was not deleted")
예제 #3
0
    def test_apply_hdfs_snapshot(self):
        _config_file = os.path.join(os.path.dirname(__file__),
                                    'resources',
                                    'bootsrap',
                                    'bootstrap.ini')
        _raw_sales_dir = HDFS('/tmp/raw/sales')
        _raw_users_dir = HDFS('/tmp/raw/users')
        _raw_tmp_dir = HDFS('/tmp/raw/tmp')
        try:
            # run bootstrap script
            metastore = IniFileMetaStore(file=_config_file)
            _config = Configuration.load(metastore)
            apply_hdfs_snapshot(_config)

            # asserts
            # assert directories were created
            self.assertTrue(_raw_sales_dir.exists(), "Directory '/tmp/raw/sales' was not created")
            self.assertTrue(_raw_users_dir.exists(), "Directory '/tmp/raw/users' was not created")
            self.assertTrue(_raw_tmp_dir.exists(), "Directory '/tmp/raw/tmp' was not created")
            # assert acls were applied
            sales_dir_acls = _raw_sales_dir.get_acls()
            users_dir_acls = _raw_users_dir.get_acls()

            self.assertIsNotNone(sales_dir_acls, '/tmp/raw/sales : ACL were not applied')
            self.assertTrue('group:sys-pii:r-x' in sales_dir_acls, '/tmp/raw/sales : pii acl was not applied')
            self.assertTrue('group:sales:r--' in sales_dir_acls, '/tmp/raw/sales : salse acl was not applied')

            self.assertIsNotNone(users_dir_acls, '/tmp/raw/users : ACL were not applied')
            self.assertTrue('group:sys-pii:r-x' in sales_dir_acls, '/tmp/raw/users : pii acl was not applied')
        finally:
            _test_basedir = HDFS('/tmp/raw')
            _test_basedir.delete_directory()
            self.assertFalse(_test_basedir.exists(), "ERROR: clean up failed")
예제 #4
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def _create_non_empty_dir_(self, path):
     _dir = HDFS(path)
     _dir.create_directory()
     self.assertTrue(_dir.exists(), "source directory not found")
     for i in range(5):
         _file = HDFS(os.path.join(path, str(uuid.uuid4())))
         _file.create(directory=(i % 2 == 0))
         self.assertTrue(_file.exists(), "File was not created")
     return _dir
예제 #5
0
 def _create_non_empty_dir_(self, path):
     _dir = HDFS(path)
     _dir.create_directory()
     self.assertTrue(_dir.exists(), "source directory not found")
     for i in range(5):
         _file = HDFS(os.path.join(path, str(uuid.uuid4())))
         _file.create(directory=(i % 2 == 0))
         self.assertTrue(_file.exists(), "File was not created")
     return _dir
예제 #6
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_create_directory(self):
     new_dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     self.assertFalse(new_dir.exists(), "Directory is already exists")
     try:
         new_dir.create_directory()
         self.assertTrue(new_dir.exists(), "Directory was not created")
         self.assertTrue(new_dir.is_directory())
     finally:
         new_dir.delete(recursive=True)
         self.assertFalse(new_dir.exists(), "Directory was not removed")
예제 #7
0
 def test_create_directory(self):
     new_dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     self.assertFalse(new_dir.exists(), "Directory is already exists")
     try:
         new_dir.create_directory()
         self.assertTrue(new_dir.exists(), "Directory was not created")
         self.assertTrue(new_dir.is_directory())
     finally:
         new_dir.delete(recursive=True)
         self.assertFalse(new_dir.exists(), "Directory was not removed")
예제 #8
0
 def test_create_file(self):
     new_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     self.assertFalse(new_file.exists(), "File is already exists")
     try:
         new_file.create_file()
         self.assertTrue(new_file.exists(), "File was not created")
         self.assertFalse(new_file.is_directory(), "New file should not be a folder")
     finally:
         new_file.delete()
         self.assertFalse(new_file.exists(), "File was not removed")
예제 #9
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_create_file(self):
     new_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     self.assertFalse(new_file.exists(), "File is already exists")
     try:
         new_file.create_file()
         self.assertTrue(new_file.exists(), "File was not created")
         self.assertFalse(new_file.is_directory(), "New file should not be a folder")
     finally:
         new_file.delete()
         self.assertFalse(new_file.exists(), "File was not removed")
예제 #10
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_list_files(self):
     basedir = HDFS("/tmp")
     new_file = HDFS("/tmp/test.txt")
     try:
         new_file.create(directory=False)
         self.assertTrue(new_file.exists(), "File was not created")
         files = basedir.list_files()
         self.assertTrue(new_file in files)
     finally:
         new_file.delete()
         self.assertFalse(new_file.exists(), "File was not deleted")
예제 #11
0
 def test_list_files(self):
     basedir = HDFS("/tmp")
     new_file = HDFS("/tmp/test.txt")
     try:
         new_file.create(directory=False)
         self.assertTrue(new_file.exists(), "File was not created")
         files = basedir.list_files()
         self.assertTrue(new_file in files)
     finally:
         new_file.delete()
         self.assertFalse(new_file.exists(), "File was not deleted")
예제 #12
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_get_replicas(self):
     self.assertEqual("0", HDFS("/").replicas(), "Root dir replicas should be 0")
     self.assertNotEqual("0", HDFS("/tmp").replicas(), "dir replicas should be 0")
     name = uuid.uuid4()
     hdfs_file = HDFS("/tmp/{0}".format(name))
     hdfs_file.create_file()
     shell.execute_shell_command("hadoop dfs", "-setrep -w 1 /tmp/{0}".format(name))
     if hdfs_file.exists():
         self.assertEqual("1", hdfs_file.replicas(), "Number replicas of file must be 1")
         hdfs_file.delete()
         self.assertFalse(hdfs_file.exists())
예제 #13
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def should_create_file_recursively(self):
     _base_dir = os.path.join("/tmp", str(uuid.uuid4()))
     _path = os.path.join(_base_dir, str(uuid.uuid4()), str(uuid.uuid4()), "file.txt")
     _file = HDFS(_path)
     self.assertFalse(_file.exists(), "File is already exists")
     try:
         _file.create_file(recursive=True)
         self.assertTrue(_file.exists(), "File was not created")
         self.assertFalse(_file.is_directory(), "New file should not be a directory")
     finally:
         HDFS(_base_dir).delete_directory()
         self.assertFalse(_file.exists(), "File was not removed")
         self.assertFalse(HDFS(_base_dir).exists(), "Bse dir was not removed")
예제 #14
0
 def should_create_file_recursively(self):
     _base_dir = os.path.join('/tmp', str(uuid.uuid4()))
     _path = os.path.join(_base_dir, str(uuid.uuid4()), str(uuid.uuid4()), 'file.txt')
     _file = HDFS(_path)
     self.assertFalse(_file.exists(), "File is already exists")
     try:
         _file.create_file(recursive=True)
         self.assertTrue(_file.exists(), "File was not created")
         self.assertFalse(_file.is_directory(), "New file should not be a directory")
     finally:
         HDFS(_base_dir).delete_directory()
         self.assertFalse(_file.exists(), "File was not removed")
         self.assertFalse(HDFS(_base_dir).exists(), "Bse dir was not removed")
예제 #15
0
 def test_get_replicas(self):
     self.assertEqual('0', HDFS("/").replicas(), "Root dir replicas should be 0")
     self.assertNotEqual('0', HDFS("/tmp").replicas(), "dir replicas should be 0")
     name = uuid.uuid4()
     hdfs_file = HDFS("/tmp/{0}".format(name))
     hdfs_file.create_file()
     shell.execute_shell_command('hadoop dfs', '-setrep -w 1 /tmp/{0}'.format(name))
     if hdfs_file.exists():
         self.assertEqual('1',
                          hdfs_file.replicas(),
                          "Number replicas of file must be 1")
         hdfs_file.delete()
         self.assertFalse(hdfs_file.exists())
예제 #16
0
 def test_get_modification_time(self):
     now = datetime.now().strftime("%Y-%m-%d")
     _dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     _file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         _dir.create_directory()
         _file.create_file()
         self.assertTrue(_dir.exists(), "Dir was not created")
         self.assertTrue(_file.exists(), "File was not created")
         self.assertEqual(now, _dir.modification_time().strftime("%Y-%m-%d"), "Error: dir modification time")
         self.assertEqual(now, _file.modification_time().strftime("%Y-%m-%d"), "Error: File modification time")
     finally:
         _dir.delete_directory()
         _file.delete()
예제 #17
0
 def test_copy_empty_dir(self):
     _dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     dst = HDFS("/tmp/dst_" + str(uuid.uuid4()))
     try:
         _dir.create(directory=True)
         self.assertTrue(_dir.exists(), "directory not found")
         self.assertFalse(dst.exists(), "dst directory is already exists")
         _dir.copy(dst)
         self.assertTrue(dst.exists(), "directory was not copied")
     finally:
         _dir.delete(True)
         dst.delete(True)
         self.assertFalse(_dir.exists(), "File was not deleted")
         self.assertFalse(dst.exists(), "File was not deleted")
예제 #18
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_copy_to_local(self):
     new_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     local_path = os.path.join("/tmp", "copied_from_hdfs")
     self.assertFalse(os.path.exists(local_path))
     try:
         new_file.create_file()
         self.assertTrue(new_file.exists(), "File was not created")
         new_file.copy_to_local(local_path)
         self.assertTrue(os.path.exists(local_path), "File was not copied from HDFS")
     finally:
         new_file.delete()
         self.assertFalse(new_file.exists(), "File was not removed")
         os.remove(local_path)
         self.assertFalse(os.path.exists(local_path))
예제 #19
0
 def test_copy_to_local(self):
     new_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     local_path = os.path.join("/tmp", "copied_from_hdfs")
     self.assertFalse(os.path.exists(local_path))
     try:
         new_file.create_file()
         self.assertTrue(new_file.exists(), "File was not created")
         new_file.copy_to_local(local_path)
         self.assertTrue(os.path.exists(local_path), "File was not copied from HDFS")
     finally:
         new_file.delete()
         self.assertFalse(new_file.exists(), "File was not removed")
         os.remove(local_path)
         self.assertFalse(os.path.exists(local_path))
예제 #20
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_copy_empty_dir(self):
     _dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     dst = HDFS("/tmp/dst_" + str(uuid.uuid4()))
     try:
         _dir.create(directory=True)
         self.assertTrue(_dir.exists(), "directory not found")
         self.assertFalse(dst.exists(), "dst directory is already exists")
         _dir.copy(dst)
         self.assertTrue(dst.exists(), "directory was not copied")
     finally:
         _dir.delete(True)
         dst.delete(True)
         self.assertFalse(_dir.exists(), "File was not deleted")
         self.assertFalse(dst.exists(), "File was not deleted")
예제 #21
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_get_modification_time(self):
     now = datetime.now().strftime("%Y-%m-%d")
     _dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     _file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         _dir.create_directory()
         _file.create_file()
         self.assertTrue(_dir.exists(), "Dir was not created")
         self.assertTrue(_file.exists(), "File was not created")
         self.assertEqual(now, _dir.modification_time().strftime("%Y-%m-%d"), "Error: dir modification time")
         self.assertEqual(now, _file.modification_time().strftime("%Y-%m-%d"), "Error: File modification time")
     finally:
         _dir.delete_directory()
         _file.delete()
예제 #22
0
 def test_move_non_empty_dir(self):
     dst = HDFS("/tmp/dst_" + str(uuid.uuid4()))
     _dir = None
     try:
         _dir = self._create_non_empty_dir_(os.path.join("/tmp", str(uuid.uuid4())))
         self.assertFalse(dst.exists(), "dst directory is already exists")
         _dir.move(dst.path)
         self.assertFalse(_dir.exists(), "original directory should be deleted")
         self.assertTrue(dst.exists(), "directory move operation failed")
     finally:
         if _dir:
             _dir.delete_directory()
             self.assertFalse(_dir.exists(), "Folder was not deleted")
         dst.delete_directory()
         self.assertFalse(dst.exists(), "Dst Folder was not deleted")
예제 #23
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_move_non_empty_dir(self):
     dst = HDFS("/tmp/dst_" + str(uuid.uuid4()))
     _dir = None
     try:
         _dir = self._create_non_empty_dir_(os.path.join("/tmp", str(uuid.uuid4())))
         self.assertFalse(dst.exists(), "dst directory is already exists")
         _dir.move(dst.path)
         self.assertFalse(_dir.exists(), "original directory should be deleted")
         self.assertTrue(dst.exists(), "directory move operation failed")
     finally:
         if _dir:
             _dir.delete_directory()
             self.assertFalse(_dir.exists(), "Folder was not deleted")
         dst.delete_directory()
         self.assertFalse(dst.exists(), "Dst Folder was not deleted")
예제 #24
0
def clean_resources():
    """
    Cleans resources from previously flow.
    """
    hdfs_file = HDFS("{0}/data_to_export".format(BASE_DIR))
    if hdfs_file.exists():
        hdfs_file.delete(recursive=True)

    hdfs_file = HDFS("{0}/data_from_import".format(BASE_DIR))
    if hdfs_file.exists():
        hdfs_file.delete(recursive=True)

    hdfs_file = HDFS("{0}/rdbms.password".format(BASE_DIR))
    if hdfs_file.exists():
        hdfs_file.delete()
예제 #25
0
 def test_recursive_list_files(self):
     basedir = HDFS("/tmp")
     new_folder = HDFS("/tmp/test123")
     new_file = HDFS("/tmp/test123/test.txt")
     try:
         new_folder.create(directory=True)
         self.assertTrue(new_folder.exists(), "Folder was not created")
         new_file.create(directory=False)
         self.assertTrue(new_file.exists(), "File was not created")
         files = basedir.recursive_list_files()
         self.assertTrue(new_file in files)
         self.assertTrue(new_folder in files)
     finally:
         new_folder.delete(recursive=True)
         self.assertFalse(new_file.exists(), "Folder was not deleted")
예제 #26
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_recursive_list_files(self):
     basedir = HDFS("/tmp")
     new_folder = HDFS("/tmp/test123")
     new_file = HDFS("/tmp/test123/test.txt")
     try:
         new_folder.create(directory=True)
         self.assertTrue(new_folder.exists(), "Folder was not created")
         new_file.create(directory=False)
         self.assertTrue(new_file.exists(), "File was not created")
         files = basedir.recursive_list_files()
         self.assertTrue(new_file in files)
         self.assertTrue(new_folder in files)
     finally:
         new_folder.delete(recursive=True)
         self.assertFalse(new_file.exists(), "Folder was not deleted")
예제 #27
0
 def test_distcp(self):
     directory = HDFS("/tmp/bar")
     directory.create()
     new_file = HDFS("/tmp/test_dist.txt")
     new_file.create(directory=False)
     _host = "sandbox.hortonworks.com"
     try:
         self.assertTrue(new_file.exists(), "File was not created")
         _file = HDFS("hdfs://{host}:8020/tmp/test_dist.txt".format(host=_host))
         _file.distcp(dest="hdfs://{host}:8020/tmp/bar/test_dist.txt".format(host=_host))
         file_after_copy = HDFS("/tmp/bar/test_dist.txt")
         self.assertTrue(file_after_copy.exists(), "File was not copied")
     finally:
         new_file.delete()
         directory.delete(recursive=True)
         self.assertFalse(new_file.exists(), "File was not deleted")
         self.assertFalse(directory.delete(), "File was not deleted")
예제 #28
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_distcp(self):
     directory = HDFS("/tmp/bar")
     directory.create()
     new_file = HDFS("/tmp/test_dist.txt")
     new_file.create(directory=False)
     _host = "sandbox.hortonworks.com"
     try:
         self.assertTrue(new_file.exists(), "File was not created")
         _file = HDFS("hdfs://{host}:8020/tmp/test_dist.txt".format(host=_host))
         _file.distcp(dest="hdfs://{host}:8020/tmp/bar/test_dist.txt".format(host=_host))
         file_after_copy = HDFS("/tmp/bar/test_dist.txt")
         self.assertTrue(file_after_copy.exists(), "File was not copied")
     finally:
         new_file.delete()
         directory.delete(recursive=True)
         self.assertFalse(new_file.exists(), "File was not deleted")
         self.assertFalse(directory.delete(), "File was not deleted")
예제 #29
0
 def test_file_size(self):
     local = LocalFS(os.path.realpath(__file__))
     hdfs_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         local.copy_to_hdfs(hdfs_file.path)
         self.assertTrue(hdfs_file.exists(), "Local file was not copied to HDFS")
         self.assertEqual(hdfs_file.size(), local.size())
     finally:
         hdfs_file.delete()
예제 #30
0
 def test_get_permissions(self):
     self.assertEqual("drwxr-xr-x", HDFS("/").permissions(), "Root dir permissions should be 'drwxr-xr-x'")
     # Permissions to '/tmp' folder are different on different CDH versions
     # self.assertEqual("drwxrwxrwt", HDFS("/tmp").permissions(), "Tmp dir permissions should be 'drwxrwxrwxt'")
     hbase_file = HDFS("/hbase/hbase.id")
     if hbase_file.exists():
         self.assertEqual("-rw-r--r--",
                          hbase_file.permissions(),
                          "/hbase/hbase.id permissions should be '-rw-r--r--'")
예제 #31
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def should_raise_error_mkdir_not_recursive(self):
     _base_dir = os.path.join("/tmp", str(uuid.uuid4()))
     _path = os.path.join(_base_dir, str(uuid.uuid4()), str(uuid.uuid4()))
     _dir = HDFS(_path)
     self.assertFalse(_base_dir.exists(), "Folder is already exists")
     try:
         self.assertRaises(FileSystemException, _dir.create_directory, recursive=False)
     finally:
         self.assertFalse(_dir.exists(), "File was created")
예제 #32
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_get_permissions(self):
     self.assertEqual("drwxr-xr-x", HDFS("/").permissions(), "Root dir permissions should be 'drwxr-xr-x'")
     # Permissions to '/tmp' folder are different on different CDH versions
     # self.assertEqual("drwxrwxrwt", HDFS("/tmp").permissions(), "Tmp dir permissions should be 'drwxrwxrwxt'")
     hbase_file = HDFS("/hbase/hbase.id")
     if hbase_file.exists():
         self.assertEqual(
             "-rw-r--r--", hbase_file.permissions(), "/hbase/hbase.id permissions should be '-rw-r--r--'"
         )
예제 #33
0
 def should_raise_error_mkdir_not_recursive(self):
     _base_dir = os.path.join('/tmp', str(uuid.uuid4()))
     _path = os.path.join(_base_dir, str(uuid.uuid4()), str(uuid.uuid4()))
     _dir = HDFS(_path)
     self.assertFalse(_base_dir.exists(), "Folder is already exists")
     try:
         self.assertRaises(FileSystemException, _dir.create_directory, recursive=False)
     finally:
         self.assertFalse(_dir.exists(), "File was created")
예제 #34
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_file_size(self):
     local = LocalFS(os.path.realpath(__file__))
     hdfs_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         local.copy_to_hdfs(hdfs_file.path)
         self.assertTrue(hdfs_file.exists(), "Local file was not copied to HDFS")
         self.assertEqual(hdfs_file.size(), local.size())
     finally:
         hdfs_file.delete()
예제 #35
0
 def test_create(self):
     new_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     new_dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     # tets new file creation
     try:
         new_file.create(directory=False)
         self.assertTrue(new_file.exists(), "File was not created")
         self.assertFalse(new_file.is_directory(), "New file should not be a directory")
     finally:
         new_file.delete()
         self.assertFalse(new_file.exists(), "File was not removed")
         # test new folder creation
     try:
         new_dir.create(directory=True)
         self.assertTrue(new_dir.exists(), "Directory was not created")
         self.assertTrue(new_dir.is_directory(), "New file should be a directory")
     finally:
         new_dir.delete(recursive=True)
         self.assertFalse(new_dir.exists(), "Directory was not removed")
예제 #36
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_create(self):
     new_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     new_dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     # tets new file creation
     try:
         new_file.create(directory=False)
         self.assertTrue(new_file.exists(), "File was not created")
         self.assertFalse(new_file.is_directory(), "New file should not be a directory")
     finally:
         new_file.delete()
         self.assertFalse(new_file.exists(), "File was not removed")
         # test new folder creation
     try:
         new_dir.create(directory=True)
         self.assertTrue(new_dir.exists(), "Directory was not created")
         self.assertTrue(new_dir.is_directory(), "New file should be a directory")
     finally:
         new_dir.delete(recursive=True)
         self.assertFalse(new_dir.exists(), "Directory was not removed")
예제 #37
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_dir_size(self):
     local_basedir = os.path.dirname(os.path.realpath(__file__))
     local = LocalFS(os.path.join(local_basedir, "resources", "test_dir_size"))
     hdfs_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         local.copy_to_hdfs(hdfs_file.path)
         self.assertTrue(hdfs_file.exists(), "Local file was not copied to HDFS")
         expected_fsize = local.size()
         self.assertEqual(hdfs_file.size(), expected_fsize)
     finally:
         hdfs_file.delete(recursive=True)
예제 #38
0
 def test_dir_size(self):
     local_basedir = os.path.dirname(os.path.realpath(__file__))
     local = LocalFS(os.path.join(local_basedir, "resources", "test_dir_size"))
     hdfs_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         local.copy_to_hdfs(hdfs_file.path)
         self.assertTrue(hdfs_file.exists(), "Local file was not copied to HDFS")
         expected_fsize = local.size()
         self.assertEqual(hdfs_file.size(), expected_fsize)
     finally:
         hdfs_file.delete(recursive=True)
예제 #39
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_merge(self):
     basedir = os.path.dirname(os.path.realpath(__file__))
     local = LocalFS(os.path.join(basedir, "resources", "test_merge"))
     hdfs_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     merged_file = LocalFS(os.path.join(basedir, "resources", "merged.txt"))
     try:
         local.copy_to_hdfs(hdfs_file.path)
         self.assertTrue(hdfs_file.exists(), "Local file was not copied to HDFS")
         hdfs_file.merge(merged_file.path)
         self.assertTrue(merged_file.exists(), "merged file was not copied to local fs")
     finally:
         hdfs_file.delete_directory()
         merged_file.delete()
예제 #40
0
 def test_merge(self):
     basedir = os.path.dirname(os.path.realpath(__file__))
     local = LocalFS(os.path.join(basedir, "resources", "test_merge"))
     hdfs_file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     merged_file = LocalFS(os.path.join(basedir, "resources", "merged.txt"))
     try:
         local.copy_to_hdfs(hdfs_file.path)
         self.assertTrue(hdfs_file.exists(), "Local file was not copied to HDFS")
         hdfs_file.merge(merged_file.path)
         self.assertTrue(merged_file.exists(), "merged file was not copied to local fs")
     finally:
         hdfs_file.delete_directory()
         merged_file.delete()
예제 #41
0
    def test_mr_job_command_generation_with_arguments(self):
        _job_name = "test_mr_job_%s" % uuid.uuid4()

        _base_dir = HDFS(os.path.join("/tmp", _job_name))
        _base_dir.create_directory()
        try:
            jar = os.path.join(os.path.dirname(__file__), 'resources', 'mapreduce', 'hadoop-mapreduce-examples.jar')
            # configure job inputs
            _job_input = HDFS(os.path.join(_base_dir.path, "input"))
            _job_input.create_directory()
            LocalFS(os.path.join(
                os.path.dirname(__file__),
                'resources',
                'mapreduce', 'raw-data.txt')
            ).copy_to_hdfs(
                _job_input.path
            )

            # configure job output
            _job_output = HDFS(os.path.join(_base_dir.path, "output"))
            if not os.path.exists(jar):
                self.skipTest("'%s' not found" % jar)

            job = MapReduce.prepare_mapreduce_job(jar=jar,
                                                  main_class="wordcount",
                                                  name=_job_name) \
                .with_config_option("split.by", "'\\t'") \
                .with_number_of_reducers(3) \
                .with_arguments(
                _job_input.path,
                _job_output.path
            )
            _command_submission_result = job.run()
            _command_submission_result.if_failed_raise(AssertionError("Cannot run MR job"))
            _job_status = job.status()
            self.assertTrue(_job_status is not None and _job_status.is_succeeded(), "MR job Failed")
            self.assertTrue(_job_output.exists(), "Error: empty job output")
            #     check counters
            self.assertEqual(6, _job_status.counter(group='File System Counters',
                                                    counter='HDFS: Number of write operations'))
            self.assertEqual(1, _job_status.counter(group='Job Counters', counter='Launched map tasks'))
            self.assertEqual(3, _job_status.counter(group='Job Counters', counter='Launched reduce tasks'))
            self.assertEqual(2168, _job_status.counter(group='File Input Format Counters', counter='Bytes Read'))
        finally:
            _base_dir.delete_directory()
예제 #42
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_move_empty_dir(self):
     _dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     dst = HDFS("/tmp/dst_" + str(uuid.uuid4()))
     try:
         _dir.create(directory=True)
         self.assertTrue(_dir.exists(), "directory not found")
         self.assertFalse(dst.exists(), "destination directory is already exists")
         _dir.move(dst.path)
         self.assertFalse(_dir.exists(), "Original directory was not removed")
         self.assertTrue(dst.exists(), "destination directory was not created")
     finally:
         _dir.delete(True)
         dst.delete(True)
         self.assertFalse(_dir.exists(), "File was not deleted")
         self.assertFalse(dst.exists(), "File was not deleted")
예제 #43
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_move_file(self):
     _file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     dst = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         _file.create_file()
         self.assertTrue(_file.exists(), "File was not created")
         self.assertFalse(dst.exists(), "Destination file should not exist")
         _file.move(dst.path)
         self.assertFalse(_file.exists(), "Original file should be deleted")
         self.assertTrue(dst.exists(), "Destination file should be created")
     finally:
         _file.delete()
         dst.delete()
         self.assertFalse(_file.exists(), "File was not deleted")
         self.assertFalse(dst.exists(), "destination file was not deleted")
예제 #44
0
 def test_move_file(self):
     _file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     dst = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         _file.create_file()
         self.assertTrue(_file.exists(), "File was not created")
         self.assertFalse(dst.exists(), "Destination file should not exist")
         _file.move(dst.path)
         self.assertFalse(_file.exists(), "Original file should be deleted")
         self.assertTrue(dst.exists(), "Destination file should be created")
     finally:
         _file.delete()
         dst.delete()
         self.assertFalse(_file.exists(), "File was not deleted")
         self.assertFalse(dst.exists(), "destination file was not deleted")
예제 #45
0
 def test_move_empty_dir(self):
     _dir = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     dst = HDFS("/tmp/dst_" + str(uuid.uuid4()))
     try:
         _dir.create(directory=True)
         self.assertTrue(_dir.exists(), "directory not found")
         self.assertFalse(dst.exists(), "destination directory is already exists")
         _dir.move(dst.path)
         self.assertFalse(_dir.exists(), "Original directory was not removed")
         self.assertTrue(dst.exists(), "destination directory was not created")
     finally:
         _dir.delete(True)
         dst.delete(True)
         self.assertFalse(_dir.exists(), "File was not deleted")
         self.assertFalse(dst.exists(), "File was not deleted")
예제 #46
0
    def test_import_to_hive(self):
        _path = HDFS(os.path.join('/user', getpass.getuser(), 'table_name'))
        try:
            if _path.exists():
                _path.delete(recursive=_path.is_directory())
                # shell.execute_shell_command('hadoop fs', '-rm -r /user/', getpass.getuser(), '/table_name')
            cmd = Sqoop.import_data().from_rdbms(
                host=MYSQL_SERVER,
                rdbms="mysql",
                username="******",
                password_file="{0}/rdbms.password".format(BASE_DIR),
                database="sqoop_tests").table(
                    table="table_name").to_hive().run()

            # self.assertEquals(cmd.status, 0, cmd.stderr)
            # result = shell.execute_shell_command('hadoop fs', '-du -s /user/hive/warehouse/table_name/part-m-*')
            # self.assertNotEqual(result.stdout.split(' ')[0], '0', result.stdout)
        finally:

            shell.execute_shell_command(
                'hive', "-e 'DROP TABLE IF EXISTS table_name'")
예제 #47
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_copy_file(self):
     _file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     dst = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         _file.create_file()
         self.assertTrue(_file.exists(), "original file not found")
         self.assertFalse(dst.exists(), "destination file already exists")
         _file.create()
         _file.copy(dst)
         self.assertTrue(dst.exists(), "file was not copied")
         self.assertTrue(_file.exists(), "original file should not be deleted")
     finally:
         _file.delete()
         dst.delete()
         self.assertFalse(_file.exists(), "File was not deleted")
         self.assertFalse(dst.exists(), "destination file was not deleted")
예제 #48
0
 def test_copy_file(self):
     _file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     dst = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     try:
         _file.create_file()
         self.assertTrue(_file.exists(), "original file not found")
         self.assertFalse(dst.exists(), "destination file already exists")
         _file.create()
         _file.copy(dst)
         self.assertTrue(dst.exists(), "file was not copied")
         self.assertTrue(_file.exists(), "original file should not be deleted")
     finally:
         _file.delete()
         dst.delete()
         self.assertFalse(_file.exists(), "File was not deleted")
         self.assertFalse(dst.exists(), "destination file was not deleted")
예제 #49
0
파일: test_sqoop.py 프로젝트: epam/Merlin
    def test_import_to_hive(self):
        _path = HDFS(os.path.join('/user', getpass.getuser(), 'table_name'))
        try:
            if _path.exists():
                _path.delete(recursive=_path.is_directory())
                # shell.execute_shell_command('hadoop fs', '-rm -r /user/', getpass.getuser(), '/table_name')
            cmd = Sqoop.import_data().from_rdbms(
                host=MYSQL_SERVER,
                rdbms="mysql",
                username="******",
                password_file="{0}/rdbms.password".format(BASE_DIR),
                database="sqoop_tests"
            ).table(
                table="table_name"
            ).to_hive().run()

            # self.assertEquals(cmd.status, 0, cmd.stderr)
            # result = shell.execute_shell_command('hadoop fs', '-du -s /user/hive/warehouse/table_name/part-m-*')
            # self.assertNotEqual(result.stdout.split(' ')[0], '0', result.stdout)
        finally:

            shell.execute_shell_command('hive', "-e 'DROP TABLE IF EXISTS table_name'")
예제 #50
0
    def _template_streaming_job_(self, base_dir="/tmp", map_only_job=False):
        if not os.path.exists(HADOOP_STREAMING_JAR):
            self.skip("Cannot allocate %s" % HADOOP_STREAMING_JAR)
        _hdfs_basdir = HDFS(base_dir)
        if not _hdfs_basdir.exists():
            _hdfs_basdir.create_directory()
        _job_input = HDFS(os.path.join(_hdfs_basdir.path, "input"))
        _job_input.create_directory()
        _job_output = HDFS(os.path.join(_hdfs_basdir.path, "output"))
        home = os.path.dirname(__file__)
        _mapper = os.path.join(home, 'resources', 'mapreduce', 'mapper.py')
        _reducer = os.path.join(home, 'resources', 'mapreduce', 'reducer.py')

        LocalFS(
            os.path.join(os.path.dirname(__file__), 'resources', 'mapreduce', 'raw-data.txt')
        ).copy_to_hdfs(
            _job_input.path
        )

        return MapReduce.prepare_streaming_job(name="test-mr-streaming-job{}".format(str(uuid.uuid4())), jar=HADOOP_STREAMING_JAR) \
            .take(_job_input.path) \
            .process_with(mapper=_mapper, reducer=None if map_only_job else _reducer) \
            .save(_job_output.path)
예제 #51
0
# for additional information regarding copyright ownership and licensing.
#

from merlin.tools.hive import Hive
from ConfigParser import RawConfigParser
from merlin.fs.localfs import LocalFS
from merlin.fs.hdfs import HDFS
from merlin.fs.ftp import ftp_client
import os

BASE_DIR = "/tmp"

if __name__ == "__main__":

    hdfs_file = HDFS("{0}/raw".format(BASE_DIR))
    if hdfs_file.exists():
        hdfs_file.delete(recursive=True)

    config = RawConfigParser()
    config.read(os.path.join(os.path.dirname(__file__), "resources/ftp_config.ini"))
    host_download = config.get("ftp", "host.download")
    user_name = config.get("ftp", "user.name")
    password = config.get("ftp", "password")
    path = config.get("ftp", "path")
    ftp = ftp_client(host=host_download,
                     login=user_name,
                     password=password,
                     path="/tmp")

    if ftp.exists():
        ftp.delete(recursive=True)
예제 #52
0
파일: flow.py 프로젝트: Mbaroudi/Merlin
def on_flow_failed(context):
    hdfs_file = HDFS("{0}/raw".format(BASE_DIR))
    if hdfs_file.exists():
        hdfs_file.delete(recursive=True)
예제 #53
0
 def test_get_owner(self):
     self.assertEqual('hdfs', HDFS("/").owner(), "ERROR: Root dir owner")
     self.assertEqual('hdfs', HDFS("/tmp").owner(), "ERROR: /tmp dir owner")
     hbase_file = HDFS("/hbase/hbase.id")
     if hbase_file.exists():
         self.assertEqual('hbase', HDFS("/hbase/hbase.id").owner(), "ERROR: /hbase/hbase.id dir owner")
예제 #54
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_get_owner(self):
     self.assertEqual("hdfs", HDFS("/").owner(), "ERROR: Root dir owner")
     self.assertEqual("hdfs", HDFS("/tmp").owner(), "ERROR: /tmp dir owner")
     hbase_file = HDFS("/hbase/hbase.id")
     if hbase_file.exists():
         self.assertEqual("hbase", HDFS("/hbase/hbase.id").owner(), "ERROR: /hbase/hbase.id dir owner")
예제 #55
0
파일: flow.py 프로젝트: epam/Merlin
def on_flow_failed(context):
    hdfs_file = HDFS("{0}/raw".format(BASE_DIR))
    if hdfs_file.exists():
        hdfs_file.delete(recursive=True)
예제 #56
0
파일: test_hdfs.py 프로젝트: epam/Merlin
 def test_delete_file(self):
     _file = HDFS(os.path.join("/tmp", str(uuid.uuid4())))
     _file.create_file()
     self.assertTrue(_file.exists(), "Target file can not be found")
     _file.delete()
     self.assertFalse(_file.exists(), "Target file was not deleted")