예제 #1
0
 def test_equals_file_string(self):
     files = self.temp_file()
     try:
         expend_q = Hive.load_queries_from_string("show tables").use_database("default")
         expend_f = Hive.load_queries_from_file(files).use_database("default")
         self.assertEqual(expend_q.run().is_ok(), expend_f.run().is_ok())
     finally:
         self.delete_local(files)
예제 #2
0
 def test_exist_table_file(self):
     files, msg = self.create_database()
     msg_f = self.temp_file(msg=msg)
     try:
         res = Hive.load_queries_from_file(msg_f)
         self.assertEqual(True, res.run().is_ok())
     finally:
         self.delete_local(msg_f)
         self.delete_local(files)
예제 #3
0
파일: setup.py 프로젝트: Mbaroudi/Merlin
                                       "resources/file_12.11.2014_.txt"))
    ftp.upload(local_path=os.path.join(os.path.dirname(__file__),
                                       "resources/file_13.11.2014_.txt"))
    ftp.upload(local_path=os.path.join(os.path.dirname(__file__),
                                       "resources/file_14.11.2014_.txt"))

    # upload file to HDFS/ create directories
    hdfs_file = HDFS("{0}/raw/12.11.2014".format(BASE_DIR))
    hdfs_file.create(directory=True)
    local_file = LocalFS(path=os.path.join(os.path.dirname(__file__),
                                           'resources/file_12.11.2014_.txt'))
    local_file.copy_to_hdfs(hdfs_path="{0}/raw/12.11.2014".format(BASE_DIR))

    hdfs_file = HDFS("{0}/raw/13.11.2014".format(BASE_DIR))
    hdfs_file.create(directory=True)
    local_file = LocalFS(path=os.path.join(os.path.dirname(__file__),
                                           'resources/file_13.11.2014_.txt'))
    local_file.copy_to_hdfs(hdfs_path="{0}/raw/13.11.2014".format(BASE_DIR))

    # create empty local directory 'tmp' in folder 'resources'
    local_file = LocalFS(
        path=os.path.join(os.path.dirname(__file__), 'resources/tmp'))
    if local_file.exists():
        local_file.delete_directory()
    local_file.create(directory=True)

    # create HIVE external table with partition
    hive = Hive.load_queries_from_file(
        path=os.path.join(os.path.dirname(__file__), "resources/script.hql"))
    hive.run()
예제 #4
0
파일: setup.py 프로젝트: epam/Merlin
    # upload file to directory on FTP
    ftp.upload(local_path=os.path.join(os.path.dirname(__file__), "resources/file_12.11.2014_.txt"))
    ftp.upload(local_path=os.path.join(os.path.dirname(__file__), "resources/file_13.11.2014_.txt"))
    ftp.upload(local_path=os.path.join(os.path.dirname(__file__), "resources/file_14.11.2014_.txt"))

    # upload file to HDFS/ create directories
    hdfs_file = HDFS("{0}/raw/12.11.2014".format(BASE_DIR))
    hdfs_file.create(directory=True)
    local_file = LocalFS(path=os.path.join(os.path.dirname(__file__),
                                           'resources/file_12.11.2014_.txt'))
    local_file.copy_to_hdfs(hdfs_path="{0}/raw/12.11.2014".format(BASE_DIR))

    hdfs_file = HDFS("{0}/raw/13.11.2014".format(BASE_DIR))
    hdfs_file.create(directory=True)
    local_file = LocalFS(path=os.path.join(os.path.dirname(__file__),
                                           'resources/file_13.11.2014_.txt'))
    local_file.copy_to_hdfs(hdfs_path="{0}/raw/13.11.2014".format(BASE_DIR))

    # create empty local directory 'tmp' in folder 'resources'
    local_file = LocalFS(path=os.path.join(os.path.dirname(__file__),
                                           'resources/tmp'))
    if local_file.exists():
        local_file.delete_directory()
    local_file.create(directory=True)

    # create HIVE external table with partition
    hive = Hive.load_queries_from_file(path=os.path.join(os.path.dirname(__file__), "resources/script.hql"))
    hive.run()