def main(): conn = ConnectionManager() conn.get_current_connection(options["driver"]) hive = conn.get_hook() hive.data2table(filepath=options['path'], table=options['table'], overwrite=options['path'], # TODO partition=options['partition'])
def main(): conn = ConnectionManager() conn.get_current_connection(options["driver"]) hive = conn.get_hook() if options['path']: for path in (hive.check_for_content(options['path'], flags['r'])): grass.message(path)
def main(): conn = ConnectionManager() conn.get_current_connection(options["conn_type"]) hive = conn.get_hook() result = hive.execute(options['hql'], options['fatch']) if flags['f']: for i in result: print(i)
def main(): if not options['columns'] and not options['struct']: grass.fatal("Must be defined <attributes> or <struct> parameter") conn = ConnectionManager() conn.get_current_connection(options["driver"]) hive = conn.get_hook() hive.create_geom_table(table=options['table'], field=options['columns'], stored=options['stored'], serde=options['serde'], outputformat=options['outformat'], external=flags['e'], recreate=flags['d'], filepath=options['jsonpath'], overwrite=flags['o'])
def main(): conn = ConnectionManager() conn.get_current_connection(options["driver"]) hive = conn.get_hook() hive.create_csv_table(table=options['table'], field=options['columns'], partition=options['partition'], delimiter=options['delimeter'], stored=options['stored'], serde=options['serde'], outputformat=options['outputformat'], external=flags['e'], recreate=flags['d'], filepath=options['csvpath'], overwrite=flags['o'])
def main(): conn = ConnectionManager() conn.get_current_connection(options["driver"]) hive = conn.get_hook() if not options['schema']: options['schema'] = 'default' out = hive.get_results(hql=options['hql'], schema=options['schema']) if options['out']: with open(out, 'rw') as io: io.writelines(out) io.close() else: print out
def main(): conn = ConnectionManager() conn.get_current_connection(options["driver"]) hive = conn.get_hook() if not options["schema"]: options["schema"] = "default" out = hive.get_results(hql=options["hql"], schema=options["schema"]) if options["out"]: with open(out, "rw") as io: io.writelines(out) io.close() else: print out
def main(): conn = ConnectionManager() conn.get_current_connection(options["driver"]) hive = conn.get_hook() if flags["p"]: hive.show_tables() if flags["d"]: if not options["table"]: grass.fatal("With flag <d> table must be defined") hive.describe_table(options["table"], True) if flags["h"]: if not options["table"]: grass.fatal("With flag <h> table must be defined") print(hive.find_table_location(options["table"])) if options["path"]: hive.check_for_content(options["path"])
def main(): conn = ConnectionManager() conn.get_current_connection(options["driver"]) hive = conn.get_hook() hive.create_csv_table( table=options["table"], field=options["columns"], partition=options["partition"], delimiter=options["delimeter"], stored=options["stored"], serde=options["serde"], outputformat=options["outputformat"], external=flags["e"], recreate=flags["d"], filepath=options["csvpath"], overwrite=flags["o"], )
def main(): if not options["columns"] and not options["struct"]: grass.fatal("Must be defined <attributes> or <struct> parameter") conn = ConnectionManager() conn.get_current_connection(options["driver"]) hive = conn.get_hook() hive.create_geom_table( table=options["table"], field=options["columns"], stored=options["stored"], serde=options["serde"], outputformat=options["outformat"], external=flags["e"], recreate=flags["d"], filepath=options["jsonpath"], overwrite=flags["o"], )
def main(): tmp_dir = os.path.join(get_tmp_folder(), options['out']) if os.path.exists(tmp_dir): shutil.rmtree(tmp_dir) transf = GrassHdfs(options['driver']) table_path = options['hdfs'] if options['table']: conn = ConnectionManager() conn.get_current_connection('hiveserver2') if not conn.get_current_connection('hiveserver2'): grass.fatal("Cannot connet to hive for table description. " "Use param hdfs without param table") hive = conn.get_hook() table_path = hive.find_table_location(options['table']) tmp_dir = os.path.join(tmp_dir,options['table']) if not transf.download(hdfs=table_path, fs=tmp_dir): return files = os.listdir(tmp_dir) map_string='' for block in files: map='%s_%s'%(options['out'],block) block=os.path.join(tmp_dir,block) map_build = GrassMapBuilderEsriToEsri(block, map, options['attributes']) try: map_build.build() map_string+='%s,'%map except Exception ,e: grass.warning("Error: %s\n Map < %s > conversion failed"%(e,block))
def main(): tmp_dir = os.path.join(get_tmp_folder(), options['out']) if os.path.exists(tmp_dir): shutil.rmtree(tmp_dir) transf = GrassHdfs(options['driver']) table_path = options['hdfs'] if options['table']: conn = ConnectionManager() conn.get_current_connection('hiveserver2') if not conn.get_current_connection('hiveserver2'): grass.fatal("Cannot connet to hive for table description. " "Use param hdfs without param table") hive = conn.get_hook() table_path = hive.find_table_location(options['table']) tmp_dir = os.path.join(tmp_dir,options['table']) if not transf.download(hdfs=table_path, fs=tmp_dir): return files = os.listdir(tmp_dir) map_string = '' for block in files: map = '%s_%s' % (options['out'],block) block = os.path.join(tmp_dir,block) map_build = GrassMapBuilderEsriToEsri(block, map, options['attributes']) try: map_build.build() map_string += '%s,' % map except Exception,e: grass.warning("Error: %s\n Map < %s > conversion failed" % (e,block))