def config_home():
    '''
    Get configuration for hadoop services, and change it in required config locations
    :return: Returns success 1 if successful or success 0 with error message
    '''
    log.info("\nChanging Configuration\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        try:
            loaded_json = json.loads(request.data.decode())
            for x in loaded_json:
                filename = x
                final_content = loaded_json[x]

                write_to_file(filename, final_content)

        except Exception as e:
            log.error("\nError while changing configuration\n")
            log.error(e)
            return '{"success": 0, "msg": ["%s"]}' % e

        log.info("\nSuccessfully changed configuration\n")
        return '{"success": 1}'
    else:
        return api_status
Beispiel #2
0
def dfs_stop():
    '''
    Stops dfs services (namenode, datanode, zkfc, journalnode)
    :return: success if dfs_stop_cmd runs successfully else error message is shown
    '''
    log.info("\nStopping DFS Services\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        conn = get_postgres_connection()
        cur = conn.cursor()
        sql = "select status from hdfs_hdfs where type=1"
        cur.execute(sql)
        rows = cur.fetchall()
        cur.close()
        conn.close()
        status_list = [' '.join(item) for item in rows]
        if "RUNNING" in status_list:
            result = run_services.run_bigdata_services(dfs_stop_cmd, 'Stopped DFS Services', 'Error Stopping DFS Services')
            if json.loads(result)["success"]:
                update_namenode_info()
                return result
        else:
            update_namenode_info()
            return '{"success": 1}'
    else:
        return api_status
Beispiel #3
0
def stop_all_hbase():
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        conn = get_postgres_connection()
        cur = conn.cursor()
        sql = "select status from hbase_hbase where type=1"
        cur.execute(sql)
        rows = cur.fetchall()
        status_list = [' '.join(item) for item in rows]
        if "RUNNING" in status_list:
            result = run_services.run_bigdata_services(
                hbase_stop_cmd, 'HBase Service Stopped',
                'Error Stopping HBase Services')
            update_hbase_master()
            if json.loads(result)["success"]:
                cur.execute(sql)
                rows = cur.fetchall()
                status_list = [' '.join(item) for item in rows]
                if "RUNNING" in status_list:
                    cur.close()
                    conn.close()
                    return '{"success": 2, "msg": ["HBase Service not Stopped!!!"]}'
                else:
                    cur.close()
                    conn.close()
                    return result
            else:
                cur.close()
                conn.close()
                return result
        else:
            return '{"success": 1}'
    else:
        return api_status
Beispiel #4
0
def kill():
    try:
        updated_at = int(str(time.time()).split(".")[0])
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            conn = get_postgres_connection()
            cur = conn.cursor()
            loaded_json = json.loads(request.data.decode())
            service_name = loaded_json['service_name']
            node_id = loaded_json['node_id']
            table_name = loaded_json['table_name']
            kill_result = kill_service(service_name, user_pass)
            kill_result = json.loads(kill_result)
            if kill_result["success"]:
                sql = """UPDATE %s set status='SHUTDOWN', updated_at=%d where id=%d;""" % (table_name, updated_at, int(node_id))
                cur.execute(sql)
                conn.commit()
                cur.close()
                conn.close()
                return '{"success": 1, "msg": ["%s killed forcefully"]}' % service_name
            else:
                sql = """UPDATE %s set status='RUNNING' where id=%d;""" % (table_name, int(node_id))
                cur.execute(sql)
                conn.commit()
                cur.close()
                conn.close()
                return '{"success": 0, "msg": ["Error killing %s forcefully"]}' % service_name
        else:
            return api_status
    except Exception as e:
        log.error("Error in kill()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
Beispiel #5
0
def hregionserver_restart():
    '''
    Restarts hbase regionserver
    '''
    log.info("\nRestarting Hbase Regionserver\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        with Pool(processes=1) as pool:
            loaded_json = json.loads(request.data.decode())
            cluster_id = loaded_json['cluster_id']

            mul_pool = pool.apply_async(regionserver_stop, args=[cluster_id])
            try:
                result = mul_pool.get(timeout=20)
                stop = json.loads(result)
                if stop["success"] == 1:
                    start = json.loads(regionserver_start(cluster_id))
                    if start["success"]:
                        return '{"success": 1, "msg": ["Successfully Restarted HBase Regionserver"]}'
                    else:
                        return start
                elif stop["success"] == 2:
                    stop["success"] = 2
                    return stop
                else:
                    return stop
            except Exception as e:
                pool.terminate()
                return '{"success": 2, "msg": ["Took more than 20 seconds to restart regionserver!!!"]}'
    else:
        return api_status
def stop_spark():
    '''
    Stops spark
    :return: Returns success if spark_stop command executes successfully or error message is shown
    '''
    log.info("\nStopping spark\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        conn = get_postgres_connection()
        cur = conn.cursor()
        sql = "select status from spark_spark where type=1"
        cur.execute(sql)
        rows = cur.fetchall()
        status_list = [' '.join(item) for item in rows]
        if "RUNNING" in status_list:
            result = run_services.run_bigdata_services(spark_stop_cmd,
                                                       'spark Stopped',
                                                       'Error Stopping spark')
            if json.loads(result)["success"]:
                sql = """UPDATE spark_spark set status='SHUTDOWN' where type=1 and state=1;"""
                cur.execute(sql)
                conn.commit()
                cur.close()
                conn.close()
                return result
        else:
            cur.close()
            conn.close()
            return '{"success": 1}'
    else:
        return api_status
Beispiel #7
0
def get_system_statistics_history():
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            if not parent_dir.endswith('/'):
                history_dir = parent_dir + '/system_statistics_history/'
            else:
                history_dir = parent_dir + 'system_statistics_history/'

            system_data = []

            loaded_json = json.loads(request.data.decode())
            key = list(loaded_json.keys())[0]

            current_time = datetime.utcfromtimestamp(time.time())

            if key == 'time':
                requested_hour = int(loaded_json["time"])
                requested_time = current_time - timedelta(hours=requested_hour)

            elif key == 'days':
                days = int(loaded_json["days"])
                requested_time = current_time - timedelta(days=days)
            else:
                return 'No such key "%s" found!!!' % key

            while requested_time <= current_time:
                requested_date = requested_time.strftime('%Y-%m-%d')

                if not os.path.exists("%s.bin" %
                                      (history_dir + requested_date)):
                    if key == 'time':
                        requested_time = requested_time + timedelta(
                            hours=requested_hour)
                    elif key == 'days':
                        requested_time = requested_time + timedelta(days=1)
                    continue
                with open("%s.bin" % (history_dir + requested_date),
                          "r") as history_file:
                    contents = history_file.read()
                    contents = contents.strip().split("\n")
                    for content in contents:
                        content = int(content, 2)
                        content = content.to_bytes(
                            (content.bit_length() + 7) // 8, 'big').decode()
                        content_dict = ast.literal_eval(content)
                        system_data.append(content_dict)
                requested_time = requested_time + timedelta(days=1)

            return '{\'final_data\': %s}' % system_data

        else:
            return api_status
    except Exception as e:
        log.error("Error in list_dir()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
Beispiel #8
0
def get_system_statistics():
    log.info("\nGetting System Statistics\n")
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            return system_stats()
        else:
            return api_status
    except Exception as e:
        log.error("Error in list_dir()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
def stop_spark_slave():
    '''
    Stops spark slave
    :return: Returns success if stop_spark_slave command executes successfully or error message is shown
    '''
    log.info("\nStopping spark slave\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        loaded_json = json.loads(request.data.decode())
        cluster_id = loaded_json['cluster_id']
        return sps_stop(cluster_id)
    else:
        return api_status
Beispiel #10
0
def hregionserver_start():
    '''
    Starts hbase regionserver
    :return: success if hregionserver_start_cmd runs successfully else error message is shown
    '''
    log.info("\nStarting HRegionserver\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        loaded_json = json.loads(request.data.decode())
        cluster_id = loaded_json['cluster_id']
        return regionserver_start(cluster_id)
    else:
        return api_status
Beispiel #11
0
def datanode_stop():
    '''
    Stops datanode service
    :return: success if datanode_stop_cmd runs successfully else error message is shown
    '''
    log.info("\nStopping Datanode\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        loaded_json = json.loads(request.data.decode())
        cluster_id = loaded_json['cluster_id']
        return dn_stop(cluster_id)
    else:
        return api_status
def nodemanager_stop():
    '''
    Stops node manager
    :return: Returns success if nm_start_cmd command executes successfully or error message is shown
    '''
    log.info("\nStopping Nodemanager\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        loaded_json = json.loads(request.data.decode())
        cluster_id = loaded_json['cluster_id']
        return nm_stop(cluster_id)
    else:
        return api_status
def elasticsearch_start():
    '''
    Starts 
    :return: Returns success if es_start_cmd command executes successfully or error message is shown
    '''
    log.info("\nStarting elasticsearch\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        loaded_json = json.loads(request.data.decode())
        cluster_id = loaded_json['cluster_id']
        return es_start(cluster_id)
    else:
        return api_status
Beispiel #14
0
def dfs_start():
    '''
    Starts dfs services (namenode, datanode, zkfc, journalnode)
    :return: success if dfs_start_cmd runs successfully else error message is shown
    '''
    log.info("\nStarting DFS Services\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        result = run_services.run_bigdata_services(dfs_start_cmd, 'Started DFS Service', 'Error Starting DFS Services')
        update_namenode_info()
        return result
    else:
        return api_status
Beispiel #15
0
def tail():
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            loaded_json = json.loads(request.data.decode())
            file_path = loaded_json['file_path']

            return file_read_from_tail(file_path, nlines, user_pass)
        else:
            return api_status
    except Exception as e:
        log.error("Error in tail_view()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
def yarn_start():
    '''
    Starts yarn services (nodemanager, resourcemanager)
    :return: Returns success if yarn_start_cmd command executes successfully or error message is shown
    '''
    log.info("\nStarting Yarn\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        result = run_services.run_bigdata_services(
            yarn_start_cmd, 'Started Yarn Services',
            'Error Starting Yarn Services')
        update_rm_info()
        return result
    else:
        return api_status
Beispiel #17
0
def kill_running_service():
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        loaded_json = json.loads(request.data.decode())
        table_name = loaded_json['table_name']
        if any(table_name in a for a in my_service_process):
            service_tuple = next((i, d)
                                 for i, d in enumerate(my_service_process)
                                 if '%s' % table_name in d)
            service_index = service_tuple[0]
            value = service_tuple[1][table_name]
            result = kill_service_process(service_index, value)
            return result
    else:
        return api_status
Beispiel #18
0
def remove_files():
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            loaded_json = json.loads(request.data.decode())
            src = loaded_json['source']
            files = ast.literal_eval(loaded_json['files'])

            return delete(src, files, user_pass)
        else:
            return api_status
    except Exception as e:
        log.error("Error in remove_files()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
Beispiel #19
0
def change_mode():
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            loaded_json = json.loads(request.data.decode())
            file_path = loaded_json['file_path']
            mode = loaded_json['mode']

            return chmod(file_path, mode, user_pass)
        else:
            return api_status
    except Exception as e:
        log.error("Error in change_mode()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
Beispiel #20
0
def get_total_space():
    log.info("\nGetting Total Memory and Disk space\n")
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            total_memory = get_total_memory()
            total_disk = get_total_disk()
            # log.info("Total Memory: %d GB && Total Disk: %d GB" % (total_memory, total_disk))
            return '{"total_memory": %d, "total_disk": %d}' % (total_memory,
                                                               total_disk)
        else:
            return api_status
    except Exception as e:
        log.error("Error in list_dir()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
Beispiel #21
0
def copy_files():
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            loaded_json = json.loads(request.data.decode())
            src = loaded_json['source']
            file_list = ast.literal_eval(loaded_json['file_list'])
            dest = loaded_json['destination']

            return copy(src, file_list, dest, user_pass)
        else:
            return api_status
    except Exception as e:
        log.error("Error in copy_files()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
Beispiel #22
0
def list_dir():
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            loaded_json = json.loads(request.data.decode())
            path = loaded_json['path']
            if path == 'user':
                path = '/home/%s' % username

            return browse_local(path)
        else:
            return api_status
    except Exception as e:
        log.error("Error in list_dir()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
Beispiel #23
0
def hbase_start():
    '''
    Starts hbase services (hbase master, hbase regionserver)
    :return: success if hbase_start_cmd runs successfully else error message is shown
    '''
    log.info("\nStarting HBase\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        result = run_services.run_bigdata_services(
            hbase_start_cmd, 'HBase Services Started',
            'Error Starting HBase Services')
        up_status = update_hbase_master()
        if up_status is not None:
            if up_status["success"] == 0:
                return json.dumps(up_status)
        return result
    else:
        return api_status
Beispiel #24
0
def extract():
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            loaded_json = json.loads(request.data.decode())
            root_folder = loaded_json['root_folder']
            file_name = loaded_json['file_name']
            override = loaded_json['override']

            if not root_folder.endswith('/'):
                root_folder = root_folder + '/'

            return extract_archive(file_name, root_folder, override, user_pass)
        else:
            return api_status
    except Exception as e:
        log.error("Error in extract()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
Beispiel #25
0
def make_dir():
    try:
        header_key = request.headers.get('API-KEY')
        api_status = check_apiKey(header_key)
        if api_status == 'success':
            loaded_json = json.loads(request.data.decode())
            root_folder = loaded_json['root_folder']
            folder_name = loaded_json['folder_name']

            if not root_folder.endswith('/'):
                root_folder = root_folder + '/'

            folder_to_create = '%s%s' % (root_folder, folder_name)
            return make_directory(folder_to_create, user_pass)
        else:
            return api_status
    except Exception as e:
        log.error("Error in mkdir_dir()")
        log.error(e)
        return '{"success": 0, "msg": ["%s"]}' % e
Beispiel #26
0
def dfs_restart():
    '''
    Stops dfs services and starts them again
    '''
    log.info("\nRestarting DFS Services\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        stop = json.loads(dfs_stop())
        if stop["success"]:
            start = json.loads(dfs_start())
            if start["success"]:
                return '{"success": 1, "msg": ["Successfully restarted dfs service"]}'
            else:
                return '{"success": 0, "msg": ["Error restarting dfs services!!!"]}'
        else:
            return '{"success": 0, "msg": ["Error restarting dfs services!!!"]}'

    else:
        return api_status
def elasticsearch_restart():
    '''
    Restarts elasticsearch
    '''
    log.info("\nRestarting ES\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        loaded_json = json.loads(request.data.decode())
        cluster_id = loaded_json['cluster_id']
        stop = json.loads(es_stop(cluster_id))
        if stop["success"]:
            start = json.loads(es_start(cluster_id))
            if start["success"]:
                return '{"success": 1, "msg": ["Successfully Restarted Elasticsearch"]}'
            else:
                return '{"success": 0, "msg": ["Error Restarting Elasticsearch!!!"]}'
        else:
            return '{"success": 0, "msg": ["Error Restarting Elasticsearch!!!"]}'
    else:
        return api_status
def restart_spark():
    '''
    Restarts spark
    :return: Returns success if spark_restart command executes successfully or error message is shown
    '''
    log.info("\nRestarting spark\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        stop = json.loads(stop_spark())
        if stop["success"]:
            start = json.loads(start_spark())
            if start["success"]:
                return '{"success": 1, "msg": ["Successfully restarted spark service"]}'
            else:
                return '{"success": 0, "msg": ["Error restarting spark services!!!"]}'
        else:
            return '{"success": 0, "msg": ["Error restarting spark services!!!"]}'

    else:
        return api_status
def spark_master_restart():
    '''
    Restarts spark master
    '''
    log.info("\nRestarting Spark Master\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        loaded_json = json.loads(request.data.decode())
        cluster_id = loaded_json['cluster_id']
        stop = json.loads(spm_stop(cluster_id))
        if stop["success"]:
            start = json.loads(spm_start(cluster_id))
            if start["success"]:
                return '{"success": 1, "msg": ["Successfully Restarted Spark Master"]}'
            else:
                return '{"success": 0, "msg": ["Error Restarting Spark Master!!!"]}'
        else:
            return '{"success": 0, "msg": ["Error Restarting Spark Master!!!"]}'
    else:
        return api_status
Beispiel #30
0
def hregionserver_stop():
    '''
    Stops hbase regionserver
    :return: success if hregionserver_stop_cmd runs successfully else error message is shown
    '''
    log.info("\nStopping HRegionserver\n")
    header_key = request.headers.get('API-KEY')
    api_status = check_apiKey(header_key)
    if api_status == 'success':
        with Pool(processes=1) as pool:
            loaded_json = json.loads(request.data.decode())
            cluster_id = loaded_json['cluster_id']
            mul_pool = pool.apply_async(regionserver_stop, args=[cluster_id])
            try:
                result = mul_pool.get(timeout=20)
                return result
            except Exception as e:
                pool.terminate()
                return '{"success": 2, "msg": ["Took more than 20 seconds to stop regionserver!!!"]}'
    else:
        return api_status