def get(self): return_status = None result = {} start = int(round(time.time() * 1000)) try: etl_metadata_obj = Metadata() id = request.args.get("id") data = etl_metadata_obj.select_workflows(id) return_status = 200 result = {"status": "success", "data": data} log.info("__") except: result = {} log.exception('Exception while submitting file processing Request') return_status = 500 result['status'] = 0 result[ 'message'] = 'Internal Error has Occurred while processing the File request' finally: resp = Response(json.dumps(result), status=return_status, mimetype="application/json") del etl_metadata_obj return resp
def post(self): return_status = None result = {} start = int(round(time.time() * 1000)) try: log.info("api Request Initiated") fp = FilesourceMiddleware(filesource_parser) return_status, result = fp.run(request) log.info("__") except: result = {} log.exception('Exception while submitting file processing Request') return_status = 500 result['status'] = 0 result[ 'message'] = 'Internal Error has Occurred while processing the File request' finally: #resp = Response(json.dumps(result, cls=NpEncoder) ,status = return_status, mimetype ="application/json") resp = Response(json.dumps(result), status=return_status, mimetype="application/json") #metrics_logging(request, resp, int(round(time.time() * 100 ))start) return resp
def run(self, request): """ This function stores the user feedback received from Ul server """ return_status = None result = {} try: total_rows = 0 # validate payload log.info(request.json) d = request.json current_cwd = os.getcwd() UPLOAD_FOLDER = current_cwd + "\\data\\" d['file_name'] = UPLOAD_FOLDER + d['file_name'].split( '.')[0] + ".csv" etl_obj = Fileprocess(d) file_type = etl_obj.validate() if not file_type in ("csv", "xlsx", "pdf"): raise ValueError(file_type) if request.args.get("metadata_ind", None) == "yes": data = etl_obj.file_metadata_process(file_type) elif request.args.get("pivot_ind", None) == "yes": file_name = request.json.get("file_name", None) filter_items = request.json.get("filter", None) columns = request.json.get("columns", None) rows = request.json.get("rows", None) values_section = request.json.get("values", None) download_ind = request.json.get("download_ind", None) data = etl_obj.file_process(file_type, file_name, filter_items, columns, rows, values_section, download_ind) else: data = etl_obj.sample_data(file_type) return_status = 200 result['status'] = "successful" result['data'] = data except ValueError as e: result = {} log.exception("Value Exception while submitting feedback") result['status'] = "failed" return_status = 400 result['message'] = e.args[0] except: result = {} log.exception("Exception while submitting feedback") return_status = 500 result['status'] = "failed" result[ 'message'] = 'internal error has occured while processing the request' return return_status, result
def save_db_details(self): db_obj = Connections() try: db_obj.insert(self.connection_name, self.database, self.host, self.username, self.password, self.port) status = "successful" except Exception as e: status = "failed" log.exception("%s DB connection error " % e) return status
def run(self, request): """ This function stores the user feedback received from Ul server """ return_status = None result = {} log.info(f"args: (str(self.args)") try: # validate payload log.info(request.args) log.info(f"input file: {request.json['file']}") etl_obj = Fileprocess(request.json) msg = etl_obj.validate() if not etl_obj.validate() == "successful": raise ValueError(msg) if request.args["file_source_type"] == "excel": data = etl_obj.excel_file_process() else: data = etl_obj.non_excel_file_process() return_status = 200 result['status'] = 1 result['data'] = data except ValueError as e: result = {} log.exception("Value Excerticon while submitting feedback") result['status'] = 0 return_status = 400 result['message'] = e.args[0] except: result = {} log.exception("Exception while submitting feedback") return_status = 500 result['status'] = 0 result[ 'message'] = 'internal error has occured while processing the request' return return_status, result
def upload(): try: return_status = None result = None filename = request.args['filename'] file_type = filename.split('.')[-1] filedata = request.data file_path = os.path.join(UPLOAD_FOLDER, filename) with open(file_path, "wb") as binary_file: binary_file.write(filedata) if file_type == "csv": result = json.loads( pd.read_csv(file_path, nrows=50).head(10).to_json(orient="records")) else: result = json.loads( pd.read_excel(file_path, nrows=50).head(10).to_json(orient="records")) return_status = 200 except: result = {} log.exception('Exception while uploading the file ') return_status = 500 result['status'] = 0 result[ 'message'] = 'Internal Error has Occurred while processing the File request' finally: resp = Response(json.dumps(result), status=return_status, mimetype="application/json") #metrics_logging(request, resp, int(round(time.time() * 100 ))start) print(resp) return resp print(data) resp = Response(json.dumps(data), status=return_status, mimetype="application/json") print('---------------------') print(resp) return resp
def post(self): return_status = None result = {} start = int(round(time.time() * 1000)) try: file_name = "" file_name = request.json["file_name"] columns = request.json["columns"] if file_name: download_file = file_name.split('\\')[-1] download_file = r"C:\\Users\\venkats_mandadapu\\{}".format( download_file) chunk_size = 1000000 reader = pd.read_csv(file_name, header=0, iterator=True) chunks = [] loop = True while loop: try: chunk = reader.get_chunk(chunk_size)[columns] chunks.append(chunk) except StopIteration: loop = False print("Iteration is stopped") df_ac = pd.concat(chunks, ignore_index=True) df_ac.to_csv(download_file, index=False) return_status = 200 result = {"status": "success", "download_file": download_file} log.info("__") except: result = {} log.exception('Exception while submitting file processing Request') return_status = 500 result['status'] = 0 result[ 'message'] = 'Internal Error has Occurred while processing the File request' finally: resp = Response(json.dumps(result), status=return_status, mimetype="application/json") return resp
def upload(): try: return_status = None result = None filename = request.args['filename'] csv_filename = filename.split('.')[0] + ".csv" final_file = UPLOAD_FOLDER + csv_filename file_type = csv_filename.split('.')[-1] filedata = request.data file_path = os.path.join(UPLOAD_FOLDER, filename) with open(file_path, "wb") as binary_file: binary_file.write(filedata) print(file_path) tabula.convert_into(file_path, final_file, output_format="csv", pages='all') print(final_file) if file_type == "csv": raw_data = pd.read_csv(final_file, nrows=50) data = raw_data.rename(columns=raw_data.iloc[0]) df = data.drop([0, 1]) df.columns = ['OCC' if x == '%Occ' else x for x in df.columns] final_data = df.loc[:, ~df.columns.duplicated()] result = json.loads(final_data.to_json(orient="records")) db_Table = filename.split('.')[0] db = create_engine( "postgres://*****:*****@35.226.209.188:5432/guide_database" ) final_data.to_sql(db_Table, db, if_exists='replace') print(db_Table + " Table creaated in database") #db.Engine.dispose() else: result = json.loads( pd.read_excel(file_path, nrows=50).head(10).to_json(orient="records")) return_status = 200 except: result = {} log.exception('Exception while uploading the file ') return_status = 500 result['status'] = 0 result[ 'message'] = 'Internal Error has Occurred while processing the File request' finally: resp = Response(json.dumps(result), status=return_status, mimetype="application/json") #metrics_logging(request, resp, int(round(time.time() * 100 ))start) return resp resp = Response(json.dumps(data), status=return_status, mimetype="application/json") print('---------------------') print(resp) return resp
def run(self, request, id=None, connectionid=None, post_request=False, active_connections_ind=False): """ This function stare the user feedback received from UI server """ return_status = None result = {} try: # validate payload log.info(request.args) if post_request: user_input = request.json data = self.etl_obj.validate_user_connection(user_input) elif active_connections_ind: data = self.etl_obj.get_connection_details() else: if connectionid: metadata_dict = { 'schemas': 'TABLE_SCHEMA', 'tables': 'TABLE_NAME', 'columns': 'COLUMN_NAME' } type = request.args.get("type", None) schema = request.args.get("schema", None) table = request.args.get("table", None) if request.args["type"] in metadata_dict: metadata = self.etl_obj.get_db_object_metadata( connectionid, type, schema, table) if len(metadata) > 0: data = list( set( map( lambda element_dict: element_dict[ metadata_dict[request.args['type'] ]], metadata)) )[int(request.args['offset']):int(request. args['limit'])] else: data = 'No data rows was found' else: source_id = None if id: source_id = int(id) data = self.etl_obj.get_datasource(source_id) return_status = 200 result['status'] = 1 result['data'] = data except ValueError as e: result = {} log.exception("Value Exception while submitting feedback") result['status'] = 0 return_status = 400 result['message'] = e.args[0] except Exception: result = {} log.exception("Exception while submitting feedback") return_status = 500 result['status'] = 0 result['message'] = ( 'Internal Error has occurred while processing the request') return return_status, result