def get_gdrive_file(file_data): """Get text/csv data from gdrive file""" http_auth = get_http_auth() try: drive_service = discovery.build('drive', 'v3', http=http_auth) # check file type file_meta = drive_service.files().get(fileId=file_data['id']).execute() if file_meta.get("mimeType") == "text/csv": file_data = drive_service.files().get_media( fileId=file_data['id']).execute() else: file_data = drive_service.files().export_media( fileId=file_data['id'], mimeType='text/csv').execute() csv_data = read_csv_file(StringIO(file_data)) except AttributeError: # when file_data has no splitlines() method raise BadRequest("Wrong file format.") except HttpError as e: message = json.loads(e.content).get("error").get("message") if e.resp.status == 404: raise NotFound(message) if e.resp.status == 401: raise Unauthorized("{} Try to reload /import page".format(message)) if e.resp.status == 400: raise BadRequest(message + " Probably the file is of a wrong type.") raise InternalServerError(message) except: # pylint: disable=bare-except raise InternalServerError("Import failed due to internal server error.") return csv_data
def get_gdrive_file_data(file_data): """Get text/csv data from gdrive file""" http_auth = get_http_auth() try: drive_service = discovery.build(API_SERVICE_NAME, API_VERSION, http=http_auth) # check file type file_meta = drive_service.files().get(fileId=file_data['id']).execute() if file_meta.get('mimeType') == 'text/csv': file_data = drive_service.files().get_media( fileId=file_data['id']).execute() else: file_data = drive_service.files().export_media( fileId=file_data['id'], mimeType='text/csv').execute() csv_data = read_csv_file(StringIO(file_data)) except AttributeError: # when file_data has no splitlines() method raise BadRequest('Wrong file format.') except HttpAccessTokenRefreshError: handle_token_error('Try to reload /import page') except HttpError as ex: hande_http_error(ex) except Exception as ex: logger.error(ex.message) raise InternalServerError( 'Import failed due to internal server error.') return csv_data, file_data, file_meta.get('name')
def get_gdrive_file_data(file_data): """Get text/csv data from gdrive file""" http_auth = get_http_auth() try: drive_service = discovery.build(API_SERVICE_NAME, API_VERSION, http=http_auth) # check file type file_meta = drive_service.files().get(fileId=file_data['id']).execute() if file_meta.get('mimeType') == 'text/csv': file_data = drive_service.files().get_media( fileId=file_data['id']).execute() else: file_data = drive_service.files().export_media( fileId=file_data['id'], mimeType='text/csv').execute() csv_data = read_csv_file(StringIO(file_data)) except AttributeError: # when file_data has no splitlines() method raise BadRequest(errors.WRONG_FILE_FORMAT) except HttpAccessTokenRefreshError: handle_token_error('Try to reload /import page') except HttpError as ex: hande_http_error(ex) except Exception as ex: logger.error(ex.message) raise InternalServerError(errors.INTERNAL_SERVER_ERROR) return csv_data, file_data, file_meta.get('name')
def get_gdrive_file(file_data): """Get text/csv data from gdrive file""" http_auth = get_http_auth() try: drive_service = discovery.build(API_SERVICE_NAME, API_VERSION, http=http_auth) # check file type file_meta = drive_service.files().get(fileId=file_data['id']).execute() if file_meta.get("mimeType") == "text/csv": file_data = drive_service.files().get_media( fileId=file_data['id']).execute() else: file_data = drive_service.files().export_media( fileId=file_data['id'], mimeType='text/csv').execute() csv_data = read_csv_file(StringIO(file_data)) except AttributeError: # when file_data has no splitlines() method raise BadRequest("Wrong file format.") except Unauthorized as ex: raise Unauthorized("{} Try to reload /import page".format(ex.message)) except HttpAccessTokenRefreshError: # drive_service can rise this exception in case of invalid token, that why # we need to handle it here del flask.session['credentials'] raise Unauthorized('Unable to get valid credentials.' ' Try to reload /import page') except HttpError as ex: hande_http_error(ex) except: # pylint: disable=bare-except raise InternalServerError( "Import failed due to internal server error.") return csv_data
def run_import_phases(ie_id, user_id, url_root): # noqa: ignore=C901 """Execute import phases""" with app.app_context(): try: user = person.Person.query.get(user_id) setattr(g, '_current_user', user) ie_job = import_export.get(ie_id) check_for_previous_run() csv_data = read_csv_file(StringIO(ie_job.content.encode("utf-8"))) if ie_job.status == "Analysis": info = make_import(csv_data, True) db.session.rollback() db.session.refresh(ie_job) if ie_job.status == "Stopped": return ie_job.results = json.dumps(info) for block_info in info: if block_info["block_errors"] or block_info["row_errors"]: ie_job.status = "Analysis Failed" db.session.commit() job_emails.send_email(job_emails.IMPORT_FAILED, user.email, url_root, ie_job.title) return for block_info in info: if block_info["block_warnings"] or block_info["row_warnings"]: ie_job.status = "Blocked" db.session.commit() job_emails.send_email(job_emails.IMPORT_BLOCKED, user.email, url_root, ie_job.title) return ie_job.status = "In Progress" db.session.commit() if ie_job.status == "In Progress": info = make_import(csv_data, False) ie_job.results = json.dumps(info) for block_info in info: if block_info["block_errors"] or block_info["row_errors"]: ie_job.status = "Analysis Failed" job_emails.send_email(job_emails.IMPORT_FAILED, user.email, url_root, ie_job.title) db.session.commit() return ie_job.status = "Finished" db.session.commit() job_emails.send_email(job_emails.IMPORT_COMPLETED, user.email, url_root, ie_job.title) except Exception as e: # pylint: disable=broad-except logger.exception("Import failed: %s", e.message) try: ie_job.status = "Failed" ie_job.end_date = datetime.now() db.session.commit() job_emails.send_email(job_emails.IMPORT_FAILED, user.email, url_root, ie_job.title) except Exception as e: # pylint: disable=broad-except logger.exception("Failed to set job status: %s", e.message)
def run_import_phases(ie_id, user_id, url_root): # noqa: ignore=C901 """Execute import phases""" with app.app_context(): try: user = person.Person.query.get(user_id) setattr(g, '_current_user', user) ie_job = import_export.get(ie_id) check_for_previous_run() csv_data = read_csv_file(StringIO(ie_job.content.encode("utf-8"))) if ie_job.status == "Analysis": info = make_import(csv_data, True) db.session.rollback() db.session.refresh(ie_job) if ie_job.status == "Stopped": return ie_job.results = json.dumps(info) for block_info in info: if block_info["block_errors"] or block_info["row_errors"]: ie_job.status = "Analysis Failed" db.session.commit() job_emails.send_email(job_emails.IMPORT_FAILED, user.email, url_root, ie_job.title) return for block_info in info: if block_info["block_warnings"] or block_info["row_warnings"]: ie_job.status = "Blocked" db.session.commit() job_emails.send_email(job_emails.IMPORT_BLOCKED, user.email, url_root, ie_job.title) return ie_job.status = "In Progress" db.session.commit() if ie_job.status == "In Progress": info = make_import(csv_data, False) ie_job.results = json.dumps(info) for block_info in info: if block_info["block_errors"] or block_info["row_errors"]: ie_job.status = "Analysis Failed" job_emails.send_email(job_emails.IMPORT_FAILED, user.email, url_root, ie_job.title) db.session.commit() return ie_job.status = "Finished" db.session.commit() job_emails.send_email(job_emails.IMPORT_COMPLETED, user.email, url_root, ie_job.title) except Exception as e: # pylint: disable=broad-except logger.exception("Import failed: %s", e.message) try: ie_job.status = "Failed" ie_job.end_date = datetime.now() db.session.commit() job_emails.send_email(job_emails.IMPORT_FAILED, user.email, url_root, ie_job.title) except Exception as e: # pylint: disable=broad-except logger.exception("Failed to set job status: %s", e.message)
def parse_import_request(): """ Check if request contains all required fields """ required_headers = { "X-Requested-By": ["gGRC"], "X-test-only": ["true", "false"], } check_required_headers(required_headers) csv_file = check_import_file() csv_data = read_csv_file(csv_file) dry_run = request.headers["X-test-only"] == "true" return dry_run, csv_data
def parse_import_request(): """ Check if request contains all required fields """ required_headers = { "X-Requested-By": ["GGRC"], "X-test-only": ["true", "false"], } check_required_headers(required_headers) csv_file = check_import_file() csv_data = read_csv_file(csv_file) dry_run = request.headers["X-test-only"] == "true" return dry_run, csv_data
def read_imported_file(file_data): # pylint: disable=unused-argument csv_file = check_import_file() return read_csv_file(csv_file)
def run_import_phases(task): """Execute import phases""" ie_id = task.parameters.get("ie_id") user = login.get_current_user() try: ie_job = import_export.get(ie_id) csv_data = import_helper.read_csv_file( StringIO(ie_job.content.encode("utf-8")) ) if ie_job.status == "Analysis": info = make_import(csv_data, True, ie_job) db.session.rollback() db.session.refresh(ie_job) if ie_job.status == "Stopped": return utils.make_simple_response() ie_job.results = json.dumps(info) for block_info in info: if block_info["block_errors"] or block_info["row_errors"]: ie_job.status = "Analysis Failed" ie_job.end_at = datetime.utcnow() db.session.commit() job_emails.send_email(job_emails.IMPORT_FAILED, user.email, ie_job.title) return utils.make_simple_response() for block_info in info: if block_info["block_warnings"] or block_info["row_warnings"]: ie_job.status = "Blocked" db.session.commit() job_emails.send_email(job_emails.IMPORT_BLOCKED, user.email, ie_job.title) return utils.make_simple_response() ie_job.status = "In Progress" db.session.commit() if ie_job.status == "In Progress": info = make_import(csv_data, False, ie_job) ie_job.results = json.dumps(info) for block_info in info: if block_info["block_errors"] or block_info["row_errors"]: ie_job.status = "Analysis Failed" ie_job.end_at = datetime.utcnow() job_emails.send_email(job_emails.IMPORT_FAILED, user.email, ie_job.title) db.session.commit() return utils.make_simple_response() ie_job.status = "Finished" ie_job.end_at = datetime.utcnow() db.session.commit() job_emails.send_email(job_emails.IMPORT_COMPLETED, user.email, ie_job.title) except Exception as e: # pylint: disable=broad-except logger.exception(e.message) ie_job = import_export.get(ie_id) try: ie_job.status = "Failed" ie_job.end_at = datetime.utcnow() db.session.commit() job_emails.send_email(job_emails.IMPORT_FAILED, user.email, ie_job.title) return utils.make_simple_response(e.message) except Exception as e: # pylint: disable=broad-except logger.exception("%s: %s", app_errors.STATUS_SET_FAILED, e.message) return utils.make_simple_response(e.message) return utils.make_simple_response()
def read_imported_file(file_data): # pylint: disable=unused-argument csv_file = check_import_file() return read_csv_file(csv_file)