def onboard(activatorOnboardDetails): """ This function onboards an activator given an activator id :param activator: activator to create in activator list :return: 200 on success, 406 on activator not-exists, 500 DAC call failed """ onboarded_state = "Locked" oid = activatorOnboardDetails["id"] response = False activator_name = "not found" try: with db_session() as dbs: user = security.get_valid_user_from_token(dbsession=dbs) logger.debug(f"Logged in user {user}") if not (user and user.isMCAdmin): return abort( 401, "JWT not valid or user is not a Mission Control Admin" ) ret = post_repo_data_to_dac(oid, user.id) response = ret[0] activator_name = ret[1] except Exception as ex: logger.debug("exception encountered running post_repo_data_to_dac") logger.exception(ex) if response and 200 <= response.status_code <= 299: logger.debug("response.status_code in range " + str(response.status_code)) with db_session() as dbs: act = dbs.query(Activator).filter(Activator.id == oid).one_or_none() if act: act.status = onboarded_state act.gitSnapshotJson = str(response.json()) dbs.merge(act) dbs.commit() else: logger.debug("Unable to clone repository, return 406") abort(406, "Unable to find activator.") logger.debug("Success, return 201") payload = { "message": "Activator {0} has been successfully onboarded".format( activator_name ), "onboardingState": onboarded_state, "id": oid, } return make_response(payload, 200) else: logger.debug("Unable to clone repository, return 500") abort(500, "Unable to clone repository")
def create(activatorDetails): """ This function creates a new activator in the activator list based on the passed in activator data :param activator: activator to create in activator list :return: 201 on success, 406 on activator exists """ with db_session() as dbs: # Remove id as it's created automatically if "id" in activatorDetails: del activatorDetails["id"] extraFields = activator_extension.refine_activator_details(activatorDetails) schema = ActivatorSchema() new_activator = schema.load(activatorDetails, session=dbs) dbs.add(new_activator) dbs.flush() # Create entries into all tables where activator has association activator_extension.create_activator_associations( extraFields, new_activator, dbs ) # Expand Activator new_activator = activator_extension.expand_activator(new_activator, dbs) schema = ExtendedActivatorSchema(many=False) data = schema.dump(new_activator) return data, 201
def delete(oid): """ This function deletes an activator from the activators list :param key: key of the activator to delete :return: 200 on successful delete, 404 if not found """ with db_session() as dbs: # Does the activator to delete exist? existing_activator = ( dbs.query(Activator).filter(Activator.id == oid).one_or_none() ) # if found? if existing_activator is not None: existing_activator.isActive = False dbs.merge(existing_activator) dbs.flush() # Delete entires Activator-CI relationship table activator_extension.delete_activator_associations( existing_activator.id, dbs ) return make_response(f"Activator id {oid} successfully deleted", 200) # Otherwise, nope, activator to delete not found else: abort(404, f"Activator id {oid} not found")
def create(activatorByURLDetails): """ Args: url ([url]): [URL of the githib repo to get activator_metadata.yml] 1. Connect to GitHIB repo using Github API and download yaml file 2. Read the contents of activator_metadata.yml file 3. Create activator object to insert an entry into 'activator' table 4. Create activatorMetadata object to insert an entry into 'activatorMetadata' table 5. Read 'Platforms' field from yaml file and create 'activatorPlatform' object and insert into 'activatorPlatform' table 6. Read 'mandatoryVariables'from yaml and insert into 'activatorVariables' table with 'isOptional'=False 7. Read 'optionalVariables' from yaml and insert into 'activatorVariables' table with 'isOptional'=True """ try: with db_session() as dbs: user = security.get_valid_user_from_token(dbsession=dbs) if not (user and user.isMCAdmin): return abort( 401, "JWT not valid or user is not a Mission Control Admin") github_credentials = systemsettings.get_github_credentials(user.id) # get Yaml from gitgub and read the contents of the yaml file act_metadata_yml_dict = get_file_from_repo( activatorByURLDetails["url"], github_credentials) activator_id = create_activator(dbs, act_metadata_yml_dict, activatorByURLDetails["url"]) activator_metadata = create_activator_metadata( dbs, act_metadata_yml_dict, activator_id, activatorByURLDetails["url"]) create_activator_metadata_platforms(dbs, act_metadata_yml_dict, activator_metadata.id) mandatoryVariables = act_metadata_yml_dict["mandatoryVariables"] create_activator_metadata_variables(dbs, activator_metadata.id, mandatoryVariables, False) optionalVariables = act_metadata_yml_dict["optionalVariables"] create_activator_metadata_variables(dbs, activator_metadata.id, optionalVariables, True) # return the activator act = (dbs.query(Activator).filter( Activator.id == activator_id, ).one_or_none()) if act is not None: # Expand Activator act = activator_extension.expand_activator(act, dbs) schema = ExtendedActivatorSchema(many=False) data = schema.dump(act) return data, 200 except Exception as ex: logger.exception(ex) abort(500, "Internal Server Error")
def read_one(oid): """ This function responds to a request for /api/activator/{key} with one matching activator from activatorss :param application: key of activator to find :return: activator matching key """ with db_session() as dbs: act = ( dbs.query(Activator) .filter( Activator.id == oid, ) .one_or_none() ) if act is not None: # Expand Activator act = activator_extension.expand_activator(act, dbs) schema = ExtendedActivatorSchema(many=False) data = schema.dump(act) return data, 200 else: abort(404, f"Activator with id {oid} not found".format(id=oid))
def notify_user(message, activatorId, toUserId, importance=1): logger.debug( "notify_users fromUserId: %s message: %s activatorId: %s", toUserId, message, activatorId, ) # Notify all user with db_session() as dbs: # To avoid sending duplicate notifications, send only if no previous # active message. existing_notifications = ( dbs.query(Notification) .filter( Notification.message == message, Notification.toUserId == toUserId, Notification.isActive, ) .count() ) logger.debug("existing_notifications: %s", existing_notifications) if existing_notifications == 0: notification_payload = { "activatorId": activatorId, "message": message, "toUserId": toUserId, "importance": importance, } notification.create(notification_payload, typeId=1, dbsession=dbs) # Auto-dismiss the previous notification from the user notification.dismiss( fromUserId=toUserId, activatorId=activatorId, dbsession=dbs )
def notify_admins(message, activatorId, fromUserId, importance=1): logger.debug( "notify_admins fromUserId: %s message: %s activatorId: %s", fromUserId, message, activatorId, ) # Notify all admins notification_payload = { "activatorId": activatorId, "message": message, "fromUserId": fromUserId, "toUserId": 0, "importance": importance, } # TODO: Send admin notifications to teammember.isTeamAdmin, # joining with activator.businessUnitId when that become available. with db_session() as dbs: admins = dbs.query(User).filter(User.isMCAdmin, User.isActive).all() for admin in admins: # To avoid sending duplicate notifications, send only if no previous # active message. existing_notifications = ( dbs.query(Notification) .filter( Notification.message == message, Notification.toUserId == admin.id, Notification.isActive, ) .count() ) logger.debug("existing_notifications: %s", existing_notifications) if existing_notifications == 0: notification_payload["toUserId"] = admin.id notification.create(notification_payload, typeId=1, dbsession=dbs)
def update(key, keyValueDetails): """ Updates an existing lzmetadata in the lzmetadata list :param key: id of the lzmetadata to update in the lzmetadata list :param lzmetadata: lzmetadata to update :return: updated lzmetadata. """ if "key" in keyValueDetails and keyValueDetails["key"] != key: abort(400, "Key mismatch in path and body") else: keyValueDetails["key"] = key # Does the lzmetadata exist in lzmetadata list? with db_session() as dbs: existing_lzmetadata = ( dbs.query(LZMetadata).filter(LZMetadata.key == key).one_or_none() ) # Does lzmetadata exist? if existing_lzmetadata is not None: schema = LZMetadataSchema(many=False) update_lzmetadata = schema.load(keyValueDetails, session=db.session) dbs.merge(update_lzmetadata) dbs.commit() # return the updated obj in the response data = schema.dump(update_lzmetadata) app.logger.debug(data) return data, 200 else: # otherwise, nope, it doesn't exist, so that's an error abort(404, f"LZMetadata with key {key} not found") abort(500, "Problem encountered updating lzmetadata.")
def update(oid, activatorDetails): """ This function updates an existing activator in the activators list :param key: key of the activator to update in the activators list :param activator: activator to update :return: updated activator """ logger.debug("update") logger.debug("id") logger.debug(oid) logger.debug("activator") logger.debug(pformat(activatorDetails)) if "id" in activatorDetails and activatorDetails["id"] != oid: abort(400, "Key mismatch in path and body") with db_session() as dbs: # Does the activators exist in activators list? existing_activator = ( dbs.query(Activator).filter(Activator.id == oid).one_or_none() ) # Does activator exist? if existing_activator is not None: # schema = ActivatorSchema() activatorDetails["id"] = oid logger.info("activatorDetails: %s", activatorDetails) extraFields = activator_extension.refine_activator_details(activatorDetails) schema = ActivatorSchema(many=False, session=dbs) updatedActivator = schema.load(activatorDetails) logger.info("updatedActivator: %s", updatedActivator) dbs.merge(updatedActivator) # Update CI list in activatorCI table # return the updated activator in the response dbs.flush() response = activator_extension.create_activator_associations( extraFields, updatedActivator, dbs ) if response: abort(response["code"], response["message"]) # Expand activator updatedActivator = activator_extension.expand_activator( updatedActivator, dbs ) schema = ExtendedActivatorSchema(many=False) data = schema.dump(updatedActivator) return data, 200 # otherwise, nope, deployment doesn't exist, so that's an error else: abort(404, f"Activator id {oid} not found")
def deployment_read_all(): with db_session() as dbs: app_deployments = (dbs.query(ApplicationDeployment).filter( ApplicationDeployment.deploymentState != "").all()) for ad in app_deployments: ad.lzEnvironment = (dbs.query(LZEnvironment).filter( LZEnvironment.id == ad.lzEnvironmentId).one_or_none()) schema = ExtendedApplicationDeploymentSchema(many=True) data = schema.dump(app_deployments) # logger.debug("deployment_read_all::applications data: %s", data) return data, 200
def start_deployment(applicationId): logger.info("start_deployment::applicationId: %s", applicationId) # can only deploy an application if the solution it belong's to has already been # deployed successfully. with db_session() as dbs: deployment_complete = False while deployment_complete is False: app_dep = (dbs.query(ApplicationDeployment).filter( ApplicationDeployment.applicationId == applicationId, ApplicationDeployment.deploymentState.notin_( (DeploymentStatus.SUCCESS, DeploymentStatus.FAILURE)), ).first()) logger.debug("start_deployment::app_dep *** %s", app_dep) if app_dep: app_id = app_dep.applicationId task_id = app_dep.taskId logger.debug( "start_deployment: deploymentState: %s, app_id: %s, workspaceProjectId %s, task_id %s", app_dep.deploymentState, app_id, app_dep.workspaceProjectId, task_id, ) if task_id is None or task_id == "": response = deploy_application(app_dep, dbsession=dbs) dbs.flush() logger.debug( "start_deployment::deploy_application: app_id: %s", app_id) logger.debug(pformat(response)) else: logger.debug( "start_deployment::polling_results_from_the_DaC: app_id: %s task_id: %s", app_id, task_id, ) get_application_results_from_the_dac( app_id=app_id, lzEnvId=app_dep.lzEnvironmentId, task_id=task_id, dbsession=dbs, ) dbs.flush() print("Sleep 2") time.sleep(2) else: deployment_complete = True logger.debug( "start_deployment::deployment complete for Application: %s", applicationId) notify_user(applicationId=applicationId) return True
def deployment_create(applicationDeploymentDetails): """ This function queries a application forwards the request to the DaC :param solution: id :return: 201 on success : 404 if application not found : 500 if other failure """ logger.debug("deployment_create: %s", pformat(applicationDeploymentDetails)) app_id = applicationDeploymentDetails["id"] with db_session() as dbs: app = dbs.query(Application).filter( Application.id == app_id).one_or_none() if not app: abort("This application doesn't exist.", 404) sol = (dbs.query(Solution).filter( Application.id == app_id, Application.solutionId == Solution.id).one_or_none()) if sol and sol.deploymentState != DeploymentStatus.SUCCESS: logger.warning( "Cannot deploy an application if the solution deployment has not completed successfully." ) abort( 400, "Cannot deploy an application if the solution deployment has not completed successfully.", ) sol_envs = (dbs.query(LZEnvironment).filter( SolutionEnvironment.environmentId == LZEnvironment.id, SolutionEnvironment.solutionId == sol.id, SolutionEnvironment.isActive, LZEnvironment.isActive, ).all()) for lzenv in sol_envs: lzenv_app_deployment(lzenv, dbs, sol, app_id, app) # above db transaction should be complete before the next steps executor.submit(start_deployment, app_id) return make_response( { "id": app_id, "deploymentState": DeploymentStatus.PENDING }, 200)
def categories(): """ :return: distinct list of activator categories. """ with db_session() as dbs: sql = "select category from activatorMetadata group by category" rs = dbs.execute(sql) categories_arr = [] for row in rs: categories_arr.append({"category": row["category"]}) schema = ExtendedActivatorCategorySchema(many=True) data = schema.dump(categories_arr) return data, 200
def get_gcp_project_id(projectId): local_key = "GCP_PROJECT_URL" logger.debug("get_gcp_project_id: projectId=%s", local_key) with db_session() as dbs: lzmetadata = ( dbs.query(LZMetadata).filter(LZMetadata.key == local_key).one_or_none() ) logger.debug("lzmetadata: %s", lzmetadata) if lzmetadata: val = {} val["key"] = lzmetadata.key val["value"] = lzmetadata.value val["value"] = val["value"].replace("{{project_id}}", projectId) schema = LZMetadataSchema(many=False) data = schema.dump(val) return data, 200 abort(500, "Problem encountered getting the GCP Project URL.")
def deploy_folders_and_solution(sol_deployment): logger.debug("deploy_folders_and_solution") with db_session() as dbs: solution = solution_extension.expand_solution(sol_deployment, dbsession=dbs) create_folders_resp = create_folders(solution) deploymentFolderId = create_folders_resp.get("deploymentFolderId") status = create_folders_resp.get("status") logger.debug( "deploy_folders_and_solution::deploymentFolderId: %s status: %s", deploymentFolderId, status, ) if deploymentFolderId and status == DeploymentStatus.SUCCESS: solution.deploymentFolderId = deploymentFolderId status = send_solution_deployment_to_the_dac(solution, dbsession=dbs) return status, 200
def read(key): """ Responds to a request for /api/lzmetadata/{key} with one matching lzmetadata from LZMetadatas :param application: key of lzmetadata to find :return: LZMetadata matching key """ with db_session() as dbs: lzmetadata = dbs.query(LZMetadata).filter(LZMetadata.key == key).one_or_none() if lzmetadata is not None: # Serialize the data for the response schema = LZMetadataSchema() data = schema.dump(lzmetadata) app.logger.debug(data) return data, 200 else: abort(404, f"LZMetadata with key {key} not found")
def read_one(oid): """ This function responds to a request for /api/solution/{oid} with one matching solution from solutions :param application: id of solution to find :return: solution matching id """ with db_session() as dbs: sol = (dbs.query(Solution).filter(Solution.id == oid, ).one_or_none()) if sol is not None: solution = solution_extension.expand_solution(sol, dbsession=dbs) # Serialize the data for the response solution_schema = ExtendedSolutionSchema() data = solution_schema.dump(solution) return data, 200 else: abort(404, f"Solution with id {oid} not found".format(id=oid))
def post_repo_data_to_dac(oid: int, userId: int): """ Posts repository details for cloning by the DAC :param activator: dictionary with repoName, repoURL, tagName :return: response code from the post """ logger.debug("running post_repo_data_to_dac") activator_name: str = "not found" with db_session() as dbs: github_credentials = systemsettings.get_github_credentials(userId) logger.debug(f"GitHub Credentials {github_credentials}") act = dbs.query(Activator).filter(Activator.id == oid).one_or_none() if act: logger.debug("DB entry found") repo_url = act.gitRepoUrl url_valid = check_url_valid(repo_url) if url_valid: activator_name = generate_name_from_repo_url(repo_url) else: logger.debug("repo name from url invalid, is activator 'draft'?") raise Exception("repo name from url invalid, is activator 'draft'?") else: logger.debug("DB entry not found") raise Exception("Error retrieving data from db") payload = { "repo": {"name": activator_name, "url": repo_url}, "cred": { "user": github_credentials.username, "token": github_credentials.token, }, } logger.debug("post_repo_data_to_dac sending post") headers = {"Content-Type": "application/json"} response = requests.post( onboard_repo_url, headers=headers, data=json.dumps(payload, indent=4) ) logger.debug("post_repo_data_to_dac response received") return response, activator_name
def update(oid, solutionDetails): """ Updates an existing solutions in the solutions list. :param key: key of the solutions to update in the solutions list :param solutions: solutions to update :return: updated solutions """ logger.debug("update::solutionDetails: %s", solutionDetails) with db_session() as dbs: # Does the solutions exist in solutions list? existing_solution = dbs.query(Solution).filter( Solution.id == oid).one_or_none() # Does solutions exist? if existing_solution is not None: solutionDetails["id"] = oid envs = solutionDetails.get("environments") # Remove envs as it's processed separately, but in the same transaction. if "environments" in solutionDetails: del solutionDetails["environments"] solution_extension.create_solution_environments(oid, envs, dbsession=dbs) schema = SolutionSchema(many=False) new_solution = schema.load(solutionDetails, session=dbs) new_solution.lastUpdated = ModelTools.get_utc_timestamp() dbs.merge(new_solution) dbs.commit() new_solution = solution_extension.expand_solution(new_solution, dbsession=dbs) # return the updted solutions in the response schema = ExtendedSolutionSchema(many=False) data = schema.dump(new_solution) logger.debug("data: %s", data) return data, 200 # otherwise, nope, deployment doesn't exist, so that's an error else: abort(404, f"Solution {oid} not found")
def meta(typeId=None, isRead=None, isActive=None): """ Responds to a request for /api/notificationsMeta/. :param activator: :return: total count of notifications """ with db_session() as dbs: user = security.get_valid_user_from_token(dbsession=dbs) if not user: abort(404, "No valid user found!") toUserId = user.id count = (dbs.query(Notification).filter( (typeId is None or Notification.typeId == typeId), (toUserId is None or Notification.toUserId == toUserId), (isRead is None or Notification.isRead == isRead), (isActive is None or Notification.isActive == isActive), ).count()) data = {"count": count} return data, 200
def notify_user(solutionId): """ Notify the user the solution deployment has completed. Args: solutionId ([int]): [The solution id] """ with db_session() as dbs: user = security.get_valid_user_from_token(dbsession=dbs) logger.debug("user: %s", user) if user: sol = dbs.query(Solution).filter( Solution.id == solutionId).one_or_none() if sol: deploymentState = sol.deploymentState if deploymentState == DeploymentStatus.SUCCESS: message = f"Your Solution {sol.id} ({sol.name}) deployment has completed successfully" else: message = ( f"Your Solution {sol.id} ({sol.name}) deployment has failed." ) payload = { "isActive": True, "toUserId": user.id, "importance": 1, "message": message, "isRead": False, "solutionId": sol.id, } notification.create(notification=payload, typeId=4, dbsession=dbs) else: logger.warning( "notify_user::Cannot send notification, unable to find the solution (%s).", sol.id, ) else: logger.warning( "notify_user::Cannot send notification, unable to validate the token." )
def notify_user(applicationId): """ Notify the user the application deployment has completed. Args: applicationId ([int]): [The application id] """ with db_session() as dbs: user = security.get_valid_user_from_token(dbsession=dbs) logger.debug("user: %s", user) if user: (app, app_deploy) = (dbs.query( Application, ApplicationDeployment).filter( ApplicationDeployment.applicationId == applicationId, ApplicationDeployment.applicationId == Application.id, ).one_or_none()) if app: deploymentState = app_deploy.deploymentState if deploymentState == DeploymentStatus.SUCCESS: message = f"Your Application {applicationId} ({app.name}) deployment has completed successfully" else: message = f"Your Application {applicationId} ({app.name}) deployment has failed." payload = { "isActive": True, "toUserId": user.id, "importance": 1, "message": message, "isRead": False, "applicationId": app.id, } notification.create(notification=payload, typeId=3, dbsession=dbs) else: logger.warning( "Cannot send notification, unable to find the application (%s).", app.id, ) else: logger.warning( "Cannot send notification, unable to validate the token.")
def delete(oid): """ This function deletes a solution from the solutions list :param key: id of the solutions to delete :return: 200 on successful delete, 404 if not found """ with db_session() as dbs: # Does the solution to delete exist? existing_solution = dbs.query(Solution).filter( Solution.id == oid).one_or_none() # if found? if existing_solution is not None: existing_solution.isActive = False dbs.merge(existing_solution) dbs.commit() return make_response(f"Solution {oid} successfully deleted", 200) # Otherwise, nope, solution to delete not found else: abort(404, f"Solution {oid} not found")
def create(keyValueDetails): """ Creates a new lzmetadata in the lzmetadata list based on the passed in lzmetadata data :param lzmetadata: lzmetadata to create in lzmetadata structure :return: 201 on success, 406 on lzmetadata exists """ # Remove id as it's created automatically if "id" in keyValueDetails: del keyValueDetails["id"] with db_session() as dbs: schema = LZMetadataSchema() new_lzmetadata = schema.load(keyValueDetails, session=dbs) db.session.add(new_lzmetadata) db.session.commit() # Serialize and return the newly created lzmetadata # in the response data = schema.dump(new_lzmetadata) app.logger.debug(data) return data, 201 abort("500", "Problem encountered creating an LZMetadata.")
def create_all( notificationListDetails, typeId, isRead=None, isActive=None, page=None, page_size=None, sort=None, ): logger.debug("create_all: %s", notificationListDetails) with db_session() as dbs: for n in notificationListDetails: create(n, typeId, dbsession=dbs) (data, resp_code) = read_all( typeId=typeId, isRead=isRead, isActive=isActive, page=page, page_size=page_size, sort=sort, ) logger.debug("data: %s, resp_code: %s", data, resp_code) return data, 201
def setActivatorStatus(activatorDetails): """ Update the activator status. : return: The activator that was changed """ logger.info(pformat(activatorDetails)) with db_session() as dbs: # Does the activator to delete exist? existing_activator = ( dbs.query(Activator) .filter(Activator.id == activatorDetails["id"], Activator.isActive) .one_or_none() ) # if found? if existing_activator is not None: schema = ActivatorSchema() updated_activator = schema.load(activatorDetails, session=dbs) updated_activator.lastUpdated = ModelTools.get_utc_timestamp() dbs.merge(updated_activator) # Expand Activator updated_activator = activator_extension.expand_activator( updated_activator, dbs ) activator_schema = ExtendedActivatorSchema() data = activator_schema.dump(updated_activator) # Create notifications if ( updated_activator.status != "Available" and updated_activator.accessRequestedById ): full_name = ( (updated_activator.accessRequestedBy.firstName or "") + " " + (updated_activator.accessRequestedBy.lastName or "") ) message = f"{full_name} has requested access to activator #{updated_activator.id}" notify_admins( message=message, activatorId=updated_activator.id, fromUserId=updated_activator.accessRequestedById, ) elif ( updated_activator.status == "Available" and updated_activator.accessRequestedById ): activator_name = ( f"Activator {updated_activator.id} ({updated_activator.name})" ) message = f"Access to {activator_name} has been granted." notify_user( message, activatorId=updated_activator.id, toUserId=updated_activator.accessRequestedById, ) return data, 200 # Otherwise, nope, activator to update was not found else: actid = activatorDetails["id"] abort(404, f"Activator id {actid} not found")
def read_all( isActive=None, isFavourite=None, category=None, status=None, environment=None, platform=None, type=None, source=None, sensitivity=None, page=None, page_size=None, sort=None, ): """ This function responds to a request for /api/activators with the complete lists of activators :return: json string of list of activators """ # Create the list of activators from our data logger.debug( "Parameters: isActive: %s, isFavourite: %s, category: %s, status: %s, environment: %s, platform: %s, " "type: %s, source: %s, sensitivity: %s, page: %s, page_size: %s, sort: %s", isActive, isFavourite, category, status, environment, platform, type, source, sensitivity, page, page_size, sort, ) with db_session() as dbs: # pre-process sort instructions if sort is None: activator_query = dbs.query(Activator).order_by(Activator.id) else: try: sort_inst = [si.split(":") for si in sort] orderby_arr = [] for si in sort_inst: si1 = si[0] if len(si) > 1: si2 = si[1] else: si2 = "asc" orderby_arr.append(f"{si1} {si2}") # print("orderby: {}".format(orderby_arr)) activator_query = dbs.query(Activator).order_by( literal_column(", ".join(orderby_arr)) ) except SQLAlchemyError as e: logger.warning(e) activator_query = dbs.query(Activator).order_by(Activator.id) activator_metadatas = ( dbs.query(ActivatorMetadata) .filter( (category is None or ActivatorMetadata.category == category), (type is None or ActivatorMetadata.typeId == type), ) .all() ) act_ids = None if activator_metadatas: act_ids = [am.activatorId for am in activator_metadatas] activator_query = activator_query.filter( (status is None or Activator.status == status), (environment is None or Activator.envs.like('%"{}"%'.format(environment))), (source is None or Activator.sourceControl.like('%"{}"%'.format(source))), (sensitivity is None or Activator.sensitivity == sensitivity), (isActive is None or Activator.isActive == isActive), (isFavourite is None or Activator.isFavourite == isFavourite), (act_ids is None or Activator.id.in_(act_ids)), ) if act_ids is None: activators = None elif page is None or page_size is None: activators = activator_query.all() else: activators = activator_query.limit(page_size).offset(page * page_size).all() if activators: # Expand all Activators for act in activators: act = activator_extension.expand_activator(act, dbs) activator_schema = ExtendedActivatorSchema(many=True) data = activator_schema.dump(activators) logger.debug("read_all") logger.debug(pformat(data)) return data, 200 else: abort(404, "No Activators found with matching criteria")
def check_credentials(login_details): """ Responds to a request for /api/login. :return: json string of user details """ authorization = connexion.request.headers.get("Authorization") if authorization: logger.debug("Authorization: %s", authorization) token = authorization.split(" ")[1] claims = security.decode_token(token) logger.debug("Claims: %s", claims) existing_user = ( db.session.query(User) .filter(User.email == claims.get("email")) .one_or_none() ) if not existing_user: userDetails = { "email": claims.get("email"), "firstName": claims.get("given_name"), "lastName": claims.get("family_name"), } with db_session() as dbs: schema = UserSchema() new_user = schema.load(userDetails, session=dbs) dbs.add(new_user) login_details["username"] = claims.get("email") login_details["password"] = os.environ.get("EC_PASSWORD", pw_backup) logger.info( "Login Details: {}".format(pformat(ModelTools.redact_dict(login_details))) ) username = login_details.get("username") password = login_details.get("password") is_active_user = False with db_session() as dbs: user = ( dbs.query(User).filter(User.email == username, User.isActive).one_or_none() ) if user: is_active_user = True is_valid_password = False if os.environ.get("EC_PASSWORD", pw_backup) == password: is_valid_password = True schema = ExtendedLoginSchema(many=False) if is_active_user and is_valid_password: logger.debug("LOGIN accepted!") teams_resp = team.read_list_by_user_id(user.id) if teams_resp[1] == HTTPStatus.OK: user.teams = teams_resp[0] else: logger.info("No teams found for user {user.id}") data = schema.dump(user) return data, 200 logger.warning("LOGIN FAILED!") abort(401, "Unauthorised! {}".format(ModelTools.redact_dict(login_details)))
def read_all( isActive=None, isFavourite=None, isSandbox=None, namesonly=None, page=None, page_size=None, sort=None, ): """ This function responds to a request for /api/solutions with the complete lists of solutions :return: json string of list of solutions """ logger.debug("solution.read_all") logger.debug( "Parameters: isActive: %s, isFavourite: %s, isSandbox: %s, namesonly: %s, page: %s, page_size: %s, sort: %s", isActive, isFavourite, isSandbox, namesonly, page, page_size, sort, ) with db_session() as dbs: # pre-process sort instructions if sort is None: solution_query = dbs.query(Solution).order_by(Solution.id) else: try: sort_inst = [si.split(":") for si in sort] orderby_arr = [] for si in sort_inst: si1 = si[0] if len(si) > 1: si2 = si[1] else: si2 = "asc" orderby_arr.append(f"{si1} {si2}") # print("orderby: {}".format(orderby_arr)) solution_query = dbs.query(Solution).order_by( literal_column(", ".join(orderby_arr))) except SQLAlchemyError as e: logger.warning("Exception: %s", e) solution_query = dbs.query(Solution).order_by(Solution.id) # Create the list of solutions from our data solution_query = solution_query.filter( (isActive is None or Solution.isActive == isActive), (isFavourite is None or Solution.isFavourite == isFavourite), (isSandbox is None or Solution.isSandbox == isSandbox), ) # do limit and offset last if page is None or page_size is None: solutions = solution_query.all() else: solutions = solution_query.limit(page_size).offset(page * page_size) if namesonly is True: # Serialize the data for the response schema = SolutionNamesOnlySchema(many=True) data = schema.dump(solutions) else: for sol in solutions: sol = solution_extension.expand_solution(sol, dbsession=dbs) schema = ExtendedSolutionSchema(many=True) data = schema.dump(solutions) logger.debug("read_all: %s", data) return data, 200
def read_all(typeId=None, isRead=None, isActive=None, page=None, page_size=None, sort=None): logger.debug("read_all: %s", typeId) with db_session() as dbs: # pre-process sort instructions if sort is None: notifications_query = dbs.query(Notification).order_by( Notification.lastUpdated + " desc") else: try: sort_inst = [si.split(":") for si in sort] orderby_arr = [] for si in sort_inst: si1 = si[0] if len(si) > 1: si2 = si[1] else: si2 = "asc" orderby_arr.append(f"{si1} {si2}") # print("orderby: {}".format(orderby_arr)) notifications_query = dbs.query(Notification).order_by( literal_column(", ".join(orderby_arr))) except SQLAlchemyError as e: logger.warning("Exception: %s", e) notifications_query = dbs.query(Notification).order_by( Notification.lastUpdated + " desc") user = security.get_valid_user_from_token(dbsession=dbs) if not user: abort(404, "No valid user found!") toUserId = user.id notifications_query = notifications_query.filter( (typeId is None or Notification.typeId == typeId), (toUserId is None or Notification.toUserId == toUserId), (isRead is None or Notification.isRead == isRead), (isActive is None or Notification.isActive == isActive), ) # do limit and offset last if page is None or page_size is None: notifications = notifications_query.all() else: notifications = notifications_query.limit(page_size).offset( page * page_size) for n in notifications: n.type = (dbs.query(NotificationType).filter( NotificationType.id == n.typeId).one_or_none()) if n.typeId == 1: n.details = (dbs.query(NotificationActivator).filter( n.id == NotificationActivator.notificationId, Activator.id == NotificationActivator.activatorId, ).one_or_none()) elif n.typeId == 2: n.details = (dbs.query(NotificationTeam).filter( n.id == NotificationTeam.notificationId, Team.id == NotificationTeam.teamId, ).one_or_none()) elif n.typeId == 3: n.details = ( dbs.query(NotificationApplicationDeployment).filter( n.id == NotificationApplicationDeployment.notificationId, Application.id == NotificationApplicationDeployment.applicationId, ).one_or_none()) elif n.typeId == 4: n.details = (dbs.query(NotificationSolutionDeployment).filter( n.id == NotificationSolutionDeployment.notificationId, Solution.id == NotificationSolutionDeployment.solutionId, ).one_or_none()) schema = ExtendedNotificationSchema(many=True) data = schema.dump(notifications) return data, 200