def process_user(db_session, datasource, payload): datasource_credentials = get_datasource_credentials( db_session, datasource.datasource_id) if datasource_credentials: domain_id = datasource_credentials['domain_id'] userObj = entities.SlackUser(domain_id, datasource.datasource_id, payload) user_model_obj = userObj.get_model() db_session.execute( DomainUser.__table__.insert().prefix_with("IGNORE").values( db_utils.get_model_values(DomainUser, user_model_obj))) db_session.commit() #check if new external member is added to a team if user_model_obj.member_type == constants.EntityExposureType.EXTERNAL.value: payload = {} payload["user"] = json.dumps(user_model_obj, cls=alchemy_encoder()) policy_params = { 'dataSourceId': datasource.datasource_id, 'policy_trigger': constants.PolicyTriggerType.NEW_USER.value } Logger().info("new_user : payload : {}".format(payload)) messaging.trigger_post_event(urls.SLACK_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "slack") activity_db().add_event( domain_id=datasource.domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='USER_ADDED', actor=user_model_obj.email, tags={ "exposure_type": user_model_obj.member_type, "user_email": user_model_obj.email })
def export_to_csv(auth_token, payload): if not 'is_async' in payload: payload['is_async'] = True messaging.trigger_post_event(urls.USERS_EXPORT, auth_token, None, payload) return ResponseMessage(202, "Your download request is in process, you shall receive an email with the download link soon...") else: write_to_csv(auth_token, payload)
def process_group_related_activities(datasource_id, actor_email, event): event_name = event['name'] if event_name == 'ADD_GROUP_MEMBER': activity_events_parameters = event['parameters'] group_email = None user_email = None for param in activity_events_parameters: name = param['name'] if name == 'GROUP_EMAIL': group_email = param['value'] elif name == 'USER_EMAIL': user_email = param['value'] user_directory_struct = DirectoryStructure() user_directory_struct.datasource_id = datasource_id user_directory_struct.member_email = user_email user_directory_struct.parent_email = group_email user_directory_struct.member_role = 'MEMBER' user_directory_struct.member_type = 'USER' # TODO : check whether type is group or user db_session = db_connection().get_session() db_session.execute(DirectoryStructure.__table__.insert().prefix_with("IGNORE"). values(db_utils.get_model_values(DirectoryStructure, user_directory_struct))) if user_email: datasource_obj = get_datasource(datasource_id) domain_id = datasource_obj.domain_id exposure_type = utils.check_if_external_user(db_session, domain_id, user_email) if exposure_type == constants.EntityExposureType.EXTERNAL.value: # check if external user present in domain user table existing_user = db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email)).first() external_user = None if not existing_user: external_user = DomainUser() external_user.datasource_id = datasource_id external_user.email = user_email external_user.member_type = constants.EntityExposureType.EXTERNAL.value external_user.type = 'USER' # TODO: find the first name and last name of external user external_user.first_name = "" external_user.last_name = "" db_session.add(external_user) user_obj = existing_user if existing_user else external_user payload = {} payload["user"] = json.dumps(user_obj, cls=alchemy_encoder()) policy_params = {'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.NEW_USER.value} Logger().info("new_user : payload : {}".format(payload)) messaging.trigger_post_event(urls.GSUITE_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "gsuite") datasource_obj = get_datasource(datasource_id) tags = {"group_email": group_email, "user_email":user_email} activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type='ADD_GROUP_MEMBER', actor=actor_email, tags=tags) db_connection().commit()
def request_scanner_data(auth_token, query_params): #try: datasource_id = query_params["dataSourceId"] scanner_id = query_params["scannerId"] db_session = db_connection().get_session() scanner = db_session.query(DatasourceScanners).filter( and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)).first() if not scanner: return response = get_scanner_processor(scanner.scanner_type).query( auth_token, query_params, scanner) next_page_token = response["nextPageNumber"] if next_page_token: scanner.page_token = str(next_page_token) query_params["nextPageNumber"] = scanner.page_token messaging.trigger_get_event(urls.SCAN_SLACK_ENTITIES, auth_token, query_params, "slack") else: scanner.page_token = "" entities_list = response["payload"] fetched_entities_count = len(entities_list) in_progress = 0 if fetched_entities_count < 1 else 1 db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.total_count: DatasourceScanners.total_count + fetched_entities_count, DatasourceScanners.query_status: DatasourceScanners.query_status + 1}) if in_progress == 0: db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.in_progress: in_progress}) db_connection().commit() messaging.trigger_post_event(urls.SCAN_SLACK_UPDATE, auth_token, query_params, {}, "slack") return datasource_metric_column = get_datasource_column(scanner.scanner_type) if datasource_metric_column: db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \ update({datasource_metric_column: datasource_metric_column + fetched_entities_count}) db_connection().commit() #datasource = db_session.query(DataSource).filter(and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() #messaging.send_push_notification("adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) #db_connection().close_connection() sent_member_count = 0 while sent_member_count < fetched_entities_count: scanner_data = {} scanner_data["entities"] = entities_list[ sent_member_count:sent_member_count + 30] #If this is the last set of users, in the process call, send the next page number as empty if fetched_entities_count - sent_member_count <= 30 and not scanner.page_token: query_params["nextPageNumber"] = "" messaging.trigger_post_event(urls.SCAN_SLACK_ENTITIES, auth_token, query_params, scanner_data, "slack") sent_member_count += 30
def call_validate_policies_for_admin_user(user_obj, datasource_id): if user_obj and user_obj.is_admin: payload = {} payload["user"] = json.dumps(user_obj, cls=alchemy_encoder()) policy_params = {'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.NEW_USER.value} Logger().info("new_user : payload : {}".format(payload)) messaging.trigger_post_event(urls.GSUITE_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "gsuite")
def request_scanner_data(auth_token, query_params): datasource_id = query_params["dataSourceId"] scanner_id = query_params["scannerId"] db_session = db_connection().get_session() scanner = db_session.query(DatasourceScanners).filter(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id).first() if not scanner: return response = None try: response = get_scanner_processor(scanner.scanner_type).query(auth_token, query_params, scanner) except Exception as ex: Logger().exception("Exception occurred while querying scan data for - {} - {} ".format(query_params, ex)) db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.in_progress: 0}) db_connection().commit() return next_page_token = response["nextPageNumber"] if "nextPageNumber" in response else None if next_page_token: scanner.next_page_token = str(next_page_token) query_params["nextPageNumber"] = scanner.next_page_token messaging.trigger_get_event(urls.GITHUB_SCAN_ENTITIES, auth_token, query_params, "github") else: scanner.next_page_token = "" entities_list = response["payload"] fetched_entities_count = len(entities_list) in_progress = 0 if fetched_entities_count < 1 else 1 db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.total_count: DatasourceScanners.total_count + fetched_entities_count, DatasourceScanners.query_status: DatasourceScanners.query_status + 1}) if in_progress == 0: db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.in_progress: in_progress}) db_connection().commit() messaging.trigger_post_event(urls.GITHUB_SCAN_UPDATE, auth_token, query_params, {}, "github") return datasource_metric_column = get_datasource_column(scanner.scanner_type) if datasource_metric_column: db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \ update({datasource_metric_column: datasource_metric_column + fetched_entities_count}) db_connection().commit() sent_member_count = 0 batch_size = response["batchSize"] if "batchSize" in response else fetched_entities_count while sent_member_count < fetched_entities_count: scanner_data = {} scanner_data["entities"] = entities_list[sent_member_count:sent_member_count + batch_size] #If this is the last set of users, in the process call, send the next page number as empty if fetched_entities_count - sent_member_count <= batch_size and not scanner.next_page_token: query_params["nextPageNumber"] = "" messaging.trigger_post_event(urls.GITHUB_SCAN_ENTITIES, auth_token, query_params, scanner_data, "github") sent_member_count += batch_size
def initiate_action(auth_token, action_payload): try: action_key = action_payload['key'] initiated_by = action_payload['initiated_by'] action_parameters = action_payload['parameters'] datasource_id = action_payload[ 'datasource_id'] if 'datasource_id' in action_payload else 'MANUAL' db_session = db_connection().get_session() login_user_info = db_session.query(LoginUser).filter( LoginUser.auth_token == auth_token).first() domain_id = login_user_info.domain_id action_config = get_action(action_key) if not action_config or not validate_action_parameters( action_config, action_parameters): return ResponseMessage( 400, "Failed to execute action - Validation failed") log_id = action_payload[ 'log_id'] if 'log_id' in action_payload else None if log_id: log_entry = db_session.query(AuditLog).filter( and_(AuditLog.log_id == log_id)).first() else: log_entry = audit_action(domain_id, datasource_id, initiated_by, action_config, action_parameters) execution_status = execute_action(auth_token, domain_id, datasource_id, action_config, action_payload, log_entry) db_connection().commit() Logger().info("initiate_action : response body - {}".format( execution_status.get_response_body())) response_body = json.loads( json.dumps(execution_status.get_response_body())) response_body['id'] = log_entry.log_id if execution_status.response_code == constants.ACCEPTED_STATUS_CODE: action_payload['log_id'] = log_entry.log_id messaging.trigger_post_event(urls.INITIATE_ACTION_PATH, auth_token, None, action_payload) return ResponseMessage(execution_status.response_code, None, response_body) except Exception as e: Logger().exception( "Exception occurred while initiating action using payload " + str(action_payload)) return ResponseMessage(500, "Failed to execute action - {}".format(e))
def modify_group_membership(auth_token, datasource_id, action_name, action_parameters, log_entry): user_email = action_parameters["user_email"] group_email = action_parameters["group_email"] db_session = db_connection().get_session() status_message = "Action completed successfully" datasource_obj = get_datasource(datasource_id) datasource_type = datasource_obj.datasource_type payload = { "log_id": str(log_entry.log_id), "action_type": action_name, "user_email": user_email, "group_email": group_email, 'datasource_id': datasource_id, "domain_id": datasource_obj.domain_id } response = messaging.trigger_post_event( datasource_execute_action_map[datasource_type], auth_token, None, payload, connector_servicename_map[datasource_type], constants.TriggerType.SYNC.value) if response and action_name == action_constants.ActionNames.REMOVE_USER_FROM_GROUP.value: if response.response_code != constants.SUCCESS_STATUS_CODE: log_entry.status = action_constants.ActionStatus.FAILED.value status_message = 'Action failed with error - ' + \ response.response_body['error']['message'] log_entry.message = status_message return response_messages.ResponseMessage(response.response_code, status_message) db_session.query(DirectoryStructure).filter( and_(DirectoryStructure.datasource_id == datasource_id, DirectoryStructure.parent_email == group_email, DirectoryStructure.member_email == user_email)).delete() elif response and action_name == action_constants.ActionNames.ADD_USER_TO_GROUP.value: if response.response_code != constants.SUCCESS_STATUS_CODE: log_entry.status = action_constants.ActionStatus.FAILED.value message = response.response_body['error']['message'] if 'message' in \ response.response_body['error'] else response.response_body['error'] status_message = 'Action failed with error - ' + message log_entry.message = status_message return response_messages.ResponseMessage(response.response_code, status_message) response_body = response.response_body dirstructure = DirectoryStructure() dirstructure.datasource_id = datasource_id dirstructure.member_email = user_email dirstructure.parent_email = group_email dirstructure.member_type = response_body['type'] dirstructure.member_role = response_body['role'] dirstructure.member_id = response_body['id'] db_session.add(dirstructure) log_entry.status = action_constants.ActionStatus.SUCCESS.value log_entry.message = status_message db_connection().commit() return response_messages.ResponseMessage(200, status_message)
def delete_repository(auth_token, datasource_id, action_key, action_parameters, log_entry): datasource_obj = get_datasource(datasource_id) datasource_type = datasource_obj.datasource_type status_message = "Action submitted successfully" payload = { "resource_name": action_parameters["resource_name"], "action_type": action_key, "datasource_id": datasource_id } messaging.trigger_post_event( datasource_execute_action_map[datasource_type], auth_token, None, payload, connector_servicename_map[datasource_type]) log_entry.status = action_constants.ActionStatus.SUCCESS.value log_entry.message = status_message db_connection().commit() return response_messages.ResponseMessage(200, status_message)
def validate_apps_installed_policy(db_session, auth_token, datasource_id, policy, application): Logger().info("validating_policy : application : {}".format(application)) is_violated = 1 for policy_condition in policy.conditions: if policy_condition.match_type == constants.PolicyMatchType.APP_NAME.value: is_violated = is_violated & check_value_violation( policy_condition, application["display_text"]) elif policy_condition.match_type == constants.PolicyMatchType.APP_RISKINESS.value: is_violated = is_violated & check_value_violation( policy_condition, application["score"]) elif policy_condition.match_type == constants.PolicyMatchType.IS_APP_WHITELISTED.vlaue: is_violated = is_violated & check_value_violation( policy_condition, application["is_whitelisted"]) send_email_action = [] is_reverted = False if is_violated: Logger().info( "Policy \"{}\" is violated, so triggering corresponding actions". format(policy.name)) for action in policy.actions: if action.action_type == constants.PolicyActionType.SEND_EMAIL.value: send_email_action.append(policy) elif action.action_type == constants.PolicyActionType.REVERT.value: remove_app_for_domain(auth_token, application["id"]) is_reverted = True if len(send_email_action) > 0: to_address = json.loads(send_email_action[0].config)["to"] adya_emails.send_app_install_policy_violate_email( to_address, policy, application, is_reverted) payload = {} payload["datasource_id"] = datasource_id payload["name"] = policy.name payload["policy_id"] = policy.policy_id payload["severity"] = policy.severity payload[ "description_template"] = "New app install \"{{display_text}}\" for \"{{user_email}}\" has violated policy \"{{policy_name}}\"" payload["payload"] = application messaging.trigger_post_event(urls.ALERTS_PATH, auth_token, None, payload)
def scan_complete_processing(db_session, auth_token, datasource_id): Logger().info("Scan completed") db_session.query(ApplicationUserAssociation).filter(ApplicationUserAssociation.datasource_id == datasource_id, DomainUser.datasource_id == datasource_id, DomainUser.user_id == ApplicationUserAssociation.user_email). \ update({ApplicationUserAssociation.user_email: DomainUser.email}, synchronize_session = 'fetch') db_connection().commit() datasource = db_session.query(DataSource).filter( and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() messaging.send_push_notification( "adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) utils.add_license_for_scanned_app(db_session, datasource) body = {'datasource_id': datasource_id, "is_default": True} messaging.trigger_post_event(urls.POLICIES_PATH, auth_token, {}, body) messaging.trigger_post_event(urls.GET_SCHEDULED_REPORT_PATH, auth_token, {}, body)
def process_scanner_data(auth_token, query_params, scanner_data): datasource_id = query_params["dataSourceId"] scanner_id = query_params["scannerId"] next_page_token = query_params["nextPageNumber"] db_session = db_connection().get_session() scanner = db_session.query(DatasourceScanners).filter( and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)).first() if not scanner: return scanner_processor = get_scanner_processor(scanner.scanner_type) processed_results = scanner_processor.process(db_session, auth_token, query_params, scanner_data) in_progress = 1 if not next_page_token: in_progress = 0 db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.in_progress: in_progress}) db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.process_status: DatasourceScanners.process_status + 1, DatasourceScanners.processed_count: DatasourceScanners.processed_count + processed_results, DatasourceScanners.updated_at: datetime.utcnow()}) datasource_metric_column = get_datasource_column(scanner.scanner_type, False) if datasource_metric_column: db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \ update({datasource_metric_column: datasource_metric_column + processed_results}) db_connection().commit() datasource = db_session.query(DataSource).filter( and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() messaging.send_push_notification( "adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) if in_progress == 0: scanner_processor.post_process(db_session, auth_token, query_params) messaging.trigger_post_event(urls.SCAN_SLACK_UPDATE, auth_token, query_params, {}, "slack")
def create_watch_report(auth_token, datasource_id, action_payload, log_entry): action_parameters = action_payload['parameters'] user_email = str(action_parameters['user_email']) form_input = {} form_input['name'] = "Activity for " + user_email form_input['description'] = "Activity for " + user_email form_input['frequency'] = "cron(0 9 ? * 2 *)" form_input['receivers'] = action_payload['initiated_by'] form_input['report_type'] = "Activity" form_input['selected_entity_type'] = "user" form_input['selected_entity'] = user_email form_input['selected_entity_name'] = user_email form_input['is_active'] = 0 form_input['datasource_id'] = datasource_id messaging.trigger_post_event(urls.GET_SCHEDULED_REPORT_PATH, auth_token, None, form_input) log_entry.status = action_constants.ActionStatus.SUCCESS.value log_entry.message = 'Action completed successfully' return ResponseMessage(201, "Watch report created for {}".format(user_email))
def update_user(db_session, domain_id, datasource_id, user_info): datasource_credentials = get_datasource_credentials( db_session, datasource_id) if datasource_credentials: domain_id = datasource_credentials['domain_id'] slack_user = entities.SlackUser(domain_id, datasource_id, user_info) user_obj = slack_user.get_model() existing_user_info = db_session.query(DomainUser).\ filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_obj.email)).first() Logger().info("Existing user info - {} ".format(existing_user_info)) Logger().info("updated user info - {}".format(user_obj)) #update the existing user info db_session.query(DomainUser).filter( and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_obj.email)).update( db_utils.get_model_values(DomainUser, user_obj)) #check for new admin creation if existing_user_info and ( not existing_user_info.is_admin) and user_obj.is_admin: payload = {} payload["user"] = json.dumps(user_obj, cls=alchemy_encoder()) policy_params = { 'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.NEW_USER.value } Logger().info("new_user : payload : {}".format(payload)) messaging.trigger_post_event(urls.SLACK_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "slack") activity_db().add_event( domain_id=domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='ROLE_CHANGED', actor=user_obj.email, tags={"is_admin": user_obj.is_admin})
def process_scanner_data(auth_token, query_params, scanner_data): datasource_id = query_params["dataSourceId"] scanner_id = query_params["scannerId"] next_page_token = query_params["nextPageNumber"] if "nextPageNumber" in query_params else None db_session = db_connection().get_session() scanner = db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)).first() if not scanner: return scanner_processor = get_scanner_processor(scanner.scanner_type) processed_results = 0 try: processed_results = scanner_processor.process(db_session, auth_token, query_params, scanner_data) except Exception as ex: Logger().exception("Exception occurred while processing scan data for - {} - {}".format(query_params, ex)) return in_progress = 1 if not next_page_token: in_progress = 0 db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.in_progress: in_progress}) db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.process_status: DatasourceScanners.process_status + 1, DatasourceScanners.processed_count: DatasourceScanners.processed_count + processed_results, DatasourceScanners.updated_at: datetime.utcnow()}) datasource_metric_column = get_datasource_column(scanner.scanner_type, False) if datasource_metric_column: db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \ update({datasource_metric_column: datasource_metric_column + processed_results}) db_connection().commit() datasource = db_session.query(DataSource).filter(and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() messaging.send_push_notification("adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) if in_progress == 0: scanner_processor.post_process(db_session, auth_token, query_params) messaging.trigger_post_event(urls.GITHUB_SCAN_UPDATE, auth_token, query_params, {}, "github")
def transfer_ownership(auth_token, datasource_id, action_name, action_parameters, log_entry): status_message = "Action submitted successfully" datasource_obj = get_datasource(datasource_id) datasource_type = datasource_obj.datasource_type payload = { "log_id": str(log_entry.log_id), "action_type": action_name, "user_email": action_parameters["old_owner_email"], "new_owner_email": action_parameters["new_owner_email"], 'datasource_id': datasource_id, "domain_id": datasource_obj.domain_id } messaging.trigger_post_event( datasource_execute_action_map[datasource_type], auth_token, None, payload, connector_servicename_map[datasource_type]) log_entry.status = action_constants.ActionStatus.SUCCESS.value log_entry.message = status_message db_connection().commit() return response_messages.ResponseMessage(200, status_message)
def validate_new_user_policy(db_session, auth_token, datasource_id, policy, user): Logger().info("validating_policy for new user : {} ".format(user)) is_violated = 1 for policy_condition in policy.conditions: if policy_condition.match_type == constants.PolicyMatchType.USER_TYPE.value: is_violated = is_violated & check_value_violation( policy_condition, user['member_type']) elif policy_condition.match_type == constants.PolicyMatchType.USER_ROLE.value: if policy_condition.match_value == 'Admin': policy_condition.match_value = True else: policy_condition.match_value = False is_violated = is_violated & check_value_violation( policy_condition, user['is_admin']) if is_violated: Logger().info( "Policy \"{}\" is violated, so triggering corresponding actions". format(policy.name)) for action in policy.actions: if action.action_type == constants.PolicyActionType.SEND_EMAIL.value: to_address = json.loads(action.config)["to"] # TODO: add proper email template Logger().info("validate_policy : send email") adya_emails.send_new_user_policy_violate_email( to_address, policy, user) payload = {} payload["datasource_id"] = datasource_id payload["name"] = policy.name payload["policy_id"] = policy.policy_id payload["severity"] = policy.severity payload[ "description_template"] = "New user {{user_email}} added has violated policy \"{{policy_name}}\"" payload["payload"] = user messaging.trigger_post_event(urls.ALERTS_PATH, auth_token, None, payload)
def execute_batch_delete(auth_token, datasource_id, user_email, initiated_by, permissions_to_update, log_entry, action_type, more_to_execute=False): sync_response = None datasource_obj = get_datasource(datasource_id) datasource_type = datasource_obj.datasource_type body = json.dumps(permissions_to_update, cls=alchemy_encoder()) payload = { "permissions": body, "datasource_id": datasource_id, "domain_id": datasource_obj.domain_id, "more_to_execute": 1 if more_to_execute else 0, "initiated_by_email": initiated_by, "log_id": str(log_entry.log_id), "user_email": user_email, "action_type": action_type } sync_response = messaging.trigger_post_event( datasource_execute_action_map[datasource_type], auth_token, None, payload, connector_servicename_map[datasource_type], constants.TriggerType.SYNC.value) if sync_response.response_code != constants.SUCCESS_STATUS_CODE: return sync_response elif more_to_execute: return response_messages.ResponseMessage( constants.ACCEPTED_STATUS_CODE, 'Action submitted successfully') else: return response_messages.ResponseMessage( 200, 'Action completed successfully')
def add_resource_permission(auth_token, datasource_id, action_payload, log_entry): action_parameters = action_payload['parameters'] new_permission_role = action_parameters['new_permission_role'] resource_id = action_parameters['resource_id'] resource_owner = action_parameters['resource_owner_id'] permission = ResourcePermission() permission.datasource_id = datasource_id permission.resource_id = resource_id permission.email = action_parameters['user_email'] permission.permission_type = new_permission_role datasource_obj = get_datasource(datasource_id) datasource_type = datasource_obj.datasource_type body = json.dumps([permission], cls=alchemy_encoder()) payload = { "permissions": body, "datasource_id": datasource_id, "domain_id": datasource_obj.domain_id, "initiated_by_email": action_payload['initiated_by'], "log_id": str(log_entry.log_id), "user_email": resource_owner, "action_type": action_payload['key'] } response = messaging.trigger_post_event( datasource_execute_action_map[datasource_type], auth_token, None, payload, connector_servicename_map[datasource_type], constants.TriggerType.SYNC.value) if response and response.response_code == constants.SUCCESS_STATUS_CODE: return response_messages.ResponseMessage( constants.SUCCESS_STATUS_CODE, 'Action completed successfully') else: return response_messages.ResponseMessage( response.response_code, response.response_body['message'])
def scan_complete_processing(db_session, auth_token, datasource_id): Logger().info("Scan completed") datasource = db_session.query(DataSource).filter(and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() db_session.query(ExternalExposure).filter(ExternalExposure.domain_id == datasource.domain_id).delete() body = {"datasource_id": datasource_id, "is_default": True} messaging.trigger_post_event(urls.POLICIES_PATH, auth_token, {}, body) messaging.trigger_post_event(urls.GET_SCHEDULED_REPORT_PATH, auth_token, {}, body) #Subscribe for push notifications query_params = {'domainId': datasource.domain_id, 'dataSourceId': datasource_id} Logger().info("Trying for push notification subscription for domain_id: {} datasource_id: {}".format( datasource.domain_id, datasource_id)) messaging.trigger_post_event(urls.SUBSCRIBE_GDRIVE_NOTIFICATIONS_PATH, auth_token, query_params, {}, "gsuite") messaging.send_push_notification("adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) utils.add_license_for_scanned_app(db_session, datasource) Logger().info("Send email after scan complete") adya_emails.send_gdrive_scan_completed_email(auth_token, datasource) update_resource_exposure_type(db_session, datasource.domain_id, datasource_id)
def update_resource(db_session, datasource_id, user_email, updated_resource): is_new_resource = 0 gsuite_resource = GsuiteResource(datasource_id, updated_resource) db_resource = gsuite_resource.get_model() external_users = gsuite_resource.get_external_users() count = db_session.query(Resource).filter( and_(Resource.datasource_id == datasource_id, Resource.resource_id == db_resource.resource_id)).update( db_utils.get_model_values(Resource, db_resource)) if count < 1: #Resource does not exist, so insert is_new_resource = 1 db_session.execute( Resource.__table__.insert().prefix_with("IGNORE").values( db_utils.get_model_values(Resource, db_resource))) new_permissions_map = {} for new_permission in db_resource.permissions: new_permissions_map[new_permission.permission_id] = new_permission #Update resource permissions existing_permissions = db_session.query(ResourcePermission).filter( and_(ResourcePermission.datasource_id == datasource_id, ResourcePermission.resource_id == db_resource.resource_id)).all() existing_permissions_dump = json.dumps(existing_permissions, cls=alchemy_encoder()) for existing_permission in existing_permissions: if existing_permission.permission_id in new_permissions_map: #Update the permission db_session.query(ResourcePermission).filter(and_(ResourcePermission.datasource_id == datasource_id, ResourcePermission.resource_id == db_resource.resource_id, ResourcePermission.permission_id == existing_permission.permission_id))\ .update(db_utils.get_model_values(ResourcePermission, new_permissions_map[existing_permission.permission_id])) new_permissions_map.pop(existing_permission.permission_id, None) else: #Delete the permission db_session.delete(existing_permission) #Now add all the other new permissions for new_permission in new_permissions_map.values(): event_name = '' db_session.execute( ResourcePermission.__table__.insert().prefix_with("IGNORE").values( db_utils.get_model_values(ResourcePermission, new_permission))) if new_permission.exposure_type == constants.EntityExposureType.PUBLIC.value: event_name = 'FILE_SHARE_PUBLIC' elif new_permission.exposure_type == constants.EntityExposureType.ANYONEWITHLINK.value: event_name = 'FILE_SHARE_ANYONEWITHLINK' elif new_permission.exposure_type == constants.EntityExposureType.EXTERNAL.value: event_name = 'FILE_SHARE_EXTERNAL' #Update external users if len(external_users) > 0: external_users_values = [] for external_user in external_users: external_users_values.append( db_utils.get_model_values(DomainUser, external_user)) db_session.execute(DomainUser.__table__.insert().prefix_with( "IGNORE").values(external_users_values)) db_connection().commit() if is_new_resource == 1: db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \ update({DataSource.processed_file_count: DataSource.processed_file_count + 1, DataSource.total_file_count: DataSource.total_file_count + 1}) messaging.send_push_notification( "adya-" + datasource_id, json.dumps({ "type": "incremental_change", "datasource_id": datasource_id, "email": user_email, "resource": updated_resource })) #Trigger the policy validation now payload = {} payload["old_permissions"] = existing_permissions_dump payload["resource"] = json.dumps(db_resource, cls=alchemy_encoder()) payload["new_permissions"] = json.dumps(db_resource.permissions, cls=alchemy_encoder()) policy_params = { 'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.PERMISSION_CHANGE.value } #Logger().info("update_resource : payload : {}".format(payload)) messaging.trigger_post_event(urls.GSUITE_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "gsuite")
def process_app(db_session, domain_id, datasource_id, payload): slack_client = slack_utils.get_slack_client(datasource_id) app_id = payload["profile"]["api_app_id"] apps_logs = slack_client.api_call("team.integrationLogs", limit=150, app_id=app_id) app_name = None logs = None user_id = '' if apps_logs: logs = apps_logs['logs'] for log_data in logs: app_name = log_data['app_type'] if app_name: user_id = log_data['user_id'] break if payload['deleted']: # app is deleted app_info = db_session.query(Application).filter( and_(Application.domain_id == domain_id, Application.display_text == app_name)).first() if app_info: db_session.query(ApplicationUserAssociation).filter( and_(ApplicationUserAssociation.datasource_id == datasource_id, ApplicationUserAssociation.application_id == app_info.id)).delete() db_connection().commit() db_session.delete(app_info) activity_db().add_event( domain_id=domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='OAUTH_REVOKE', actor=user_id, tags={"display_text": app_info.display_text}) else: app_info = db_session.query(Application).filter( and_(Application.domain_id == domain_id, Application.display_text == app_name)).first() if not app_info: #reinstallation app_added_log_info = None for log_data in logs: if log_data[ 'change_type'] == slack_constants.AppChangedTypes.ADDED.value: app_added_log_info = log_data break if app_added_log_info: query_params = {"dataSourceId": datasource_id} scanner_data = {"entities": [app_added_log_info]} # add in application and userappassociation table apps_scanner.process(db_session, None, query_params, scanner_data) user_id = app_added_log_info['user_id'] display_text = app_added_log_info[ "app_type"] if "app_type" in app_added_log_info else app_added_log_info[ "service_type"] max_score = 0 if 'scope' in app_added_log_info: scopes = app_added_log_info["scope"] max_score = slack_utils.get_app_score(scopes) #check for trusted apps check_app_is_trusted = False trusted_apps_list = (get_trusted_entity_for_domain( db_session, domain_id))['trusted_apps'] if display_text in trusted_apps_list: check_app_is_trusted = True #validate policy if apps are not trusted if not check_app_is_trusted: policy_params = { 'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.APP_INSTALL.value } app_payload = {} app_payload['display_text'] = display_text app_payload['score'] = max_score user_info = db_session.query(DomainUser).filter( and_(DomainUser.datasource_id == datasource_id, DomainUser.user_id == user_id)).first() app_payload['user_email'] = user_info.email policy_payload = {} policy_payload['application'] = json.dumps( app_payload, cls=alchemy_encoder()) Logger().info( "added_app : payload : {}".format(app_payload)) messaging.trigger_post_event( urls.SLACK_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, policy_payload, "slack") activity_db().add_event( domain_id=domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='OAUTH_GRANT', actor=user_id, tags={ "score": max_score, "display_text": display_text })
def update_or_delete_resource_permission(auth_token, datasource_id, action_payload, log_entry): action_parameters = action_payload['parameters'] new_permission_role = action_parameters['new_permission_role'] user_type = action_parameters[ 'user_type'] if 'user_type' in action_parameters else 'user' resource_id = action_parameters['resource_id'] resource_owner = action_parameters['resource_owner_id'] user_email = action_parameters['user_email'] initiated_user = action_payload['initiated_by'] current_time = datetime.utcnow() db_session = db_connection().get_session() existing_permission = db_session.query(ResourcePermission).filter( and_(ResourcePermission.resource_id == resource_id, ResourcePermission.datasource_id == datasource_id, ResourcePermission.email == user_email)).first() if not existing_permission and action_payload[ 'key'] == action_constants.ActionNames.CHANGE_OWNER_OF_FILE.value: Logger().info("add a new permission ") response = add_resource_permission(auth_token, datasource_id, action_payload, log_entry) return response if not existing_permission: status_message = "Bad Request - Permission not found in records" Logger().info(status_message) log_entry.status = action_constants.ActionStatus.FAILED.value log_entry.message = status_message return ResponseMessage(400, status_message) query_param = { 'user_email': resource_owner, 'initiated_by_email': initiated_user, 'datasource_id': datasource_id, "log_id": str(log_entry.log_id) } existing_permission_json = json.loads( json.dumps(existing_permission, cls=alchemy_encoder())) existing_permission_json["permission_type"] = new_permission_role body = json.dumps([existing_permission_json], cls=alchemy_encoder()) response = "Action executed" datasource_obj = get_datasource(datasource_id) datasource_type = datasource_obj.datasource_type payload = { "permissions": body, "datasource_id": datasource_id, "domain_id": datasource_obj.domain_id, "initiated_by_email": action_payload['initiated_by'], "log_id": str(log_entry.log_id), "user_email": resource_owner, "action_type": action_payload['key'], "resource_name": action_parameters["resource_name"] } response = messaging.trigger_post_event( datasource_execute_action_map[datasource_type], auth_token, None, payload, connector_servicename_map[datasource_type], constants.TriggerType.SYNC.value) if response and response.response_code == constants.SUCCESS_STATUS_CODE: return response_messages.ResponseMessage( constants.SUCCESS_STATUS_CODE, 'Action completed successfully') else: return response_messages.ResponseMessage( response.response_code, response.response_body['message'])
def process_application(db_session, datasource_id, payload): slack_client = slack_utils.get_slack_client(datasource_id) app_id = payload['profile']['api_app_id'] apps = slack_client.api_call( "team.integrationLogs", limit=150, change_type=slack_constants.AppChangedTypes.ADDED.value, app_id=app_id) apps_list = apps["logs"] if apps_list: user_id = '' display_text = '' max_score = 0 for app in apps_list: query_params = {"dataSourceId": datasource_id} scanner_data = {"entities": [app]} # add in application and userappassociation table apps_scanner.process(db_session, None, query_params, scanner_data) user_id = app['user_id'] display_text = app["app_type"] if "app_type" in app else app[ "service_type"] if 'scope' in app: scopes = app["scope"] max_score = slack_utils.get_app_score(scopes) #check for trusted apps datasource_obj = get_datasource(datasource_id) domain_id = datasource_obj.domain_id check_app_is_trusted = False trusted_apps_list = (get_trusted_entity_for_domain( db_session, domain_id))['trusted_apps'] if display_text in trusted_apps_list: check_app_is_trusted = True # policy check and send alert if apps are not trusted if not check_app_is_trusted: policy_params = { 'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.APP_INSTALL.value } app_payload = {} app_payload['display_text'] = display_text app_payload['score'] = max_score user_info = db_session.query(DomainUser).filter( and_(DomainUser.datasource_id == datasource_id, DomainUser.user_id == user_id)).first() app_payload['user_email'] = user_info.email policy_payload = {} policy_payload['application'] = json.dumps(app_payload, cls=alchemy_encoder()) Logger().info("added_app : payload : {}".format(app_payload)) messaging.trigger_post_event(urls.SLACK_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, policy_payload, "slack") activity_db().add_event( domain_id=domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='OAUTH_GRANT', actor=user_id, tags={ "score": max_score, "display_text": display_text })
def update_resource(db_session, datasource_id, updated_resource): slack_resource = SlackFile(datasource_id, updated_resource) db_resource = slack_resource.get_model() count = db_session.query(Resource).filter( and_(Resource.datasource_id == datasource_id, Resource.resource_id == db_resource.resource_id)).update( db_utils.get_model_values(Resource, db_resource)) if count < 1: # insert new entry in resource table db_session.execute( Resource.__table__.insert().prefix_with("IGNORE").values( db_utils.get_model_values(Resource, db_resource))) # permissions map from update or newly shared resource entry. new_permissions_map = {} for new_permission in db_resource.permissions: new_permissions_map[new_permission.permission_id] = new_permission # Update resource permissions existing_permissions = db_session.query(ResourcePermission).filter( and_(ResourcePermission.datasource_id == datasource_id, ResourcePermission.resource_id == db_resource.resource_id)).all() existing_permissions_dump = json.dumps(existing_permissions, cls=alchemy_encoder()) for existing_permission in existing_permissions: if existing_permission.permission_id in new_permissions_map: # Update the permission db_session.query(ResourcePermission).filter( and_(ResourcePermission.datasource_id == datasource_id, ResourcePermission.resource_id == db_resource.resource_id, ResourcePermission.permission_id == existing_permission.permission_id)) \ .update( db_utils.get_model_values(ResourcePermission, new_permissions_map[existing_permission.permission_id])) new_permissions_map.pop(existing_permission.permission_id, None) else: # Delete the permission db_session.delete(existing_permission) # Now add all the other new permissions for new_permission in new_permissions_map.values(): db_session.execute( ResourcePermission.__table__.insert().prefix_with("IGNORE").values( db_utils.get_model_values(ResourcePermission, new_permission))) db_connection().commit() # trigger policy payload = {} payload["old_permissions"] = existing_permissions_dump payload["resource"] = json.dumps(db_resource, cls=alchemy_encoder()) payload["new_permissions"] = json.dumps(db_resource.permissions, cls=alchemy_encoder()) policy_params = { 'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.PERMISSION_CHANGE.value } Logger().info("update_resource : payload : {}".format(payload)) messaging.trigger_post_event(urls.SLACK_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "slack")
def validate_permission_change_policy(db_session, auth_token, datasource_obj, policy, resource, new_permissions): datasource_id = datasource_obj.datasource_id Logger().info( "validating_policy : resource : {} , new permission : {} ".format( resource, new_permissions)) is_policy_violated = False violated_permissions = [] new_permissions_left = [] highest_exposure_type = constants.EntityExposureType.PRIVATE.value for permission in new_permissions: is_permission_violated = 1 for policy_condition in policy.conditions: if policy_condition.match_type == constants.PolicyMatchType.DOCUMENT_NAME.value: is_permission_violated = is_permission_violated & check_value_violation( policy_condition, resource["resource_name"]) elif policy_condition.match_type == constants.PolicyMatchType.DOCUMENT_OWNER.value: is_permission_violated = is_permission_violated & check_value_violation( policy_condition, resource["resource_owner_id"]) elif policy_condition.match_type == constants.PolicyMatchType.DOCUMENT_EXPOSURE.value: is_permission_violated = is_permission_violated & check_value_violation( policy_condition, permission["exposure_type"]) elif policy_condition.match_type == constants.PolicyMatchType.PERMISSION_EMAIL.value: is_permission_violated = is_permission_violated & check_value_violation( policy_condition, permission["email"]) if is_permission_violated: is_policy_violated = True if not permission["permission_type"] == constants.Role.OWNER.value: violated_permissions.append(permission) highest_exposure_type = get_highest_exposure_type( permission["exposure_type"], highest_exposure_type) else: new_permissions_left.append(permission) else: new_permissions_left.append(permission) send_email_action = [] check_if_revert_action = False if is_policy_violated: if highest_exposure_type in constants.permission_exposure_to_event_constants: tags = { "resource_id": resource["resource_id"], "resource_name": resource["resource_name"], "new_permissions": violated_permissions } activity_db().add_event( domain_id=datasource_obj.domain_id, connector_type=datasource_obj.datasource_type, event_type=constants. permission_exposure_to_event_constants[highest_exposure_type], actor=resource["resource_owner_id"], tags=tags) Logger().info( "Policy \"{}\" is violated, so triggering corresponding actions". format(policy.name)) for action in policy.actions: if action.action_type == constants.PolicyActionType.SEND_EMAIL.value: send_email_action.append(action) elif action.action_type == constants.PolicyActionType.REVERT.value and len( violated_permissions) > 0: Logger().info( "violated permissions : {}".format(violated_permissions)) check_if_revert_action = True datasource_type = datasource_obj.datasource_type body = json.dumps(violated_permissions, cls=alchemy_encoder()) payload = { "permissions": body, "datasource_id": datasource_id, "domain_id": datasource_obj.domain_id, "user_email": resource["resource_owner_id"], "action_type": action_constants.ActionNames. REMOVE_EXTERNAL_ACCESS_TO_RESOURCE.value } response = messaging.trigger_post_event( datasource_execute_action_map[datasource_type], auth_token, None, payload, connector_servicename_map[datasource_type], constants.TriggerType.SYNC.value) if response and not response.response_code == constants.SUCCESS_STATUS_CODE: violated_permissions = [] if len(send_email_action) > 0: to_address = json.loads(send_email_action[0].config)["to"] Logger().info("validate_policy : send email") if not check_if_revert_action: violated_permissions = None adya_emails.send_permission_change_policy_violate_email( to_address, policy, resource, new_permissions, violated_permissions, new_permissions_left) payload = {} payload["datasource_id"] = datasource_id payload["name"] = policy.name payload["policy_id"] = policy.policy_id payload["severity"] = policy.severity payload[ "description_template"] = "Permission changes on {{resource_owner_id}}'s document \"{{resource_name}}\" has violated policy \"{{policy_name}}\"" payload["payload"] = resource messaging.trigger_post_event(urls.ALERTS_PATH, auth_token, None, payload)
def create_trusted_entities_for_a_domain(auth_token, payload): db_session = db_connection().get_session() if payload: more_to_execute = False if not 'more_to_execute' in payload else payload['more_to_execute'] existing_domains = [] existing_apps = [] datasource_ids = [] if not more_to_execute: domain_id = payload['domain_id'] new_domains = payload['trusted_domains'] new_apps = payload['trusted_apps'] datasources = get_datasource(auth_token) for datasource in datasources: datasource_ids.append(datasource.datasource_id) all_trusted_entities_for_domain = get_all_trusted_entities(domain_id) #Try to remove trusts which existed earlier and removed in the update if all_trusted_entities_for_domain: existing_domains = all_trusted_entities_for_domain['trusted_domains'] existing_apps = all_trusted_entities_for_domain['trusted_apps'] remove_domains = set(existing_domains) - set(new_domains) if len(remove_domains) > 0: for domain_name in remove_domains: more_to_execute, domain_name = delete_trusted_entities_for_domain(auth_token, domain_id, datasource_ids, domain_name, None) if more_to_execute: entity_payload = {"more_to_execute": more_to_execute, "datasource_ids": datasource_ids, "remove_domain": domain_name} messaging.trigger_post_event(urls.TRUSTED_ENTITIES, auth_token, None, entity_payload) remove_apps = set(existing_apps) - set(new_apps) if len(remove_apps) > 0: for apps_name in remove_apps: delete_trusted_entities_for_domain(auth_token, domain_id, datasource_ids, None, apps_name) #Now add the new trusts add_domains = set(new_domains) - set(existing_domains) add_apps = set(new_apps) - set(existing_apps) if len(add_domains) > 0: for new_trusted_domain in add_domains: more_to_execute, add_trusted_domain = update_data_for_trusted_domains(auth_token, db_session, datasource_ids, new_trusted_domain) if more_to_execute: entity_payload = {"more_to_execute": more_to_execute, "datasource_ids": datasource_ids, "add_domain": add_trusted_domain} messaging.trigger_post_event(urls.TRUSTED_ENTITIES, auth_token, None, entity_payload) if len(add_apps) > 0: for apps_name in add_apps: db_session.query(Application).filter( and_(Application.domain_id == domain_id, Application.display_text == apps_name)) \ .update({Application.score: 0, Application.is_whitelisted: True}) trusted_domain_string = ",".join(str(x) for x in new_domains) trusted_app_string = ",".join(str(x) for x in new_apps) if all_trusted_entities_for_domain: db_session.query(TrustedEntities).filter(TrustedEntities.domain_id == domain_id).update({ TrustedEntities.trusted_domains: trusted_domain_string, TrustedEntities.trusted_apps: trusted_app_string }) db_connection().commit() return payload else: # new entry db_entry = TrustedEntities() db_entry.domain_id = domain_id db_entry.trusted_domains = trusted_domain_string db_entry.trusted_apps = trusted_app_string try: db_session.add(db_entry) except Exception as ex: Logger().exception("error while inserting trusted entities in db - {}".format(ex)) db_session.rollback() raise Exception(ex) db_connection().commit() return db_entry else: datasource_ids = payload['datasource_ids'] if 'add_domain' in payload: more_to_execute, add_trusted_domain = update_data_for_trusted_domains(auth_token, db_session, datasource_ids, payload['add_domain']) if more_to_execute: entity_payload = {"more_to_execute": more_to_execute, "datasource_ids": datasource_ids, "add_domain": add_trusted_domain} messaging.trigger_post_event(urls.TRUSTED_ENTITIES, auth_token, None, entity_payload) elif 'remove_domain' in payload: more_to_execute, domain_name = delete_trusted_entities_for_domain(auth_token, None, datasource_ids, payload['remove_domain'], None) if more_to_execute: entity_payload = {"more_to_execute": more_to_execute, "datasource_ids": datasource_ids, "remove_domain": domain_name} messaging.trigger_post_event(urls.TRUSTED_ENTITIES, auth_token, None, entity_payload)
def process_token_activity(datasource_id, incoming_activity): Logger().info("Processing token activity - {}".format(incoming_activity)) actor_email = incoming_activity['actor']['email'] db_session = db_connection().get_session() for event in incoming_activity['events']: domain_id = db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id).first().domain_id event_name = event['name'] event_parameters = event['parameters'] scopes = None client_id = None app_name = None for param in event_parameters: param_name = param["name"] if param_name == "app_name": app_name = param["value"] elif param_name == "client_id": client_id = param["value"] elif param_name == "scope": scopes = param["multiValue"] if not app_name: app_name = client_id application = db_session.query(Application).filter(Application.display_text == app_name, Application.domain_id == domain_id).first() tags = {"display_text": app_name} if event_name == "authorize": event_name = "OAUTH_GRANT" # Ignore Adya install if "Adya" in app_name: continue inventory_app = db_session.query(AppInventory).filter(AppInventory.name == app_name).first() inventory_app_id = inventory_app.id if inventory_app else None max_score = 0 is_app_whitelisted = True if not application: application = Application() application.anonymous = 1 application.domain_id = domain_id application.timestamp = datetime.datetime.utcnow() if inventory_app_id: application.inventory_app_id = inventory_app_id application.category = inventory_app.category application.image_url = inventory_app.image_url # check for trusted apps trusted_domain_apps = (get_trusted_entity_for_domain(db_session, domain_id))["trusted_apps"] if not app_name in trusted_domain_apps: is_app_whitelisted = False max_score = gutils.get_app_score(scopes) if scopes else max_score application.score = max_score application.scopes = ','.join(scopes) if scopes else None application.is_whitelisted = is_app_whitelisted if app_name: application.display_text = app_name application.unit_num = 0 user_association = ApplicationUserAssociation() user_association.user_email = actor_email user_association.datasource_id = datasource_id if client_id: user_association.client_id = client_id db_session = db_connection().get_session() db_session.add(application) try: db_connection().commit() user_association.application_id = application.id db_session.add(user_association) db_connection().commit() # Trigger the policy validation now payload = {} application.user_email = user_association.user_email if application.score != 0: payload["application"] = json.dumps(application, cls=alchemy_encoder()) policy_params = {'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.APP_INSTALL.value} messaging.trigger_post_event(urls.GSUITE_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "gsuite") except IntegrityError as ie: Logger().info("user app association was already present for the app : {} and user: {}".format(app_name, actor_email)) db_session.rollback() tags["score"] = max_score elif event_name == "revoke": event_name = "OAUTH_REVOKE" if application: try: app_id = application.id db_session.query(ApplicationUserAssociation).filter(and_(ApplicationUserAssociation.application_id == app_id, ApplicationUserAssociation.datasource_id == datasource_id)).delete() db_session.delete(application) db_connection().commit() except: Logger().info("not able to delete app - {} from the db for user: {}".format(app_name, actor_email)) db_session.rollback() tags["score"] = application.score activity_db().add_event(domain_id=domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type=event_name, actor=actor_email, tags=tags)
def subscribe(auth_token, domain_id, datasource_id, page_num): page_num = page_num if page_num else 0 db_session = db_connection().get_session() datasource = db_session.query(DataSource).filter( DataSource.datasource_id == datasource_id).first() if not datasource: return is_local_deployment = constants.DEPLOYMENT_ENV == "local" if page_num == 0: if not is_local_deployment: # set up a resubscribe handler that runs every 6 hours cron(0 0/6 ? * * *) aws_utils.create_cloudwatch_event( "handle_channel_expiration", "cron(0 0/6 ? * * *)", aws_utils.get_lambda_name( "get", urls.HANDLE_GDRIVE_CHANNEL_EXPIRATION_PATH, "gsuite")) # since we dont always get notification for changes, adding an event which will run every hour and check for drive changes aws_utils.create_cloudwatch_event( "gdrive_periodic_changes_poll", "cron(0 0/1 * * ? *)", aws_utils.get_lambda_name("get", urls.GDRIVE_PERIODIC_CHANGES_POLL, "gsuite")) db_session.query(PushNotificationsSubscription).filter( PushNotificationsSubscription.datasource_id == datasource_id).delete() #check for admin user existing_user = db_utils.get_user_session(auth_token) login_user_email = existing_user.email is_admin = existing_user.is_admin is_service_account_is_enabled = existing_user.is_serviceaccount_enabled if is_service_account_is_enabled and is_admin: #Try subscribing for various activity notifications activities_to_track = [ constants.GSuiteNotificationType.DRIVE_ACTIVITY.value, constants.GSuiteNotificationType.ADMIN_ACTIVITY.value, constants.GSuiteNotificationType.TOKEN_ACTIVITY.value, constants.GSuiteNotificationType.LOGIN_ACTIVITY.value ] for activity in activities_to_track: subscription = prepare_new_subscription( datasource, login_user_email) subscription.notification_type = activity _subscribe_for_activity(db_session, subscription, is_local_deployment) db_session.add(subscription) #Try subscribing for drive change notifications if datasource.is_serviceaccount_enabled: domain_users = db_session.query(DomainUser).filter( and_(DomainUser.datasource_id == datasource.datasource_id, DomainUser.type == constants.DirectoryEntityType.USER.value, DomainUser.member_type == 'INT')).offset(page_num * 25).limit(25).all() for user in domain_users: subscription = prepare_new_subscription(datasource, user.email) subscription.drive_root_id = "SVC" _subscribe_for_drive_change(db_session, auth_token, subscription, is_local_deployment) db_session.add(subscription) db_connection().commit() #If there are more users, call the api again with next page number if len(domain_users) == 25: query_params = { 'domainId': domain_id, 'dataSourceId': datasource_id, 'pageNum': page_num + 1 } messaging.trigger_post_event( urls.SUBSCRIBE_GDRIVE_NOTIFICATIONS_PATH, auth_token, query_params, {}, "gsuite") elif page_num == 0: Logger().info( "Service account is not enabled, subscribing for push notification using logged in user's creds" ) subscription = prepare_new_subscription(datasource, login_user_email) subscription.drive_root_id = "" _subscribe_for_drive_change(db_session, auth_token, subscription, is_local_deployment) db_session.add(subscription) db_connection().commit()
def execute_action(auth_token, domain_id, datasource_id, action_config, action_payload, log_entry): action_parameters = action_payload['parameters'] response_msg = '' action_key = action_config["key"] # Watch report action if action_key == action_constants.ActionNames.WATCH_ALL_ACTION_FOR_USER.value: response_msg = create_watch_report(auth_token, datasource_id, action_payload, log_entry) # Trigger mail for cleaning files elif action_key == action_constants.ActionNames.NOTIFY_USER_FOR_CLEANUP.value: user_email = action_parameters['user_email'] full_name = action_parameters['full_name'] initiated_by = action_payload['initiated_by'] status_message = "Notification sent to {} for cleanUp".format( user_email) log_entry.status = action_constants.ActionStatus.SUCCESS.value status_code = 200 if not adya_emails.send_clean_files_email(datasource_id, user_email, full_name, initiated_by): status_message = "Sending Notification failed for {}".format( user_email) log_entry.status = action_constants.ActionStatus.FAILED.value status_code = 400 log_entry.message = status_message response_msg = ResponseMessage(status_code, status_message) # Directory change actions elif action_key == action_constants.ActionNames.REMOVE_USER_FROM_GROUP.value or action_key == action_constants.ActionNames.ADD_USER_TO_GROUP.value: response_msg = modify_group_membership(auth_token, datasource_id, action_key, action_parameters, log_entry) # Transfer ownership # part of batch action elif action_key == action_constants.ActionNames.TRANSFER_OWNERSHIP.value: response_msg = transfer_ownership(auth_token, datasource_id, action_key, action_parameters, log_entry) # Bulk permission change actions for user elif action_key == action_constants.ActionNames.MAKE_ALL_FILES_PRIVATE.value: user_email = action_parameters['user_email'] initiated_by = action_payload['initiated_by'] response_msg = update_access_for_owned_files(auth_token, domain_id, datasource_id, user_email, initiated_by, "ALL", log_entry, action_key) elif action_key == action_constants.ActionNames.REMOVE_EXTERNAL_ACCESS.value: user_email = action_parameters['user_email'] initiated_by = action_payload['initiated_by'] response_msg = update_access_for_owned_files( auth_token, domain_id, datasource_id, user_email, initiated_by, constants.EntityExposureType.EXTERNAL.value, log_entry, action_key) elif action_key == action_constants.ActionNames.REMOVE_ALL_ACCESS_FOR_USER.value: user_email = action_parameters['user_email'] initiated_by = action_payload['initiated_by'] response_msg = remove_all_permissions_for_user(auth_token, domain_id, datasource_id, user_email, initiated_by, log_entry, action_key) # Bulk permission change actions for resource elif action_key == action_constants.ActionNames.MAKE_RESOURCE_PRIVATE.value: response_msg = update_access_for_resource(auth_token, domain_id, datasource_id, action_payload, 'ALL', log_entry, action_key) elif action_key == action_constants.ActionNames.REMOVE_EXTERNAL_ACCESS_TO_RESOURCE.value: response_msg = update_access_for_resource( auth_token, domain_id, datasource_id, action_payload, constants.EntityExposureType.EXTERNAL.value, log_entry, action_key) # Single Resource permission change actions elif action_key == action_constants.ActionNames.UPDATE_PERMISSION_FOR_USER.value: response_msg = update_or_delete_resource_permission( auth_token, datasource_id, action_payload, log_entry) elif action_key == action_constants.ActionNames.DELETE_PERMISSION_FOR_USER.value: action_parameters['new_permission_role'] = '' response_msg = update_or_delete_resource_permission( auth_token, datasource_id, action_payload, log_entry) elif action_key == action_constants.ActionNames.ADD_PERMISSION_FOR_A_FILE.value: response_msg = add_resource_permission(auth_token, datasource_id, action_payload, log_entry) elif action_key == action_constants.ActionNames.CHANGE_OWNER_OF_FILE.value: action_parameters['new_permission_role'] = constants.Role.OWNER.value action_parameters['resource_owner_id'] = action_parameters[ "old_owner_email"] action_parameters['user_email'] = action_parameters["new_owner_email"] response_msg = update_or_delete_resource_permission( auth_token, datasource_id, action_payload, log_entry) # Uninstalling an app for a user elif action_key == action_constants.ActionNames.REMOVE_USER_FROM_APP.value: user_email = action_parameters['user_email'] app_id = action_parameters['app_id'] response_msg = revoke_user_app_access(auth_token, datasource_id, user_email, app_id, log_entry) # Uninstalling app for the entire domain elif action_key == action_constants.ActionNames.REMOVE_APP_FOR_DOMAIN.value: app_id = action_parameters["app_id"] response_msg = remove_app_for_domain(auth_token, app_id, log_entry) #Deleting a repository elif action_key == action_constants.ActionNames.DELETE_REPOSITORY.value: response_msg = delete_repository(auth_token, datasource_id, action_key, action_parameters, log_entry) elif action_key == action_constants.ActionNames.NOTIFY_MULTIPLE_USERS_FOR_CLEANUP.value: users_email = action_parameters['users_email'] users_name = action_parameters['users_name'] initiated_by = action_payload['initiated_by'] status_message = 'Action submitted successfully' log_entry.status = action_constants.ActionStatus.SUCCESS.value if len(users_email) > 0: for i, user_email in enumerate(users_email): modified_action_payload = dict(action_payload) modified_action_payload['parameters'] = { 'user_email': user_email, 'full_name': users_name[i] } modified_action_payload[ 'key'] = action_constants.ActionNames.NOTIFY_USER_FOR_CLEANUP.value modified_action_payload['log_id'] = log_entry.log_id messaging.trigger_post_event(urls.INITIATE_ACTION_PATH, auth_token, None, modified_action_payload) response_msg = ResponseMessage(200, status_message) elif action_key == action_constants.ActionNames.REMOVE_ALL_ACCESS_FOR_MULTIPLE_USERS.value: users_email = action_parameters['users_email'] initiated_by = action_payload['initiated_by'] status_message = 'Action submitted successfully' log_entry.status = action_constants.ActionStatus.SUCCESS.value response_msg = ResponseMessage(200, status_message) if len(users_email) > 0: for user_email in users_email: modified_action_payload = dict(action_payload) modified_action_payload['parameters'] = { 'user_email': user_email } modified_action_payload[ 'key'] = action_constants.ActionNames.REMOVE_ALL_ACCESS_FOR_USER.value modified_action_payload['log_id'] = log_entry.log_id messaging.trigger_post_event(urls.INITIATE_ACTION_PATH, auth_token, None, modified_action_payload) return response_msg