def process_user(db_session, datasource, payload): datasource_credentials = get_datasource_credentials( db_session, datasource.datasource_id) if datasource_credentials: domain_id = datasource_credentials['domain_id'] userObj = entities.SlackUser(domain_id, datasource.datasource_id, payload) user_model_obj = userObj.get_model() db_session.execute( DomainUser.__table__.insert().prefix_with("IGNORE").values( db_utils.get_model_values(DomainUser, user_model_obj))) db_session.commit() #check if new external member is added to a team if user_model_obj.member_type == constants.EntityExposureType.EXTERNAL.value: payload = {} payload["user"] = json.dumps(user_model_obj, cls=alchemy_encoder()) policy_params = { 'dataSourceId': datasource.datasource_id, 'policy_trigger': constants.PolicyTriggerType.NEW_USER.value } Logger().info("new_user : payload : {}".format(payload)) messaging.trigger_post_event(urls.SLACK_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "slack") activity_db().add_event( domain_id=datasource.domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='USER_ADDED', actor=user_model_obj.email, tags={ "exposure_type": user_model_obj.member_type, "user_email": user_model_obj.email })
def delegate_admin_settings(datasource_id, actor_email, event): db_session = db_connection().get_session() event_name = event['name'] activity_events_parameters = event['parameters'] user_email = None for param in activity_events_parameters: name = param['name'] if name == 'USER_EMAIL': user_email = param['value'] if event_name == 'ASSIGN_ROLE': user_obj = db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email)).first() if user_obj: user_obj.is_admin = True call_validate_policies_for_admin_user(user_obj, datasource_id) datasource_obj = get_datasource(datasource_id) tags = {"user_email": user_email, "is_admin": user_obj.is_admin} activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type=event_name, actor=actor_email, tags=tags) elif event_name == 'UNASSIGN_ROLE': db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email, DomainUser.is_admin == True)).update({DomainUser.is_admin: False}) db_connection().commit()
def process_group_related_activities(datasource_id, actor_email, event): event_name = event['name'] if event_name == 'ADD_GROUP_MEMBER': activity_events_parameters = event['parameters'] group_email = None user_email = None for param in activity_events_parameters: name = param['name'] if name == 'GROUP_EMAIL': group_email = param['value'] elif name == 'USER_EMAIL': user_email = param['value'] user_directory_struct = DirectoryStructure() user_directory_struct.datasource_id = datasource_id user_directory_struct.member_email = user_email user_directory_struct.parent_email = group_email user_directory_struct.member_role = 'MEMBER' user_directory_struct.member_type = 'USER' # TODO : check whether type is group or user db_session = db_connection().get_session() db_session.execute(DirectoryStructure.__table__.insert().prefix_with("IGNORE"). values(db_utils.get_model_values(DirectoryStructure, user_directory_struct))) if user_email: datasource_obj = get_datasource(datasource_id) domain_id = datasource_obj.domain_id exposure_type = utils.check_if_external_user(db_session, domain_id, user_email) if exposure_type == constants.EntityExposureType.EXTERNAL.value: # check if external user present in domain user table existing_user = db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email)).first() external_user = None if not existing_user: external_user = DomainUser() external_user.datasource_id = datasource_id external_user.email = user_email external_user.member_type = constants.EntityExposureType.EXTERNAL.value external_user.type = 'USER' # TODO: find the first name and last name of external user external_user.first_name = "" external_user.last_name = "" db_session.add(external_user) user_obj = existing_user if existing_user else external_user payload = {} payload["user"] = json.dumps(user_obj, cls=alchemy_encoder()) policy_params = {'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.NEW_USER.value} Logger().info("new_user : payload : {}".format(payload)) messaging.trigger_post_event(urls.GSUITE_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "gsuite") datasource_obj = get_datasource(datasource_id) tags = {"group_email": group_email, "user_email":user_email} activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type='ADD_GROUP_MEMBER', actor=actor_email, tags=tags) db_connection().commit()
def process_channel_unarchive(db_session, datasource_id, payload): channel_id = payload['channel'] db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.user_id == channel_id)).update({DomainUser.is_suspended: False}) datasource_obj = get_datasource(datasource_id) if datasource_obj: tags = {"channel_id": channel_id} activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='CHANNEL_UNARCHIVE', actor=None, tags=tags)
def process_user_related_activities(datasource_id, actor_email, event): event_name = event['name'] activity_events_parameters = event['parameters'] user_email = None user_obj = None db_session = db_connection().get_session() for param in activity_events_parameters: name = param['name'] if name == 'USER_EMAIL': user_email = param['value'] datasource_obj = get_datasource(datasource_id) tags = {"user_email": user_email} if event_name == 'CREATE_USER': if user_email: directory_service = gutils.get_directory_service(None, actor_email) results = None try: results = directory_service.users().get(userKey=user_email).execute() except RefreshError as ex: Logger().info("User query : Not able to refresh credentials") except HttpError as ex: Logger().info("User query : Domain not found error") if results: gsuite_user = user.GsuiteUser(datasource_id, results) user_obj = gsuite_user.get_model() db_session.execute(DomainUser.__table__.insert().prefix_with("IGNORE"). values(db_utils.get_model_values(DomainUser, user_obj))) call_validate_policies_for_admin_user(user_obj, datasource_id) additional_payload = {"user_email": user_email, "is_admin": user_obj.is_admin} tags["is_admin"] = user_obj.is_admin elif event_name == 'GRANT_ADMIN_PRIVILEGE': user_obj = db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email)).first() if user_obj: user_obj.is_admin = True call_validate_policies_for_admin_user(user_obj, datasource_id) tags["is_admin"] = user_obj.is_admin elif event_name == "SUSPEND_USER": db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email)).update({DomainUser.is_suspended: True}) elif event_name == "DELETE_USER": delete_user_info(db_session, user_email, datasource_id) activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type=event_name, actor=actor_email, tags=tags) db_connection().commit()
def process_activity(auth_token, payload, event_type): db_session = db_connection().get_session() datasource = db_session.query(DataSource).filter(DataSource.datasource_type == constants.ConnectorTypes.GITHUB.value).first() domain_id = datasource.domain_id if event_type == github_constants.GithubNativeEventTypes.TEAM.value: action = payload["action"] if action == "created": activity_db().add_event(domain_id, constants.ConnectorTypes.GITHUB.value, 'TEAM_ADDED', None, {}) elif action == "deleted": activity_db().add_event(domain_id, constants.ConnectorTypes.GITHUB.value, 'TEAM_REMOVED', None, {})
def new_channel_created(db_session, datasource_id, payload): channel_info = payload['channel'] channel_info['channel_type'] = slack_constants.ChannelTypes.PUBLIC.value channel_obj = entities.SlackChannel(datasource_id, channel_info) channel_obj_model = channel_obj.get_model() db_session.add(channel_obj_model) datasource_obj = get_datasource(datasource_id) if datasource_obj: tags = {"channel_email": channel_obj_model.email} activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='CHANNEL_CREATED', actor=None, tags={})
def process_drive_activity(datasource_id, incoming_activity): actor = incoming_activity['actor'] # Sometimes email does not come, when the event is triggered by a service (Ex- Google Support for DLP) actor_email = actor['email'] if 'email' in actor else "" resource = {} resource_permission = {} last_modifying_user_email = actor_email last_modified_time = incoming_activity['id']['time'][:-1] for event in incoming_activity['events']: event_name = event['name'] event_type = event['type'] activity_events_parameters = event['parameters'] primary_name = activity_events_parameters[0]['name'] if event_type == 'acl_change': pass # if primary_name == 'primary_event': # boolValue = activity_events_parameters[0]['boolValue'] # if boolValue: # max_perm_string = None # for parameter in activity_events_parameters: # if parameter['name'] == 'doc_id': # resource["resource_id"] = parameter['value'] # resource_permission["resource_id"] =parameter['value'] # elif parameter['name'] == 'owner': # resource['resource_owner_id'] = parameter['value'] # # permission change activity # elif parameter['name'] == 'target_user': # resource_permission['email'] = parameter['value'] # elif parameter['name'] == 'new_value': # perm_values = parameter['multiValue'] # ['can_edit','can_view'] # for perm in perm_values: # curr_perm_value = constants.permission_priority[perm] if perm in constants.permission_priority else 0 # max_perm_Value = constants.permission_priority[max_perm_string] if max_perm_string in constants.permission_priority else 0 # max_perm_string = perm if curr_perm_value > max_perm_Value else max_perm_string # # resource_permission['permission_type'] = constants.Permission_Role_mapping[max_perm_string] # elif parameter['name'] == 'visibility': # resource_permission['exposure_type'] = parameter['value'] elif event_type == 'download': resource_id = None if primary_name == 'primary_event': boolValue = activity_events_parameters[0]['boolValue'] if boolValue: for parameter in activity_events_parameters: if parameter['name'] == 'doc_id': resource_id = parameter['value'] datasource_obj = get_datasource(datasource_id) activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type="DOWNLOAD", actor=actor_email, tags={'resource_id': resource_id})
def process_member_left_channel(db_session, datasource_id, payload): left_user_id = payload['user'] channel_id = payload['channel'] user_channel_info = get_user_and_channel_info_based_on_ids(db_session, datasource_id, left_user_id, channel_id) user_info = user_channel_info['user'] channel_info = user_channel_info['channel'] db_session.query(DirectoryStructure).filter(and_(DirectoryStructure.datasource_id == datasource_id, DirectoryStructure.parent_email == channel_info.email, DirectoryStructure.member_email == user_info.email)).delete() datasource_obj = get_datasource(datasource_id) if datasource_obj: activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='MEMBER_LEFT_CHANNEL', actor=None, tags={"channel_id": channel_id, "user_email": user_info.email, "channel_email": channel_info.email})
def process_activity(auth_token, payload, event_type): db_session = db_connection().get_session() datasource = db_session.query(DataSource).filter( DataSource.datasource_type == constants.ConnectorTypes.GITHUB.value).first() domain_id = datasource.domain_id if event_type == github_constants.GithubNativeEventTypes.ORGANIZATION.value: action = payload["action"] if action == "member_added": activity_db().add_event(domain_id, constants.ConnectorTypes.GITHUB.value, 'ORG_MEMBER_ADDED', None, {}) elif action == "member_removed": activity_db().add_event(domain_id, constants.ConnectorTypes.GITHUB.value, 'ORG_MEMBER_REMOVED', None, {}) elif action == "member_invited": pass
def process_member_joined_channel(db_session, datasource_id, payload): joined_user_id = payload['user'] if 'user' in payload else None channel_id = payload['channel'] if joined_user_id: user_channel_info = get_user_and_channel_info_based_on_ids(db_session, datasource_id, joined_user_id, channel_id) user_info = user_channel_info['user'] channel_info = user_channel_info['channel'] payload['name'] = channel_info.email directory_member_obj = entities.SlackDirectoryMember(db_session, datasource_id, user_info, joined_user_id, None, payload) db_session.execute(DirectoryStructure.__table__.insert().prefix_with("IGNORE"). values(db_utils.get_model_values(DirectoryStructure, directory_member_obj.get_model()))) datasource_obj = get_datasource(datasource_id) if datasource_obj: activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='MEMBER_JOINED_CHANNEL', actor=None, tags={"channel_id": channel_id, "channel_email": channel_info.email, "user_email": user_info.email})
def update_user(db_session, domain_id, datasource_id, user_info): datasource_credentials = get_datasource_credentials( db_session, datasource_id) if datasource_credentials: domain_id = datasource_credentials['domain_id'] slack_user = entities.SlackUser(domain_id, datasource_id, user_info) user_obj = slack_user.get_model() existing_user_info = db_session.query(DomainUser).\ filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_obj.email)).first() Logger().info("Existing user info - {} ".format(existing_user_info)) Logger().info("updated user info - {}".format(user_obj)) #update the existing user info db_session.query(DomainUser).filter( and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_obj.email)).update( db_utils.get_model_values(DomainUser, user_obj)) #check for new admin creation if existing_user_info and ( not existing_user_info.is_admin) and user_obj.is_admin: payload = {} payload["user"] = json.dumps(user_obj, cls=alchemy_encoder()) policy_params = { 'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.NEW_USER.value } Logger().info("new_user : payload : {}".format(payload)) messaging.trigger_post_event(urls.SLACK_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "slack") activity_db().add_event( domain_id=domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='ROLE_CHANGED', actor=user_obj.email, tags={"is_admin": user_obj.is_admin})
def get_activites_for_domain(filters): page_number = int( filters.get('pageNumber')) if filters.get('pageNumber') else 0 sort_column = filters.get('sortColumn') if filters.get( 'sortColumn') else "timestamp" filters = filters if filters else {} page_limit = filters.get('pageSize') if filters.get('pageSize') else 20 sort_type = filters.get('sortType') if filters.get('sortType') else None cursor = activity_db.activity_db().get_activites_with_filter( filters, sort_column, sort_type, page_number, page_limit) activities = [] for activity in cursor: activity['timestamp'] = str(activity['timestamp']) activity['_id'] = str(activity['_id']) activities.append(activity) return activities
def process_app(db_session, domain_id, datasource_id, payload): slack_client = slack_utils.get_slack_client(datasource_id) app_id = payload["profile"]["api_app_id"] apps_logs = slack_client.api_call("team.integrationLogs", limit=150, app_id=app_id) app_name = None logs = None user_id = '' if apps_logs: logs = apps_logs['logs'] for log_data in logs: app_name = log_data['app_type'] if app_name: user_id = log_data['user_id'] break if payload['deleted']: # app is deleted app_info = db_session.query(Application).filter( and_(Application.domain_id == domain_id, Application.display_text == app_name)).first() if app_info: db_session.query(ApplicationUserAssociation).filter( and_(ApplicationUserAssociation.datasource_id == datasource_id, ApplicationUserAssociation.application_id == app_info.id)).delete() db_connection().commit() db_session.delete(app_info) activity_db().add_event( domain_id=domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='OAUTH_REVOKE', actor=user_id, tags={"display_text": app_info.display_text}) else: app_info = db_session.query(Application).filter( and_(Application.domain_id == domain_id, Application.display_text == app_name)).first() if not app_info: #reinstallation app_added_log_info = None for log_data in logs: if log_data[ 'change_type'] == slack_constants.AppChangedTypes.ADDED.value: app_added_log_info = log_data break if app_added_log_info: query_params = {"dataSourceId": datasource_id} scanner_data = {"entities": [app_added_log_info]} # add in application and userappassociation table apps_scanner.process(db_session, None, query_params, scanner_data) user_id = app_added_log_info['user_id'] display_text = app_added_log_info[ "app_type"] if "app_type" in app_added_log_info else app_added_log_info[ "service_type"] max_score = 0 if 'scope' in app_added_log_info: scopes = app_added_log_info["scope"] max_score = slack_utils.get_app_score(scopes) #check for trusted apps check_app_is_trusted = False trusted_apps_list = (get_trusted_entity_for_domain( db_session, domain_id))['trusted_apps'] if display_text in trusted_apps_list: check_app_is_trusted = True #validate policy if apps are not trusted if not check_app_is_trusted: policy_params = { 'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.APP_INSTALL.value } app_payload = {} app_payload['display_text'] = display_text app_payload['score'] = max_score user_info = db_session.query(DomainUser).filter( and_(DomainUser.datasource_id == datasource_id, DomainUser.user_id == user_id)).first() app_payload['user_email'] = user_info.email policy_payload = {} policy_payload['application'] = json.dumps( app_payload, cls=alchemy_encoder()) Logger().info( "added_app : payload : {}".format(app_payload)) messaging.trigger_post_event( urls.SLACK_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, policy_payload, "slack") activity_db().add_event( domain_id=domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='OAUTH_GRANT', actor=user_id, tags={ "score": max_score, "display_text": display_text })
def get_widget_data(auth_token, widget_id, datasource_id=None, user_email=None, event_filters=None): if not (auth_token or datasource_id): return None db_session = db_connection().get_session() is_admin = False login_user_email = user_email is_service_account_is_enabled = True domain_datasource_ids = [] user_domain_id = None if auth_token: existing_user = db_utils.get_user_session(auth_token) user_domain_id = existing_user.domain_id login_user_email = existing_user.email is_admin = existing_user.is_admin is_service_account_is_enabled = existing_user.is_serviceaccount_enabled datasource_ids = db_session.query(DataSource.datasource_id).filter( DataSource.domain_id == user_domain_id).all() domain_datasource_ids = [r for r, in datasource_ids] elif datasource_id: datasource = db_session.query(DataSource).filter( DataSource.datasource_id == datasource_id).first() user_domain_id = datasource.domain_id is_service_account_is_enabled = datasource.is_serviceaccount_enabled domain_datasource_ids = [datasource.datasource_id] data = None if widget_id == 'usersCount': if is_service_account_is_enabled and not is_admin: user_count_query = db_session.query( ResourcePermission.email).filter( and_( ResourcePermission.datasource_id.in_( domain_datasource_ids), Resource.datasource_id == ResourcePermission.datasource_id, Resource.resource_owner_id == login_user_email, ResourcePermission.resource_id == Resource.resource_id, DomainUser.email == ResourcePermission.email, DomainUser.datasource_id.in_(domain_datasource_ids), DomainUser.member_type == constants.EntityExposureType. EXTERNAL.value)).distinct().count() # add 1 for loggin user user_count_query += 1 else: user_count_query = db_session.query(DomainUser.email).filter( DomainUser.datasource_id.in_( domain_datasource_ids)).distinct().count() data = user_count_query elif widget_id == 'groupsCount': group_count_query = db_session.query(DomainUser).filter( DomainUser.datasource_id.in_(domain_datasource_ids)).filter( DomainUser.type != constants.DirectoryEntityType.USER.value) if is_service_account_is_enabled and not is_admin: group_count_query = group_count_query.filter( DirectoryStructure.datasource_id == DomainUser.datasource_id, DirectoryStructure.member_email == login_user_email, DirectoryStructure.parent_email == DomainUser.email) data = group_count_query.count() elif widget_id == 'filesCount': file_count_query = db_session.query(Resource).filter( and_(Resource.datasource_id.in_(domain_datasource_ids), Resource.resource_type != 'folder')) if is_service_account_is_enabled and not is_admin: file_count_query = file_count_query.filter( Resource.resource_owner_id == login_user_email) data = file_count_query.count() elif widget_id == 'foldersCount': folder_count_query = db_session.query(Resource).filter( and_(Resource.datasource_id.in_(domain_datasource_ids), Resource.resource_type == 'folder')) if is_service_account_is_enabled and not is_admin: folder_count_query = folder_count_query.filter( Resource.resource_owner_id == login_user_email) data = folder_count_query.count() elif widget_id == 'sharedDocsByType': data = {} shared_docsByType_query = db_session.query( Resource.exposure_type, func.count(Resource.exposure_type)).filter( and_( Resource.exposure_type != constants.EntityExposureType.INTERNAL.value, Resource.datasource_id.in_(domain_datasource_ids), Resource.exposure_type != constants.EntityExposureType.PRIVATE.value)).group_by( Resource.exposure_type) if is_service_account_is_enabled and not is_admin: shared_docsByType_query = shared_docsByType_query.filter( Resource.resource_owner_id == login_user_email) shared_docs_by_type = shared_docsByType_query.all() public_count = 0 external_count = 0 domain_count = 0 anyone_with_link_count = 0 trusted_count = 0 for share_type in shared_docs_by_type: if share_type[0] == constants.EntityExposureType.EXTERNAL.value: external_count = share_type[1] elif share_type[ 0] == constants.EntityExposureType.ANYONEWITHLINK.value: anyone_with_link_count = share_type[1] elif share_type[0] == constants.EntityExposureType.PUBLIC.value: public_count = share_type[1] elif share_type[0] == constants.EntityExposureType.DOMAIN.value: domain_count = share_type[1] elif share_type[0] == constants.EntityExposureType.TRUSTED.value: trusted_count = share_type[1] data["rows"] = [ [constants.DocType.PUBLIC_COUNT.value, public_count], [ constants.DocType.ANYONE_WITH_LINK_COUNT.value, anyone_with_link_count ], [constants.DocType.EXTERNAL_COUNT.value, external_count], [constants.DocType.DOMAIN_COUNT.value, domain_count], [constants.DocType.TRUSTED.value, trusted_count] ] data[ "totalCount"] = public_count + external_count + domain_count + anyone_with_link_count + trusted_count elif widget_id == 'sharedDocsList': data = {} shared_docs_list_query = db_session.query( Resource.resource_name, Resource.resource_type).filter( and_( Resource.datasource_id.in_(domain_datasource_ids), or_( Resource.exposure_type == constants.EntityExposureType.EXTERNAL.value, Resource.exposure_type == constants.EntityExposureType.PUBLIC.value, Resource.exposure_type == constants.EntityExposureType.ANYONEWITHLINK.value))) shared_docs_totalcount_query = db_session.query( Resource.resource_name, Resource.resource_type).filter( and_( Resource.datasource_id.in_(domain_datasource_ids), or_( Resource.exposure_type == constants.EntityExposureType.EXTERNAL.value, Resource.exposure_type == constants.EntityExposureType.PUBLIC.value, Resource.exposure_type == constants.EntityExposureType.ANYONEWITHLINK.value))) if is_service_account_is_enabled and not is_admin: shared_docs_list_query = shared_docs_list_query.filter( Resource.resource_owner_id == login_user_email) shared_docs_totalcount_query = shared_docs_totalcount_query.filter( Resource.resource_owner_id == login_user_email) data["rows"] = shared_docs_list_query.limit(5).all() data["totalCount"] = shared_docs_totalcount_query.count() elif widget_id == 'externalUsersList': data = {} #Read from cache external_users_from_cache = db_session.query(ExternalExposure).filter( ExternalExposure.domain_id == user_domain_id).order_by( ExternalExposure.exposure_count.desc()).all() if len(external_users_from_cache) > 0 and ( external_users_from_cache[0].updated_at > (datetime.datetime.utcnow() - timedelta(seconds=600))): rows = [] total_count = 0 for external_user in external_users_from_cache: rows.append( [external_user.email, external_user.exposure_count]) total_count += 1 data["rows"] = rows data["totalCount"] = total_count return data external_user_list = db_session.query( ResourcePermission.email, func.count(ResourcePermission.email)).filter( and_( ResourcePermission.exposure_type == constants.EntityExposureType.EXTERNAL.value, ResourcePermission.datasource_id.in_( domain_datasource_ids))).group_by( ResourcePermission.email).order_by( func.count(ResourcePermission.email).desc()) if is_service_account_is_enabled and not is_admin: external_user_list = external_user_list.filter( and_( Resource.datasource_id == ResourcePermission.datasource_id, Resource.resource_owner_id == login_user_email, ResourcePermission.resource_id == Resource.resource_id)) user_group_emails_and_count = external_user_list.limit(5).all() external_user_emails_count_map = {} # get only external users ; removing channels/groups for row in user_group_emails_and_count: count_for_particular_email = row[1] email = row[0] directory_struct = db_session.query(DirectoryStructure).filter( and_( DirectoryStructure.parent_email == email, DirectoryStructure.datasource_id.in_( domain_datasource_ids), DomainUser.datasource_id == DirectoryStructure.datasource_id, DomainUser.email == DirectoryStructure.member_email, DomainUser.member_type == constants.EntityExposureType.EXTERNAL.value)).all() if directory_struct: for memberdetails in directory_struct: user = memberdetails.member_email if user in external_user_emails_count_map: count = external_user_emails_count_map[user] external_user_emails_count_map[ user] = count + count_for_particular_email else: external_user_emails_count_map[ user] = count_for_particular_email else: if email in external_user_emails_count_map: count = external_user_emails_count_map[email] external_user_emails_count_map[ email] = count + count_for_particular_email else: external_user_emails_count_map[ email] = count_for_particular_email external_user_perms_count = [] total_count = 0 for key, value in external_user_emails_count_map.iteritems(): total_count = total_count + 1 input_list = [key, value] external_user_perms_count.append(input_list) sorted_external_user_list = sorted(external_user_perms_count, key=lambda x: x[1], reverse=True) cache_for_db = [] now = datetime.datetime.utcnow() for ext_user in sorted_external_user_list: cache = {} cache["domain_id"] = user_domain_id cache["email"] = ext_user[0] cache["exposure_count"] = ext_user[1] cache["updated_at"] = now cache_for_db.append(cache) if len(cache_for_db) > 0: db_session.query(ExternalExposure).filter( ExternalExposure.domain_id == user_domain_id).delete() db_session.bulk_insert_mappings(ExternalExposure, cache_for_db) db_connection().commit() data["rows"] = sorted_external_user_list[:5] data["totalCount"] = external_user_list.count() elif widget_id == 'userAppAccess': data = {} apps = db_session.query(Application).distinct(Application.id).filter( Application.id == ApplicationUserAssociation.application_id, ApplicationUserAssociation.datasource_id.in_( domain_datasource_ids)) if is_service_account_is_enabled and not is_admin: apps = apps.filter( ApplicationUserAssociation.user_email == login_user_email) severity = {} low = apps.filter(Application.score < 4).count() medium = apps.filter( and_(Application.score >= 4, Application.score < 7)).count() high = apps.filter( and_(Application.score >= 7, Application.score <= 10)).count() severity['Low Risk'] = low severity['Medium Risk'] = medium severity['High Risk'] = high data["rows"] = severity data["totalCount"] = low + medium + high elif widget_id == 'filesWithFileType': data = {} file_type_query = db_session.query( Resource.resource_type, func.count(Resource.resource_type)).filter( and_( Resource.datasource_id.in_(domain_datasource_ids), Resource.exposure_type != constants.EntityExposureType.INTERNAL.value, Resource.exposure_type != constants.EntityExposureType.PRIVATE.value)).group_by( Resource.resource_type).order_by( func.count(Resource.resource_type).desc()) if is_service_account_is_enabled and not is_admin: file_type_query = file_type_query.filter( Resource.resource_owner_id == login_user_email) all_file_types = file_type_query.all() first_five = all_file_types[0:5] others = all_file_types[5:] totalcount = 0 for count in first_five: totalcount += count[1] others_count = 0 for count in others: others_count += count[1] if others_count > 0: first_five.append(('Others', others_count)) totalcount += others_count data["rows"] = first_five data["totalCount"] = totalcount elif widget_id == "internalUserList": data = {} internal_user_list = db_session.query( Resource.resource_owner_id, func.count(Resource.resource_id)).filter( and_( Resource.datasource_id.in_(domain_datasource_ids), Resource.exposure_type.in_([ constants.EntityExposureType.EXTERNAL.value, constants.EntityExposureType.PUBLIC.value ]))).group_by(Resource.resource_owner_id).order_by( func.count(Resource.resource_id).desc()) if is_service_account_is_enabled and not is_admin: internal_user_list = internal_user_list.filter( Resource.resource_owner_id == login_user_email) data["rows"] = internal_user_list.limit(5).all() data["totalCount"] = internal_user_list.count() elif widget_id == 'expensesByCategory': data = app_controller.get_app_stats(auth_token) elif widget_id == 'activitiesByEventType': activities = activity_db.activity_db().get_event_stats( event_filters, None, None) series_map = {} for activity in activities: print activity event_type = activity["event_type"] date = str(activity["year"]) + "-" + str( activity["month"]) + "-" + str(activity["day"]) if event_type in series_map: series_map[event_type]["data"][date] = activity["count"] else: series_map[event_type] = {"name": event_type, "data": {}} print series_map data = series_map.values() return data
def process_application(db_session, datasource_id, payload): slack_client = slack_utils.get_slack_client(datasource_id) app_id = payload['profile']['api_app_id'] apps = slack_client.api_call( "team.integrationLogs", limit=150, change_type=slack_constants.AppChangedTypes.ADDED.value, app_id=app_id) apps_list = apps["logs"] if apps_list: user_id = '' display_text = '' max_score = 0 for app in apps_list: query_params = {"dataSourceId": datasource_id} scanner_data = {"entities": [app]} # add in application and userappassociation table apps_scanner.process(db_session, None, query_params, scanner_data) user_id = app['user_id'] display_text = app["app_type"] if "app_type" in app else app[ "service_type"] if 'scope' in app: scopes = app["scope"] max_score = slack_utils.get_app_score(scopes) #check for trusted apps datasource_obj = get_datasource(datasource_id) domain_id = datasource_obj.domain_id check_app_is_trusted = False trusted_apps_list = (get_trusted_entity_for_domain( db_session, domain_id))['trusted_apps'] if display_text in trusted_apps_list: check_app_is_trusted = True # policy check and send alert if apps are not trusted if not check_app_is_trusted: policy_params = { 'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.APP_INSTALL.value } app_payload = {} app_payload['display_text'] = display_text app_payload['score'] = max_score user_info = db_session.query(DomainUser).filter( and_(DomainUser.datasource_id == datasource_id, DomainUser.user_id == user_id)).first() app_payload['user_email'] = user_info.email policy_payload = {} policy_payload['application'] = json.dumps(app_payload, cls=alchemy_encoder()) Logger().info("added_app : payload : {}".format(app_payload)) messaging.trigger_post_event(urls.SLACK_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, policy_payload, "slack") activity_db().add_event( domain_id=domain_id, connector_type=constants.ConnectorTypes.SLACK.value, event_type='OAUTH_GRANT', actor=user_id, tags={ "score": max_score, "display_text": display_text })
def process_token_activity(datasource_id, incoming_activity): Logger().info("Processing token activity - {}".format(incoming_activity)) actor_email = incoming_activity['actor']['email'] db_session = db_connection().get_session() for event in incoming_activity['events']: domain_id = db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id).first().domain_id event_name = event['name'] event_parameters = event['parameters'] scopes = None client_id = None app_name = None for param in event_parameters: param_name = param["name"] if param_name == "app_name": app_name = param["value"] elif param_name == "client_id": client_id = param["value"] elif param_name == "scope": scopes = param["multiValue"] if not app_name: app_name = client_id application = db_session.query(Application).filter(Application.display_text == app_name, Application.domain_id == domain_id).first() tags = {"display_text": app_name} if event_name == "authorize": event_name = "OAUTH_GRANT" # Ignore Adya install if "Adya" in app_name: continue inventory_app = db_session.query(AppInventory).filter(AppInventory.name == app_name).first() inventory_app_id = inventory_app.id if inventory_app else None max_score = 0 is_app_whitelisted = True if not application: application = Application() application.anonymous = 1 application.domain_id = domain_id application.timestamp = datetime.datetime.utcnow() if inventory_app_id: application.inventory_app_id = inventory_app_id application.category = inventory_app.category application.image_url = inventory_app.image_url # check for trusted apps trusted_domain_apps = (get_trusted_entity_for_domain(db_session, domain_id))["trusted_apps"] if not app_name in trusted_domain_apps: is_app_whitelisted = False max_score = gutils.get_app_score(scopes) if scopes else max_score application.score = max_score application.scopes = ','.join(scopes) if scopes else None application.is_whitelisted = is_app_whitelisted if app_name: application.display_text = app_name application.unit_num = 0 user_association = ApplicationUserAssociation() user_association.user_email = actor_email user_association.datasource_id = datasource_id if client_id: user_association.client_id = client_id db_session = db_connection().get_session() db_session.add(application) try: db_connection().commit() user_association.application_id = application.id db_session.add(user_association) db_connection().commit() # Trigger the policy validation now payload = {} application.user_email = user_association.user_email if application.score != 0: payload["application"] = json.dumps(application, cls=alchemy_encoder()) policy_params = {'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.APP_INSTALL.value} messaging.trigger_post_event(urls.GSUITE_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "gsuite") except IntegrityError as ie: Logger().info("user app association was already present for the app : {} and user: {}".format(app_name, actor_email)) db_session.rollback() tags["score"] = max_score elif event_name == "revoke": event_name = "OAUTH_REVOKE" if application: try: app_id = application.id db_session.query(ApplicationUserAssociation).filter(and_(ApplicationUserAssociation.application_id == app_id, ApplicationUserAssociation.datasource_id == datasource_id)).delete() db_session.delete(application) db_connection().commit() except: Logger().info("not able to delete app - {} from the db for user: {}".format(app_name, actor_email)) db_session.rollback() tags["score"] = application.score activity_db().add_event(domain_id=domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type=event_name, actor=actor_email, tags=tags)
def process_activity(auth_token, payload, event_type): db_session = db_connection().get_session() datasource = db_session.query(DataSource).filter( DataSource.datasource_type == constants.ConnectorTypes.GITHUB.value).first() domain_id = datasource.domain_id if event_type == github_constants.GithubNativeEventTypes.REPOSITORY.value: action = payload["action"] repository = payload["repository"] owner_id = repository["owner"]["id"] if action == "created": # Update the Resource table with the new repository repo = entities.GithubRepository(datasource.datasource_id, repository) repo_model = repo.get_model() repo_permission = entities.GithubRepositoryPermission( datasource.datasource_id, repository) repo_permission_model = repo_permission.get_model() db_session.add(repo_model) db_session.add(repo_permission_model) db_connection().commit() activity_db().add_event(domain_id, constants.ConnectorTypes.GITHUB.value, 'REP_ADDED', owner_id, {}) elif action == "archived": activity_db().add_event(domain_id, constants.ConnectorTypes.GITHUB.value, 'REP_ARCHIVED', owner_id, {}) elif action == "unarchived": pass elif action == "publicized": # Update the Repository as public in the Resource table db_session.query(Resource).filter(Resource.datasource_id == datasource.datasource_id, Resource.resource_id == payload["id"]). \ update({ Resource.exposure_type == constants.EntityExposureType.PUBLIC.value }) db_connection().commit() activity_db().add_event(domain_id, constants.ConnectorTypes.GITHUB.value, 'REP_PUBLIC', owner_id, {}) elif action == "privatized": pass elif event_type == github_constants.GithubNativeEventTypes.REPOSITORY_VULNERABILITY_ALERT.value: action = payload["action"] if action == "create": pass elif action == "dismiss": pass elif action == "resolve": pass elif event_type == github_constants.GithubNativeEventTypes.FORK.value: forkee = payload["forkee"] repository = payload["repository"] owner_id = forkee["owner"]["id"] activity_db().add_event(domain_id, constants.ConnectorTypes.GITHUB.value, 'REP_FORKED', owner_id, {})
def validate_permission_change_policy(db_session, auth_token, datasource_obj, policy, resource, new_permissions): datasource_id = datasource_obj.datasource_id Logger().info( "validating_policy : resource : {} , new permission : {} ".format( resource, new_permissions)) is_policy_violated = False violated_permissions = [] new_permissions_left = [] highest_exposure_type = constants.EntityExposureType.PRIVATE.value for permission in new_permissions: is_permission_violated = 1 for policy_condition in policy.conditions: if policy_condition.match_type == constants.PolicyMatchType.DOCUMENT_NAME.value: is_permission_violated = is_permission_violated & check_value_violation( policy_condition, resource["resource_name"]) elif policy_condition.match_type == constants.PolicyMatchType.DOCUMENT_OWNER.value: is_permission_violated = is_permission_violated & check_value_violation( policy_condition, resource["resource_owner_id"]) elif policy_condition.match_type == constants.PolicyMatchType.DOCUMENT_EXPOSURE.value: is_permission_violated = is_permission_violated & check_value_violation( policy_condition, permission["exposure_type"]) elif policy_condition.match_type == constants.PolicyMatchType.PERMISSION_EMAIL.value: is_permission_violated = is_permission_violated & check_value_violation( policy_condition, permission["email"]) if is_permission_violated: is_policy_violated = True if not permission["permission_type"] == constants.Role.OWNER.value: violated_permissions.append(permission) highest_exposure_type = get_highest_exposure_type( permission["exposure_type"], highest_exposure_type) else: new_permissions_left.append(permission) else: new_permissions_left.append(permission) send_email_action = [] check_if_revert_action = False if is_policy_violated: if highest_exposure_type in constants.permission_exposure_to_event_constants: tags = { "resource_id": resource["resource_id"], "resource_name": resource["resource_name"], "new_permissions": violated_permissions } activity_db().add_event( domain_id=datasource_obj.domain_id, connector_type=datasource_obj.datasource_type, event_type=constants. permission_exposure_to_event_constants[highest_exposure_type], actor=resource["resource_owner_id"], tags=tags) Logger().info( "Policy \"{}\" is violated, so triggering corresponding actions". format(policy.name)) for action in policy.actions: if action.action_type == constants.PolicyActionType.SEND_EMAIL.value: send_email_action.append(action) elif action.action_type == constants.PolicyActionType.REVERT.value and len( violated_permissions) > 0: Logger().info( "violated permissions : {}".format(violated_permissions)) check_if_revert_action = True datasource_type = datasource_obj.datasource_type body = json.dumps(violated_permissions, cls=alchemy_encoder()) payload = { "permissions": body, "datasource_id": datasource_id, "domain_id": datasource_obj.domain_id, "user_email": resource["resource_owner_id"], "action_type": action_constants.ActionNames. REMOVE_EXTERNAL_ACCESS_TO_RESOURCE.value } response = messaging.trigger_post_event( datasource_execute_action_map[datasource_type], auth_token, None, payload, connector_servicename_map[datasource_type], constants.TriggerType.SYNC.value) if response and not response.response_code == constants.SUCCESS_STATUS_CODE: violated_permissions = [] if len(send_email_action) > 0: to_address = json.loads(send_email_action[0].config)["to"] Logger().info("validate_policy : send email") if not check_if_revert_action: violated_permissions = None adya_emails.send_permission_change_policy_violate_email( to_address, policy, resource, new_permissions, violated_permissions, new_permissions_left) payload = {} payload["datasource_id"] = datasource_id payload["name"] = policy.name payload["policy_id"] = policy.policy_id payload["severity"] = policy.severity payload[ "description_template"] = "Permission changes on {{resource_owner_id}}'s document \"{{resource_name}}\" has violated policy \"{{policy_name}}\"" payload["payload"] = resource messaging.trigger_post_event(urls.ALERTS_PATH, auth_token, None, payload)