def create_datasource(auth_token, access_token, scopes, team_id, domain, email_domain_id): now = datetime.datetime.utcnow() db_session = db_connection().get_session() login_user = db_session.query(LoginUser).filter(LoginUser.auth_token == auth_token).first() datasource_id = str(uuid.uuid4()) datasource = DataSource() datasource.domain_id = login_user.domain_id datasource.datasource_id = datasource_id datasource.display_name = domain datasource.creation_time = now datasource.is_push_notifications_enabled = 0 datasource.datasource_type = constants.ConnectorTypes.SLACK.value datasource.source_id = team_id db_session.add(datasource) db_connection().commit() datasource_credentials = DatasourceCredentials() datasource_credentials.datasource_id = datasource.datasource_id datasource_credentials.credentials = json.dumps( {'team_id': team_id, 'domain_id': email_domain_id, 'domain_name': domain, 'authorize_scope_name': scopes, 'token': access_token}) datasource_credentials.created_user = login_user.email db_session.add(datasource_credentials) db_connection().commit() query_params = {"domainId": email_domain_id, "dataSourceId": datasource.datasource_id, "userEmail": login_user.email} messaging.trigger_get_event(urls.SCAN_SLACK_UPDATE, auth_token, query_params, "slack") return datasource
def execute_slack_actions(event, context): req_session = RequestSession(event) req_error = req_session.validate_authorized_request(True) if req_error: return req_error body = req_session.get_body() response = slack_action_facade.execute_slack_actions(req_session.get_auth_token(), body) log_status = action_constants.ActionStatus.SUCCESS.value status_code = 200 status_message = "Action completed successfully" if response['action_status'] == constants.ResponseType.ERROR.value: status_code = 404 log_status = action_constants.ActionStatus.FAILED.value status_message = "Action failed" log_id = body['log_id'] db_session = db_connection().get_session() current_log = db_session.query(AuditLog).filter( and_(AuditLog.log_id == log_id, AuditLog.status != action_constants.ActionStatus.FAILED.value)).first() if current_log: current_log.status = log_status current_log.message = status_message db_connection().commit() return req_session.generate_response(status_code, response)
def process_activity(payload): team_id = payload["team_id"] db_session = db_connection().get_session() datasource = db_session.query(DataSource).filter( and_(DataSource.source_id == team_id, DataSource.datasource_type == constants.ConnectorTypes.SLACK.value)).first() if not datasource: return datasource_id = datasource.datasource_id event = payload['event'] event_type = event["type"] if event_type == slack_constants.NotificationEvents.CHANNEL_ARCHIVE.value or \ event_type == slack_constants.NotificationEvents.GROUP_ARCHIVE.value: process_channel_archive(db_session, datasource_id, event) elif event_type == slack_constants.NotificationEvents.CHANNEL_CREATED.value: new_channel_created(db_session, datasource_id, event) elif event_type == slack_constants.NotificationEvents.CHANNEL_UNARCHIVE.value or \ event_type == slack_constants.NotificationEvents.GROUP_UNARCHIVE.value: process_channel_unarchive(db_session, datasource_id, event) elif event_type == slack_constants.NotificationEvents.CHANNEL_HISTORY_CHANGED.value: pass # TODO : process logic pending elif event_type == slack_constants.NotificationEvents.CHANNEL_RENAME.value or \ event_type == slack_constants.NotificationEvents.GROUP_RENAME.value: process_channel_rename(db_session, datasource_id, event) elif event_type == slack_constants.NotificationEvents.MEMBER_JOINED_CHANNEL.value: process_member_joined_channel(db_session, datasource_id, event) elif event_type == slack_constants.NotificationEvents.MEMBER_LEFT_CHANNEL.value: process_member_left_channel(db_session, datasource_id, event) db_connection().commit()
def process_login_activity(datasource_id, incoming_activity): Logger().info('Processing login activity {}'.format(incoming_activity)) db_session = db_connection().get_session() actor_email = incoming_activity['actor']['email'] events = incoming_activity["events"] login_time = datetime.datetime.strptime(incoming_activity["id"]["time"], '%Y-%m-%dT%H:%M:%S.%fZ') ninety_days_ago = datetime.datetime.utcnow() - datetime.timedelta(days=90) is_inactive = login_time < ninety_days_ago last_is_inactive = None for event in events: if event["name"] == 'login_success': last_login_time = db_session.query(DomainUser).filter(DomainUser.datasource_id == datasource_id, DomainUser.email == actor_email).first().last_login_time if last_login_time: last_is_inactive = last_login_time < ninety_days_ago if not last_login_time or (login_time > last_login_time): db_session.query(DomainUser).filter(DomainUser.datasource_id == datasource_id, DomainUser.email == actor_email).update({"last_login_time":login_time}, synchronize_session = 'fetch') datasource = db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id).first() app_name = constants.datasource_to_installed_app_map[datasource.datasource_type] if (last_is_inactive is None) or not last_is_inactive : if is_inactive: db_session.query(Application).filter(and_(Application.domain_id == datasource.domain_id, Application.display_text == app_name)).update({"inactive_users":Application.inactive_users + 1}, synchronize_session = 'fetch') Logger().info("new inactive user : {}".format(actor_email)) elif last_is_inactive and not is_inactive: Logger().info("inactive user is now active : {}".format(actor_email)) db_session.query(Application).filter(and_(Application.domain_id == datasource.domain_id, Application.display_text == app_name, Application.inactive_users > 0)).update({"inactive_users":Application.inactive_users - 1}, synchronize_session = 'fetch') db_connection().commit()
def process_notifications(notification_type, datasource_id, channel_id, body): if notification_type == "sync": return db_session = db_connection().get_session() subscription = db_session.query(PushNotificationsSubscription).filter( PushNotificationsSubscription.channel_id == channel_id).first() if not subscription: Logger().warn("Subscription does not exist for datasource_id: {} and channel_id: {}, hence ignoring the notification.".format( datasource_id, channel_id)) return activities = [body] try: # If notification type is adya, then that means its triggered either manually or scheduled sync # So we need to fetch the events from last sync time if notification_type == "adya": reports_service = gutils.get_gdrive_reports_service(None, subscription.user_email, db_session) results = reports_service.activities().list(userKey="all", applicationName=subscription.notification_type, startTime=subscription.page_token).execute() if results and "items" in results: activities = results["items"] for activity in activities: process_incoming_activity(datasource_id, activity) db_session.refresh(subscription) subscription.last_accessed = datetime.datetime.utcnow() subscription.page_token = datetime.datetime.utcnow().isoformat("T") + "Z" db_connection().commit() except Exception as e: Logger().exception("Exception occurred while processing activity notification for datasource_id: {} channel_id: {} - {}".format(datasource_id, channel_id, e))
def post_process(db_session, auth_token, query_params): domain_id = query_params["domainId"] datasource_id = query_params["dataSourceId"] now = datetime.utcnow() internal_users = db_session.query(DomainUser).filter( and_( DomainUser.datasource_id == datasource_id, DomainUser.type == constants.DirectoryEntityType.USER.value, DomainUser.member_type == constants.EntityExposureType.INTERNAL.value)).all() for internal_user in internal_users: scanner = DatasourceScanners() scanner.datasource_id = datasource_id scanner.scanner_type = slack_constants.ScannerTypes.FILES.value scanner.channel_id = str(uuid.uuid4()) scanner.user_email = internal_user.email scanner.started_at = now scanner.in_progress = 1 db_session.add(scanner) db_connection().commit() file_query_params = { 'domainId': domain_id, 'dataSourceId': datasource_id, 'scannerId': str(scanner.id), 'userId': internal_user.user_id, 'userEmail': internal_user.email } messaging.trigger_get_event(urls.SCAN_SLACK_ENTITIES, auth_token, file_query_params, "slack")
def start_scan(auth_token, datasource_id, domain_id, user_email): db_session = db_connection().get_session() scanner_types = [ slack_constants.ScannerTypes.APPS.value, slack_constants.ScannerTypes.USERS.value, slack_constants.ScannerTypes.CHANNELS.value ] for scanner_type in scanner_types: scanner = DatasourceScanners() scanner.datasource_id = datasource_id scanner.scanner_type = scanner_type scanner.channel_id = str(uuid.uuid4()) scanner.user_email = user_email scanner.started_at = datetime.utcnow() scanner.in_progress = 1 db_session.add(scanner) db_connection().commit() query_params = { "dataSourceId": datasource_id, "domainId": domain_id, "scannerId": scanner.id, "change_type": slack_constants.AppChangedTypes.ADDED.value } messaging.trigger_get_event(urls.SCAN_SLACK_ENTITIES, auth_token, query_params, "slack")
def create_user(email, first_name, last_name, domain_id, refresh_token, is_serviceaccount_enabled, scope_name, token, db_session=None): if not db_session: db_session = db_connection().get_session() creation_time = datetime.datetime.utcnow() auth_token = str(uuid.uuid4()) login_user = LoginUser() login_user.email = email login_user.first_name = first_name login_user.last_name = last_name login_user.auth_token = auth_token login_user.domain_id = domain_id login_user.refresh_token = refresh_token login_user.is_serviceaccount_enabled = is_serviceaccount_enabled login_user.creation_time = creation_time login_user.last_login_time = creation_time login_user.authorize_scope_name = scope_name login_user.token = token try: db_session.add(login_user) db_connection().commit() return login_user except: db_session.rollback() return None
def create_datasource(auth_token, access_token, scope, user_email): db_session = db_connection().get_session() login_user = db_session.query(LoginUser).filter(LoginUser.auth_token == auth_token).first() datasource = DataSource() datasource.datasource_id = str(uuid.uuid4()) datasource.domain_id = login_user.domain_id datasource.display_name = login_user.domain_id datasource.creation_time = datetime.datetime.utcnow() datasource.datasource_type = constants.ConnectorTypes.GITHUB.value datasource.is_push_notifications_enabled = 0 db_session.add(datasource) db_connection().commit() github_domain_id = user_email.split('@')[1] if user_email else login_user.domain_id datasource_credentials = DatasourceCredentials() datasource_credentials.datasource_id = datasource.datasource_id datasource_credentials.created_user = user_email datasource_credentials.credentials = json.dumps({ 'domain_id': github_domain_id, 'authorize_scope_name': scope, 'token': access_token }) db_session.add(datasource_credentials) db_connection().commit() query_params = {"domainId": github_domain_id, "dataSourceId": datasource.datasource_id, "userEmail": login_user.email} messaging.trigger_get_event(urls.GITHUB_SCAN_UPDATE, auth_token, query_params, "github")
def create_dummy_datasource(db_session, domain_id, datasource_id): file_names = ['resource', 'user', 'group', 'directory_structure', 'resource_permission', 'application', 'app_user_association'] for filename in file_names: results = [] with open(gutils.dir_path + "/dummy_datasource/" + filename + ".csv") as csvDataFile: csvReader = csv.reader(csvDataFile) tablename = get_table(filename) columns = tablename.__table__.columns firstrow = True for row in csvReader: if firstrow: firstrow = False continue datarow = {} for cellvalue, column in zip(row, columns): column_name = column.name column_type = column.type if cellvalue == 'NULL' or cellvalue == '': datarow[column_name] = None elif isinstance(column_type, Boolean): if cellvalue == '0': datarow[column_name] = False else: datarow[column_name] = True elif column_name == 'domain_id': datarow[column_name] = domain_id elif column_name == 'datasource_id': datarow[column_name] = datasource_id else: datarow[column_name] = cellvalue results.append(datarow) db_session.bulk_insert_mappings(tablename, results) db_connection().commit() update_datasource_column_count(db_session, domain_id, datasource_id)
def update_report(auth_token, payload): if not auth_token: return None db_session = db_connection().get_session() if payload: report = {} report["name"] = payload["name"] if 'description' in payload: report["description"] = payload["description"] report["frequency"] = payload["frequency"] report["receivers"] = payload["receivers"] config_input = { "report_type": payload["report_type"], "selected_entity_type": payload["selected_entity_type"], "selected_entity": payload["selected_entity"], "datasource_id": payload["datasource_id"], "selected_entity_name": payload["selected_entity_name"] } report["config"] = json.dumps(config_input) report["is_active"] = payload["is_active"] report_id = payload["report_id"] db_session.query(Report).filter( Report.report_id == report_id).update(report) try: db_connection().commit() except Exception as ex: Logger().exception() return payload else: return None
def get_accesslogs(datasource_id, page_num=1): db_session = db_connection().get_session() slack_client = get_slack_client(datasource_id) login_user_list = slack_client.api_call("team.accessLogs", count=100, page=page_num) is_login_user_list = True if login_user_list['ok'] == True else False if is_login_user_list: current_page = login_user_list['page'] total_pages = login_user_list['paging']['pages'] logins = login_user_list['logins'] for user in logins: last_login = datetime.datetime.fromtimestamp(user['date_last']) user_id = user['user_id'] db_session.query(DomainUser).filter( and_(DomainUser.datasource_id == datasource_id, DomainUser.user_id == user_id)). \ update({DomainUser.last_login_time: last_login}) db_connection().commit() if current_page != total_pages: query_param = { 'datasource_id': datasource_id, 'page_num': current_page + 1 } messaging.trigger_get_event(urls.SLACK_ACCESSLOGS, constants.INTERNAL_SECRET, query_param, constants.ConnectorTypes.SLACK.value) return constants.ACCEPTED_STATUS_CODE return constants.SUCCESS_STATUS_CODE
def create_report(auth_token, payload): db_session = db_connection().get_session() if "is_default" in payload: result = [] db_session = db_connection().get_session() datasource_id = payload["datasource_id"] login_user = db_utils.get_user_session(auth_token).email domain_id = db_session.query(DataSource).filter( DataSource.datasource_id == datasource_id).first().domain_id reports = default_reports.default_reports datasource_type = db_utils.get_datasource( datasource_id).datasource_type default_datasource_reports = datasource_to_default_report_map[ datasource_type] if default_datasource_reports: reports.extend(default_datasource_reports) for report in reports: existing_report = db_session.query(Report).filter( Report.domain_id == domain_id, Report.name == report["name"]).first() if not existing_report: report["receivers"] = login_user report["datasource_id"] = datasource_id result.append( insert_entry_into_report_table(db_session, auth_token, report)) return result else: return [ insert_entry_into_report_table(db_session, auth_token, payload) ]
def delegate_admin_settings(datasource_id, actor_email, event): db_session = db_connection().get_session() event_name = event['name'] activity_events_parameters = event['parameters'] user_email = None for param in activity_events_parameters: name = param['name'] if name == 'USER_EMAIL': user_email = param['value'] if event_name == 'ASSIGN_ROLE': user_obj = db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email)).first() if user_obj: user_obj.is_admin = True call_validate_policies_for_admin_user(user_obj, datasource_id) datasource_obj = get_datasource(datasource_id) tags = {"user_email": user_email, "is_admin": user_obj.is_admin} activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type=event_name, actor=actor_email, tags=tags) elif event_name == 'UNASSIGN_ROLE': db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email, DomainUser.is_admin == True)).update({DomainUser.is_admin: False}) db_connection().commit()
def execute_action(auth_token, payload): action_type = payload['action_type'] user_email = payload['user_email'] datasource_id = payload['datasource_id'] domain_id = payload['domain_id'] more_to_execute = payload['more_to_execute'] if 'more_to_execute' in payload else 0 initiated_by_email = payload['initiated_by_email'] if 'initiated_by_email' in payload else None permissions = json.loads(payload['permissions']) if 'permissions' in payload else [] response = None if action_type == action_constants.ActionNames.ADD_USER_TO_GROUP.value: group_email = payload['group_email'] response = gsuite_actions.add_user_to_group(auth_token, group_email, user_email) elif action_type == action_constants.ActionNames.REMOVE_USER_FROM_GROUP.value: group_email = payload['group_email'] response = gsuite_actions.delete_user_from_group(auth_token, group_email, user_email) elif action_type == action_constants.ActionNames.DELETE_PERMISSION_FOR_USER.value or \ action_type == action_constants.ActionNames.REMOVE_ALL_ACCESS_FOR_USER.value or \ action_type == action_constants.ActionNames.MAKE_ALL_FILES_PRIVATE.value or \ action_type == action_constants.ActionNames.MAKE_RESOURCE_PRIVATE.value or \ action_type == action_constants.ActionNames.REMOVE_EXTERNAL_ACCESS.value or \ action_type == action_constants.ActionNames.REMOVE_EXTERNAL_ACCESS_TO_RESOURCE.value: response = gsuite_actions.delete_permissions(auth_token, permissions, user_email, initiated_by_email, datasource_id) elif action_type == action_constants.ActionNames.ADD_PERMISSION_FOR_A_FILE.value or \ action_type == action_constants.ActionNames.CHANGE_OWNER_OF_FILE.value: response = gsuite_actions.add_permissions(auth_token, permissions, user_email, initiated_by_email, datasource_id, domain_id) elif action_type == action_constants.ActionNames.UPDATE_PERMISSION_FOR_USER.value: response = gsuite_actions.update_permissions(auth_token, permissions, user_email, initiated_by_email, datasource_id) elif action_type == action_constants.ActionNames.TRANSFER_OWNERSHIP.value: new_owner_email = payload['new_owner_email'] response = gsuite_actions.transfer_ownership(auth_token, user_email, new_owner_email) #Update the audit log if 'log_id' in payload: log_id = payload['log_id'] db_session = db_connection().get_session() current_log = db_session.query(AuditLog).filter(and_(AuditLog.log_id == log_id, AuditLog.status != action_constants.ActionStatus.FAILED.value)).first() if current_log: response_code = response.get_response_code() perm_length = len(permissions) if response_code != 200: current_log.failed_count += perm_length current_log.status = action_constants.ActionStatus.FAILED.value current_log.message = "Action failed" else: current_log.success_count += perm_length if current_log.failed_count < 1: if not more_to_execute: current_log.total_count = current_log.success_count current_log.status = action_constants.ActionStatus.SUCCESS.value percentage_successful_till_now = ((current_log.success_count*100)/max(current_log.total_count, current_log.success_count)) current_log.message = "Action status - {} pct completed".format(percentage_successful_till_now) db_connection().commit() return response
def process_group_related_activities(datasource_id, actor_email, event): event_name = event['name'] if event_name == 'ADD_GROUP_MEMBER': activity_events_parameters = event['parameters'] group_email = None user_email = None for param in activity_events_parameters: name = param['name'] if name == 'GROUP_EMAIL': group_email = param['value'] elif name == 'USER_EMAIL': user_email = param['value'] user_directory_struct = DirectoryStructure() user_directory_struct.datasource_id = datasource_id user_directory_struct.member_email = user_email user_directory_struct.parent_email = group_email user_directory_struct.member_role = 'MEMBER' user_directory_struct.member_type = 'USER' # TODO : check whether type is group or user db_session = db_connection().get_session() db_session.execute(DirectoryStructure.__table__.insert().prefix_with("IGNORE"). values(db_utils.get_model_values(DirectoryStructure, user_directory_struct))) if user_email: datasource_obj = get_datasource(datasource_id) domain_id = datasource_obj.domain_id exposure_type = utils.check_if_external_user(db_session, domain_id, user_email) if exposure_type == constants.EntityExposureType.EXTERNAL.value: # check if external user present in domain user table existing_user = db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email)).first() external_user = None if not existing_user: external_user = DomainUser() external_user.datasource_id = datasource_id external_user.email = user_email external_user.member_type = constants.EntityExposureType.EXTERNAL.value external_user.type = 'USER' # TODO: find the first name and last name of external user external_user.first_name = "" external_user.last_name = "" db_session.add(external_user) user_obj = existing_user if existing_user else external_user payload = {} payload["user"] = json.dumps(user_obj, cls=alchemy_encoder()) policy_params = {'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.NEW_USER.value} Logger().info("new_user : payload : {}".format(payload)) messaging.trigger_post_event(urls.GSUITE_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "gsuite") datasource_obj = get_datasource(datasource_id) tags = {"group_email": group_email, "user_email":user_email} activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type='ADD_GROUP_MEMBER', actor=actor_email, tags=tags) db_connection().commit()
def modify_group_membership(auth_token, datasource_id, action_name, action_parameters, log_entry): user_email = action_parameters["user_email"] group_email = action_parameters["group_email"] db_session = db_connection().get_session() status_message = "Action completed successfully" datasource_obj = get_datasource(datasource_id) datasource_type = datasource_obj.datasource_type payload = { "log_id": str(log_entry.log_id), "action_type": action_name, "user_email": user_email, "group_email": group_email, 'datasource_id': datasource_id, "domain_id": datasource_obj.domain_id } response = messaging.trigger_post_event( datasource_execute_action_map[datasource_type], auth_token, None, payload, connector_servicename_map[datasource_type], constants.TriggerType.SYNC.value) if response and action_name == action_constants.ActionNames.REMOVE_USER_FROM_GROUP.value: if response.response_code != constants.SUCCESS_STATUS_CODE: log_entry.status = action_constants.ActionStatus.FAILED.value status_message = 'Action failed with error - ' + \ response.response_body['error']['message'] log_entry.message = status_message return response_messages.ResponseMessage(response.response_code, status_message) db_session.query(DirectoryStructure).filter( and_(DirectoryStructure.datasource_id == datasource_id, DirectoryStructure.parent_email == group_email, DirectoryStructure.member_email == user_email)).delete() elif response and action_name == action_constants.ActionNames.ADD_USER_TO_GROUP.value: if response.response_code != constants.SUCCESS_STATUS_CODE: log_entry.status = action_constants.ActionStatus.FAILED.value message = response.response_body['error']['message'] if 'message' in \ response.response_body['error'] else response.response_body['error'] status_message = 'Action failed with error - ' + message log_entry.message = status_message return response_messages.ResponseMessage(response.response_code, status_message) response_body = response.response_body dirstructure = DirectoryStructure() dirstructure.datasource_id = datasource_id dirstructure.member_email = user_email dirstructure.parent_email = group_email dirstructure.member_type = response_body['type'] dirstructure.member_role = response_body['role'] dirstructure.member_id = response_body['id'] db_session.add(dirstructure) log_entry.status = action_constants.ActionStatus.SUCCESS.value log_entry.message = status_message db_connection().commit() return response_messages.ResponseMessage(200, status_message)
def request_scanner_data(auth_token, query_params): #try: datasource_id = query_params["dataSourceId"] scanner_id = query_params["scannerId"] db_session = db_connection().get_session() scanner = db_session.query(DatasourceScanners).filter( and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)).first() if not scanner: return response = get_scanner_processor(scanner.scanner_type).query( auth_token, query_params, scanner) next_page_token = response["nextPageNumber"] if next_page_token: scanner.page_token = str(next_page_token) query_params["nextPageNumber"] = scanner.page_token messaging.trigger_get_event(urls.SCAN_SLACK_ENTITIES, auth_token, query_params, "slack") else: scanner.page_token = "" entities_list = response["payload"] fetched_entities_count = len(entities_list) in_progress = 0 if fetched_entities_count < 1 else 1 db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.total_count: DatasourceScanners.total_count + fetched_entities_count, DatasourceScanners.query_status: DatasourceScanners.query_status + 1}) if in_progress == 0: db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.in_progress: in_progress}) db_connection().commit() messaging.trigger_post_event(urls.SCAN_SLACK_UPDATE, auth_token, query_params, {}, "slack") return datasource_metric_column = get_datasource_column(scanner.scanner_type) if datasource_metric_column: db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \ update({datasource_metric_column: datasource_metric_column + fetched_entities_count}) db_connection().commit() #datasource = db_session.query(DataSource).filter(and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() #messaging.send_push_notification("adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) #db_connection().close_connection() sent_member_count = 0 while sent_member_count < fetched_entities_count: scanner_data = {} scanner_data["entities"] = entities_list[ sent_member_count:sent_member_count + 30] #If this is the last set of users, in the process call, send the next page number as empty if fetched_entities_count - sent_member_count <= 30 and not scanner.page_token: query_params["nextPageNumber"] = "" messaging.trigger_post_event(urls.SCAN_SLACK_ENTITIES, auth_token, query_params, scanner_data, "slack") sent_member_count += 30
def delete_alert_for_a_policy(policy_id): delete_response = None if policy_id: db_session = db_connection().get_session() Logger().info("delete alert for policy id: {}".format(policy_id)) delete_response = db_session.query(Alert).filter( Alert.policy_id == policy_id).delete() db_connection().commit() return delete_response
def scan_complete_processing(db_session, auth_token, datasource_id): db_session.query(ResourcePermission).filter(ResourcePermission.datasource_id == datasource_id, DomainUser.datasource_id == ResourcePermission.datasource_id, DomainUser.user_id == ResourcePermission.permission_id). \ update({ResourcePermission.email: DomainUser.email}, synchronize_session = 'fetch') db_connection().commit() datasource = db_session.query(DataSource).filter(and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() messaging.send_push_notification("adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) utils.add_license_for_scanned_app(db_session, datasource)
def process_user_related_activities(datasource_id, actor_email, event): event_name = event['name'] activity_events_parameters = event['parameters'] user_email = None user_obj = None db_session = db_connection().get_session() for param in activity_events_parameters: name = param['name'] if name == 'USER_EMAIL': user_email = param['value'] datasource_obj = get_datasource(datasource_id) tags = {"user_email": user_email} if event_name == 'CREATE_USER': if user_email: directory_service = gutils.get_directory_service(None, actor_email) results = None try: results = directory_service.users().get(userKey=user_email).execute() except RefreshError as ex: Logger().info("User query : Not able to refresh credentials") except HttpError as ex: Logger().info("User query : Domain not found error") if results: gsuite_user = user.GsuiteUser(datasource_id, results) user_obj = gsuite_user.get_model() db_session.execute(DomainUser.__table__.insert().prefix_with("IGNORE"). values(db_utils.get_model_values(DomainUser, user_obj))) call_validate_policies_for_admin_user(user_obj, datasource_id) additional_payload = {"user_email": user_email, "is_admin": user_obj.is_admin} tags["is_admin"] = user_obj.is_admin elif event_name == 'GRANT_ADMIN_PRIVILEGE': user_obj = db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email)).first() if user_obj: user_obj.is_admin = True call_validate_policies_for_admin_user(user_obj, datasource_id) tags["is_admin"] = user_obj.is_admin elif event_name == "SUSPEND_USER": db_session.query(DomainUser).filter(and_(DomainUser.datasource_id == datasource_id, DomainUser.email == user_email)).update({DomainUser.is_suspended: True}) elif event_name == "DELETE_USER": delete_user_info(db_session, user_email, datasource_id) activity_db().add_event(domain_id=datasource_obj.domain_id, connector_type=constants.ConnectorTypes.GSUITE.value, event_type=event_name, actor=actor_email, tags=tags) db_connection().commit()
def get_user_session(auth_token): db_session = db_connection().get_session() user_session = db_utils.get_user_session(auth_token, db_session) db_session.query(LoginUser).filter( LoginUser.auth_token == auth_token).update( {"last_login_time": datetime.datetime.utcnow()}) db_connection().commit() user_session = json.loads(json.dumps(user_session, cls=alchemy_encoder())) user_session["refresh_token"] = "" user_session["token"] = "" return user_session
def get_alerts(auth_token): db_session = db_connection().get_session() existing_user = db_utils.get_user_session(auth_token, db_session=db_session) alerts = db_session.query(Alert).filter( DataSource.domain_id == existing_user.domain_id, Alert.datasource_id == DataSource.datasource_id) alerts_obj = alerts.order_by(Alert.last_updated.desc()).limit(100).all() alerts.update({"isOpen": False}, synchronize_session='fetch') db_connection().commit() return alerts_obj
def add_license_for_scanned_app(db_session, datasource): app_name = constants.datasource_to_installed_app_map[ datasource.datasource_type] application = db_session.query(Application).filter( and_(Application.domain_id == datasource.domain_id, Application.display_text == app_name)).first() now = datetime.datetime.utcnow() unit_price = None inventory_app = db_session.query(AppInventory).filter( AppInventory.name == app_name).first() inventory_app_id = inventory_app.id if inventory_app else None if inventory_app: unit_price = db_session.query(AppLicenseInventory).filter( AppLicenseInventory.app_id == inventory_app_id, AppLicenseInventory.price > 0).first() if not application: application = Application() application.domain_id = datasource.domain_id application.display_text = app_name application.timestamp = now application.purchased_date = now application.unit_num = db_session.query(DomainUser).filter( DomainUser.datasource_id == datasource.datasource_id, DomainUser.member_type == constants.EntityExposureType.INTERNAL.value, DomainUser.type == constants.DirectoryEntityType.USER.value).count() if inventory_app_id: application.inventory_app_id = inventory_app_id application.category = inventory_app.category application.image_url = inventory_app.image_url if unit_price: application.unit_price = unit_price.price ninety_days_ago = datetime.datetime.utcnow() - datetime.timedelta( days=90) application.inactive_users = db_session.query(DomainUser).filter( DomainUser.datasource_id == datasource.datasource_id, DomainUser.member_type == constants.EntityExposureType.INTERNAL.value, DomainUser.type == constants.DirectoryEntityType.USER.value, DomainUser.last_login_time < ninety_days_ago).count() db_session.add(application) else: application.timestamp = now application.purchased_date = now application.unit_num = db_session.query(DomainUser).filter( DomainUser.datasource_id == datasource.datasource_id, DomainUser.member_type == constants.EntityExposureType.INTERNAL.value, DomainUser.type == constants.DirectoryEntityType.USER.value).count() if unit_price: application.unit_price = unit_price.price db_connection().commit() populate_users_to_scanned_app(db_session, datasource, application.id)
def initiate_action(auth_token, action_payload): try: action_key = action_payload['key'] initiated_by = action_payload['initiated_by'] action_parameters = action_payload['parameters'] datasource_id = action_payload[ 'datasource_id'] if 'datasource_id' in action_payload else 'MANUAL' db_session = db_connection().get_session() login_user_info = db_session.query(LoginUser).filter( LoginUser.auth_token == auth_token).first() domain_id = login_user_info.domain_id action_config = get_action(action_key) if not action_config or not validate_action_parameters( action_config, action_parameters): return ResponseMessage( 400, "Failed to execute action - Validation failed") log_id = action_payload[ 'log_id'] if 'log_id' in action_payload else None if log_id: log_entry = db_session.query(AuditLog).filter( and_(AuditLog.log_id == log_id)).first() else: log_entry = audit_action(domain_id, datasource_id, initiated_by, action_config, action_parameters) execution_status = execute_action(auth_token, domain_id, datasource_id, action_config, action_payload, log_entry) db_connection().commit() Logger().info("initiate_action : response body - {}".format( execution_status.get_response_body())) response_body = json.loads( json.dumps(execution_status.get_response_body())) response_body['id'] = log_entry.log_id if execution_status.response_code == constants.ACCEPTED_STATUS_CODE: action_payload['log_id'] = log_entry.log_id messaging.trigger_post_event(urls.INITIATE_ACTION_PATH, auth_token, None, action_payload) return ResponseMessage(execution_status.response_code, None, response_body) except Exception as e: Logger().exception( "Exception occurred while initiating action using payload " + str(action_payload)) return ResponseMessage(500, "Failed to execute action - {}".format(e))
def generate_response(self, http_code, payload=None): db_connection().close_connection() if self.isLocal: return payload, http_code else: return { "statusCode": http_code, "body": payload, "headers": { "Access-Control-Allow-Origin": "*", "Access-Control-Allow-Credentials": True }, }
def delete_report(auth_token, report_id): if not auth_token: return None db_session = db_connection().get_session() existing_report = db_session.query(Report).filter( Report.report_id == report_id).first() db_session.delete(existing_report) try: db_connection().commit() except: Logger().exception("Exception occured while delete a report") return existing_report
def delete_policy(policy_id): db_session = db_connection().get_session() existing_policy = db_session.query(Policy).filter( Policy.policy_id == policy_id).first() if existing_policy: db_session.query(Alert).filter(Alert.policy_id == policy_id).delete() db_session.query(PolicyAction).filter( PolicyAction.policy_id == policy_id).delete() db_session.query(PolicyCondition).filter( PolicyCondition.policy_id == policy_id).delete() db_session.delete(existing_policy) db_connection().commit()
def populate_users_to_scanned_app(db_session, datasource, application_id): domain_internal_users = db_session.query(DomainUser).filter( DomainUser.datasource_id == datasource.datasource_id, DomainUser.member_type == constants.EntityExposureType.INTERNAL.value, DomainUser.type == constants.DirectoryEntityType.USER.value).all() for domain_internal_user in domain_internal_users: app_user_association = ApplicationUserAssociation() app_user_association.application_id = application_id app_user_association.datasource_id = domain_internal_user.datasource_id app_user_association.user_email = domain_internal_user.email app_user_association.client_id = domain_internal_user.datasource_id db_session.add(app_user_association) db_connection().commit()
def process(db_session, auth_token, query_params, scanner_data): user_email = query_params["userEmail"] domain_id = query_params["domainId"] datasource_id = query_params["dataSourceId"] application_associations = [] now = datetime.datetime.utcnow() apps_count = 0 if scanner_data and "entities" in scanner_data: trusted_domain_apps = (get_trusted_entity_for_domain(db_session, domain_id))["trusted_apps"] for app in scanner_data["entities"]: apps_count += 1 app_name = app.get("displayText") application = db_session.query(Application).filter(Application.display_text == app_name,Application.domain_id == domain_id).first() if not application: inventory_app = db_session.query(AppInventory).filter(AppInventory.name == app_name).first() inventory_app_id = inventory_app.id if inventory_app else None application = Application() if inventory_app_id: application.inventory_app_id = inventory_app_id application.category = inventory_app.category application.image_url = inventory_app.image_url application.domain_id = domain_id application.display_text = app_name application.anonymous = app.get("anonymous") application.timestamp = now application.purchased_date = now scopes = app["scopes"] if app_name in trusted_domain_apps: application.score = 0 application.is_whitelisted = True else: max_score = gutils.get_app_score(scopes) application.score = max_score application.scopes = ','.join(scopes) application.unit_num = 1 db_session.add(application) else: application.unit_num += 1 db_connection().commit() association_table = {} association_table["client_id"] = app.get("clientId") association_table["user_email"] = user_email association_table["datasource_id"] = datasource_id association_table["application_id"] = application.id application_associations.append(association_table) if len(application_associations) > 0: db_session.bulk_insert_mappings(ApplicationUserAssociation, application_associations) db_connection().commit() return apps_count