def scan_complete_processing(db_session, auth_token, datasource_id): db_session.query(ResourcePermission).filter(ResourcePermission.datasource_id == datasource_id, DomainUser.datasource_id == ResourcePermission.datasource_id, DomainUser.user_id == ResourcePermission.permission_id). \ update({ResourcePermission.email: DomainUser.email}, synchronize_session = 'fetch') db_connection().commit() datasource = db_session.query(DataSource).filter(and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() messaging.send_push_notification("adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) utils.add_license_for_scanned_app(db_session, datasource)
def scan_complete_processing(db_session, auth_token, datasource_id): Logger().info("Scan completed") db_session.query(ApplicationUserAssociation).filter(ApplicationUserAssociation.datasource_id == datasource_id, DomainUser.datasource_id == datasource_id, DomainUser.user_id == ApplicationUserAssociation.user_email). \ update({ApplicationUserAssociation.user_email: DomainUser.email}, synchronize_session = 'fetch') db_connection().commit() datasource = db_session.query(DataSource).filter( and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() messaging.send_push_notification( "adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) utils.add_license_for_scanned_app(db_session, datasource) body = {'datasource_id': datasource_id, "is_default": True} messaging.trigger_post_event(urls.POLICIES_PATH, auth_token, {}, body) messaging.trigger_post_event(urls.GET_SCHEDULED_REPORT_PATH, auth_token, {}, body)
def process_scanner_data(auth_token, query_params, scanner_data): datasource_id = query_params["dataSourceId"] scanner_id = query_params["scannerId"] next_page_token = query_params["nextPageNumber"] db_session = db_connection().get_session() scanner = db_session.query(DatasourceScanners).filter( and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)).first() if not scanner: return scanner_processor = get_scanner_processor(scanner.scanner_type) processed_results = scanner_processor.process(db_session, auth_token, query_params, scanner_data) in_progress = 1 if not next_page_token: in_progress = 0 db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.in_progress: in_progress}) db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.process_status: DatasourceScanners.process_status + 1, DatasourceScanners.processed_count: DatasourceScanners.processed_count + processed_results, DatasourceScanners.updated_at: datetime.utcnow()}) datasource_metric_column = get_datasource_column(scanner.scanner_type, False) if datasource_metric_column: db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \ update({datasource_metric_column: datasource_metric_column + processed_results}) db_connection().commit() datasource = db_session.query(DataSource).filter( and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() messaging.send_push_notification( "adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) if in_progress == 0: scanner_processor.post_process(db_session, auth_token, query_params) messaging.trigger_post_event(urls.SCAN_SLACK_UPDATE, auth_token, query_params, {}, "slack")
def scan_complete_processing(db_session, auth_token, datasource_id): Logger().info("Scan completed") datasource = db_session.query(DataSource).filter(and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() db_session.query(ExternalExposure).filter(ExternalExposure.domain_id == datasource.domain_id).delete() body = {"datasource_id": datasource_id, "is_default": True} messaging.trigger_post_event(urls.POLICIES_PATH, auth_token, {}, body) messaging.trigger_post_event(urls.GET_SCHEDULED_REPORT_PATH, auth_token, {}, body) #Subscribe for push notifications query_params = {'domainId': datasource.domain_id, 'dataSourceId': datasource_id} Logger().info("Trying for push notification subscription for domain_id: {} datasource_id: {}".format( datasource.domain_id, datasource_id)) messaging.trigger_post_event(urls.SUBSCRIBE_GDRIVE_NOTIFICATIONS_PATH, auth_token, query_params, {}, "gsuite") messaging.send_push_notification("adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) utils.add_license_for_scanned_app(db_session, datasource) Logger().info("Send email after scan complete") adya_emails.send_gdrive_scan_completed_email(auth_token, datasource) update_resource_exposure_type(db_session, datasource.domain_id, datasource_id)
def process_scanner_data(auth_token, query_params, scanner_data): datasource_id = query_params["dataSourceId"] scanner_id = query_params["scannerId"] next_page_token = query_params["nextPageNumber"] if "nextPageNumber" in query_params else None db_session = db_connection().get_session() scanner = db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)).first() if not scanner: return scanner_processor = get_scanner_processor(scanner.scanner_type) processed_results = 0 try: processed_results = scanner_processor.process(db_session, auth_token, query_params, scanner_data) except Exception as ex: Logger().exception("Exception occurred while processing scan data for - {} - {}".format(query_params, ex)) return in_progress = 1 if not next_page_token: in_progress = 0 db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.in_progress: in_progress}) db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \ update({DatasourceScanners.process_status: DatasourceScanners.process_status + 1, DatasourceScanners.processed_count: DatasourceScanners.processed_count + processed_results, DatasourceScanners.updated_at: datetime.utcnow()}) datasource_metric_column = get_datasource_column(scanner.scanner_type, False) if datasource_metric_column: db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \ update({datasource_metric_column: datasource_metric_column + processed_results}) db_connection().commit() datasource = db_session.query(DataSource).filter(and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first() messaging.send_push_notification("adya-scan-update", json.dumps(datasource, cls=alchemy_encoder())) if in_progress == 0: scanner_processor.post_process(db_session, auth_token, query_params) messaging.trigger_post_event(urls.GITHUB_SCAN_UPDATE, auth_token, query_params, {}, "github")
def update_resource(db_session, datasource_id, user_email, updated_resource): is_new_resource = 0 gsuite_resource = GsuiteResource(datasource_id, updated_resource) db_resource = gsuite_resource.get_model() external_users = gsuite_resource.get_external_users() count = db_session.query(Resource).filter( and_(Resource.datasource_id == datasource_id, Resource.resource_id == db_resource.resource_id)).update( db_utils.get_model_values(Resource, db_resource)) if count < 1: #Resource does not exist, so insert is_new_resource = 1 db_session.execute( Resource.__table__.insert().prefix_with("IGNORE").values( db_utils.get_model_values(Resource, db_resource))) new_permissions_map = {} for new_permission in db_resource.permissions: new_permissions_map[new_permission.permission_id] = new_permission #Update resource permissions existing_permissions = db_session.query(ResourcePermission).filter( and_(ResourcePermission.datasource_id == datasource_id, ResourcePermission.resource_id == db_resource.resource_id)).all() existing_permissions_dump = json.dumps(existing_permissions, cls=alchemy_encoder()) for existing_permission in existing_permissions: if existing_permission.permission_id in new_permissions_map: #Update the permission db_session.query(ResourcePermission).filter(and_(ResourcePermission.datasource_id == datasource_id, ResourcePermission.resource_id == db_resource.resource_id, ResourcePermission.permission_id == existing_permission.permission_id))\ .update(db_utils.get_model_values(ResourcePermission, new_permissions_map[existing_permission.permission_id])) new_permissions_map.pop(existing_permission.permission_id, None) else: #Delete the permission db_session.delete(existing_permission) #Now add all the other new permissions for new_permission in new_permissions_map.values(): event_name = '' db_session.execute( ResourcePermission.__table__.insert().prefix_with("IGNORE").values( db_utils.get_model_values(ResourcePermission, new_permission))) if new_permission.exposure_type == constants.EntityExposureType.PUBLIC.value: event_name = 'FILE_SHARE_PUBLIC' elif new_permission.exposure_type == constants.EntityExposureType.ANYONEWITHLINK.value: event_name = 'FILE_SHARE_ANYONEWITHLINK' elif new_permission.exposure_type == constants.EntityExposureType.EXTERNAL.value: event_name = 'FILE_SHARE_EXTERNAL' #Update external users if len(external_users) > 0: external_users_values = [] for external_user in external_users: external_users_values.append( db_utils.get_model_values(DomainUser, external_user)) db_session.execute(DomainUser.__table__.insert().prefix_with( "IGNORE").values(external_users_values)) db_connection().commit() if is_new_resource == 1: db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \ update({DataSource.processed_file_count: DataSource.processed_file_count + 1, DataSource.total_file_count: DataSource.total_file_count + 1}) messaging.send_push_notification( "adya-" + datasource_id, json.dumps({ "type": "incremental_change", "datasource_id": datasource_id, "email": user_email, "resource": updated_resource })) #Trigger the policy validation now payload = {} payload["old_permissions"] = existing_permissions_dump payload["resource"] = json.dumps(db_resource, cls=alchemy_encoder()) payload["new_permissions"] = json.dumps(db_resource.permissions, cls=alchemy_encoder()) policy_params = { 'dataSourceId': datasource_id, 'policy_trigger': constants.PolicyTriggerType.PERMISSION_CHANGE.value } #Logger().info("update_resource : payload : {}".format(payload)) messaging.trigger_post_event(urls.GSUITE_POLICIES_VALIDATE_PATH, constants.INTERNAL_SECRET, policy_params, payload, "gsuite")