def store_query_and_generate_link(query_session_req): '''Given a query session, store it in DB and generate a unique link. There are two ways of looking at this: 1) Storing just the encoded string, and taking a hash of that string 2) Generating UUID per query call Going with option 1 makes more sense because we inherently do a check to see if a link had already been generated with that hash ''' encoded_request = str(query_session_req).encode('utf-8') query_hash = hashlib.md5(encoded_request).hexdigest() with Transaction() as transaction: # Check for a query with the hash that was generated. Add if it does not # exist maybe_query_session = transaction.find_all_by_fields( UserQuerySession, {'query_uuid': query_hash}) if not maybe_query_session.first(): transaction.add_or_update( UserQuerySession( query_uuid=query_hash, query_blob=query_session_req['query_blob'], user_id=query_session_req['user_id'], ), flush=True, ) return query_hash
def bulk_transfer_alert_def_ownership(source_username, target_username): logger = g.request_logger if hasattr(g, 'request_logger') else LOG source_user_entity = try_get_user(source_username) target_user_entity = try_get_user(target_username) if not source_user_entity or not target_user_entity: logger.error('Cannot transfer alerts from "%s" to "%s"' % (source_username, target_username)) logger.info( 'Attempting to transfer ownership of all alerts owned by %s to %s', source_username, target_username, ) with Transaction() as transaction: alert_defs = transaction.find_all_by_fields( AlertDefinition, {'user_id': source_user_entity.id}) for alert_def in alert_defs: transfer_alert_def_ownership(transaction, alert_def, target_user_entity) logger.info('Transfer of alerts was successful.')
def set_configuration(self, configuration, updated_value): with AuthorizedOperation( 'edit_resource', 'configuration', configuration.id), Transaction() as transaction: key = configuration.key try: old_value = get_configuration(key) assert_valid_configuration(key, updated_value) message = ('The configuration for \'%s\' is being updated. ' 'The existing value is \'%s\'. ' 'The new value is \'%s\'. ') % (key, old_value, updated_value) g.request_logger.info(message) except Exception as e: raise BadRequest(description=e.message) configuration.overwritten_value = (updated_value, ) configuration.overwritten = True transaction.add_or_update(configuration, flush=True) # NOTE(vedant): I have NO idea why the object is stored by default as an array so we # always unpack it and reset the value. configuration.overwritten_value = configuration.overwritten_value[ 0] transaction.add_or_update(configuration) # Restart gunicorn when a new datasource is selected. restart_gunicorn_on_datasource_change(key, old_value, updated_value) return configuration
def read(self, id): with Transaction() as transaction: dashboard = super(DashboardResource, self).read(id) dashboard.total_views += 1 track_dashboard_access(dashboard.id) dashboard = transaction.add_or_update(dashboard, flush=True) return self._get_single_dashboard_with_metadata(id)
def reset_configuration(self, configuration): with AuthorizedOperation( 'edit_resource', 'configuration', configuration.id), Transaction() as transaction: key = configuration.key default_value = _DEFAULT_CONFIGURATION_STORE[key]['value'] old_value = get_configuration(key) message = ( 'The configuration for \'%s\' is being reset to its default value. ' 'The existing value is \'%s\'. ' 'The new (and default) value is \'%s\'. ') % (key, old_value, default_value) g.request_logger.info(message) # By setting `overwritten` to False, we are signifying that we want to have the default # value apply. For housekeeping reasons, we also set `overwritten_value` back to None. configuration.overwritten_value = None configuration.overwritten = False transaction.add_or_update(configuration, flush=True) # Restart gunicorn when a datasource is selected. restart_gunicorn_on_datasource_change(key, old_value, default_value) return configuration
def create(self, properties, commit=True): try: with Transaction() as transaction: item = self.model() for key, value in list(properties.items()): setattr(item, key, value) # Create the authorization model associated with this new item and set the # appropriate id value of that newly created authorization item on this resource authorization_item = self.authorization_model() self.create_authorization_model(item, authorization_item) authorization_item = transaction.add_or_update( authorization_item, flush=True) authorization_id = getattr( authorization_item, self.authorization_model_id_attribute) setattr(item, self.target_model_authorization_attribute, authorization_id) self.before_create(transaction, item, authorization_item) potion_signals.before_create.send(self.resource, item=item) transaction.add_or_update(item) except IntegrityError as exception: self.integrity_error_handler(exception) potion_signals.after_create.send(self.resource, item=item) return item
def downvert_data(alembic_operation): with Transaction( get_session=lambda: get_session(alembic_operation)) as transaction: update_site_resource_name(transaction, '/') delete_query_analyst_public_role(transaction) delete_role_permissions(transaction) delete_new_permissions(transaction) delete_new_roles(transaction)
def upvert_data(alembic_operation): with Transaction( get_session=lambda: get_session(alembic_operation)) as transaction: add_new_roles(transaction) add_new_permissions(transaction) add_role_permissions(transaction) add_query_analyst_public_role(transaction) update_site_resource_name(transaction, 'website')
def upvert_data(alembic_operation): with Transaction( get_session=lambda: get_session(alembic_operation)) as transaction: add_resource_types(transaction) add_new_roles(transaction) add_new_permissions(transaction) add_role_permissions(transaction) add_user_preferences(transaction)
def track_dashboard_access(dashboard_id, edited=False): with Transaction() as transaction: metadata = get_or_create_metadata(transaction, dashboard_id) now = datetime.now() metadata.last_viewed = now metadata.views_by_user += 1 if edited: metadata.last_edited = now transaction.add_or_update(metadata)
def set_favorite(self, dashboard, is_favorite): with AuthorizedOperation( 'view_resource', 'dashboard', dashboard.id ), Transaction() as transaction: metadata = get_or_create_metadata(transaction, dashboard.id) metadata.is_favorite = is_favorite transaction.add_or_update(metadata) return None, NO_CONTENT
def downvert_data(alembic_operation): with Transaction( get_session=lambda: get_session(alembic_operation)) as transaction: assert_no_resources_of_new_type(transaction) delete_role_permissions(transaction) delete_new_permissions(transaction) delete_new_roles(transaction) delete_resource_types(transaction) delete_user_preferences(transaction)
def upvert_data(alembic_operation): with Transaction( get_session=lambda: get_session(alembic_operation)) as transaction: LOG.info('Prepopulating database with default information') add_new_resource_types(transaction) add_new_resources(transaction) add_new_permission_definitions(transaction) add_new_roles(transaction) add_new_role_permissions(transaction) LOG.info('Successfully prepopulated data.')
def generate_metadata_after_create(sender, item): with Transaction() as transaction: metadata = get_or_create_metadata(transaction, item.id) now = datetime.now() metadata.last_viewed = now metadata.last_edited = now metadata.views_by_user = 1 item.total_views += 1 transaction.add_or_update(metadata) transaction.add_or_update(item)
def get_history(self, dashboard, page, per_page, where, sort): records = [] with Transaction() as transaction: records = ( transaction.find_all_by_fields( HistoryRecord, {'object_id': dashboard.resource_id, 'object_type': self.meta.name}, ) .paginate(page, per_page) .items ) return records
def _add_visualization(dashboard, add_query_request, is_advanced_query=False): with Transaction() as transaction: specification = add_query_to_custom_dashboard( dashboard.specification, add_query_request['activeViewType'], add_query_request['querySelections'], add_query_request['queryResultSpec'], is_advanced_query, ) dashboard.specification = specification transaction.add_or_update(dashboard, flush=True) return find_by_id(Dashboard, dashboard.id)
def api_bulk_transfer_dashboard_ownership(old_author, new_author): logger = g.request_logger if hasattr(g, 'request_logger') else LOG new_author_string = get_user_string(new_author) old_author_string = get_user_string(old_author) logger.info( 'Attempting to transfer ownership of ALL Dashboards owned by %s to %s', old_author_string, new_author_string, ) with Transaction() as transaction: bulk_transfer_dashboard_ownership(transaction, old_author, new_author) logger.info('Transfer was successful.')
def _populate_configuration_table(session=None): '''Populates the `configuration` table of the SQL Database with the default values from the in-memory configuration store. ''' session = session or get_db_adapter().session LOG.debug('Populating configuration store with default values. ') default_value_errors = [] with Transaction(get_session=lambda: session) as transaction: for key in CONFIGURATION_KEYS: entity = transaction.find_one_by_fields(Configuration, True, {'key': key}) value = _DEFAULT_CONFIGURATION_STORE[key]['value'] try: assert_valid_configuration(key, value) # We intentionally want to catch all exceptions # pylint:disable=W0703 except Exception as e: default_value_errors.append(e) LOG.error( 'Encountered an error when attempting to update default value for key \'%s\'. ' 'Default value was \'%s\'. Error was \'%s\'. ', key, value, e, ) if not entity: LOG.debug( 'Configuration for \'%s\' did not exist in database, adding it.', key, ) new_entity = Configuration(key=key, overwritten_value=None, overwritten=False) transaction.add_or_update(new_entity, flush=True) LOG.debug('Added configuration entry for \'%s\'.', key) else: LOG.debug( 'Configuration for \'%s\' already exists in database, skipping it.', key, ) if default_value_errors: raise ValueError( 'Default configurations were not valid. Details as follows: %s' % str(default_value_errors))
def delete(self, item): try: with Transaction() as transaction: authorization_id = getattr( item, self.target_model_authorization_attribute) authorization_item = transaction.find_by_id( self.authorization_model, authorization_id) authorization_item = transaction.delete(authorization_item) self.before_delete(transaction, item, authorization_item) potion_signals.before_delete.send(self.resource, item=item) transaction.delete(item) except IntegrityError as exception: self.integrity_error_handler(exception) potion_signals.after_delete.send(self.resource, item=item)
def _build_resource_specific_needs(resource_type, permission, resource): '''Yields any authorization needs that are specifically related to an individual authorization resource or authorization resource type. ''' # $CycloneIdaiHack(vedant) if resource_type.name == ResourceTypeEnum.ALERT: resource_id = resource.id if resource else None yield ItemNeed(permission.permission, resource_id, 'alert_definitions') if not resource: return if resource.resource_type.name == ResourceTypeEnum.QUERY_POLICY: with Transaction() as transaction: query_policy_entity = transaction.find_by_id( QueryPolicy, resource.id, 'resource_id') query_need = construct_query_need_from_policy(query_policy_entity) yield query_need
def api_transfer_dashboard_ownership(dashboard, new_author): logger = g.request_logger if hasattr(g, 'request_logger') else LOG with Transaction() as transaction: new_author_string = get_user_string(new_author) old_author_string = get_user_string(dashboard.author) dashboard_string = ('slug: %s, resource_id: %s') % ( dashboard.slug, dashboard.resource_id, ) logger.info( 'Attempting to transfer ownership of Dashboard \'%s\' from %s to %s', dashboard_string, old_author_string, new_author_string, ) bulk_transfer_dashboard_ownership(transaction, dashboard, new_author) logger.info('Transfer was successful.')
def send_reset_password(email): logger = g.request_logger if hasattr(g, 'request_logger') else LOG with Transaction() as transaction: user_manager = current_app.user_manager user = transaction.find_one_by_fields( User, case_sensitive=True, search_fields={'username': email}) if not user: logger.warning('User does not exist with email: \'%s\'', user.username) raise ItemNotFound('user', {'username': email}) # Generate reset password token token = user_manager.generate_token(int(user.get_id())) reset_password_link = url_for('user.reset_password', token=token, _external=True) # Create reset password email message email_message = current_app.email_renderer.create_password_reset_message( current_user, user, reset_password_link) logger.info('Sending reset-password email to: \'%s\'', user.username) try: # Send password reset email current_app.notification_service.send_email(email_message) except NotificationError: error = 'Failed to send reset-password email to: \'%s\'' % user.username logger.error('Failed to send reset-password email to: \'%s\'', user.username) raise BadGateway(error) # Store token to db user.reset_password_token = token user = transaction.add_or_update(user, flush=True) # Send forgot_password signal to flask_user to trigger any hooks # pylint: disable=W0212 user_forgot_password.send(current_app._get_current_object(), user=user) logger.info( 'Successfully sent reset password email for user with email: \'%s\'', user.username, )
def track_dashboard_changes(sender, item, changes): with Transaction() as transaction: dashboard_changes = as_dictionary(item) updated_changes = {} for key, value in list(dashboard_changes.items()): if isinstance(value, datetime): # datetime is not directly JSON serializable. Thus, we format # it as an ISO8601 string. updated_changes[key] = value.isoformat() else: updated_changes[key] = value record = HistoryRecord( object_type=sender.meta.name, object_id=item.resource_id, changes=updated_changes, user_id=current_user.id, ) transaction.add_or_update(record)
def update(self, item, changes, commit=True): try: with Transaction( should_commit=lambda *args: commit) as transaction: actual_changes = { key: value for key, value in list(changes.items()) if self._is_change(get_value(key, item, None), value) } for key, value in list(changes.items()): # Done for the reasons described here # https://stackoverflow.com/questions/42559434/updates-to-json-field-dont-persist-to-db if isinstance(value, collections.Mapping): flag_modified(item, key) authorization_id = getattr( item, self.target_model_authorization_attribute) authorization_item = transaction.find_by_id( self.authorization_model, authorization_id) self.update_authorization_model(item, changes, authorization_item) authorization_item = transaction.add_or_update( authorization_item, flush=True) self.before_update(transaction, item, changes, authorization_item) potion_signals.before_update.send(self.resource, item=item, changes=actual_changes) for key, value in list(changes.items()): setattr(item, key, value) transaction.add_or_update(item) except IntegrityError as exception: self.integrity_error_handler(exception) potion_signals.after_update.send(self.resource, item=item, changes=actual_changes) return item
def before_(sender, item, changes): if 'formula' in changes and hasattr(item, 'raw_formula'): formula = changes.get('formula') item.raw_formula = update_formula(formula) with Transaction() as transaction: if hasattr(item, 'constituents'): constituents = [{ 'id': constituent.id, 'text_id': constituent.text_id } for constituent in item.constituents] setattr(item, 'constituents', []) changes = as_dictionary(item) changes['constituents'] = constituents else: changes = as_dictionary(item) record = HistoryRecord( object_type=sender.meta.name, object_id=item.id, changes=changes, user_id=current_user.id, ) transaction.add_or_update(record)
def _attach_metadata_to_query(self, query): # NOTE(stephen): I don't think a transaction is necessary for this # read only query, but it is an easy way to access the session. with Transaction() as transaction: session = transaction._session subquery = DashboardUserMetadata.summary_by_dashboard_for_user( session, current_user.id ).subquery() return ( query # Join in the summarized metadata for each dashboard. .outerjoin(subquery, Dashboard.id == subquery.c.dashboard_id) # Attach user info so we can extract the author username. .outerjoin(User, Dashboard.author_id == User.id) # Make sure all the metadata columns are included. .add_columns(subquery) # Also include all dashboard columns since otherwise a new query # will be issued EACH TIME we access a dashboard in the query # result. .add_columns(Dashboard.__table__) # Manually set up author_username since hybrid properties weren't # transferring. .add_columns(User.username.label('author_username')) )
def upvert_data(alembic_operation): with Transaction( get_session=lambda: get_session(alembic_operation)) as transaction: populate_user_status(transaction) convert_current_users(transaction) convert_pending_users(transaction)
def _initialize_druid_context(app): zen_configuration = app.zen_config # Pulling Data from Zen_Config Module druid_host = os.getenv('DRUID_HOST', zen_configuration.druid.DRUID_HOST) deployment_name = zen_configuration.general.DEPLOYMENT_NAME et_date_selection_enabled = zen_configuration.ui.ENABLE_ET_DATE_SELECTION data_status_static_info = zen_configuration.data_status.DATA_STATUS_STATIC_INFO geo_field_ordering = zen_configuration.aggregation.GEO_FIELD_ORDERING dimension_slices = zen_configuration.aggregation.DIMENSION_SLICES filter_dimensions = zen_configuration.filters.FILTER_DIMENSIONS authorizable_dimensions = zen_configuration.filters.AUTHORIZABLE_DIMENSIONS dimension_categories = zen_configuration.aggregation.DIMENSION_CATEGORIES dimension_id_map = zen_configuration.aggregation.DIMENSION_ID_MAP druid_configuration = construct_druid_configuration(druid_host) system_query_client = DruidQueryClient_(druid_configuration) druid_metadata = DruidMetadata_(druid_configuration, system_query_client) # TODO(vedant, stephen) - Having an environment variable for this seems # like an incredibly limiting choice. This should probably be passed into # the configuration of the Flask App. # pylint: disable=E0110 if OFFLINE_MODE: geo_to_lat_long_field = zen_configuration.aggregation.GEO_TO_LATLNG_FIELD map_default_lat_long = zen_configuration.ui.MAP_DEFAULT_LATLNG system_query_client = MockDruidQueryClient(geo_to_lat_long_field, map_default_lat_long) druid_metadata = MockDruidMetadata(deployment_name) # If an admin selected 'LATEST_DATASOURCE' in the admin app, app will always # select the most recent datasource. Otherwise, we will use the datasource # the admin selected and default to most recent datasource if datasource # doesn't exist. datasource_config = get_configuration(CUR_DATASOURCE_KEY) if datasource_config in druid_metadata.get_datasources_for_site( deployment_name): datasource = SiteDruidDatasource.build(datasource_config) else: datasource = druid_metadata.get_most_recent_datasource(deployment_name) if datasource_config != 'LATEST_DATASOURCE': LOG.error('Datasource %s does not exist.', datasource_config) with Transaction() as transaction: config_database_entity = transaction.find_one_by_fields( Configuration, search_fields={'key': CUR_DATASOURCE_KEY}, case_sensitive=False, ) config_database_entity.overwritten_value = 'LATEST_DATASOURCE' config_database_entity.overwritten = True transaction.add_or_update(config_database_entity, flush=True) LOG.info('** Using datasource %s **', datasource.name) dimension_values = DimensionValuesLookup( system_query_client, datasource, filter_dimensions, dimension_slices, authorizable_dimensions, geo_field_ordering, ) dimension_values.load_dimensions_from_druid() time_boundary = DataTimeBoundary(system_query_client, datasource) time_boundary.load_time_boundary_from_druid() row_count_lookup = RowCountLookup(system_query_client, datasource) status_information = SourceStatus( system_query_client, datasource, data_status_static_info, et_date_selection_enabled, ) status_information.load_all_status() dimension_metadata = DimensionMetadata(system_query_client, datasource) dimension_metadata.load_dimension_metadata( dimension_categories, dimension_id_map, time_boundary.get_full_time_interval()) druid_context = DruidApplicationContext( druid_metadata, druid_configuration, dimension_values, time_boundary, status_information, row_count_lookup, dimension_metadata, datasource, ) app.query_client = AuthorizedQueryClient(system_query_client) app.system_query_client = system_query_client app.druid_context = druid_context
def main(): Flags.PARSER.add_argument( '-d', '--sql_connection_string', type=str, required=False, help='The SQL Connection String to use to connect to the SQL ' 'Database. Can also be specified via the \'DATABASE_URL\' ' 'environment variable. The inline parameter takes priority' 'over the environment variable.', ) Flags.PARSER.add_argument( '-u', '--username', type=str, required=False, help='The username of the user. MUST be a Zenysis e-mail address.', ) Flags.PARSER.add_argument('-f', '--first_name', type=str, required=False, help='The user\'s first name.') Flags.PARSER.add_argument('-l', '--last_name', type=str, required=False, help='The user\'s last name. ') Flags.PARSER.add_argument( '-p', '--password', type=str, required=False, help='The user\'s password. If none specified, this will be ' 'auto-generated. ', ) Flags.PARSER.add_argument( '-s', '--status', type=str, action='store', required=False, choices=[e.name for e in UserStatusEnum], default=UserStatusEnum.ACTIVE.name, help=('The type of SSL configuration to use. ' '1. ACTIVE - The will be able to login immediately. ' '2. INACTIVE - The user will not be able to login unless an ' 'Administrator logs in and marks the user as active. ' '3. PENDING - The user will not be able to login unless an ' 'Administrator logs in and sends the user an invite email. '), ) Flags.PARSER.add_argument( '-a', '--site_admin', action='store_true', required=False, default=False, help='If specified, make user an admin.', ) Flags.PARSER.add_argument( '-o', '--overwrite', action='store_true', required=False, default=False, help='Overwrite the user if the specified username already exists.', ) Flags.PARSER.add_argument( '-A', '--automation_user', action='store_true', required=False, default=False, help='Make a new automation user.', ) Flags.InitArgs() sql_connection_string = Flags.ARGS.sql_connection_string if not sql_connection_string: instance_configuration = load_instance_configuration_from_file() with CredentialProvider(instance_configuration) as credential_provider: sql_connection_string = credential_provider.get( 'SQLALCHEMY_DATABASE_URI') username = Flags.ARGS.username first_name = Flags.ARGS.first_name or None last_name = Flags.ARGS.last_name or None plaintext_password = Flags.ARGS.password is_site_admin = Flags.ARGS.site_admin # pylint: disable=E1136 # The types defined in Flags match exactly those defined in the Enum # there will not be a key error status = UserStatusEnum[Flags.ARGS.status] overwrite_user = Flags.ARGS.overwrite automation_user = Flags.ARGS.automation_user if automation_user: username = AUTOMATION_USERNAME first_name = AUTOMATION_FIRST_NAME last_name = AUTOMATION_LAST_NAME _, plaintext_password = get_credentials() is_site_admin = True if not username: LOG.error( 'You must provide a username if you are not creating a automation user.' ) return 5 if not overwrite_user and (not first_name or not last_name): LOG.error( 'You must provide a first and last name if you are creating a new user.' ) return 2 username = username.strip() first_name = first_name.strip() if first_name else None last_name = last_name.strip() if last_name else None if not is_email_address(username): LOG.error( 'Username \'%s\' is not valid. It must be an e-mail address.', username) return 3 Session = sessionmaker() engine = create_engine(sql_connection_string) Session.configure(bind=engine) session = Session() with Transaction(should_commit=None, get_session=lambda: session) as transaction: (new_user, plaintext_password) = create_user( transaction, username, first_name, last_name, plaintext_password, is_site_admin, overwrite_user, status, ) LOG.info( 'Successfully created/updated User \'%s\' with status \'%s\' and password \'%s\'.', get_user_string(new_user), status.name, plaintext_password, ) return 0
def downvert_data(alembic_operation): with Transaction( get_session=lambda: get_session(alembic_operation)) as transaction: revert_current_users(transaction) revert_pending_users(transaction)