Esempio n. 1
0
 def dehydrate_user(self, bundle):
     """
         Expose only the username
     :param bundle:
     :return:
     """
     return filter_keys(bundle.data['user'].data, ['username'])
Esempio n. 2
0
 def dehydrate_user(self, bundle):
     """
         Expose only the username
     :param bundle:
     :return:
     """
     return filter_keys(bundle.data['user'].data, ['username'])
Esempio n. 3
0
def on_db_entity_interest_post_save(sender, **kwargs):
    """
        Called after a DbEntityInterest saves, but not when a config_entity is running post_save publishers
        In other words, this is only called after a direct DbEntityInterest save/update.
        This does the same as post_save_config_entity, but starts with the 'post_save_config_entity_db_entities'
        signal to do only DbEntity dependent presentation.
    """
    db_entity_interest = kwargs['instance']
    config_entity = ConfigEntity._subclassed(db_entity_interest.config_entity)
    db_entity = db_entity_interest.db_entity
    # TODO The default user should be the admin
    user = db_entity.updater if db_entity.updater else get_user_model(
    ).objects.get(username=UserGroupKey.SUPERADMIN)
    # post_save_db_entity presentation should always be disabled if we are saving a ConfigEntity
    logger.info(
        "Handler: post_save_db_entity_interest for config_entity {config_entity}, db_entity {db_entity}, "
        "and user {username}.".format(config_entity=config_entity,
                                      db_entity=db_entity_interest.db_entity,
                                      username=user.username))

    if kwargs.get('created', None):
        db_entity = db_entity_interest.db_entity
        # TODO
        # While we test upload, just delete the previous DbEntitys with the same key name
        # in the ConfigEntity.
        db_entity_interest.config_entity.db_entities.filter(
            key=db_entity.key).exclude(id=db_entity.id).delete()

        # Make sure the db_entity's schema matches the config_entity's if not set
        # TODO we assume that the schema should match the config_entity, rather than
        # an ancestor or the config_entity (like the project or a scenario). There
        # are many cases where the schema should not be that of the config_entity, so
        # we might want to remove this default and force the saver to set it
        if not db_entity.schema or not db_entity.table:
            db_entity.schema = db_entity.schema or db_entity_interest.config_entity.schema(
            )
            # Always base the table name on the key
            db_entity.table = db_entity.key
            db_entity_interest.db_entity.save()

    if db_entity_interest.config_entity.deleted:
        # Do nothing for deleted config_entities
        return

    # Define the data_importer if not already set
    if not (db_entity.feature_class_configuration
            and db_entity.feature_class_configuration.data_importer):
        feature_class_configuration = db_entity.feature_class_configuration = db_entity.feature_class_configuration or FeatureClassConfiguration(
        )
        # Choose the correct importer, if any, to set up the feature_class_configuration and features
        if db_entity.origin_instance:
            # Import from the origin_instance. This could be a full copy or from the current layer selection features
            feature_class_configuration.data_importer = full_module_path(
                OriginDbEntityProcessor)
        elif '.json' in db_entity.url.lower():
            # Import it using the geojson importer
            feature_class_configuration.data_importer = full_module_path(
                GeoJsonProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif '.zip' in db_entity.url.lower():
            feature_class_configuration.data_importer = full_module_path(
                ZippedSqlFileProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif not db_entity.no_feature_class_configuration:
            feature_class_configuration.data_importer = full_module_path(
                DefaultImportProcessor)
        previous = DbEntityInterest._no_post_save_publishing
        DbEntityInterest._no_post_save_publishing = True
        db_entity.feature_class_configuration = feature_class_configuration
        db_entity.save()
        DbEntityInterest._no_post_save_publishing = previous

    # Post save presentation section
    # Quit if the publishers were turned off outside this method
    if DbEntityInterest._no_post_save_publishing or db_entity_interest._no_post_save_publishing:
        return

    # Use this to initialize the FeatureBehavior and other stuff that might not be set
    update_or_create_db_entity(config_entity, db_entity)

    starting_signal_path = resolvable_module_attr_path(
        __name__, 'post_save_db_entity_initial')

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=db_entity_interest,
        instance_class=DbEntity,
        client_instance_path='db_entity',
        instance_key=db_entity_interest.db_entity.key,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_db_entity',
        # Update the setup_percent_complete instance attribute for new instances
        # of classes with this attribute (currently only DbEntity)
        update_setup_percent_complete=db_entity_interest.db_entity.
        setup_percent_complete == 0,
        **filter_keys(kwargs, ['created']))
def on_db_entity_interest_post_save(sender, **kwargs):
    """
        Called after a DbEntityInterest saves, but not when a config_entity is running post_save publishers
        In other words, this is only called after a direct DbEntityInterest save/update.
        This does the same as post_save_config_entity, but starts with the 'post_save_config_entity_db_entities'
        signal to do only DbEntity dependent presentation.
    """
    db_entity_interest = kwargs['instance']
    config_entity = ConfigEntity._subclassed(db_entity_interest.config_entity)
    db_entity = db_entity_interest.db_entity
    # TODO The default user should be the admin
    user = db_entity.updater if db_entity.updater else get_user_model().objects.get(username=UserGroupKey.SUPERADMIN)
    # post_save_db_entity presentation should always be disabled if we are saving a ConfigEntity
    logger.info("Handler: post_save_db_entity_interest for config_entity {config_entity}, db_entity {db_entity}, "
                "and user {username}.".format(
        config_entity=config_entity,
        db_entity=db_entity_interest.db_entity,
        username=user.username
    ))

    if kwargs.get('created', None):
        db_entity = db_entity_interest.db_entity
        # TODO
        # While we test upload, just delete the previous DbEntitys with the same key name
        # in the ConfigEntity.
        db_entity_interest.config_entity.db_entities.filter(key=db_entity.key).exclude(id=db_entity.id).delete()

        # Make sure the db_entity's schema matches the config_entity's if not set
        # TODO we assume that the schema should match the config_entity, rather than
        # an ancestor or the config_entity (like the project or a scenario). There
        # are many cases where the schema should not be that of the config_entity, so
        # we might want to remove this default and force the saver to set it
        if not db_entity.schema or not db_entity.table:
            db_entity.schema = db_entity.schema or db_entity_interest.config_entity.schema()
            # Always base the table name on the key
            db_entity.table = db_entity.key
            db_entity_interest.db_entity.save()

    if db_entity_interest.config_entity.deleted:
        # Do nothing for deleted config_entities
        return

    # Define the data_importer if not already set
    if not (db_entity.feature_class_configuration and db_entity.feature_class_configuration.data_importer):
        feature_class_configuration = db_entity.feature_class_configuration = db_entity.feature_class_configuration or FeatureClassConfiguration()
        # Choose the correct importer, if any, to set up the feature_class_configuration and features
        if db_entity.origin_instance:
            # Import from the origin_instance. This could be a full copy or from the current layer selection features
            feature_class_configuration.data_importer = full_module_path(OriginDbEntityProcessor)
        elif '.json' in db_entity.url.lower():
            # Import it using the geojson importer
            feature_class_configuration.data_importer = full_module_path(GeoJsonProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif '.zip' in db_entity.url.lower():
            feature_class_configuration.data_importer = full_module_path(ZippedSqlFileProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif not db_entity.no_feature_class_configuration:
            feature_class_configuration.data_importer = full_module_path(DefaultImportProcessor)
        previous = DbEntityInterest._no_post_save_publishing
        DbEntityInterest._no_post_save_publishing = True
        db_entity.feature_class_configuration = feature_class_configuration
        db_entity.save()
        DbEntityInterest._no_post_save_publishing = previous

    # Post save presentation section
    # Quit if the publishers were turned off outside this method
    if DbEntityInterest._no_post_save_publishing or db_entity_interest._no_post_save_publishing:
        return

    # Use this to initialize the FeatureBehavior and other stuff that might not be set
    update_or_create_db_entity(config_entity, db_entity)

    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_db_entity_initial')

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=db_entity_interest,
        instance_class=DbEntity,
        client_instance_path='db_entity',
        instance_key=db_entity_interest.db_entity.key,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_db_entity',
        # Update the setup_percent_complete instance attribute for new instances
        # of classes with this attribute (currently only DbEntity)
        update_setup_percent_complete=db_entity_interest.db_entity.setup_percent_complete == 0,
        **filter_keys(kwargs, ['created'])
    )