コード例 #1
0
def dependent_signal_paths(signal_path):
    """
        Gives the hierarchy of publisher signal calling order based on the given signal
        Signals are given as strings instead of paths for serialization ease
        param: signal_path. The signal path for which the dependent signals are returned
        return: An array of signal_paths or an empty array
    """
    if signal_path == resolvable_module_attr_path(__name__, 'post_save_db_entity_initial'):
        # DataImport dependent publishers are run after DbEntity dependent publishers
        return [
            resolvable_module_attr_path(__name__, 'post_save_db_entity_import'),
        ]
    return []
コード例 #2
0
def dependent_signal_paths(signal_path):
    """
        Gives the hierarchy of publisher signal calling order based on the given signal
        Signals are given as strings instead of paths for serialization ease
        param: signal_path. The signal path for which the dependent signals are returned
        return: An array of signal_paths or an empty array
    """
    if signal_path == resolvable_module_attr_path(__name__, 'post_save_db_entity_initial'):
        # DataImport dependent publishers are run after DbEntity dependent publishers
        return [
            resolvable_module_attr_path(__name__, 'post_save_db_entity_import'),
        ]
    return []
コード例 #3
0
def on_feature_post_save(sender, **kwargs):
    """
        Called after one or more features save. This is called from the Feature post_save with kwargs
    """
    # todo invalidate the tiles at the current map zoom level FIRST, then do the other zoom levels
    features = kwargs['instance']
    user = get_user_model().objects.get(id=kwargs['user_id'])
    config_entity = features[0].config_entity.subclassed
    db_entity_key = features[0].db_entity_key
    logger.info("Handler: post_save_feature for config_entity {config_entity}, db_entity_key {db_entity_key}, and user {username}.".format(
        config_entity=config_entity,
        db_entity_key=db_entity_key,
        username=user.username
    ))

    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_feature_initial')

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=features,
        # This is a unique attribute to tell the client the DbEntity key of the Feature class
        class_key=db_entity_key,
        # The publisher client communication is only concerned with the base Feature class, not the dynamic subclasses
        instance_class=Feature,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_feature'
    )
コード例 #4
0
def on_feature_post_save(sender, **kwargs):
    """
        Called after one or more features save. This is called from the Feature post_save with kwargs
    """
    # todo invalidate the tiles at the current map zoom level FIRST, then do the other zoom levels
    features = kwargs['instance']
    user = get_user_model().objects.get(id=kwargs['user_id'])
    config_entity = features[0].config_entity.subclassed
    db_entity_key = features[0].db_entity_key
    logger.info(
        "Handler: post_save_feature for config_entity {config_entity}, db_entity_key {db_entity_key}, and user {username}."
        .format(config_entity=config_entity,
                db_entity_key=db_entity_key,
                username=user.username))

    starting_signal_path = resolvable_module_attr_path(
        __name__, 'post_save_feature_initial')

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=features,
        # This is a unique attribute to tell the client the DbEntity key of the Feature class
        class_key=db_entity_key,
        # The publisher client communication is only concerned with the base Feature class, not the dynamic subclasses
        instance_class=Feature,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_feature')
コード例 #5
0
def _on_built_form_post_save(built_forms):

    logger.info("Handler: post_save_built_form for {built_forms}".format(
        built_forms=', '.join(map_property(built_forms, 'name')))
    )
    user = built_forms[0].updater
    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_built_form_initial')

    return post_save_publishing(
        starting_signal_path,
        None,
        user,
        instance=built_forms,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_built_form')
コード例 #6
0
def _on_built_form_post_save(built_forms):

    logger.info("Handler: post_save_built_form for {built_forms}".format(
        built_forms=', '.join(map_property(built_forms, 'name')))
    )
    user = built_forms[0].updater
    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_built_form_initial')

    return post_save_publishing(
        starting_signal_path,
        None,
        user,
        instance=built_forms,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_built_form')
コード例 #7
0
def on_user_post_save(sender, **kwargs):
    user = kwargs['instance']
    groups = user.groups.all()
    if not groups:
        raise Exception('User %s is not in any groups', user)

    config_entities = set()
    for group in groups:
        config_entity = group.group_hierarchy.config_entity
        if not config_entity and group.name in UserGroupKey.GLOBAL:
            # Each of the GLOBAL groups are a special case--both a global Group and a ConfigEntity Group
            # It purposely doesn't resolve its config_entity but we need it here
            config_entity = global_config_singleton()

        if config_entity:
            config_entities.add(config_entity)

    # We should always have at least one ConfigEntity
    if not config_entities:
        raise Exception('No config entity for user {user}'.format(user=user))

    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_user_initial')

    scenarios = set()
    for config_entity in config_entities:
        scenarios |= set(config_entity.descendants_by_type(Scenario))

    # randoming order b/c we've had problems with
    # post-save processing of admin-level users causing
    # the machine to run out of available memory, which
    # prevented scenarios later in the list from being processed.
    scenarios = list(scenarios)
    random.shuffle(scenarios)

    for scenario in scenarios:
        post_save_publishing(
            starting_signal_path,
            scenario,
            user,
            instance=user,
            instance_class=User,
            instance_key=user.username,
            signal_proportion_lookup=signal_proportion_lookup,
            dependent_signal_paths=dependent_signal_paths,
            signal_prefix='post_save_user',
            scenario=scenario
        )
コード例 #8
0
def on_user_post_save(sender, **kwargs):
    user = kwargs['instance']
    groups = user.groups.all()
    if not groups:
        raise Exception('User %s is not in any groups', user)

    config_entities = set()
    for group in groups:
        config_entity = group.group_hierarchy.config_entity
        if not config_entity and group.name in UserGroupKey.GLOBAL:
            # Each of the GLOBAL groups are a special case--both a global Group and a ConfigEntity Group
            # It purposely doesn't resolve its config_entity but we need it here
            config_entity = global_config_singleton()

        if config_entity:
            config_entities.add(config_entity)

    # We should always have at least one ConfigEntity
    if not config_entities:
        raise Exception('No config entity for user {user}'.format(user=user))

    starting_signal_path = resolvable_module_attr_path(
        __name__, 'post_save_user_initial')

    scenarios = set()
    for config_entity in config_entities:
        scenarios |= set(config_entity.descendants_by_type(Scenario))

    # randoming order b/c we've had problems with
    # post-save processing of admin-level users causing
    # the machine to run out of available memory, which
    # prevented scenarios later in the list from being processed.
    scenarios = list(scenarios)
    random.shuffle(scenarios)

    for scenario in scenarios:
        post_save_publishing(starting_signal_path,
                             scenario,
                             user,
                             instance=user,
                             instance_class=User,
                             instance_key=user.username,
                             signal_proportion_lookup=signal_proportion_lookup,
                             dependent_signal_paths=dependent_signal_paths,
                             signal_prefix='post_save_user',
                             scenario=scenario)
コード例 #9
0
def on_layer_post_save(sender, **kwargs):
    """
        Called after a Layer saves, but not when a config_entity is running post_save publishers
        In other words, this is only called after a direct Layer save/update.
        This does the same as on_config_entity_post_save_layer, but starts with the 'post_save_es'
        signal to do only DbEntity dependent publishing.
    """
    layer = kwargs['instance']
    if layer._no_post_save_publishing:
        return
    config_entity = layer.config_entity

    db_entity_interest = \
        config_entity.computed_db_entity_interests(db_entity__key=layer.db_entity_key, with_deleted=True)[0]

    if db_entity_interest.deleted:
        # If the db_entity_interest is deleted, make sure the layer is deleted.
        layer.deleted = True
        layer._no_post_save_publishing = True
        layer.save()
        layer._no_post_save_publishing = False
        return

    db_entity = db_entity_interest.db_entity
    user = layer.updater if layer.updater else get_user_model().objects.get(
        username=UserGroupKey.SUPERADMIN)
    # post_save_db_entity publishing should always be disabled if we are saving a ConfigEntity
    logger.info(
        "Handler: post_save_layer for config_entity {config_entity}, db_entity {db_entity}, and user {username}."
        .format(
            config_entity=config_entity,
            db_entity=db_entity,
            username=user.username,
        ))

    starting_signal_path = resolvable_module_attr_path(
        __name__, 'post_save_layer_initial')

    try:
        # Make sure no transactions are outstanding
        transaction.commit()
    except Exception, e:
        pass
コード例 #10
0
def on_layer_post_save(sender, **kwargs):
    """
        Called after a Layer saves, but not when a config_entity is running post_save publishers
        In other words, this is only called after a direct Layer save/update.
        This does the same as on_config_entity_post_save_layer, but starts with the 'post_save_es'
        signal to do only DbEntity dependent publishing.
    """
    layer = kwargs['instance']
    if layer._no_post_save_publishing:
        return
    config_entity = layer.config_entity

    db_entity_interest = \
        config_entity.computed_db_entity_interests(db_entity__key=layer.db_entity_key, with_deleted=True)[0]

    if db_entity_interest.deleted:
        # If the db_entity_interest is deleted, make sure the layer is deleted.
        layer.deleted = True
        layer._no_post_save_publishing = True
        layer.save()
        layer._no_post_save_publishing = False
        return

    db_entity = db_entity_interest.db_entity
    user = layer.updater if layer.updater else get_user_model().objects.get(username=UserGroupKey.SUPERADMIN)
    # post_save_db_entity publishing should always be disabled if we are saving a ConfigEntity
    logger.info(
        "Handler: post_save_layer for config_entity {config_entity}, db_entity {db_entity}, and user {username}.".format(
            config_entity=config_entity,
            db_entity=db_entity,
            username=user.username,
        ))

    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_layer_initial')

    try:
        # Make sure no transactions are outstanding
        transaction.commit()
    except Exception, e:
        pass
コード例 #11
0
def dependent_signal_paths(signal_path):
    if signal_path == resolvable_module_attr_path(__name__, 'post_save_built_form_initial'):
        return [resolvable_module_attr_path(__name__, 'post_save_built_form_layer')]
    return []
コード例 #12
0
def dependent_signal_paths(signal_path):
    if signal_path == resolvable_module_attr_path(__name__, 'post_save_built_form_initial'):
        return [resolvable_module_attr_path(__name__, 'post_save_built_form_layer')]
    return []
コード例 #13
0
def on_config_entity_post_save(sender, **kwargs):
    """
        Create the ConfigEntity's database schema on initial save.
        Post save starts a chain of asynchronous publishers that run according to a dependency tree.
        First publishers that are wired to the post_save_config_entity_initial signal
        run, followed by publishers dependent on signals that are dependent of
        post_save_config_entity_initial (see dependent_signal_paths)
        :param sender:
        :param kwargs:
            instance - the ConfigEntity
            created - True if the instance was just created
            sync - True if the instance should be synced to the configuration
        :return:
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if transaction.is_managed():
        transaction.commit()
    crud_type = CrudKey.resolve_crud(**kwargs)

    # Send a message to publishers to configure after creation or update of the config_entity
    # This is executed through a Celery task so that it can run asynchronously
    if config_entity._no_post_save_publishing:
        return
    if config_entity.deleted:
        # Also do nothing if the config_entity is deleted. At some point this should do some
        # processings, such as rekeying the scenario so it doesn't conflict with new scenario keys
        return

    for child_config_entity in config_entity.children():
        # Do any needed syncing of config_entity_children
        # This currently does nothing
        child_config_entity.parent_config_entity_saved()


    if CrudKey.CLONE == crud_type:
        config_entity.add_categories(*config_entity.origin_instance.categories.all())
    elif CrudKey.CREATE == crud_type:
        # Unless preconfigured, set the basic category based on type
        if config_entity.categories.count() == 0:
            category = Category.objects.update_or_create(
                key='category',
                value='Future' if isinstance(config_entity, FutureScenario) else 'Base')[0]
            config_entity.add_categories(category)

    # TODO The default user here should be the admin, and in fact all config_entity instances
    # should simply have to have a creator
    user = config_entity.creator if config_entity.creator else get_user_model().objects.get(username=GroupKey.SUPERADMIN)
    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_config_entity_initial')

    logger.info("Handler: post_save_config_entity for config_entity {config_entity} and user {username}".format(
        config_entity=config_entity,
        username=user.username))

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=config_entity,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_config_entity',
        crud_type=crud_type)
コード例 #14
0
def on_config_entity_post_save(sender, **kwargs):
    """
        Create the ConfigEntity's database schema on initial save.
        Post save starts a chain of asynchronous publishers that run according to a dependency tree.
        First publishers that are wired to the post_save_config_entity_initial signal
        run, followed by publishers dependent on signals that are dependent of
        post_save_config_entity_initial (see dependent_signal_paths)
        :param sender:
        :param kwargs:
            instance - the ConfigEntity
            created - True if the instance was just created
            sync - True if the instance should be synced to the configuration
        :return:
    """
    config_entity = InstanceBundle.extract_single_instance(**kwargs)
    if transaction.is_managed():
        transaction.commit()
    crud_type = CrudKey.resolve_crud(**kwargs)

    # Send a message to publishers to configure after creation or update of the config_entity
    # This is executed through a Celery task so that it can run asynchronously
    if config_entity._no_post_save_publishing:
        return
    if config_entity.deleted:
        # Also do nothing if the config_entity is deleted. At some point this should do some
        # processings, such as rekeying the scenario so it doesn't conflict with new scenario keys
        return

    for child_config_entity in config_entity.children():
        # Do any needed syncing of config_entity_children
        # This currently does nothing
        child_config_entity.parent_config_entity_saved()

    if CrudKey.CLONE == crud_type:
        config_entity.add_categories(
            *config_entity.origin_instance.categories.all())
    elif CrudKey.CREATE == crud_type:
        # Unless preconfigured, set the basic category based on type
        if config_entity.categories.count() == 0:
            category = Category.objects.update_or_create(
                key='category',
                value='Future'
                if isinstance(config_entity, FutureScenario) else 'Base')[0]
            config_entity.add_categories(category)

    # TODO The default user here should be the admin, and in fact all config_entity instances
    # should simply have to have a creator
    user = config_entity.creator if config_entity.creator else get_user_model(
    ).objects.get(username=GroupKey.SUPERADMIN)
    starting_signal_path = resolvable_module_attr_path(
        __name__, 'post_save_config_entity_initial')

    logger.info(
        "Handler: post_save_config_entity for config_entity {config_entity} and user {username}"
        .format(config_entity=config_entity, username=user.username))

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=config_entity,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_config_entity',
        crud_type=crud_type)
コード例 #15
0
def on_db_entity_interest_post_save(sender, **kwargs):
    """
        Called after a DbEntityInterest saves, but not when a config_entity is running post_save publishers
        In other words, this is only called after a direct DbEntityInterest save/update.
        This does the same as post_save_config_entity, but starts with the 'post_save_config_entity_db_entities'
        signal to do only DbEntity dependent presentation.
    """
    db_entity_interest = kwargs['instance']
    config_entity = ConfigEntity._subclassed(db_entity_interest.config_entity)
    db_entity = db_entity_interest.db_entity
    # TODO The default user should be the admin
    user = db_entity.updater if db_entity.updater else get_user_model(
    ).objects.get(username=UserGroupKey.SUPERADMIN)
    # post_save_db_entity presentation should always be disabled if we are saving a ConfigEntity
    logger.info(
        "Handler: post_save_db_entity_interest for config_entity {config_entity}, db_entity {db_entity}, "
        "and user {username}.".format(config_entity=config_entity,
                                      db_entity=db_entity_interest.db_entity,
                                      username=user.username))

    if kwargs.get('created', None):
        db_entity = db_entity_interest.db_entity
        # TODO
        # While we test upload, just delete the previous DbEntitys with the same key name
        # in the ConfigEntity.
        db_entity_interest.config_entity.db_entities.filter(
            key=db_entity.key).exclude(id=db_entity.id).delete()

        # Make sure the db_entity's schema matches the config_entity's if not set
        # TODO we assume that the schema should match the config_entity, rather than
        # an ancestor or the config_entity (like the project or a scenario). There
        # are many cases where the schema should not be that of the config_entity, so
        # we might want to remove this default and force the saver to set it
        if not db_entity.schema or not db_entity.table:
            db_entity.schema = db_entity.schema or db_entity_interest.config_entity.schema(
            )
            # Always base the table name on the key
            db_entity.table = db_entity.key
            db_entity_interest.db_entity.save()

    if db_entity_interest.config_entity.deleted:
        # Do nothing for deleted config_entities
        return

    # Define the data_importer if not already set
    if not (db_entity.feature_class_configuration
            and db_entity.feature_class_configuration.data_importer):
        feature_class_configuration = db_entity.feature_class_configuration = db_entity.feature_class_configuration or FeatureClassConfiguration(
        )
        # Choose the correct importer, if any, to set up the feature_class_configuration and features
        if db_entity.origin_instance:
            # Import from the origin_instance. This could be a full copy or from the current layer selection features
            feature_class_configuration.data_importer = full_module_path(
                OriginDbEntityProcessor)
        elif '.json' in db_entity.url.lower():
            # Import it using the geojson importer
            feature_class_configuration.data_importer = full_module_path(
                GeoJsonProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif '.zip' in db_entity.url.lower():
            feature_class_configuration.data_importer = full_module_path(
                ZippedSqlFileProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif not db_entity.no_feature_class_configuration:
            feature_class_configuration.data_importer = full_module_path(
                DefaultImportProcessor)
        previous = DbEntityInterest._no_post_save_publishing
        DbEntityInterest._no_post_save_publishing = True
        db_entity.feature_class_configuration = feature_class_configuration
        db_entity.save()
        DbEntityInterest._no_post_save_publishing = previous

    # Post save presentation section
    # Quit if the publishers were turned off outside this method
    if DbEntityInterest._no_post_save_publishing or db_entity_interest._no_post_save_publishing:
        return

    # Use this to initialize the FeatureBehavior and other stuff that might not be set
    update_or_create_db_entity(config_entity, db_entity)

    starting_signal_path = resolvable_module_attr_path(
        __name__, 'post_save_db_entity_initial')

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=db_entity_interest,
        instance_class=DbEntity,
        client_instance_path='db_entity',
        instance_key=db_entity_interest.db_entity.key,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_db_entity',
        # Update the setup_percent_complete instance attribute for new instances
        # of classes with this attribute (currently only DbEntity)
        update_setup_percent_complete=db_entity_interest.db_entity.
        setup_percent_complete == 0,
        **filter_keys(kwargs, ['created']))
コード例 #16
0
def on_db_entity_interest_post_save(sender, **kwargs):
    """
        Called after a DbEntityInterest saves, but not when a config_entity is running post_save publishers
        In other words, this is only called after a direct DbEntityInterest save/update.
        This does the same as post_save_config_entity, but starts with the 'post_save_config_entity_db_entities'
        signal to do only DbEntity dependent presentation.
    """
    db_entity_interest = kwargs['instance']
    config_entity = ConfigEntity._subclassed(db_entity_interest.config_entity)
    db_entity = db_entity_interest.db_entity
    # TODO The default user should be the admin
    user = db_entity.updater if db_entity.updater else get_user_model().objects.get(username=UserGroupKey.SUPERADMIN)
    # post_save_db_entity presentation should always be disabled if we are saving a ConfigEntity
    logger.info("Handler: post_save_db_entity_interest for config_entity {config_entity}, db_entity {db_entity}, "
                "and user {username}.".format(
        config_entity=config_entity,
        db_entity=db_entity_interest.db_entity,
        username=user.username
    ))

    if kwargs.get('created', None):
        db_entity = db_entity_interest.db_entity
        # TODO
        # While we test upload, just delete the previous DbEntitys with the same key name
        # in the ConfigEntity.
        db_entity_interest.config_entity.db_entities.filter(key=db_entity.key).exclude(id=db_entity.id).delete()

        # Make sure the db_entity's schema matches the config_entity's if not set
        # TODO we assume that the schema should match the config_entity, rather than
        # an ancestor or the config_entity (like the project or a scenario). There
        # are many cases where the schema should not be that of the config_entity, so
        # we might want to remove this default and force the saver to set it
        if not db_entity.schema or not db_entity.table:
            db_entity.schema = db_entity.schema or db_entity_interest.config_entity.schema()
            # Always base the table name on the key
            db_entity.table = db_entity.key
            db_entity_interest.db_entity.save()

    if db_entity_interest.config_entity.deleted:
        # Do nothing for deleted config_entities
        return

    # Define the data_importer if not already set
    if not (db_entity.feature_class_configuration and db_entity.feature_class_configuration.data_importer):
        feature_class_configuration = db_entity.feature_class_configuration = db_entity.feature_class_configuration or FeatureClassConfiguration()
        # Choose the correct importer, if any, to set up the feature_class_configuration and features
        if db_entity.origin_instance:
            # Import from the origin_instance. This could be a full copy or from the current layer selection features
            feature_class_configuration.data_importer = full_module_path(OriginDbEntityProcessor)
        elif '.json' in db_entity.url.lower():
            # Import it using the geojson importer
            feature_class_configuration.data_importer = full_module_path(GeoJsonProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif '.zip' in db_entity.url.lower():
            feature_class_configuration.data_importer = full_module_path(ZippedSqlFileProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif not db_entity.no_feature_class_configuration:
            feature_class_configuration.data_importer = full_module_path(DefaultImportProcessor)
        previous = DbEntityInterest._no_post_save_publishing
        DbEntityInterest._no_post_save_publishing = True
        db_entity.feature_class_configuration = feature_class_configuration
        db_entity.save()
        DbEntityInterest._no_post_save_publishing = previous

    # Post save presentation section
    # Quit if the publishers were turned off outside this method
    if DbEntityInterest._no_post_save_publishing or db_entity_interest._no_post_save_publishing:
        return

    # Use this to initialize the FeatureBehavior and other stuff that might not be set
    update_or_create_db_entity(config_entity, db_entity)

    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_db_entity_initial')

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=db_entity_interest,
        instance_class=DbEntity,
        client_instance_path='db_entity',
        instance_key=db_entity_interest.db_entity.key,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_db_entity',
        # Update the setup_percent_complete instance attribute for new instances
        # of classes with this attribute (currently only DbEntity)
        update_setup_percent_complete=db_entity_interest.db_entity.setup_percent_complete == 0,
        **filter_keys(kwargs, ['created'])
    )