Esempio n. 1
0
 def map_result_path(field_path):
     # Get the field and the optional related model
     field, related_model = resolve_field_of_path(self, field_path, True)
     field_class_path = full_module_path(field.__class__)
     # Return
     return [field_path,
             field_class_path if\
                 not return_related_models else\
                 (field_class_path, full_module_path(related_model) if related_model else None)]
Esempio n. 2
0
 def __init__(self, **kwargs):
     """
         kwargs are the attributes of the class. None are required
     """
     abstract_class_name = full_module_path(kwargs['abstract_class']) if \
                             kwargs.get('abstract_class') else \
                             kwargs.get('abstract_class_name', full_module_path(Feature))
     updated_kwargs = merge(remove_keys(kwargs, ['abstract_class']),
                            dict(abstract_class_name=abstract_class_name))
     self.__dict__.update(**updated_kwargs)
Esempio n. 3
0
def disable_signal_handler(signal_ref_path, handler, uid, limit_to_classes):
    for cls in filter_classes(limit_to_classes):
        resolve_module_attr(signal_ref_path).disconnect(handler, cls, True, uid)

    disable_signal_handler_for_celery.apply_async(
        args=(signal_ref_path, full_module_path(handler), uid, map(lambda cls: full_module_path(cls), limit_to_classes)),
        soft_time_limit=3600,
        time_limit=3600,
        countdown=1
    )
Esempio n. 4
0
 def __init__(self, **kwargs):
     """
         kwargs are the attributes of the class. None are required
     """
     abstract_class_name = (
         full_module_path(kwargs["abstract_class"])
         if kwargs.get("abstract_class")
         else kwargs.get("abstract_class_name", full_module_path(Feature))
     )
     updated_kwargs = merge(remove_keys(kwargs, ["abstract_class"]), dict(abstract_class_name=abstract_class_name))
     self.__dict__.update(**updated_kwargs)
Esempio n. 5
0
    def __init__(self, configuration):

        self.abstract_class_name = full_module_path(configuration['abstract_class']) if \
                                configuration.get('abstract_class') else \
                                configuration.get('abstract_class_name', full_module_path(AnalysisModule))
        self.configuration = remove_keys(configuration, ['key', 'name', 'description'])
        self.name = configuration.get('name')
        self.description = configuration.get('description')
        self.partner_description = configuration.get('partner_description')
        self.key = configuration.get('key')
        self.analysis_tools = configuration.get('analysis_tools', [])
        super(AnalysisModuleConfiguration, self).__init__()
Esempio n. 6
0
    def __init__(self, configuration):

        self.abstract_class_name = full_module_path(configuration['abstract_class']) if \
                                configuration.get('abstract_class') else \
                                configuration.get('abstract_class_name', full_module_path(AnalysisModule))
        self.configuration = remove_keys(configuration,
                                         ['key', 'name', 'description'])
        self.name = configuration.get('name')
        self.description = configuration.get('description')
        self.partner_description = configuration.get('partner_description')
        self.key = configuration.get('key')
        self.analysis_tools = configuration.get('analysis_tools', [])
        super(AnalysisModuleConfiguration, self).__init__()
Esempio n. 7
0
    def create_empty_source_table(self, clazz, source_table_name, source_db_connection, extra_fields={}):
        project = Project(key='sutter_county', id=0)
        db_entity = DbEntity(key=Keys.DB_ABSTRACT_BASE_AGRICULTURE_FEATURE, feature_class_configuration=dict(
            abstract_class=full_module_path(clazz)
        ))
        SourceClass = FeatureClassCreator(project, db_entity, no_ensure=True).dynamic_model_class(base_only=True, schema='public', table=source_table_name)
        # class SourceClass(clazz):
        #     class Meta(clazz.Meta):
        #         db_table = source_table_name
        create_tables_for_dynamic_classes(SourceClass)
        for field in SourceClass._meta.fields[1:]:
            setattr(field, 'null', True)

        drop_table = "DROP TABLE IF EXISTS {final_table} CASCADE;".format(final_table=source_table_name)

        sql, refs = source_db_connection.creation.sql_create_model(SourceClass, color_style())
        add_geometry_fields = '''
            ALTER TABLE {final_table} ADD COLUMN geography_id VARCHAR;
            ALTER TABLE {final_table} ADD COLUMN wkb_geometry GEOMETRY;'''.format(final_table=source_table_name)

        sql = drop_table + sql[0] + add_geometry_fields
        for dbfield, fieldtype in extra_fields.items():
            sql += 'ALTER TABLE {final_table} ADD COLUMN {field} {type}'.format(
                final_table=source_table_name, field=dbfield, type=fieldtype)
        source_db_connection.cursor().execute(sql)
Esempio n. 8
0
 def __init__(self, **kwargs):
     """
         Processes the instance or instances given by the kwargs, storing instance_ids, the instance_class, and
         optional instance_keys, which may all be used for celery calls and client messaging
         :param kwargs: Currently only 'instance' is required, which is a single or list of instances.
         Optional arguments are:
             user_id: The user id of the user that instigated the save on the client
             instance_key: The attribute that is the key of the instance
             class_key: For dynamic classes, resolves the key of its scope class (e.g. 'db_entity_key' of Feature)
             class_path: The class path of the instance class. Defaults to the class of the first instance
             model_path: Backup to class_path for dynamic model resolution
             instance_class - Optional. Overrides the class of the instance for use in communicating with the client.
                 This is used when the client only cares about a base class, such as Feature or for DbEntityInterest
                 to be a DbEntity
             client_instance_path - Optional. Property path from the main instance to the instance to show the client
                 (this is only used to convert DbEntityInterest to DbEntity)
     """
     logger.debug("Creating InstanceBundle with kwargs: %s" % kwargs)
     self.user_id = kwargs['user_id']
     self.has_keys = kwargs.get('instance_key') is not None or hasattr(
         kwargs['instance'], 'key')
     # An optional class-scope key, like the DbEntity key of Features
     self.class_key = kwargs.get('class_key')
     self.client_instance_path = kwargs.get('client_instance_path')
     if hasattr(kwargs['instance'], '__iter__'):
         self.ids = map(lambda instance: instance.id, kwargs['instance'])
         self.class_path = full_module_path(kwargs['instance'][0].__class__)
         # Backup for dynamic subclass resolution
         self.model_path = resolvable_model_name(
             kwargs['instance'][0].__class__)
         self.keys = map(lambda instance: kwargs.get('instance_key'),
                         self.instances) if self.has_keys else []
     else:
         instance = kwargs['instance']
         self.ids = [instance.id]
         self.class_path = full_module_path(kwargs['instance'].__class__)
         # Backup for dynamic subclass resolution
         self.model_path = resolvable_model_name(
             kwargs['instance'].__class__)
         self.keys = [
             kwargs['instance_key']
             if kwargs.get('instance_key', None) else self.instances[0].key
         ] if self.has_keys else []
     # Overrides the instance class of the instance for sending to the client
     self.override_class_path = full_module_path(
         kwargs['instance_class']) if kwargs.get('instance_class') else None
    def create_result_map(self, related_models=[], map_path_segments={}):
        """
            Given the field_paths of the queryset, returns a ResultMap instance.
            ResultMap.result_fields is a list of field_paths minus specifically omitted ones--
            the parent id and geometry column.
            ResultMap.title_lookup is a lookup from the field_path to a title appropriate for the user.
            The generated title uses '.' in place of '__'
            ResultMap.value_map is a lookup from the field_path to a property path that describes
            how to convert the value to a more human readable form. This is used to convert
            instances to a readable label and dates, etc, to a more readable format.
            :param: related_models: pass the related_models represented in the query results so that unneeded
            paraent reference fields can be removed from the result fields
            :param: map_path_segments: An optional dict that matches segments of the field_paths. The value
            corresponding the key is the name to convert it to for the title. If the value is None it will
            be eliminated from the path when it is rejoined with '.'
        """

        result_paths = self.model_result_paths(related_models)
        # Get a mapping of the each result field_path to its field class path along
        # with the related model of that field, if the field is a ForeignKey or AutoField
        result_field_path_lookup = self.model_result_field_path_field_lookup(
            related_models, True)
        join_models = map(lambda model: full_module_path(model.__base__),
                          related_models)
        return ResultMap(
            # Replace '__' with '_x_'. We can't use __ because it confuses tastypie
            result_fields=map(lambda path: string.replace(path, '__', '_x_'),
                              result_paths),
            # Create a lookup from field name to title
            # The only processing we do to the title is to remove the middle path
            title_lookup=map_to_dict(
                lambda path: [
                    # Replace '__' with '_x_'. We can't use __ because it confuses tastypie
                    string.replace(path, '__', '_x_'),
                    # match each segment to map_path_segments or failing that return the segment
                    # remove segments that map to None
                    '__'.join(
                        compact(
                            map(
                                lambda segment: map_path_segments.get(
                                    segment, segment), path.split('__'))))
                ],
                result_paths),
            field_lookup=map_dict_to_dict(
                lambda field_path, tup: [field_path, tup[0]],
                result_field_path_lookup),
            related_model_lookup=compact_dict(
                map_dict_to_dict(lambda field_path, tup: [field_path, tup[1]],
                                 result_field_path_lookup)),
            join_models=join_models,
        )
Esempio n. 10
0
 def __init__(self, **kwargs):
     """
         Processes the instance or instances given by the kwargs, storing instance_ids, the instance_class, and
         optional instance_keys, which may all be used for celery calls and client messaging
         :param kwargs: Currently only 'instance' is required, which is a single or list of instances.
         Optional arguments are:
             user_id: The user id of the user that instigated the save on the client
             instance_key: The attribute that is the key of the instance
             class_key: For dynamic classes, resolves the key of its scope class (e.g. 'db_entity_key' of Feature)
             class_path: The class path of the instance class. Defaults to the class of the first instance
             model_path: Backup to class_path for dynamic model resolution
             instance_class - Optional. Overrides the class of the instance for use in communicating with the client.
                 This is used when the client only cares about a base class, such as Feature or for DbEntityInterest
                 to be a DbEntity
             client_instance_path - Optional. Property path from the main instance to the instance to show the client
                 (this is only used to convert DbEntityInterest to DbEntity)
     """
     logger.debug("Creating InstanceBundle with kwargs: %s" % kwargs)
     self.user_id = kwargs['user_id']
     self.has_keys = kwargs.get('instance_key') is not None or hasattr(kwargs['instance'], 'key')
     # An optional class-scope key, like the DbEntity key of Features
     self.class_key = kwargs.get('class_key')
     self.client_instance_path = kwargs.get('client_instance_path')
     if hasattr(kwargs['instance'], '__iter__'):
         self.ids = map(lambda instance: instance.id, kwargs['instance'])
         self.class_path = full_module_path(kwargs['instance'][0].__class__)
         # Backup for dynamic subclass resolution
         self.model_path = resolvable_model_name(kwargs['instance'][0].__class__)
         self.keys = map(lambda instance: kwargs.get('instance_key'), self.instances) if self.has_keys else []
     else:
         instance = kwargs['instance']
         self.ids = [instance.id]
         self.class_path = full_module_path(kwargs['instance'].__class__)
         # Backup for dynamic subclass resolution
         self.model_path = resolvable_model_name(kwargs['instance'].__class__)
         self.keys = [kwargs['instance_key'] if kwargs.get('instance_key', None) else self.instances[0].key] if self.has_keys else []
     # Overrides the instance class of the instance for sending to the client
     self.override_class_path = full_module_path(kwargs['instance_class']) if kwargs.get('instance_class') else None
Esempio n. 11
0
    def create_result_map(self, related_models=[], map_path_segments={}):
        """
            Given the field_paths of the queryset, returns a ResultMap instance.
            ResultMap.result_fields is a list of field_paths minus specifically omitted ones--
            the parent id and geometry column.
            ResultMap.title_lookup is a lookup from the field_path to a title appropriate for the user.
            The generated title uses '.' in place of '__'
            ResultMap.value_map is a lookup from the field_path to a property path that describes
            how to convert the value to a more human readable form. This is used to convert
            instances to a readable label and dates, etc, to a more readable format.
            :param: related_models: pass the related_models represented in the query results so that unneeded
            paraent reference fields can be removed from the result fields
            :param: map_path_segments: An optional dict that matches segments of the field_paths. The value
            corresponding the key is the name to convert it to for the title. If the value is None it will
            be eliminated from the path when it is rejoined with '.'
        """

        result_paths = self.model_result_paths(related_models)
        # Get a mapping of the each result field_path to its field class path along
        # with the related model of that field, if the field is a ForeignKey or AutoField
        result_field_path_lookup = self.model_result_field_path_field_lookup(related_models, True)
        join_models = map(lambda model: full_module_path(model.__base__), related_models)
        return ResultMap(
            # Replace '__' with '_x_'. We can't use __ because it confuses tastypie
            result_fields=map(lambda path: string.replace(path, '__', '_x_'), result_paths),
            # Create a lookup from field name to title
            # The only processing we do to the title is to remove the middle path
            title_lookup=map_to_dict(
                lambda path: [
                    # Replace '__' with '_x_'. We can't use __ because it confuses tastypie
                    string.replace(path, '__', '_x_'),
                    # match each segment to map_path_segments or failing that return the segment
                    # remove segments that map to None
                    '__'.join(compact(
                        map(
                            lambda segment: map_path_segments.get(segment, segment),
                            path.split('__')
                        )
                    ))
                ],
                result_paths
            ),
            field_lookup=map_dict_to_dict(lambda field_path, tup: [field_path, tup[0]], result_field_path_lookup),
            related_model_lookup=compact_dict(map_dict_to_dict(lambda field_path, tup: [field_path, tup[1]], result_field_path_lookup)),
            join_models=join_models,

        )
Esempio n. 12
0
    def create_empty_source_table(self,
                                  clazz,
                                  source_table_name,
                                  source_db_connection,
                                  extra_fields={}):
        project = Project(key='sutter_county', id=0)
        db_entity = DbEntity(key=Keys.DB_ABSTRACT_BASE_AGRICULTURE_FEATURE,
                             feature_class_configuration=dict(
                                 abstract_class=full_module_path(clazz)))
        SourceClass = FeatureClassCreator(project, db_entity,
                                          no_ensure=True).dynamic_model_class(
                                              base_only=True,
                                              schema='public',
                                              table=source_table_name)
        # class SourceClass(clazz):
        #     class Meta(clazz.Meta):
        #         db_table = source_table_name
        create_tables_for_dynamic_classes(SourceClass)
        for field in SourceClass._meta.fields[1:]:
            setattr(field, 'null', True)

        drop_table = "DROP TABLE IF EXISTS {final_table} CASCADE;".format(
            final_table=source_table_name)

        sql, refs = source_db_connection.creation.sql_create_model(
            SourceClass, color_style())
        add_geometry_fields = '''
            ALTER TABLE {final_table} ADD COLUMN geography_id VARCHAR;
            ALTER TABLE {final_table} ADD COLUMN wkb_geometry GEOMETRY;'''.format(
            final_table=source_table_name)

        sql = drop_table + sql[0] + add_geometry_fields
        for dbfield, fieldtype in extra_fields.items():
            sql += 'ALTER TABLE {final_table} ADD COLUMN {field} {type}'.format(
                final_table=source_table_name, field=dbfield, type=fieldtype)
        source_db_connection.cursor().execute(sql)
Esempio n. 13
0
def on_db_entity_interest_post_save(sender, **kwargs):
    """
        Called after a DbEntityInterest saves, but not when a config_entity is running post_save publishers
        In other words, this is only called after a direct DbEntityInterest save/update.
        This does the same as post_save_config_entity, but starts with the 'post_save_config_entity_db_entities'
        signal to do only DbEntity dependent presentation.
    """
    db_entity_interest = kwargs['instance']
    config_entity = ConfigEntity._subclassed(db_entity_interest.config_entity)
    db_entity = db_entity_interest.db_entity
    # TODO The default user should be the admin
    user = db_entity.updater if db_entity.updater else get_user_model(
    ).objects.get(username=UserGroupKey.SUPERADMIN)
    # post_save_db_entity presentation should always be disabled if we are saving a ConfigEntity
    logger.info(
        "Handler: post_save_db_entity_interest for config_entity {config_entity}, db_entity {db_entity}, "
        "and user {username}.".format(config_entity=config_entity,
                                      db_entity=db_entity_interest.db_entity,
                                      username=user.username))

    if kwargs.get('created', None):
        db_entity = db_entity_interest.db_entity
        # TODO
        # While we test upload, just delete the previous DbEntitys with the same key name
        # in the ConfigEntity.
        db_entity_interest.config_entity.db_entities.filter(
            key=db_entity.key).exclude(id=db_entity.id).delete()

        # Make sure the db_entity's schema matches the config_entity's if not set
        # TODO we assume that the schema should match the config_entity, rather than
        # an ancestor or the config_entity (like the project or a scenario). There
        # are many cases where the schema should not be that of the config_entity, so
        # we might want to remove this default and force the saver to set it
        if not db_entity.schema or not db_entity.table:
            db_entity.schema = db_entity.schema or db_entity_interest.config_entity.schema(
            )
            # Always base the table name on the key
            db_entity.table = db_entity.key
            db_entity_interest.db_entity.save()

    if db_entity_interest.config_entity.deleted:
        # Do nothing for deleted config_entities
        return

    # Define the data_importer if not already set
    if not (db_entity.feature_class_configuration
            and db_entity.feature_class_configuration.data_importer):
        feature_class_configuration = db_entity.feature_class_configuration = db_entity.feature_class_configuration or FeatureClassConfiguration(
        )
        # Choose the correct importer, if any, to set up the feature_class_configuration and features
        if db_entity.origin_instance:
            # Import from the origin_instance. This could be a full copy or from the current layer selection features
            feature_class_configuration.data_importer = full_module_path(
                OriginDbEntityProcessor)
        elif '.json' in db_entity.url.lower():
            # Import it using the geojson importer
            feature_class_configuration.data_importer = full_module_path(
                GeoJsonProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif '.zip' in db_entity.url.lower():
            feature_class_configuration.data_importer = full_module_path(
                ZippedSqlFileProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif not db_entity.no_feature_class_configuration:
            feature_class_configuration.data_importer = full_module_path(
                DefaultImportProcessor)
        previous = DbEntityInterest._no_post_save_publishing
        DbEntityInterest._no_post_save_publishing = True
        db_entity.feature_class_configuration = feature_class_configuration
        db_entity.save()
        DbEntityInterest._no_post_save_publishing = previous

    # Post save presentation section
    # Quit if the publishers were turned off outside this method
    if DbEntityInterest._no_post_save_publishing or db_entity_interest._no_post_save_publishing:
        return

    # Use this to initialize the FeatureBehavior and other stuff that might not be set
    update_or_create_db_entity(config_entity, db_entity)

    starting_signal_path = resolvable_module_attr_path(
        __name__, 'post_save_db_entity_initial')

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=db_entity_interest,
        instance_class=DbEntity,
        client_instance_path='db_entity',
        instance_key=db_entity_interest.db_entity.key,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_db_entity',
        # Update the setup_percent_complete instance attribute for new instances
        # of classes with this attribute (currently only DbEntity)
        update_setup_percent_complete=db_entity_interest.db_entity.
        setup_percent_complete == 0,
        **filter_keys(kwargs, ['created']))
def on_db_entity_interest_post_save(sender, **kwargs):
    """
        Called after a DbEntityInterest saves, but not when a config_entity is running post_save publishers
        In other words, this is only called after a direct DbEntityInterest save/update.
        This does the same as post_save_config_entity, but starts with the 'post_save_config_entity_db_entities'
        signal to do only DbEntity dependent presentation.
    """
    db_entity_interest = kwargs['instance']
    config_entity = ConfigEntity._subclassed(db_entity_interest.config_entity)
    db_entity = db_entity_interest.db_entity
    # TODO The default user should be the admin
    user = db_entity.updater if db_entity.updater else get_user_model().objects.get(username=UserGroupKey.SUPERADMIN)
    # post_save_db_entity presentation should always be disabled if we are saving a ConfigEntity
    logger.info("Handler: post_save_db_entity_interest for config_entity {config_entity}, db_entity {db_entity}, "
                "and user {username}.".format(
        config_entity=config_entity,
        db_entity=db_entity_interest.db_entity,
        username=user.username
    ))

    if kwargs.get('created', None):
        db_entity = db_entity_interest.db_entity
        # TODO
        # While we test upload, just delete the previous DbEntitys with the same key name
        # in the ConfigEntity.
        db_entity_interest.config_entity.db_entities.filter(key=db_entity.key).exclude(id=db_entity.id).delete()

        # Make sure the db_entity's schema matches the config_entity's if not set
        # TODO we assume that the schema should match the config_entity, rather than
        # an ancestor or the config_entity (like the project or a scenario). There
        # are many cases where the schema should not be that of the config_entity, so
        # we might want to remove this default and force the saver to set it
        if not db_entity.schema or not db_entity.table:
            db_entity.schema = db_entity.schema or db_entity_interest.config_entity.schema()
            # Always base the table name on the key
            db_entity.table = db_entity.key
            db_entity_interest.db_entity.save()

    if db_entity_interest.config_entity.deleted:
        # Do nothing for deleted config_entities
        return

    # Define the data_importer if not already set
    if not (db_entity.feature_class_configuration and db_entity.feature_class_configuration.data_importer):
        feature_class_configuration = db_entity.feature_class_configuration = db_entity.feature_class_configuration or FeatureClassConfiguration()
        # Choose the correct importer, if any, to set up the feature_class_configuration and features
        if db_entity.origin_instance:
            # Import from the origin_instance. This could be a full copy or from the current layer selection features
            feature_class_configuration.data_importer = full_module_path(OriginDbEntityProcessor)
        elif '.json' in db_entity.url.lower():
            # Import it using the geojson importer
            feature_class_configuration.data_importer = full_module_path(GeoJsonProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif '.zip' in db_entity.url.lower():
            feature_class_configuration.data_importer = full_module_path(ZippedSqlFileProcessor)
            # Indicate that the feature class configuration was generated not fixture based
            feature_class_configuration.generated = True
        elif not db_entity.no_feature_class_configuration:
            feature_class_configuration.data_importer = full_module_path(DefaultImportProcessor)
        previous = DbEntityInterest._no_post_save_publishing
        DbEntityInterest._no_post_save_publishing = True
        db_entity.feature_class_configuration = feature_class_configuration
        db_entity.save()
        DbEntityInterest._no_post_save_publishing = previous

    # Post save presentation section
    # Quit if the publishers were turned off outside this method
    if DbEntityInterest._no_post_save_publishing or db_entity_interest._no_post_save_publishing:
        return

    # Use this to initialize the FeatureBehavior and other stuff that might not be set
    update_or_create_db_entity(config_entity, db_entity)

    starting_signal_path = resolvable_module_attr_path(__name__, 'post_save_db_entity_initial')

    return post_save_publishing(
        starting_signal_path,
        config_entity,
        user,
        instance=db_entity_interest,
        instance_class=DbEntity,
        client_instance_path='db_entity',
        instance_key=db_entity_interest.db_entity.key,
        signal_proportion_lookup=signal_proportion_lookup,
        dependent_signal_paths=dependent_signal_paths,
        signal_prefix='post_save_db_entity',
        # Update the setup_percent_complete instance attribute for new instances
        # of classes with this attribute (currently only DbEntity)
        update_setup_percent_complete=db_entity_interest.db_entity.setup_percent_complete == 0,
        **filter_keys(kwargs, ['created'])
    )