Exemple #1
0
    def test_02_style_formatter(self):
        """Style lat/lng SLD on a numeric attribute"""
        msgt(self.test_02_style_formatter.__doc__)

        extra_kwargs = dict(\
                is_point_layer=True,
                current_sld=self.get_input_file('t2_01_current_sld.xml'),
                predefined_id='abc1234')

        msg('Load current SLD')
        sld_formatter = StyleRulesFormatter('starbucks_u_gl_abc1234',
                                            **extra_kwargs)

        # Format rule data
        msg('Apply rule data')
        sld_rule_data = self.get_input_file('t2_02_sld_rules.xml')
        sld_formatter.format_sld_xml(sld_rule_data)

        # Should be no errors
        self.assertEqual(sld_formatter.err_found, False)

        new_sld_xml = sld_formatter.formatted_sld_xml
        self.assertTrue(new_sld_xml is not None)

        expected_sld = self.get_input_file('t2_03_full_sld.xml')
        self.assertEqual(new_sld_xml.strip(), expected_sld.strip())
Exemple #2
0
    def get_input_file(self, fname):
        """Convenience method for opening a test input file"""
        full_fname = join(self.test_file_dir, fname)

        msg('open input file: %s' % full_fname)
        assert isfile(full_fname),\
            "Test input directory not found: %s" % full_fname

        fh = open(full_fname, 'r')
        content = fh.read()
        fh.close()
        return content
def set_default_style_for_latlng_layer(geoserver_catalog, feature_type):
    """
    Create an SLD for a new Lat/Lng Layer

    success returns (True, None)
    fail returns (False, err_msg)
    """
    assert isinstance(geoserver_catalog, Catalog)
    assert isinstance(feature_type, FeatureType)
    LOGGER.info('set_default_style_for_latlng_layer')

    # ----------------------------------------------------
    # Retrieve the layer from the catalog
    # ----------------------------------------------------
    new_layer = geoserver_catalog.get_layer(feature_type.name)
    if not new_layer:
        err_msg = "New layer not found in catalog for name: %s" % feature_type.name
        LOGGER.error(err_msg)
        return (False, err_msg)

    # ----------------------------------------------------
    # Retrieve the SLD for this layer
    # ----------------------------------------------------
    sld = get_sld_for(new_layer)
    if sld is None:
        err_msg = "SLD not found in catalog for new_layer: %s" % new_layer
        LOGGER.error(err_msg)
        return (False, err_msg)

    # ----------------------------------------------------
    # Create a new style name
    # ----------------------------------------------------
    random_ext = get_random_chars(4)
    new_layer_stylename = '%s_%s' % (feature_type.name, random_ext)

    LOGGER.info('new_layer_stylename: %s' % new_layer_stylename)

    # ----------------------------------------------------
    # Add this new style to the catalog
    # ----------------------------------------------------
    try:
        geoserver_catalog.create_style(new_layer_stylename, sld)
        msg('created!')
    except geoserver.catalog.ConflictingDataError, e:
        err_msg = (_('There is already a style in GeoServer named ') + '"%s"' %
                   (new_layer_stylename))
        LOGGER.error(err_msg)
        return False, err_msg
Exemple #4
0
    def link_layer(self):
        """
        - Retrieve DataversePermissionLink objects related to the dv_username
        - Give the associated WorldMap users editing permissions on this layer
        """
        if self.has_error:
            return False

        # (1) Retrieve the PermissionLink(s)
        #
        filter_params = dict(dataverse_username=self.dv_username,
                             is_active=True)
        perm_links = DataversePermissionLink.objects.filter(**filter_params)
        if len(perm_links) == 0:
            # Most times, there are no links, this is OK
            return True

        # (2) Retrieve the Map Layer
        #
        try:
            map_obj = Layer.objects.get(typename=self.layer_name)
        except Layer.DoesNotExist:
            self.add_error("The layer was not found: %s" % self.layer_name)
            return False

        # (3) Give edit permissions to each associated WorldMap user
        #
        for perm_link in perm_links:
            msg('perm_link: %s' % perm_link)
            #  Set to admin level.
            #  Note: The "set_user_level" method clears perms
            #        before setting them--e.g. it doesn't allow dupes
            #
            map_obj.set_user_level(perm_link.worldmap_user, Layer.LEVEL_ADMIN)

        self.was_layer_linked = True
        return True
def set_style_for_new_join_layer(geoserver_catalog, feature_type,
                                 original_layer):
    """
    Make a new SLD for a TableJoin.
    (1) Get the SLD from the original_layer layer
    (2) Add the name of the new layer

    success returns (True, None)
    fail returns (False, err_msg)

    """
    assert isinstance(geoserver_catalog, Catalog)
    assert isinstance(feature_type, FeatureType)
    assert isinstance(original_layer, Layer)

    # Ah...was the original good enough?
    #return set_default_style_for_latlng_layer(geoserver_catalog, feature_type)

    # ----------------------------------------------------
    # Get the SLD from the original layer
    # ----------------------------------------------------
    if original_layer.default_style is None:
        err_msg = ('Failed to retrieve the default_style '
                    'for the original_layer "%s" (id: %s)' %\
                    original_layer.name,
                    original_layer.id)

        LOGGER.error(err_msg)
        return False, err_msg

    #msg('orig layer style name: %s' % original_layer.default_style.name)
    original_sld = original_layer.default_style.sld_body
    if original_sld is None:
        err_msg = 'Failed to retrieve the SLD for the original_layer (id: %s)' % original_layer.id
        LOGGER.error(err_msg)
        return False, err_msg

    # ----------------------------------------------------
    # Retrieve the new layer from the catalog
    # ----------------------------------------------------
    #msg('feature_type.name: %s' % feature_type.name)

    new_layer = geoserver_catalog.get_layer(feature_type.name)
    if new_layer is None:
        err_msg = ('Failed to retrieve the Layer '
                   ' based on the feature_type '
                   ' name ("%s")' % feature_type.name)
        LOGGER.error(err_msg)
        return False, err_msg

    # ----------------------------------------------------
    # Create a new style name and
    # use it in the original_sld string
    # ----------------------------------------------------

    new_sld = update_sld_name(original_sld, new_layer.name)

    # ----------------------------------------------------
    # Add this new style to the catalog
    # ----------------------------------------------------
    random_ext = get_random_chars(4)
    new_layer_stylename = '%s_%s' % (feature_type.name, random_ext)

    try:
        geoserver_catalog.create_style(new_layer_stylename, new_sld)
        msg('created!')
    except ConflictingDataError, e:
        err_msg = (_('There is already a style in GeoServer named ') + '"%s"' %
                   (new_layer_stylename))
        LOGGER.error(err_msg)
        return False, err_msg
        return False, err_msg

    # ----------------------------------------------------
    # Use the new SLD as the layer's default style
    # ----------------------------------------------------
    try:
        new_layer.default_style = geoserver_catalog.get_style(
            new_layer_stylename)
        geoserver_catalog.save(new_layer)
    except Exception as e:
        traceback.print_exc(sys.exc_info())
        err_msg = "Error setting new default style for layer. %s" % (str(e))
        LOGGER.error(err_msg)
        return False, err_msg

    msg('default saved')
    msg('sname: %s' % new_layer.default_style)
    return True, None


def create_layer_attributes_from_datatable(datatable, layer):
    """
    When a new Layer has been created from a DataTable,
    Create LayerAttribute objects from the DataTable's DataTableAttribute objects
    """
    if not isinstance(datatable, DataTable):
        return (False, "datatable must be a Datatable object")
    if not isinstance(layer, Layer):
        return (False, "layer must be a Layer object")

    names_of_attrs = ('attribute', 'attribute_label', 'attribute_type',
Exemple #7
0
def process_csv_file(data_table,
                     is_dataverse_db,
                     delimiter=",",
                     no_header_row=False,
                     force_char_column=None):
    """
    Transform csv file and add it to the postgres DataStore

    :param instance:
    :param delimiter:
    :param no_header_row:
    :return:
        success:  (datatable, None)
        err:    (None, error message)
    """
    assert isinstance(data_table,
                      DataTable), "instance must be a DataTable object"

    # full path to csv file
    #
    csv_filename = data_table.uploaded_file.path

    # Standardize table_name for the DataTable
    #
    if data_table.id is not None:
        # This DataTable already has a unique name
        table_name = data_table.table_name
    else:
        # Get a unique name for the data table
        table_name = os.path.splitext(os.path.basename(csv_filename))[0]
        table_name = get_unique_tablename(table_name)

        data_table.table_name = table_name
        data_table.save()

    # -----------------------------------------------------
    # Transform csv file to csvkit Table
    # -----------------------------------------------------
    csv_file_handle = open(csv_filename, 'rb')

    try:
        csv_table = table.Table.from_csv(\
                                csv_file_handle,
                                name=table_name,
                                no_header_row=no_header_row,
                                delimiter=delimiter)
    except:
        data_table.delete()
        err_msg = str(sys.exc_info()[0])
        LOGGER.error('Failed to convert csv file to table.  Error: %s'\
                , err_msg)
        return None, err_msg
    #csv_file = File(f)
    csv_file_handle.close()

    # -----------------------------------------------------
    # If needed, force a column to be character
    # -----------------------------------------------------
    #for col in csv_table:
    #    print 'PRE col: %s, %s' % (col.name, col.type)
    csv_table = force_csv_column_tochar(csv_table,\
                    force_char_column)

    #for col in csv_table:
    #    print 'POST col: %s, %s' % (col.name, col.type)

    # -----------------------------------------------------
    # Create DataTableAttribute objects
    # -----------------------------------------------------
    try:
        # Iterate through header row
        #
        for column in csv_table:
            # Standardize column name
            #
            column.name = standardize_column_name(column.name)

            # Create DataTableAttribute object
            #
            is_visible = True
            if column.name == '_unnamed':
                is_visible = False

            attribute, created = DataTableAttribute.objects.get_or_create(\
                    datatable=data_table,
                    attribute=column.name,
                    attribute_label=column.name,
                    attribute_type=column.type.__name__,
                    display_order=column.order,
                    visible=is_visible)
    except:
        data_table.delete(
        )  # Deleting DataTable also deletes related DataTableAttribute objects
        err_msg = 'Failed to convert csv file to table.  Error: %s' % str(
            sys.exc_info()[0])
        LOGGER.error(err_msg)
        return None, err_msg

    msg('process_csv_file 3')
    # -----------------------------------------------------
    # Generate SQL to create table from csv file
    # -----------------------------------------------------
    try:
        sql_table = sql.make_table(csv_table, table_name)
        create_table_sql = sql.make_create_table_statement(
            sql_table, dialect="postgresql")
        data_table.create_table_sql = create_table_sql
        data_table.save()
    except:
        data_table.delete()
        err_msg = 'Generate SQL to create table from csv file.  Error: %s' % str(
            sys.exc_info()[0])
        LOGGER.error(err_msg)
        return None, err_msg

    msg('process_csv_file 4')

    # -----------------------------------------------------
    # Execute the SQL and Create the Table (No data is loaded)
    # -----------------------------------------------------
    conn = psycopg2.connect(
        get_datastore_connection_string(is_dataverse_db=is_dataverse_db))

    try:
        cur = conn.cursor()
        cur.execute('drop table if exists %s CASCADE;' % table_name)
        cur.execute(create_table_sql)
        conn.commit()
        cur.close()
    except Exception as e:
        traceback.print_exc(sys.exc_info())
        err_msg = "Error Creating table %s:%s" % (data_table.name, str(e))
        LOGGER.error(err_msg)
        return None, err_msg

    finally:
        conn.close()

    # -----------------------------------------------------
    # Copy Data to postgres csv data to Postgres
    # -----------------------------------------------------
    connection_string = get_datastore_connection_string(\
                                        url_format=True,
                                        is_dataverse_db=is_dataverse_db)
    try:
        engine, metadata = sql.get_connection(connection_string)
    except ImportError:
        err_msg = ("Failed to get SQL connection"
                   "for copying csv data to database."
                   "\n{0}".format(str(sys.exc_info()[0])))
        LOGGER.error(err_msg)
        return None, err_msg

    # -----------------------------------------------------
    # Iterate through rows and add data
    # -----------------------------------------------------
    conn = engine.connect()
    trans = conn.begin()

    if csv_table.count_rows() > 0:
        insert = sql_table.insert()  # Generate insert statement
        headers = csv_table.headers()  # Pull table headers
        try:
            # create rows of { column : value } dict's
            #
            rows_to_add = [
                dict(zip(headers, row)) for row in csv_table.to_rows()
            ]

            # Add rows
            conn.execute(insert, rows_to_add)
        except:
            # Clean up after ourselves
            conn.close()
            csv_file_handle.close()
            instance.delete()
            err_msg = "Failed to add csv DATA to table %s.\n%s" %\
                        (table_name, (sys.exc_info()[0]))
            LOGGER.error(err_msg)
            return None, err_msg

    # Commit new rows and close connection
    #
    trans.commit()
    conn.close()
    csv_file_handle.close()

    return data_table, ""
Exemple #8
0
def setup_join(new_table_owner, table_name, layer_typename,
               table_attribute_name, layer_attribute_name):
    LOGGER.info('setup_join')
    """
    Setup the Table Join in GeoNode
    """

    assert isinstance(new_table_owner,
                      User), "new_table_owner must be a User object"
    assert table_name is not None, "table_name cannot be None"
    assert layer_typename is not None, "layer_typename cannot be None"
    assert table_attribute_name is not None, "table_attribute_name cannot be None"
    assert layer_attribute_name is not None, "layer_attribute_name cannot be None"

    LOGGER.info('setup_join. Step (1): Retrieve the DataTable object')
    try:
        dt = DataTable.objects.get(table_name=table_name)
    except DataTable.DoesNotExist:
        err_msg = 'No DataTable object found for table_name "%s"' % table_name
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (2): Retrieve the Layer object')

    try:
        layer = Layer.objects.get(typename=layer_typename)
    except Layer.DoesNotExist:
        err_msg = 'No Layer object found for layer_typename "%s"' % layer_typename
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (3): Retrieve the DataTableAttribute object')
    try:
        table_attribute = DataTableAttribute.objects.get(\
                                        datatable=dt,
                                        attribute=table_attribute_name)
    except DataTableAttribute.DoesNotExist:
        err_msg = 'No DataTableAttribute object found for table/attribute (%s/%s)' \
                  % (dt,
                     table_attribute_name)
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (4): Retrieve the LayerAttribute object')
    try:
        layer_attribute = LayerAttribute.objects.get(\
                                layer=layer,
                                attribute=layer_attribute_name)
    except LayerAttribute.DoesNotExist:
        err_msg = 'No LayerAttribute object found for layer/attribute (%s/%s)'\
                  % (layer, layer_attribute_name)
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (5): Build SQL statement to create view')

    layer_name = layer.typename.split(':')[1]

    # ------------------------------------------------------------------
    # (5) Check if the layer and the table are in the same store (database)
    # ------------------------------------------------------------------
    if layer.store != dt.tablespace:
        err_msg = 'layer (%s) and tablespace (%s) must be in the same database.'\
            % (layer.store, dt.tablespace)
        LOGGER.error(err_msg)
        return None, err_msg

    # ------------------------------------------------------------------
    # (5a) Check if the join columns compatible
    # ------------------------------------------------------------------
    column_checker = ColumnChecker(\
                                layer_name,
                                layer_attribute.attribute,
                                dt.table_name,
                                table_attribute.attribute)
    (are_cols_compatible,
     err_msg) = column_checker.are_join_columns_compatible()
    if not are_cols_compatible:  # Doesn't look good, return an error message
        return None, err_msg

    # ------------------------------------------------------------------
    # (5b) Create SQL statement for the tablejoin
    # ------------------------------------------------------------------
    view_name = get_unique_viewname(dt.table_name, layer_name)

    # SQL to create the view
    view_sql = ('CREATE VIEW {0}'
                ' AS SELECT {1}.{2}, {3}.*'
                ' FROM {1} INNER JOIN {3}'
                ' ON {1}."{4}" = {3}."{5}";'.format(\
                    view_name,  # 0
                    layer_name, # 1
                    THE_GEOM_LAYER_COLUMN, # 2
                    dt.table_name,  # 3
                    layer_attribute.attribute, # 4
                    table_attribute.attribute)) # 5

    # Materialized view for next version of Postgres
    """view_sql = ('create materialized view {0} as'
                   ' select {1}.the_geom, {2}.*'
                   ' from {1} inner join {2}'
                   ' on {1}."{3}" = {2}."{4}";').format(\
                       view_name,
                       layer_name,
                       dt.table_name,
                       layer_attribute.attribute,
                       table_attribute.attribute)
    """
    LOGGER.info('setup_join. Step (6): Retrieve stats')

    # ------------------------------------------------------------------
    # Retrieve stats
    # ------------------------------------------------------------------
    matched_count_sql = ('select count({0}) from {1} where {1}.{0}'
                         ' in (select "{2}" from {3});').format(\
                            table_attribute.attribute,
                            dt.table_name,
                            layer_attribute.attribute,
                            layer_name)

    unmatched_count_sql = ('select count({0}) from {1} where {1}.{0}'
                           ' not in (select "{2}" from {3});').format(\
                            table_attribute.attribute,
                            dt.table_name,
                            layer_attribute.attribute,
                            layer_name)

    unmatched_list_sql = ('select {0} from {1} where {1}.{0}'
                          ' not in (select "{2}" from {3}) limit 500;').format(\
                            table_attribute.attribute,
                            dt.table_name,
                            layer_attribute.attribute,
                            layer_name)

    # ------------------------------------------------------------------
    # Create a TableJoin object
    # ------------------------------------------------------------------
    LOGGER.info('setup_join. Step (7): Create a TableJoin object')
    tj, created = TableJoin.objects.get_or_create(\
                                source_layer=layer,
                                datatable=dt,
                                table_attribute=table_attribute,
                                layer_attribute=layer_attribute,
                                view_name=view_name,
                                view_sql=view_sql)
    tj.save()
    msgt('table join created! :) %s' % tj.id)

    # ------------------------------------------------------------------
    # Create the View (and double view)
    # ------------------------------------------------------------------
    LOGGER.info('setup_join. Step (8): Create the View (and double view)')

    # Convenience method to drop a view
    #
    drop_view_by_name(view_name)

    try:
        conn = psycopg2.connect(get_datastore_connection_string())
        cur = conn.cursor()

        # Create the new view
        #
        msg('view_sql: %s' % view_sql)
        cur.execute(view_sql)
        #cur.execute(double_view_sql)   # For later version of postgres

        # Record the counts for matched records and
        # add unmatched records to the TableJoin object

        # Unmatched count
        cur.execute(matched_count_sql)
        tj.matched_records_count = cur.fetchone()[0]

        # Matched count
        cur.execute(unmatched_count_sql)
        tj.unmatched_records_count = int(cur.fetchone()[0])

        # Unmatched records list
        if tj.unmatched_records_count > 0:
            cur.execute(unmatched_list_sql)
            tj.unmatched_records_list = ",".join(
                ['%s' % r[0] for r in cur.fetchall()])

        conn.commit()
        cur.close()

        # If no records match, then delete the TableJoin
        #
        if tj.matched_records_count == 0:
            # Delete the table join
            tj.delete()

            # Create an error message, log it, and send it back
            err_msg = ('No records matched.  Make sure that you chose'
                       ' the correct column and that the chosen layer'
                       ' is in the same geographic area.')
            LOGGER.error(err_msg)
            return None, err_msg

    except Exception as ex_obj:
        tj.delete()  # If needed for debugging, don't delete the table join

        traceback.print_exc(sys.exc_info())
        err_msg = "Error Joining table %s to layer %s: %s" % (\
                        table_name,
                        layer_typename,
                        str(ex_obj[0]))
        LOGGER.error(err_msg)

        if err_msg.find('You might need to add explicit type casts.') > -1:
            user_msg = "The chosen column is a different data type than the one expected."
        else:
            user_msg = err_msg

        return None, user_msg

    finally:
        conn.close()

    #--------------------------------------------------
    # Create the Layer in GeoServer from the view
    #--------------------------------------------------
    LOGGER.info(
        'setup_join. Step (9): Create the Layer in GeoServer from the view')
    try:
        LOGGER.info('setup_join. Step (9a): Find the datastore')
        #----------------------------
        # Find the datastore
        #----------------------------
        cat = Catalog(
            settings.GEOSERVER_BASE_URL + "rest",
            settings.GEOSERVER_CREDENTIALS[0],
            settings.GEOSERVER_CREDENTIALS[1])  # "admin", "geoserver")

        workspace = cat.get_workspace('geonode')
        ds_list = cat.get_xml(workspace.datastore_url)
        datastores = [
            datastore_from_index(cat, workspace, n)
            for n in ds_list.findall("dataStore")
        ]
        ds = None

        # Iterate through datastores
        #
        for datastore in datastores:
            #msg ('datastore name:', datastore.name)
            if datastore.name == layer.store:
                ds = datastore

        if ds is None:
            tj.delete()
            err_msg = "Datastore name not found: '%s' (join)" % settings.DB_DATASTORE_NAME
            LOGGER.error(str(ds))
            return None, err_msg

        # Publish the feature
        #
        LOGGER.info('setup_join. Step (9b): Publish the feature type')
        ft = cat.publish_featuretype(view_name, ds, layer.srs, srs=layer.srs)
        #ft = cat.publish_featuretype(double_view_name, ds, layer.srs, srs=layer.srs)

        LOGGER.info('setup_join. Step (9c): Save the feature type')
        cat.save(ft)

    except Exception as e:
        tj.delete()
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoServer layer for %s: %s" % (view_name,
                                                                 str(e))
        LOGGER.error(err_msg)
        return None, err_msg

    # ------------------------------------------------------
    # Set the Layer's default Style
    # ------------------------------------------------------
    sld_success, err_msg = set_style_for_new_join_layer(cat, ft, layer)
    if not sld_success:
        return None, err_msg

    # ------------------------------------------------------------------
    # Create the Layer in GeoNode from the GeoServer Layer
    # ------------------------------------------------------------------
    LOGGER.info(
        'setup_join. Step (10): Create the Layer in GeoNode from the GeoServer Layer'
    )
    try:
        layer_params = {
            "workspace":
            workspace.name,
            "store":
            ds.name,
            "storeType":
            ds.resource_type,
            "typename":
            "%s:%s" %
            (workspace.name.encode('utf-8'), ft.name.encode('utf-8')),
            "title":
            dt.title or 'No title provided',
            "abstract":
            dt.abstract or 'No abstract provided',
            "uuid":
            str(uuid.uuid4()),
            "owner":
            new_table_owner,
            #"bbox_x0": Decimal(ft.latlon_bbox[0]),
            #"bbox_x1": Decimal(ft.latlon_bbox[1]),
            #"bbox_y0": Decimal(ft.latlon_bbox[2]),
            #"bbox_y1": Decimal(ft.latlon_bbox[3])
        }

        layer, created = Layer.objects.get_or_create(name=view_name,
                                                     defaults=layer_params)

        # Set default permissions (public)
        layer.set_default_permissions()
        #set_attributes(layer, overwrite=True)

        tj.join_layer = layer
        tj.save()
    except Exception as e:
        tj.delete()
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoNode layer for %s: %s" % (view_name,
                                                               str(e))
        LOGGER.error(err_msg)
        return None, err_msg

    # ------------------------------------------------------------------
    # Create LayerAttributes for the new Layer (not done in GeoNode 2.x)
    # ------------------------------------------------------------------
    LOGGER.info(
        'setup_join. Step (11): Create Layer Attributes from the Datatable')
    (attributes_created,
     err_msg) = create_layer_attributes_from_datatable(dt, layer)
    if not attributes_created:
        LOGGER.error(err_msg)
        tj.delete()  # Delete the table join object
        return None, "Sorry there was an error creating the Datatable (s11)"

    return tj, ""
Exemple #9
0
def view_upload_table_and_join_layer(request):
    """
    Upload a tabular file originating from Dataverse/Geoconnect and join it to a layer.

    - Check if the Dataverse Metadata is valid
        - No, error
    - Does a layer already exist for this file?
        - If yes, return it
    - Check if the Table Join POST data is valid
        - No error
    - Attempt to Join the Table
        - Fail, error
    - Create a DataverseInfo object and attach it to the layer
    """
    if request.method != 'POST':
        return HttpResponse("Invalid Request", mimetype="text/plain", status=500)

    post_data_dict = request.POST.dict()

    msg('step 1')
    LOGGER.info(('Upload tabular file from DV/Geoconnect and join to a layer.'
                 '\nStep 1:  Is the Dataverse Layer Metadata valid?'))
    # -------------------------------------------
    # Is the Dataverse Layer Metadata valid?
    # -------------------------------------------
    form_dv_metadata = DataverseLayerMetadataValidationForm(post_data_dict)
    if not form_dv_metadata.is_valid():
        LOGGER.error(('check_for_existing_layer. failed validation'
                      '\nErrors: %s'), form_dv_metadata.errors)
        #raise forms.ValidationError('Failed to validate dataverse_info data')
        json_msg = MessageHelperJSON.get_json_fail_msg(\
                            'Failed to validate dataverse_info data',
                            data_dict=form_dv_metadata.errors)

        return HttpResponse(json_msg, mimetype="application/json", status=400)


    # -------------------------------------------
    # Does a layer already exist for this DataverseInfo?
    # -------------------------------------------
    msg('step 2')
    LOGGER.info('Step 2:  Does a layer already exist for this DataverseInfo?')

    existing_dv_layer_metadata = \
        retrieve_dataverse_layer_metadata_by_kwargs_installation_and_file_id(**post_data_dict)

    #-----------------------------------------------------------
    #   A layer was found!
    #   Update the DataverseLayerMetadata and return the layer.
    #-----------------------------------------------------------
    if existing_dv_layer_metadata:
        msg("Found existing layer!")
        LOGGER.info("Found existing layer!")


        metadata_dict = get_layer_and_join_metadata(existing_dv_layer_metadata.map_layer)
        json_msg = MessageHelperJSON.get_json_msg(success=True,\
                        msg='A layer already exists for the join.',\
                         data_dict=metadata_dict)
        return HttpResponse(status=200, content=json_msg, content_type="application/json")


    # -------------------------------------------
    # Is the Upload and join info valid?
    # -------------------------------------------
    msg('step 3')
    LOGGER.info("Step 3: Is the Upload and join info valid?")

    form_upload_and_join = TableUploadAndJoinRequestForm(post_data_dict, request.FILES)
    if not form_upload_and_join.is_valid():

        json_msg = MessageHelperJSON.get_json_fail_msg(\
                        "Invalid Data for Upload and Join: %s" % form_upload_and_join.errors)

        return HttpResponse(status=400, content=json_msg, content_type="application/json")

    # ----------------------------------------------------
    # Does the DataTable join column need to be char?
    #  - Check if the existing target join column is char?
    # ----------------------------------------------------
    (check_worked, force_char_convert) = ColumnHelper.is_char_column_conversion_recommended(\
                form_upload_and_join.cleaned_data['layer_name'],\
                form_upload_and_join.cleaned_data['layer_attribute'])

    if not check_worked:
        err_msg = 'Could not check the target column type'
        LOGGER.error(err_msg)
        json_msg = MessageHelperJSON.get_json_fail_msg(err_msg)
        return HttpResponse(json_msg, mimetype="application/json", status=400)

    if force_char_convert:  # It is, make sure the Datatable col will be char
        force_char_column = post_data_dict['table_attribute']
    else:                   # Nope, let the datatable col stand, good or bad
        force_char_column = None

    # ----------------------------------------------------
    # Attempt to upload the table
    # ----------------------------------------------------
    LOGGER.info("Step 4: Attempt to UPLOAD the table")
    (success, data_table_or_error) = attempt_datatable_upload_from_request_params(request,\
                                        request.user,\
                                        is_dataverse_db=True,\
                                        force_char_column=force_char_column)
    if not success:
        json_msg = MessageHelperJSON.get_json_fail_msg(data_table_or_error)
        return HttpResponse(json_msg, mimetype="application/json", status=400)

    # ----------------------------------------------------
    # Attempt to join the table
    # ----------------------------------------------------
    LOGGER.info("Step 5: Prepare to JOIN the table")

    new_datatable = data_table_or_error
    join_props = request.POST.copy()

    # Update attributes for the join, including the name of the new DataTable
    #
    join_props['table_name'] = data_table_or_error.table_name
    original_table_attribute = join_props['table_attribute']
    sanitized_table_attribute = standardize_column_name(original_table_attribute)
    join_props['table_attribute'] = sanitized_table_attribute

    # ---------------------------------
    # Make the join!
    # ---------------------------------
    LOGGER.info("Step 6: Make the JOIN to the table")

    (success, tablejoin_obj_or_err_msg) = \
        attempt_tablejoin_from_request_params(join_props, request.user)

    if not success: # FAILED!
        new_datatable.delete()  # remove the datatable

        LOGGER.error('Failed join!: %s', tablejoin_obj_or_err_msg)
        json_msg = MessageHelperJSON.get_json_fail_msg(tablejoin_obj_or_err_msg)
        return HttpResponse(json_msg, mimetype="application/json", status=400)

    # SUCCESS!
    #
    new_tablejoin = tablejoin_obj_or_err_msg
    new_layer = new_tablejoin.join_layer

    # ----------------------------------------------------
    #  Make a new DataverseInfo object and attach it to the Layer
    # ----------------------------------------------------
    LOGGER.info('Step 7: Make a new DataverseInfo object and attach it to the Layer')

    (object_created, err_msg_or_dv_metadata) = create_dataverse_metadata(\
                                                new_layer, post_data_dict)
    if object_created is False:
        json_msg = MessageHelperJSON.get_json_fail_msg(err_msg_or_dv_metadata)
        return HttpResponse(status=400, content=json_msg, content_type="application/json")


    # ------------------------------
    # We made it! Send back a JSON response!
    # ------------------------------
    LOGGER.info('Step 8: We made it! Send back a JSON response!')

    layer_metadata_obj = LayerMetadata(new_layer)

    response_params = layer_metadata_obj.get_metadata_dict()
    response_params.update(TableJoinResultForm.get_cleaned_data_from_table_join(new_tablejoin))


    # Return the response!
    json_msg = MessageHelperJSON.get_json_msg(success=True, msg='worked', data_dict=response_params)
    msg('step 8b')
    LOGGER.info('Step 8a: json_msg', json_msg)

    msg('json_msg: %s' % json_msg)
    return HttpResponse(status=200, content=json_msg, content_type="application/json")
Exemple #10
0
def view_upload_lat_lng_table(request):
    """Upload a tabular file with lat/lng columns"""
    msg('view_upload_lat_lng_table 0')
    # -------------------------------------------
    # Is it a POST?
    # -------------------------------------------
    if not request.method == 'POST':
        json_msg = MessageHelperJSON.get_json_fail_msg("Unsupported Method")
        return HttpResponse(json_msg, mimetype="application/json", status=500)

    post_data_dict = request.POST.dict()

    LOGGER.info('Upload lat/lng file from DV/Geoconnect and join to a layer.')
    LOGGER.info('Step 1:  Is the Dataverse Layer Metadata valid?')

    msg('view_upload_lat_lng_table 1')
    # -------------------------------------------
    # (1) Is the Dataverse Layer Metadata valid?
    # -------------------------------------------
    f = DataverseLayerMetadataValidationForm(post_data_dict)
    if not f.is_valid():
        LOGGER.error('check_for_existing_layer. failed validation')
        LOGGER.error('Errors: %s', f.errors)
        json_msg = MessageHelperJSON.get_json_fail_msg('Failed to validate Dataverse metadata',
                                                       data_dict=f.errors)

        return HttpResponse(json_msg, mimetype="application/json", status=400)

    # -------------------------------------------
    # (1b) Does a layer already exist for this DataverseInfo?
    # -------------------------------------------
    msg('step 2')
    LOGGER.info('Step 2:  Does a layer already exist for this DataverseInfo?')

    existing_dv_layer_metadata =\
     retrieve_dataverse_layer_metadata_by_kwargs_installation_and_file_id(**post_data_dict)

    #-----------------------------------------------------------
    #   A layer was found!
    #   Update the DataverseLayerMetadata and return the layer.
    #-----------------------------------------------------------
    if existing_dv_layer_metadata:
        msg("Found existing layer!")
        LOGGER.info("Found existing layer!")

        #update_the_layer_metadata(existing_dv_layer_metadata, post_data_dict)

        metadata_dict = get_layer_and_join_metadata(existing_dv_layer_metadata.map_layer)
        json_msg = MessageHelperJSON.get_json_msg(success=True,\
                        msg='A layer already exists for the join.',\
                         data_dict=metadata_dict)
        return HttpResponse(status=200, content=json_msg, content_type="application/json")


    # -------------------------------------------
    # (2) Is the Lat/Lng request data valid? Check with the MapLatLngLayerRequestForm
    # -------------------------------------------
    LOGGER.info('Step 2:  Is Lat/Lng data valid? Check via MapLatLngLayerRequestForm')
    f = DataTableUploadFormLatLng(request.POST, request.FILES)
    if not f.is_valid():
        err_msg = "Invalid data in request: %s" % format_errors_as_text(f)
        LOGGER.error("datatable_upload_lat_lon_api. %s", err_msg)
        json_msg = MessageHelperJSON.get_json_fail_msg(err_msg, data_dict=f.errors)
        return HttpResponse(json_msg, mimetype="application/json", status=400)

    #   Set the new table/layer owner
    #
    new_table_owner = request.user


    # --------------------------------------
    # (3) Datatable Upload
    # --------------------------------------
    LOGGER.info('Step 3:  Datatable Upload')
    try:
        # Upload Lat/Lng Datatables to the Monthly table--not the Dataverse table
        #
        resp = datatable_upload_api(request, is_dataverse_db=False)
        upload_return_dict = json.loads(resp.content)
        if upload_return_dict.get('success', None) is not True:
            return HttpResponse(json.dumps(upload_return_dict),
                                mimetype='application/json',
                                status=400)
        else:
            pass # keep going
    except:
        err_msg = 'Uncaught error ingesting Data Table'
        LOGGER.error(err_msg)
        json_msg = MessageHelperJSON.get_json_fail_msg(err_msg)
        traceback.print_exc(sys.exc_info())
        return HttpResponse(json_msg, mimetype='application/json', status=400)

    # --------------------------------------
    # (4) Create layer using the Lat/Lng columns
    # --------------------------------------
    LOGGER.info('Step 4: Create layer using the Lat/Lng columns')
    try:
        success, latlng_record_or_err_msg = create_point_col_from_lat_lon(\
                        new_table_owner,
                        upload_return_dict['data']['datatable_name'],
                        f.cleaned_data['lat_attribute'],
                        f.cleaned_data['lng_attribute'])


        if not success:
            LOGGER.error('Failed to (2) Create layer for map lat/lng table: %s',\
                         latlng_record_or_err_msg)

            # FAILED
            #
            json_msg = MessageHelperJSON.get_json_fail_msg(latlng_record_or_err_msg)
            return HttpResponse(json_msg, mimetype="application/json", status=400)
        else:
            # -------------------------------------------
            # Add DataverseLayerMetadata object
            # -------------------------------------------
            (object_created, err_msg_or_dv_metadata) = create_dataverse_metadata(\
                                        latlng_record_or_err_msg.layer,\
                                        post_data_dict)

            # -------------------------------------------
            # Failed to create DataverseLayerMetadata
            # -------------------------------------------
            if object_created is False:
                # delete LatLngTableMappingRecord
                latlng_record_or_err_msg.delete()

                json_msg = MessageHelperJSON.get_json_fail_msg(err_msg_or_dv_metadata)
                return HttpResponse(status=400, content=json_msg, content_type="application/json")

            # -------------------------------------------
            # Success!  Send user response
            # -------------------------------------------

            # Add DV info
            response_params = get_layer_metadata_dict(latlng_record_or_err_msg.layer,\
                                latlng_record_or_err_msg.as_json())

            json_msg = MessageHelperJSON.get_json_success_msg(\
                            msg='New layer created',\
                            data_dict=response_params)
            return HttpResponse(json_msg, mimetype="application/json", status=200)
    except:
        traceback.print_exc(sys.exc_info())
        err_msg = 'Uncaught error ingesting Data Table'
        json_msg = MessageHelperJSON.get_json_fail_msg(err_msg)
        LOGGER.error(err_msg)

        return HttpResponse(json_msg, mimetype="application/json", status=400)