示例#1
0
def import_and_extract_shapefile(feature_type,
                                 file_path,
                                 qgis_version=2,
                                 output_prefix='',
                                 inasafe_version=None,
                                 lang='en'):
    """Convert the OSM xml file to a shapefile.

    This is a multi-step process:
        * Create a temporary postgis database
        * Load the osm dataset into POSTGIS with osm2pgsql and our custom
             style file.
        * Save the data out again to a shapefile
        * Zip the shapefile ready for user to download

    :param feature_type: The feature to extract.
    :type feature_type: str

    :param file_path: Path to the OSM file name.
    :type file_path: str

    :param qgis_version: Get the QGIS version. Currently 1,
        2 are accepted, default to 2. A different qml style file will be
        returned depending on the version
    :type qgis_version: int

    :param output_prefix: Base name for the shape file. Defaults to ''
        which will result in an output file of feature_type + '.shp'. Adding a
        prefix of e.g. 'test-' would result in a downloaded file name of
        'test-buildings.shp'. Allowed characters are [a-zA-Z-_0-9].
    :type output_prefix: str

    :param inasafe_version: The InaSAFE version, to get correct metadata.
    :type inasafe_version: str

    :param lang: The language desired for the labels in the legend.
        Example : 'en', 'fr', etc. Default is 'en'.
    :type lang: str

    :returns: Path to zipfile that was created.
    :rtype: str

    """
    if not check_string(output_prefix):
        error = 'Invalid output prefix: %s' % output_prefix
        LOGGER.exception(error)
        raise Exception(error)

    output_prefix += feature_type

    work_dir = temp_dir(sub_dir=feature_type)
    directory_name = unique_filename(dir=work_dir)
    db_name = os.path.basename(directory_name)

    import_osm_file(db_name, feature_type, file_path)
    zip_file = extract_shapefile(feature_type, db_name, directory_name,
                                 qgis_version, output_prefix, inasafe_version,
                                 lang)
    drop_database(db_name)
    return zip_file
示例#2
0
 def test_home(self):
     """Test the home page works."""
     try:
         return self.app.post('/', data=dict(), follow_redirects=True)
     except Exception, e:
         LOGGER.exception('Basic front page load failed.')
         raise e
示例#3
0
 def test_home(self):
     """Test the home page works."""
     try:
         result = self.app.get('/', data=dict(), follow_redirects=True)
         code = result.status_code
         self.assertEquals(code, 200)
     except Exception as e:
         LOGGER.exception('Basic front page load failed.')
         raise e
示例#4
0
 def request_data(self, url_request):
     web_request = Request(url_request, None, self.headers)
     try:
         url_handle = urlopen(web_request, timeout=60)
         data = url_handle.read().decode('utf-8')
         return data
     except HTTPError as e:
         LOGGER.exception('Error with request')
         return e.msg
示例#5
0
 def test_home(self):
     """Test the home page works."""
     try:
         result = self.app.get(
             '/', data=dict(), follow_redirects=True)
         code = result.status_code
         self.assertEquals(code, 200)
     except Exception as e:
         LOGGER.exception('Basic front page load failed.')
         raise e
示例#6
0
def fetch_osm_with_post(file_path, url_path, post_data, returns_format='json'):
    """Fetch an osm map and store locally.

    :param url_path: The path (relative to the ftp root) from which the
        file should be retrieved.
    :type url_path: str

    :param file_path: The path on the filesystem to which the file should
        be saved.
    :type file_path: str

    :param post_data: Overpass data
    :type post_data: str

    :param returns_format: Format of the response, could be json or xml
    :type returns_format: str

    :returns: The path to the downloaded file.

    """
    headers = {'User-Agent': 'HotOSM'}
    try:
        data = requests.post(url=url_path,
                             data={'data': post_data},
                             headers=headers)

        if returns_format != 'xml':
            regex = '<remark> runtime error:'
            if re.search(regex, data.text):
                raise OverpassTimeoutException

            regex = '(elements|meta)'
            if not re.search(regex, data.text):
                raise OverpassDoesNotReturnData

        if os.path.exists(file_path):
            os.remove(file_path)

        file_handle = open(file_path, 'wb')
        file_handle.write(data.text.encode('utf-8'))
        file_handle.close()
    except HTTPError as e:
        if e.code == 400:
            LOGGER.exception('Bad request to Overpass')
            raise OverpassBadRequestException
        elif e.code == 419:
            raise OverpassConcurrentRequestException

        LOGGER.exception('Error with Overpass')
        raise e
示例#7
0
def fetch_osm(file_path, url_path):
    """Fetch an osm map and store locally.


    :param url_path: The path (relative to the ftp root) from which the
        file should be retrieved.
    :type url_path: str

    :param file_path: The path on the filesystem to which the file should
        be saved.
    :type file_path: str

    :returns: The path to the downloaded file.

    """
    LOGGER.debug('Getting URL: %s', url_path)
    headers = {'User-Agent': 'InaSAFE'}
    web_request = Request(url_path, None, headers)
    try:
        url_handle = urlopen(web_request, timeout=60)
        data = url_handle.read().decode('utf-8')
        regex = '<remark> runtime error:'
        if re.search(regex, data):
            raise OverpassTimeoutException

        regex = '(elements|meta)'
        if not re.search(regex, data):
            raise OverpassDoesNotReturnData

        if os.path.exists(file_path):
            os.remove(file_path)

        file_handle = open(file_path, 'wb')
        file_handle.write(data.encode('utf-8'))
        file_handle.close()
    except HTTPError as e:
        if e.code == 400:
            LOGGER.exception('Bad request to Overpass')
            raise OverpassBadRequestException
        elif e.code == 419:
            raise OverpassConcurrentRequestException

        LOGGER.exception('Error with Overpass')
        raise e
示例#8
0
def user_status():
    """Get nodes for user as a json doc.

        .. note:: User from reporter.js

        To use e.g.:

        http://localhost:5000/user?bbox=20.431909561157227,
        -34.02849543118406,20.45207977294922,-34.02227106658948&
        obj=building&username=timlinux
    """
    username = request.args.get('username')
    bbox = request.args.get('bbox')

    try:
        coordinates = split_bbox(bbox)
    except ValueError:
        error = "Invalid bbox"
        coordinates = split_bbox(config.BBOX)
        LOGGER.exception(error + str(coordinates))
    else:
        try:
            file_handle = get_osm_file(coordinates)
        except OverpassTimeoutException:
            error = "Bad request. Maybe the bbox is too big!"
            LOGGER.exception(error + str(coordinates))
        except OverpassConcurrentRequestException:
            error = 'Please try again later, another query is running.'
            LOGGER.exception(error + str(coordinates))
        except OverpassBadRequestException:
            error = "Bad request."
            LOGGER.exception(error + str(coordinates))
        except URLError:
            error = "Bad request."
            LOGGER.exception(error + str(coordinates))
        else:
            node_data = osm_nodes_by_user(file_handle, username)
            return jsonify(d=node_data)
示例#9
0
def user_status():
    """Get nodes for user as a json doc.

        .. note:: User from reporter.js

        To use e.g.:

        http://localhost:5000/user?bbox=20.431909561157227,
        -34.02849543118406,20.45207977294922,-34.02227106658948&
        obj=building&username=timlinux
    """
    username = request.args.get('username')
    bbox = request.args.get('bbox')

    try:
        coordinates = split_bbox(bbox)
    except ValueError:
        error = "Invalid bbox"
        coordinates = split_bbox(config.BBOX)
        LOGGER.exception(error + str(coordinates))
    else:
        try:
            file_handle = get_osm_file(coordinates)
        except OverpassTimeoutException:
            error = "Bad request. Maybe the bbox is too big!"
            LOGGER.exception(error + str(coordinates))
        except OverpassConcurrentRequestException:
            error = 'Please try again later, another query is running.'
            LOGGER.exception(error + str(coordinates))
        except OverpassBadRequestException:
            error = "Bad request."
            LOGGER.exception(error + str(coordinates))
        except URLError:
            error = "Bad request."
            LOGGER.exception(error + str(coordinates))
        else:
            node_data = osm_nodes_by_user(file_handle, username)
            return jsonify(d=node_data)
示例#10
0
def fetch_osm(file_path, url_path):
    """Fetch an osm map and store locally.


    :param url_path: The path (relative to the ftp root) from which the
        file should be retrieved.
    :type url_path: str

    :param file_path: The path on the filesystem to which the file should
        be saved.
    :type file_path: str

    :returns: The path to the downloaded file.

    """
    LOGGER.debug('Getting URL: %s', url_path)
    headers = {'User-Agent': 'InaSAFE'}
    web_request = Request(url_path, None, headers)
    try:
        url_handle = urlopen(web_request, timeout=60)
        data = url_handle.read().decode('utf-8')
        regex = '<remark> runtime error:'
        if re.search(regex, data):
            raise OverpassTimeoutException

        file_handle = open(file_path, 'wb')
        file_handle.write(data.encode('utf-8'))
        file_handle.close()
    except HTTPError as e:
        if e.code == 400:
            LOGGER.exception('Bad request to Overpass')
            raise OverpassBadRequestException
        elif e.code == 419:
            raise OverpassConcurrentRequestException

        LOGGER.exception('Error with Overpass')
        raise e
示例#11
0
def osm_object_contributions(osm_file, tag_name):
    """Compile a summary of user contributions for the selected osm data type.

    :param osm_file: A file object reading from a .osm file.
    :type osm_file: file, FileIO

    :param tag_name: The tag name we want to filter on.
    :type tag_name: str

    :returns: A list of dicts where items in the list are sorted from highest
        contributor (based on number of ways) down to lowest. Each element
        in the list is a dict in the form: {
        'user': <user>,
        'ways': <way count>,
        'nodes': <node count>,
        'timeline': <timelinedict>,
        'best': <most ways in a single day>,
        'worst': <least ways in single day>,
        'average': <average ways across active days>,
        'crew': <bool> }
        where crew is used to designate users who are part of an active
        data gathering campaign.
        The timeline dict will contain a collection of dates and
        the total number of ways created on that date e.g.
        {
            u'2010-12-09': 10,
            u'2012-07-10': 14
        }
    :rtype: list
    """
    parser = OsmParser(tag_name=tag_name)
    try:
        xml.sax.parse(osm_file, parser)
    except xml.sax.SAXParseException:
        LOGGER.exception('Failed to parse OSM xml.')
        raise

    way_count_dict = parser.wayCountDict
    node_count_dict = parser.nodeCountDict
    timelines = parser.userDayCountDict

    # Convert to a list of dicts so we can sort it.
    crew_list = config.CREW
    user_list = []

    for key, value in way_count_dict.items():
        crew_flag = False
        if key in crew_list:
            crew_flag = True
        start_date, end_date = date_range(timelines[key])
        start_date = time.strftime('%d-%m-%Y', start_date.timetuple())
        end_date = time.strftime('%d-%m-%Y', end_date.timetuple())
        user_timeline = timelines[key]
        record = {
            'name': key,
            'ways': value,
            'nodes': node_count_dict[key],
            'timeline': interpolated_timeline(user_timeline),
            'start': start_date,
            'end': end_date,
            'activeDays': len(user_timeline),
            'best': best_active_day(user_timeline),
            'worst': worst_active_day(user_timeline),
            'average': average_for_active_days(user_timeline),
            'crew': crew_flag
        }
        user_list.append(record)

    # Sort it
    sorted_user_list = sorted(
        user_list, key=lambda d: (
            -d['ways'],
            d['nodes'],
            d['name'],
            d['timeline'],
            d['start'],
            d['end'],
            d['activeDays'],
            d['best'],
            d['worst'],
            d['average'],
            d['crew']))
    return sorted_user_list
示例#12
0
def osm_object_contributions(osm_file,
                             tag_name,
                             date_start=None,
                             date_end=None):
    """Compile a summary of user contributions for the selected osm data type.

    :param osm_file: A file object reading from a .osm file.
    :type osm_file: file, FileIO

    :param tag_name: The tag name we want to filter on.
    :type tag_name: str

    :param date_start: The start date we want to filter
    :type date_start: float

    :param date_end: The end date we want to filter
    :type date_end: float

    :returns: A list of dicts where items in the list are sorted from highest
        contributor (based on number of ways) down to lowest. Each element
        in the list is a dict in the form: {
        'user': <user>,
        'ways': <way count>,
        'nodes': <node count>,
        'timeline': <timelinedict>,
        'best': <most ways in a single day>,
        'worst': <least ways in single day>,
        'average': <average ways across active days>,
        'crew': <bool> }
        where crew is used to designate users who are part of an active
        data gathering campaign.
        The timeline dict will contain a collection of dates and
        the total number of ways created on that date e.g.
        {
            u'2010-12-09': 10,
            u'2012-07-10': 14
        }
    :rtype: list
    """
    parser = OsmParser(start_date=date_start, end_date=date_end)
    try:
        xml.sax.parse(osm_file, parser)
    except xml.sax.SAXParseException:
        LOGGER.exception('Failed to parse OSM xml.')
        raise

    way_count_dict = parser.wayCountDict
    node_count_dict = parser.nodeCountDict
    timelines = parser.userDayCountDict

    # Convert to a list of dicts so we can sort it.
    crew_list = config.CREW
    user_list = []

    for key, value in way_count_dict.items():
        start_date, end_date = date_range(timelines[key])
        start_date = time.strftime('%d-%m-%Y', start_date.timetuple())
        end_date = time.strftime('%d-%m-%Y', end_date.timetuple())
        user_timeline = timelines[key]
        node_count = 0
        if key in node_count_dict:
            node_count = node_count_dict[key]
        record = {
            'name': key,
            'ways': value,
            'nodes': node_count,
            'timeline': interpolated_timeline(user_timeline),
            'start': start_date,
            'end': end_date,
            'activeDays': len(user_timeline),
            'best': best_active_day(user_timeline),
            'worst': worst_active_day(user_timeline),
            'average': average_for_active_days(user_timeline)
        }
        user_list.append(record)

    for key, value in node_count_dict.items():
        start_date, end_date = date_range(timelines[key])
        start_date = time.strftime('%d-%m-%Y', start_date.timetuple())
        end_date = time.strftime('%d-%m-%Y', end_date.timetuple())
        user_timeline = timelines[key]
        record = {
            'name': key,
            'ways': 0,
            'nodes': value,
            'timeline': interpolated_timeline(user_timeline),
            'start': start_date,
            'end': end_date,
            'activeDays': len(user_timeline),
            'best': best_active_day(user_timeline),
            'worst': worst_active_day(user_timeline),
            'average': average_for_active_days(user_timeline)
        }
        user_list.append(record)

    # Sort it
    sorted_user_list = sorted(
        user_list,
        key=lambda d:
        (-d['ways'], d['nodes'], d['name'], d['timeline'], d['start'], d[
            'end'], d['activeDays'], d['best'], d['worst'], d['average']))
    return sorted_user_list
示例#13
0
def import_and_extract_shapefile(
        feature_type,
        file_path,
        qgis_version=2,
        output_prefix='',
        inasafe_version=None,
        lang='en'):
    """Convert the OSM xml file to a shapefile.

    This is a multi-step process:
        * Create a temporary postgis database
        * Load the osm dataset into POSTGIS with osm2pgsql and our custom
             style file.
        * Save the data out again to a shapefile
        * Zip the shapefile ready for user to download

    :param feature_type: The feature to extract.
    :type feature_type: str

    :param file_path: Path to the OSM file name.
    :type file_path: str

    :param qgis_version: Get the QGIS version. Currently 1,
        2 are accepted, default to 2. A different qml style file will be
        returned depending on the version
    :type qgis_version: int

    :param output_prefix: Base name for the shape file. Defaults to ''
        which will result in an output file of feature_type + '.shp'. Adding a
        prefix of e.g. 'test-' would result in a downloaded file name of
        'test-buildings.shp'. Allowed characters are [a-zA-Z-_0-9].
    :type output_prefix: str

    :param inasafe_version: The InaSAFE version, to get correct metadata.
    :type inasafe_version: str

    :param lang: The language desired for the labels in the legend.
        Example : 'en', 'fr', etc. Default is 'en'.
    :type lang: str

    :returns: Path to zipfile that was created.
    :rtype: str

    """
    if not check_string(output_prefix):
        error = 'Invalid output prefix: %s' % output_prefix
        LOGGER.exception(error)
        raise Exception(error)

    output_prefix += feature_type

    work_dir = temp_dir(sub_dir=feature_type)
    directory_name = unique_filename(dir=work_dir)
    db_name = os.path.basename(directory_name)

    import_osm_file(db_name, feature_type, file_path)
    zip_file = extract_shapefile(
        feature_type,
        db_name,
        directory_name,
        qgis_version,
        output_prefix,
        inasafe_version,
        lang)
    drop_database(db_name)
    return zip_file