Esempio n. 1
0
def date_range(timeline):
    """Given a timeline, determine the start and end dates.

    The timeline may be sparse (containing fewer entries than all the dates
    between the min and max dates) and since it is a dict,
    the dates may be in any order.

    :param timeline: A dictionary of non-sequential dates (in YYYY-MM-DD) as
    keys and values (representing ways collected on that day).
    :type timeline: dict

    :returns: A tuple containing two dates:
        * start_date - a date object representing the earliest date in the
            time line.
        * end_date - a date object representing the newest date in the time
            line.
    :rtype: (date, date)

    """
    start_date = None
    end_date = None
    for next_date in timeline.keys():
        year, month, day = next_date.split('-')
        message = 'Date: %s' % next_date
        LOGGER.info(message)
        timeline_date = date(int(year), int(month), int(day))
        if start_date is None:
            start_date = timeline_date
        if end_date is None:
            end_date = timeline_date
        if timeline_date < start_date:
            start_date = timeline_date
        if timeline_date > end_date:
            end_date = timeline_date
    return start_date, end_date
Esempio n. 2
0
def load_osm_document(file_path, url_path):
    """Load an osm document, refreshing it if the cached copy is stale.

    To save bandwidth the file is not downloaded if it is less than 1 hour old.

    :type file_path: basestring
    :param file_path: The path on the filesystem to which the file should
        be saved.

    :param url_path: Path (relative to the ftp root) from which the file
        should be retrieved.
    :type url_path: str

    :returns: A file object for the the downloaded file.
    :rtype: file

     Raises:
         None
    """
    elapsed_seconds = 0
    if os.path.exists(file_path):
        current_time = time.time()  # in unix epoch
        file_time = os.path.getmtime(file_path)  # in unix epoch
        elapsed_seconds = current_time - file_time
        if elapsed_seconds > 3600:
            os.remove(file_path)
    if elapsed_seconds > 3600 or not os.path.exists(file_path):
        fetch_osm(file_path, url_path)
        message = ('fetched %s' % file_path)
        LOGGER.info(message)
    file_handle = open(file_path, 'rb')
    return file_handle
Esempio n. 3
0
def load_osm_document(file_path, url_path):
    """Load an osm document, refreshing it if the cached copy is stale.

    To save bandwidth the file is not downloaded if it is less than 1 hour old.

    :type file_path: basestring
    :param file_path: The path on the filesystem to which the file should
        be saved.

    :param url_path: Path (relative to the ftp root) from which the file
        should be retrieved.
    :type url_path: str

    :returns: A file object for the the downloaded file.
    :rtype: file

     Raises:
         None
    """
    elapsed_seconds = 0
    if os.path.exists(file_path):
        current_time = time.time()  # in unix epoch
        file_time = os.path.getmtime(file_path)  # in unix epoch
        elapsed_seconds = current_time - file_time
        if elapsed_seconds > 3600:
            os.remove(file_path)
    if elapsed_seconds > 3600 or not os.path.exists(file_path):
        fetch_osm(file_path, url_path)
        message = ('fetched %s' % file_path)
        LOGGER.info(message)
    file_handle = open(file_path, 'rb')
    return file_handle
Esempio n. 4
0
def date_range(timeline):
    """Given a timeline, determine the start and end dates.

    The timeline may be sparse (containing fewer entries than all the dates
    between the min and max dates) and since it is a dict,
    the dates may be in any order.

    :param timeline: A dictionary of non-sequential dates (in YYYY-MM-DD) as
    keys and values (representing ways collected on that day).
    :type timeline: dict

    :returns: A tuple containing two dates:
        * start_date - a date object representing the earliest date in the
            time line.
        * end_date - a date object representing the newest date in the time
            line.
    :rtype: (date, date)

    """
    start_date = None
    end_date = None
    for next_date in timeline.keys():
        year, month, day = next_date.split('-')
        message = 'Date: %s' % next_date
        LOGGER.info(message)
        timeline_date = date(int(year), int(month), int(day))
        if start_date is None:
            start_date = timeline_date
        if end_date is None:
            end_date = timeline_date
        if timeline_date < start_date:
            start_date = timeline_date
        if timeline_date > end_date:
            end_date = timeline_date
    return start_date, end_date
Esempio n. 5
0
def drop_database(db_name):
    """Remove a database.

    :param db_name: The database
    :type db_name: str
    """
    dropdb_executable = which('dropdb')[0]
    dropdb_command = '%s %s' % (dropdb_executable, db_name)
    LOGGER.info(dropdb_command)
    call(dropdb_command, shell=True)
Esempio n. 6
0
def drop_database(db_name):
    """Remove a database.

    :param db_name: The database
    :type db_name: str
    """
    dropdb_executable = which('dropdb')[0]
    dropdb_command = '%s %s' % (dropdb_executable, db_name)
    LOGGER.info(dropdb_command)
    call(dropdb_command, shell=True)
Esempio n. 7
0
def import_osm_file(db_name, feature_type, file_path):
    """Import the OSM xml file into a postgis database.

    :param db_name: The database to use.
    :type db_name: str

    :param feature_type: The feature to import.
    :type feature_type: str

    :param file_path: Path to the OSM file.
    :type file_path: str
    """
    overpass_resource_path = overpass_resource_base_path(feature_type)
    style_file = '%s.style' % overpass_resource_path

    # Used to standarise types while data is in pg still
    transform_path = '%s.sql' % overpass_resource_path
    createdb_executable = which('createdb')[0]
    createdb_command = '%s -T template_postgis %s' % (createdb_executable,
                                                      db_name)
    osm2pgsql_executable = which('osm2pgsql')[0]
    osm2pgsql_options = config.OSM2PGSQL_OPTIONS
    osm2pgsql_command = '%s -S %s -d %s %s %s' % (osm2pgsql_executable,
                                                  style_file, db_name,
                                                  osm2pgsql_options, file_path)
    psql_executable = which('psql')[0]
    transform_command = '%s %s -f %s' % (psql_executable, db_name,
                                         transform_path)

    LOGGER.info(createdb_command)
    call(createdb_command, shell=True)
    LOGGER.info(osm2pgsql_command)
    call(osm2pgsql_command, shell=True)
    LOGGER.info(transform_command)
    call(transform_command, shell=True)
    def setUp(self):
        """Setup the selenium driver."""
        self.app = self.create_app()
        # First just do a basic test to see that the test server works
        # independently of selenium tests.
        # result = self.app.get(
        #    '/', data=dict(), follow_redirects=True)
        # code = result.status_code
        # self.assertEquals(code, 200)

        # now setup selenium driver
        # TODO add platform check and look for common browsers
        # We can check firefox and chrome on all platforms...
        try:
            self.driver = webdriver.Chrome()
        except Exception as e:
            self.fail(
                'Error setting up selenium driver for Chrome\n%s' % e.message)
        try:
            self.driver.get(self.get_server_url())
        except Exception as e:
            self.fail(
                'Error getting server url for selenium tests\n%s' % e.message)
        LOGGER.info('Preparing to run front end tests')
Esempio n. 9
0
    def setUp(self):
        """Setup the selenium driver."""
        self.app = self.create_app()
        # First just do a basic test to see that the test server works
        # independently of selenium tests.
        # result = self.app.get(
        #    '/', data=dict(), follow_redirects=True)
        # code = result.status_code
        # self.assertEquals(code, 200)

        # now setup selenium driver
        # TODO add platform check and look for common browsers
        # We can check firefox and chrome on all platforms...
        try:
            self.driver = webdriver.Chrome()
        except Exception as e:
            self.fail('Error setting up selenium driver for Chrome\n%s' %
                      e.message)
        try:
            self.driver.get(self.get_server_url())
        except Exception as e:
            self.fail('Error getting server url for selenium tests\n%s' %
                      e.message)
        LOGGER.info('Preparing to run front end tests')
Esempio n. 10
0
def import_osm_file(db_name, feature_type, file_path):
    """Import the OSM xml file into a postgis database.

    :param db_name: The database to use.
    :type db_name: str

    :param feature_type: The feature to import.
    :type feature_type: str

    :param file_path: Path to the OSM file.
    :type file_path: str
    """
    overpass_resource_path = overpass_resource_base_path(feature_type)
    style_file = '%s.style' % overpass_resource_path

    # Used to standarise types while data is in pg still
    transform_path = '%s.sql' % overpass_resource_path
    createdb_executable = which('createdb')[0]
    createdb_command = '%s -T template_postgis %s' % (
        createdb_executable, db_name)
    osm2pgsql_executable = which('osm2pgsql')[0]
    osm2pgsql_options = config.OSM2PGSQL_OPTIONS
    osm2pgsql_command = '%s -S %s -d %s %s %s' % (
        osm2pgsql_executable,
        style_file,
        db_name,
        osm2pgsql_options,
        file_path)
    psql_executable = which('psql')[0]
    transform_command = '%s %s -f %s' % (
        psql_executable, db_name, transform_path)

    LOGGER.info(createdb_command)
    call(createdb_command, shell=True)
    LOGGER.info(osm2pgsql_command)
    call(osm2pgsql_command, shell=True)
    LOGGER.info(transform_command)
    call(transform_command, shell=True)
Esempio n. 11
0
def extract_shapefile(
        feature_type,
        db_name,
        directory_name,
        qgis_version=2,
        output_prefix='',
        inasafe_version=None,
        lang='en'):
    """Extract a database to a shapefile.

    This is a multi-step process:
        * Create a temporary postgis database
        * Load the osm dataset into POSTGIS with osm2pgsql and our custom
             style file.
        * Save the data out again to a shapefile
        * Zip the shapefile ready for user to download

    :param feature_type: The feature to extract.
    :type feature_type: str

    :param db_name: The database to extract.
    :type db_name: str

    :param directory_name: The directory to use for the extract.
    :type directory_name: str

    :param qgis_version: Get the QGIS version. Currently 1,
        2 are accepted, default to 2. A different qml style file will be
        returned depending on the version
    :type qgis_version: int

    :param output_prefix: Base name for the shape file. Defaults to ''
        which will result in an output file of feature_type + '.shp'. Adding a
        prefix of e.g. 'test-' would result in a downloaded file name of
        'test-buildings.shp'. Allowed characters are [a-zA-Z-_0-9].
    :type output_prefix: str

    :param inasafe_version: The InaSAFE version, to get correct metadata.
    :type inasafe_version: str

    :param lang: The language desired for the labels in the legend.
        Example : 'en', 'fr', etc. Default is 'en'.
    :type lang: str

    :returns: Path to zipfile that was created.
    :rtype: str
    """
    # Extract
    os.makedirs(directory_name)
    shapefile_resource_path = shapefile_resource_base_path(feature_type)

    shape_path = os.path.join(directory_name, '%s.shp' % output_prefix)

    if qgis_version > 1:
        qml_source_path = '%s-%s.qml' % (shapefile_resource_path, lang)
        if not os.path.isfile(qml_source_path):
            qml_source_path = '%s-en.qml' % shapefile_resource_path
    else:
        qml_source_path = '%s-qgis1.qml' % shapefile_resource_path

    qml_dest_path = os.path.join(directory_name, '%s.qml' % output_prefix)

    license_source_path = '%s.license' % generic_shapefile_base_path()
    license_dest_path = os.path.join(
        directory_name, '%s.license' % output_prefix)
    prj_source_path = '%s.prj' % generic_shapefile_base_path()
    prj_dest_path = os.path.join(
        directory_name, '%s.prj' % output_prefix)

    pgsql2shp_executable = which('pgsql2shp')[0]
    pgsql2shp_command = '%s -f %s %s %s' % (
        pgsql2shp_executable, shape_path, db_name, SQL_QUERY_MAP[feature_type])

    # Now run the commands in sequence:
    LOGGER.info(pgsql2shp_command)
    call(pgsql2shp_command, shell=True)
    copyfile(qml_source_path, qml_dest_path)

    metadata = metadata_files(
        inasafe_version, lang, feature_type, output_prefix)

    for destination, source in metadata.items():
        source_path = '%s%s' % (shapefile_resource_path, source)
        destination_path = os.path.join(directory_name, destination)
        copyfile(source_path, destination_path)
        add_metadata_timestamp(destination_path)

    # Generic files
    copyfile(prj_source_path, prj_dest_path)
    copyfile(license_source_path, license_dest_path)

    # Now zip it up and return the path to the zip, removing the original shp
    zipfile = zip_shp(
        shape_path,
        extra_ext=['.qml', '.keywords', '.license', '.xml'],
        remove_file=True)
    LOGGER.info('Shape written to {path}'.format(path=shape_path))

    return zipfile
Esempio n. 12
0
    context['campaigns'] = Campaign.all()
    context['categories'] = AbstractInsightsFunction.CATEGORIES
    context['functions'] = get_selected_functions()
    context['title'] = 'Edit Campaign'
    return render_template(
        'create_campaign.html', form=form, **context)


@campaign_manager.route('/land')
def landing_auth():
    """OSM auth landing page.
    """
    return render_template('land.html')


@campaign_manager.route('/not-logged-in.html')
def not_logged_in():
    """Not logged in page.
    """
    return render_template('not_authenticated.html')


if __name__ == '__main__':
    if Config.DEBUG:
        campaign_manager.debug = True
        # set up flask to serve static content
        campaign_manager.add_url_rule('/<path:path>', 'static_file', static_file)
    else:
        LOGGER.info('Running in production mode')
    campaign_manager.run()
Esempio n. 13
0
def extract_shapefile(feature_type,
                      db_name,
                      directory_name,
                      qgis_version=2,
                      output_prefix='',
                      inasafe_version=None,
                      lang='en'):
    """Extract a database to a shapefile.

    This is a multi-step process:
        * Create a temporary postgis database
        * Load the osm dataset into POSTGIS with osm2pgsql and our custom
             style file.
        * Save the data out again to a shapefile
        * Zip the shapefile ready for user to download

    :param feature_type: The feature to extract.
    :type feature_type: str

    :param db_name: The database to extract.
    :type db_name: str

    :param directory_name: The directory to use for the extract.
    :type directory_name: str

    :param qgis_version: Get the QGIS version. Currently 1,
        2 are accepted, default to 2. A different qml style file will be
        returned depending on the version
    :type qgis_version: int

    :param output_prefix: Base name for the shape file. Defaults to ''
        which will result in an output file of feature_type + '.shp'. Adding a
        prefix of e.g. 'test-' would result in a downloaded file name of
        'test-buildings.shp'. Allowed characters are [a-zA-Z-_0-9].
    :type output_prefix: str

    :param inasafe_version: The InaSAFE version, to get correct metadata.
    :type inasafe_version: str

    :param lang: The language desired for the labels in the legend.
        Example : 'en', 'fr', etc. Default is 'en'.
    :type lang: str

    :returns: Path to zipfile that was created.
    :rtype: str
    """
    # Extract
    os.makedirs(directory_name)
    shapefile_resource_path = shapefile_resource_base_path(feature_type)

    shape_path = os.path.join(directory_name, '%s.shp' % output_prefix)

    if qgis_version > 1:
        qml_source_path = '%s-%s.qml' % (shapefile_resource_path, lang)
        if not os.path.isfile(qml_source_path):
            qml_source_path = '%s-en.qml' % shapefile_resource_path
    else:
        qml_source_path = '%s-qgis1.qml' % shapefile_resource_path

    qml_dest_path = os.path.join(directory_name, '%s.qml' % output_prefix)

    license_source_path = '%s.license' % generic_shapefile_base_path()
    license_dest_path = os.path.join(directory_name,
                                     '%s.license' % output_prefix)
    prj_source_path = '%s.prj' % generic_shapefile_base_path()
    prj_dest_path = os.path.join(directory_name, '%s.prj' % output_prefix)

    pgsql2shp_executable = which('pgsql2shp')[0]
    pgsql2shp_command = '%s -f %s %s %s' % (
        pgsql2shp_executable, shape_path, db_name, SQL_QUERY_MAP[feature_type])

    # Now run the commands in sequence:
    LOGGER.info(pgsql2shp_command)
    call(pgsql2shp_command, shell=True)
    copyfile(qml_source_path, qml_dest_path)

    metadata = metadata_files(inasafe_version, lang, feature_type,
                              output_prefix)

    for destination, source in metadata.items():
        source_path = '%s%s' % (shapefile_resource_path, source)
        destination_path = os.path.join(directory_name, destination)
        copyfile(source_path, destination_path)
        add_metadata_timestamp(destination_path)

    # Generic files
    copyfile(prj_source_path, prj_dest_path)
    copyfile(license_source_path, license_dest_path)

    # Now zip it up and return the path to the zip, removing the original shp
    zipfile = zip_shp(shape_path,
                      extra_ext=['.qml', '.keywords', '.license', '.xml'],
                      remove_file=True)
    LOGGER.info('Shape written to {path}'.format(path=shape_path))

    return zipfile
Esempio n. 14
0
def download_feature(feature_type):
    """Generic request to download OSM data.

    :param feature_type The feature to extract.
    :type feature_type str

    :return A zip file
    """
    if feature_type not in FEATURES:
        abort(404)

    bbox = request.args.get('bbox', config.BBOX)
    # Get the QGIS version
    # Currently 1, 2 are accepted, default to 2
    # A different qml style file will be returned depending on the version
    qgis_version = int(request.args.get('qgis_version', '2'))
    # Optional parameter that allows the user to specify the filename.
    output_prefix = request.args.get('output_prefix', feature_type)
    # A different keywords file will be returned depending on the version.
    inasafe_version = request.args.get('inasafe_version', None)
    # Optional parameter that allows the user to specify the language for
    # the legend in QGIS.
    lang = request.args.get('lang', 'en')

    # error = None
    try:
        coordinates = split_bbox(bbox)
    except ValueError:
        # error = "Invalid bbox"
        # coordinates = split_bbox(config.BBOX)
        abort(500)
    else:
        local_osm_file = abspath(
            join(dirname(__file__), 'resources', 'pbf', 'data.pbf'))
        if not exists(local_osm_file):
            LOGGER.info('Going to download data from overpass.')
            try:
                file_handle = get_osm_file(coordinates, feature_type, 'body')
            except OverpassTimeoutException:
                abort(408)
            except OverpassBadRequestException:
                abort(500)
            except OverpassConcurrentRequestException:
                abort(509)
            except URLError:
                abort(500)
        else:
            LOGGER.info(
                'Local PBF file detected. We will not use the Overpass API.')
            file_handle = open(local_osm_file, 'rb')

    # This is for logging requests so we can see what queries we received
    date_time = datetime.datetime.now()

    log_data = {
        'feature_type': feature_type,
        'qgis_version': qgis_version,
        'inasafe_version': inasafe_version,
        'year': date_time.year,
        'month': date_time.month,
        'day': date_time.day,
        'hour': date_time.hour,
        'minute': date_time.minute,
        'second': date_time.second
    }
    # add keys for SW_lng, SW_lat, NE_lng, etc.
    # to our log and write the log file out...
    log_data.update(coordinates)
    log_file_name = (
        '{year}{month}{day}-{hour}{minute}{second}.geojson').format(**log_data)
    log_path = os.path.join(config.LOG_DIR, log_file_name)
    # Note that all the double {{ will be rendered as single below
    # They need to be double so that python does not confuse them as
    # string interpolators
    log_message = """
    {{
        "type": "FeatureCollection",
        "features": [
            {{
                "type": "Feature",
                "properties": {{
                    "feature_type": "{feature_type}",
                    "qgis_version": "{qgis_version}",
                    "inasafe_version": "{inasafe_version}",
                    "year": {year},
                    "month": {month},
                    "day": {day},
                    "hour": {hour}
                }},
                "geometry": {{
                    "type": "Polygon",
                    "coordinates": [
                        [
                            [
                                {SW_lng},
                                {NE_lat}
                            ],
                            [
                                {NE_lng},
                                {NE_lat}
                            ],
                            [
                                {NE_lng},
                                {SW_lat}
                            ],
                            [
                                {SW_lng},
                                {SW_lat}
                            ],
                            [
                                {SW_lng},
                                {NE_lat}
                            ]
                        ]
                    ]
                }}
            }}
        ]
    }}""".format(**log_data)
    log_file = open(log_path, "w")
    log_file.write(log_message)
    log_file.close()

    try:
        # noinspection PyUnboundLocalVariable
        zip_file = import_and_extract_shapefile(
            feature_type,
            file_handle.name,
            qgis_version,
            output_prefix,
            inasafe_version,
            lang)

        f = open(zip_file, 'rb')
    except IOError:
        abort(404)
        return
    return Response(f.read(), mimetype='application/zip')