コード例 #1
0
def main(csv_file):
    #open the csv file
    with open(csv_file, 'r') as csv_input:
        #establish the data as a header-accessible dictionary
        reader = csv.DictReader(csv_input)
        #for each row in the data, read in a filename,
        #grab the file and append it to a table in the evl
        #postgres database for later use
        for row in reader:
            print(row['filename'])
            #initialize ogr2ogr
            ogr = gdaltools.ogr2ogr()
            ogr.set_encoding("UTF-8")
            #establish connection settings
            conn = gdaltools.PgConnectionString(host='localhost',
                                                port=5432,
                                                dbname='evl',
                                                user='******')
            #set up the file input from the isochrones directory
            ogr.set_input('data/isochrones/' + row['filename'])
            #specify the table and postgres format for PostGIS
            ogr.set_output(conn, table_name='isochrones')
            #make sure we append the data
            ogr.set_output_mode(layer_mode=ogr.MODE_LAYER_APPEND)
            #run ogr2ogr
            ogr.execute()
コード例 #2
0
ファイル: views.py プロジェクト: nyaconsulting/gvsig-online
def sync_download(request):
    locks = []
    prepared_tables = []
    try:
        request_params = json.loads(request.body)
        layers = request_params["layers"]
        # we will ignore bbox for the moment
        bbox = request_params.get("bbox", None)
        for layer in layers:
            #FIXME: maybe we need to specify if we want the layer for reading or writing!!!! Assume we always want to write for the moment
            lock = add_layer_lock(layer,
                                  request.user,
                                  lock_type=LayerLock.SYNC_LOCK)
            locks.append(lock)
            conn = _get_layer_conn(lock.layer)
            if not conn:
                raise HttpResponseBadRequest("Bad request")
            prepared_tables.append({"layer": lock.layer, "connection": conn})

        (fd, file_path) = tempfile.mkstemp(suffix=".sqlite",
                                           prefix="syncdwld_")
        os.close(fd)
        os.remove(file_path)
        if len(prepared_tables) > 0:
            ogr = gdaltools.ogr2ogr()
            ogr.set_output_mode(layer_mode=ogr.MODE_LAYER_CREATE,
                                data_source_mode=ogr.MODE_DS_CREATE_OR_UPDATE)
            for table in prepared_tables:
                if table["connection"].schema:
                    in_tbl_name = table["connection"].schema + "." + table[
                        "layer"].name
                else:
                    in_tbl_name = table["layer"].name
                ogr.set_input(table["connection"],
                              table_name=in_tbl_name).set_output(
                                  file_path,
                                  table_name=table["layer"].get_qualified_name(
                                  )).execute()

            gdaltools.ogrinfo(file_path, sql="SELECT UpdateLayerStatistics()")
            locked_layers = [lock.layer for lock in locks]
            _copy_images(locked_layers, file_path)
            file = TemporaryFileWrapper(file_path)
            response = FileResponse(file,
                                    content_type='application/spatialite')
            #response['Content-Disposition'] = 'attachment; filename=db.sqlite'
            #response['Content-Length'] = os.path.getsize(path)
            return response
        else:
            return HttpResponseBadRequest("Bad request")

    except Exception as exc:
        for layer in locks:
            remove_layer_lock(lock.layer, request.user)
        logger.exception("sync_download error")
        return HttpResponseBadRequest("Bad request")
コード例 #3
0
def import_zsj(connection, schema, zipfile, kraje=False, vusc=False,
        okresy=False, orp=False, pou=False):

    global logger
    zsj_file_name = get_data_zsj(ZSJ_URL, "zsj", zipfile)
    connection_params = _get_connection(connection)
    if schema:
        connection_params["schema"] = schema

    ogr = gdaltools.ogr2ogr()
    ogr.set_output_mode(
            data_source_mode=ogr.MODE_DS_CREATE_OR_UPDATE,
            layer_mode=ogr.MODE_LAYER_OVERWRITE
    )
    conn = gdaltools.PgConnectionString(**connection_params)

    if kraje:
        logger.info('Importing kraje')
        ogr.set_input(zsj_file_name, table_name="Kraje")
        ogr.set_output(conn, table_name="kraje", srs="EPSG:5514")
        ogr.execute()

    if vusc:
        logger.info('Importing vusc')
        ogr.set_input(zsj_file_name, table_name="Vusc")
        ogr.set_output(conn, table_name="vusc", srs="EPSG:5514")
        ogr.execute()

    if okresy:
        logger.info('Importing okresy')
        ogr.set_input(zsj_file_name, table_name="Okresy")
        ogr.set_output(conn, table_name="okresy", srs="EPSG:5514")
        ogr.execute()

    if orp:
        logger.info('Importing orp')
        ogr.set_input(zsj_file_name, table_name="Orp")
        ogr.set_output(conn, table_name="orp", srs="EPSG:5514")
        ogr.execute()

    if pou:
        logger.info('Importing pou')
        ogr.set_input(zsj_file_name, table_name="Pou")
        ogr.set_output(conn, table_name="pou", srs="EPSG:5514")
        ogr.execute()
コード例 #4
0
def ogr():
    import gdaltools
    ogr = gdaltools.ogr2ogr()
    yield ogr
コード例 #5
0
ファイル: views.py プロジェクト: nyaconsulting/gvsig-online
def sync_upload(request, release_locks=True):
    tmpfile = None

    if 'fileupload' in request.FILES:
        tmpfile = handle_uploaded_file(request.FILES.get('fileupload'))
    elif 'fileupload' in request.POST:
        try:
            zipcontents = request.POST.get('fileupload')
            tmpfile = handle_uploaded_file_base64(zipcontents)
        except:
            logger.exception(SYNCERROR_FILEPARAM_MISSING)
            return HttpResponseBadRequest(SYNCERROR_FILEPARAM_MISSING)
    elif request.method == 'POST':
        tmpfile = handle_uploaded_file_raw(request)
    else:
        logger.error(SYNCERROR_FILE_MISSING)
        return HttpResponseBadRequest(SYNCERROR_FILE_MISSING)
    if tmpfile:
        # 1 - check if the file is a spatialite database
        # 2 - check if the included tables are locked and writable by the user
        # 3 - overwrite the tables in DB using the uploaded tables
        # 4 - remove the table locks
        # 5 - handle images
        # 6 - remove the temporal file
        try:
            db = sq_introspect.Introspect(tmpfile)
            try:
                tables = db.get_geometry_tables()
                locks = []
                for t in tables:
                    # first check all the layers are properly locked and writable
                    lock = get_layer_lock(t,
                                          request.user,
                                          check_writable=True,
                                          lock_type=LayerLock.SYNC_LOCK)
                    locks.append(lock)
                for lock in locks:
                    ogr = gdaltools.ogr2ogr()
                    qualified_layer_name = lock.layer.get_qualified_name()
                    geom_info = db.get_geometry_columns_info(
                        qualified_layer_name)
                    if len(geom_info) > 0 and len(geom_info[0]) == 7:
                        srs = "EPSG:" + str(geom_info[0][3])
                        ogr.set_input(tmpfile,
                                      table_name=qualified_layer_name,
                                      srs=srs)
                        conn = _get_layer_conn(lock.layer)
                        if not conn:
                            raise HttpResponseBadRequest(
                                SYNCERROR_INCONSISTENT_LAYER_STATUS)
                        if conn.schema:
                            tbl_name = conn.schema + "." + lock.layer.name
                        else:
                            tbl_name = lock.layer.name
                        ogr.set_output(conn, table_name=tbl_name)
                        ogr.set_output_mode(ogr.MODE_LAYER_OVERWRITE,
                                            ogr.MODE_DS_UPDATE)
                        ogr.execute()

                        # workaround ogr2ogr behaviour which does not update the associated pk serial sequence
                        pgdb = pg_introspect.Introspect(
                            conn.dbname, conn.host, conn.port, conn.user,
                            conn.password)
                        schema = conn.schema if conn.schema else 'public'
                        pgdb.update_pk_sequences(lock.layer.name, schema)
                        pgdb.close()

                        mapservice_backend.updateBoundingBoxFromData(
                            lock.layer)
                    else:
                        raise HttpResponseBadRequest(
                            SYNCERROR_UNREADABLE_LAYER.format(
                                lock.layer.get_qualified_name()))
            finally:
                db.close()

            #import time
            # approach 1
            #t1 = time.clock()
            layers = [lock.layer for lock in locks]
            replacer = ResourceReplacer(tmpfile, layers)
            replacer.process()
            #t2 = time.clock()

            # approach 2
            #_remove_existing_images(layers)
            #_extract_images(tmpfile)

            #t3 = time.clock()
            #print "Time approach 1: " + str(t2-t1)
            #print "Time approach 2: " + str(t3-t2)

            if release_locks:
                for lock in locks:
                    # everything was fine, release the locks now
                    lock.delete()
        except sq_introspect.InvalidSqlite3Database:
            logger.exception(SYNCERROR_INVALID_DB)
            return HttpResponseBadRequest(SYNCERROR_INVALID_DB)
        except LayerNotLocked as e:
            logger.exception(SYNCERROR_LAYER_NOT_LOCKED.format(e.layer))
            return HttpResponseBadRequest(
                SYNCERROR_LAYER_NOT_LOCKED.format(e.layer))
        except:
            logger.exception(SYNCERROR_UPLOAD)
            return HttpResponseBadRequest(SYNCERROR_UPLOAD)
        finally:
            os.remove(tmpfile)
    return JsonResponse({'response': 'OK'})
コード例 #6
0
def import_obce(connection, schema, obec=False, ku=False, casti_obci=False, ulice=False,
        casti_obci=False, parcely=False, stav_objekty=False, adresy=False):

    global logger

    connection_params = _get_connection(connection)
    if schema:
        connection_params["schema"] = schema

    ogr = gdaltools.ogr2ogr()
    ogr.set_output_mode(
        data_source_mode=ogr.MODE_DS_CREATE_OR_UPDATE,
        layer_mode=ogr.MODE_LAYER_OVERWRITE
    )
    conn = gdaltools.PgConnectionString(**connection_params)

    obce_cislenik = get_obce_cislenik()
    for obec in obce_cislenik:
        if not obec["platne_do"]:
            kod = obec["kod"]
            obec_file = get_obec_file(kod)

            logger.info(f"Importing obec {obec['kod']} - {obec['nazev']}")

            if obec:
                ogr.set_input(obec_file, table_name="Obce")
                ogr.set_output(conn, table_name="obce", srs="EPSG:5514")
                ogr.execute()

            if ku:
                ogr.set_input(obec_file, table_name="KatastralniUzemi")
                ogr.set_output(conn, table_name="katastralni_uzemi", srs="EPSG:5514")
                ogr.execute()

            if casti_obci:
                ogr.set_input(obec_file, table_name="Ulice")
                ogr.set_output(conn, table_name="CastiObci", srs="EPSG:5514")
                ogr.execute()

            if ulice:
                ogr.set_input(obec_file, table_name="Ulice")
                ogr.set_output(conn, table_name="ulice", srs="EPSG:5514")
                ogr.execute()

            if parcely:
                ogr.set_input(obec_file, table_name="Parcely")
                ogr.set_output(conn, table_name="parcely", srs="EPSG:5514")
                ogr.execute()

            if stav_objekty:
                ogr.set_input(obec_file, table_name="StavebniObjekty")
                ogr.set_output(conn, table_name="stavebni_objekty", srs="EPSG:5514")
                ogr.execute()

            if adresy:
                ogr.set_input(obec_file, table_name="AdresniMista")
                ogr.set_output(conn, table_name="adresni_mista", srs="EPSG:5514")
                ogr.execute()

            # from now on, just append
            ogr.set_output_mode(
                data_source_mode=ogr.MODE_DS_CREATE_OR_UPDATE,
                layer_mode=ogr.MODE_LAYER_APPEND
            )