Example #1
0
def cartodb_make_valid_geom_local(src_fc):

    if os.path.splitext(src_fc)[1] == '.shp':
        source_dir = os.path.dirname(src_fc)

    # Need to write the outfile to shp-- ogr2ogr can't handle true curves stored in geodatabases
    # Will represent them as a point, which spatialite will then choke on
    else:
        source_dir = os.path.dirname(os.path.dirname(src_fc))
        file_name = 'source.shp'
        arcpy.FeatureClassToFeatureClass_conversion(src_fc, source_dir,
                                                    file_name)

        src_fc = os.path.join(source_dir, file_name)

    sqlite_dir = os.path.join(source_dir, 'sqlite')
    os.mkdir(sqlite_dir)

    out_sqlite_path = os.path.join(sqlite_dir, 'out.sqlite')

    cmd = ['ogr2ogr', '-f', 'SQLite', out_sqlite_path]
    cmd = add_fc_to_ogr2ogr_cmd(src_fc, cmd)
    cmd += ["-dsco", "SPATIALITE=yes", '-dim', '2']

    logging.debug('Creating sqlite database')
    util.run_subprocess(cmd)

    table_name = util.gen_paths_shp(src_fc)[2]
    sql = 'UPDATE {0} SET GEOMETRY = ST_MakeValid(GEOMETRY) WHERE ST_IsValid(GEOMETRY) <> 1;'.format(
        table_name)
    cmd = ['spatialite', out_sqlite_path, sql]

    util.run_subprocess(cmd)

    return out_sqlite_path
Example #2
0
def cartodb_append(sqlite_db_path,
                   out_cartodb_name,
                   gfw_env,
                   where_clause=None):
    """
    Append a local FC to a cartoDB dataset
    :param sqlite_db_path: path to local sqlite db
    :param out_cartodb_name: cartoDB table
    :param gfw_env: gfw_env
    :param where_clause: where_clause to apply to the dataset
    :return:
    """
    key = util.get_token(settings.get_settings(gfw_env)['cartodb']['token'])
    account_name = get_account_name(gfw_env)

    # Help: http://www.gdal.org/ogr2ogr.html
    # The -dim 2 option ensures that only two dimensional data is created; no Z or M values
    cmd = [
        'ogr2ogr', '--config', 'CARTODB_API_KEY', key, '-append',
        '-skipfailures', '-t_srs', 'EPSG:4326', '-f', 'CartoDB', '-nln',
        out_cartodb_name, '-dim', '2', 'CartoDB:{0}'.format(account_name)
    ]

    cmd = add_fc_to_ogr2ogr_cmd(sqlite_db_path, cmd)
    cmd = add_where_clause_to_ogr2ogr_cmd(where_clause, cmd)

    util.run_subprocess(cmd)
def read_pcl_file(path: Path):
    # Convert the pcd to ply
    new_path = path.parent / Path(path.stem + ".ply")
    run_subprocess("pcl_pcd2ply {} {}".format(path, new_path))

    ply = plyfile.PlyData.read(new_path.__str__())
    n_points = len(ply.elements[0].data)

    np_data = np.empty((4, n_points))
    for i in range(n_points):
        np_data[0, i] = ply.elements[0].data[i][0]
        np_data[1, i] = ply.elements[0].data[i][1]
        np_data[2, i] = ply.elements[0].data[i][2]
        np_data[3, i] = 1.0

    return np_data
Example #4
0
def sqlite_row_count(sqlite_db):

    ogrinfo = util.run_subprocess(['ogrinfo', '-q', sqlite_db])

    # Grab the first line from the ogrinfo command, split it, and take the second value
    # Example ogrinfo output: "1: tiger_conservation_landscapes (Multi Polygon)"
    table_name = ogrinfo[0].split()[1]

    ogr_row_count_text = util.run_subprocess([
        'ogrinfo', sqlite_db, '-q', '-sql',
        'SELECT count(*) FROM {0}'.format(table_name)
    ])

    # Response looks like this ['', 'Layer  name: SELECT', 'OGRFeature(SELECT):0', 'count(*) (Integer) = 76', '']
    row_count = int(ogr_row_count_text[-2].split(' = ')[1])

    return row_count
Example #5
0
def qc_peru_download(input_vrt):

    gdalinfo_list = util.run_subprocess(['gdalinfo', input_vrt])
    print '\n'.join(gdalinfo_list)

    size_line = gdalinfo_list[2]
    size_results = size_line.replace(',', '').split()[2:]

    size_tuple = [int(x) for x in size_results]
    print 'Checking size of the VRT that we downloaded from GEE'
    print size_tuple
    if size_tuple != [56005, 80005]:
        raise ValueError('Size tuple does not match expected peru boundaries')
Example #6
0
    def compute(self):
        cmd_string = ('./trail -algorithm {} -reading {} -reference {} '
                      '-estimate \"{}\" -filter_center {} -algo_config {}').format(
                          self.algo.name,
                          self.dataset.path_of_cloud(self.reading),
                          self.dataset.path_of_cloud(self.reference),
                          json.dumps(self.dataset.odometry_estimate(self.reading, self.reference).tolist()),
                          self.dataset.center_filter_size,
                          self.algo.config)

        print(cmd_string)
        response = run_subprocess(cmd_string)
        print(response)

        self.trail_data = json.loads(response)

        with open(CACHE_FILE_NAME, 'wb') as pickle_file:
            pickle.dump(self.trail_data, pickle_file)
Example #7
0
def ogrinfo_min_max(input_fc, oid_fieldname):
    input_table_name = os.path.basename(os.path.splitext(input_fc)[0])

    # ogrinfo doesn't recognize OBJECTID in these cases, apparently
    # Also need to only have the GDB as an input, doesn't want entire FC path
    if '.gdb' in input_fc:
        oid_fieldname = 'FID'
        input_fc = os.path.dirname(input_fc)

    sql_statement = 'SELECT min({0}), max({0}) FROM "{1}"'.format(
        oid_fieldname, input_table_name)
    ogrinfo = util.run_subprocess(['ogrinfo', '-sql', sql_statement, input_fc])

    # Grab the last two lines with data (the final line is blank)
    result_lines = ogrinfo[-3:-1]

    # Result lines look like `MIN_FID (Integer) = 0`
    # Split them at the ' = ' and grab the result then convert to int()
    result_vals = [int(l.split(' = ')[1]) for l in result_lines]

    return result_vals[0], result_vals[1]
Example #8
0
def get_layer_type(in_fc):
    """
    Get the layer type-- important for ogr2ogr; if we're working with a line string need to explicitly set the otuput
    to line string, not polyline/whatever it is natively in Arc
    :param in_fc:
    :return:
    """

    if os.path.splitext(in_fc)[1] == '.sqlite':
        ogrinfo = util.run_subprocess(['ogrinfo', '-q', in_fc], log=False)
        shapetype = ogrinfo[0].split('(')[1].lower()

    else:
        shapetype = arcpy.Describe(in_fc).shapeType.lower()

    if 'string' in shapetype or 'line' in shapetype:
        layer_type = 'LINE'
    elif 'polygon' in shapetype:
        layer_type = 'POLYGON'
    else:
        logging.error("Unknown layer type: {0}".format(shapetype))
        sys.exit(1)

    return layer_type
    numpy_points = np.vstack([points_of_side, numpy_points])

    # Add noise to points
    numpy_points += np.random.normal(scale=args.noise, size=numpy_points.shape)
    print(numpy_points)

    # Make a ply file from the points
    pointcloud = np.zeros((n_points, ),
                          dtype=[('x', np.float32), ('y', np.float32),
                                 ('z', np.float32)])
    for i, point in enumerate(numpy_points):
        pointcloud[i] = tuple(point)

    print(pointcloud)

    el = plyfile.PlyElement.describe(
        pointcloud,
        'vertex',
        val_types={
            'x': 'f8',
            'y': 'f8',
            'z': 'f8'
        },
    )
    print(el)
    plyfile.PlyData([el]).write(TEMP_FILENAME)

    # Convert the ply to pcd
    run_subprocess('pcl_ply2pcd {} {}'.format(TEMP_FILENAME, args.output))
def visualize_transform(dataset, reading, reference, transform):
    cmd_string = './visualize_transform -reading {} -reference {} -transform \"{}\"'.format(
        dataset.path_of_cloud(reading), dataset.path_of_cloud(reference),
        json.dumps(transform.tolist()))

    run_subprocess(cmd_string)