def test_call_module(): """ Run a command to see if call_module works. """ data_fname = os.path.join(TEST_DATA_DIR, "points.txt") out_fname = "test_call_module.txt" with clib.Session() as lib: with GMTTempFile() as out_fname: lib.call_module("info", "{} -C ->{}".format(data_fname, out_fname.name)) assert os.path.exists(out_fname.name) output = out_fname.read().strip() assert output == "11.5309 61.7074 -2.9289 7.8648 0.1412 0.9338"
def test_create_data_grid_range(): """ Create a grid specifying range and inc instead of dim. """ with clib.Session() as lib: # Grids from matrices using range and int lib.create_data( family="GMT_IS_GRID|GMT_VIA_MATRIX", geometry="GMT_IS_SURFACE", mode="GMT_CONTAINER_ONLY", ranges=[150.0, 250.0, -20.0, 20.0], inc=[0.1, 0.2], )
def test_parse_constant_composite(): """ Parsing a composite constant argument (separated by |) correctly. """ lib = clib.Session() test_cases = ((family, via) for family in FAMILIES for via in VIAS) for family, via in test_cases: composite = "|".join([family, via]) expected = lib[family] + lib[via] parsed = lib._parse_constant(composite, valid=FAMILIES, valid_modifiers=VIAS) assert parsed == expected
def test_create_destroy_session(): "Test that create and destroy session are called without errors" # Create two session and make sure they are not pointing to the same memory session1 = clib.Session() session1.create(name="test_session1") assert session1.session_pointer is not None session2 = clib.Session() session2.create(name="test_session2") assert session2.session_pointer is not None assert session2.session_pointer != session1.session_pointer session1.destroy() session2.destroy() # Create and destroy a session twice ses = clib.Session() for __ in range(2): with pytest.raises(GMTCLibNoSessionError): ses.session_pointer # pylint: disable=pointless-statement ses.create("session1") assert ses.session_pointer is not None ses.destroy() with pytest.raises(GMTCLibNoSessionError): ses.session_pointer # pylint: disable=pointless-statement
def test_put_matrix_grid(): "Check that assigning a numpy 2d array to an ASCII and NetCDF grid works" dtypes = "float32 float64 int32 int64 uint32 uint64".split() wesn = [10, 15, 30, 40, 0, 0] inc = [1, 1] shape = ((wesn[3] - wesn[2]) // inc[1] + 1, (wesn[1] - wesn[0]) // inc[0] + 1) for dtype in dtypes: with clib.Session() as lib: grid = lib.create_data( family="GMT_IS_GRID|GMT_VIA_MATRIX", geometry="GMT_IS_SURFACE", mode="GMT_CONTAINER_ONLY", ranges=wesn[:4], inc=inc, registration="GMT_GRID_NODE_REG", ) data = np.arange(shape[0] * shape[1], dtype=dtype).reshape(shape) lib.put_matrix(grid, matrix=data) # Save the data to a file to see if it's being accessed correctly with GMTTempFile() as tmp_file: lib.write_data( "GMT_IS_MATRIX", "GMT_IS_POINT", "GMT_CONTAINER_AND_DATA", wesn, tmp_file.name, grid, ) # Load the data and check that it's correct newdata = tmp_file.loadtxt(dtype=dtype) npt.assert_allclose(newdata, data) # Save the data to a netCDF grid and check that xarray can load it with GMTTempFile() as tmp_grid: lib.write_data( "GMT_IS_MATRIX", "GMT_IS_SURFACE", "GMT_CONTAINER_AND_DATA", wesn, tmp_grid.name, grid, ) with xr.open_dataarray(tmp_grid.name) as dataarray: assert dataarray.shape == shape npt.assert_allclose(dataarray.data, np.flipud(data)) npt.assert_allclose( dataarray.coords["x"].actual_range, np.array(wesn[0:2]) ) npt.assert_allclose( dataarray.coords["y"].actual_range, np.array(wesn[2:4]) )
def test_put_vector_2d_fails(): """ Check that it fails with an exception for multidimensional arrays. """ with clib.Session() as lib: dataset = lib.create_data( family="GMT_IS_DATASET|GMT_VIA_VECTOR", geometry="GMT_IS_POINT", mode="GMT_CONTAINER_ONLY", dim=[1, 6, 1, 0], # columns, rows, layers, dtype ) data = np.array([[37, 12, 556], [37, 12, 556]], dtype="int32") with pytest.raises(GMTInvalidInput): lib.put_vector(dataset, column=0, vector=data)
def test_put_vector_wrong_column(): """ Check that it fails with an exception when giving an invalid column. """ with clib.Session() as lib: dataset = lib.create_data( family="GMT_IS_DATASET|GMT_VIA_VECTOR", geometry="GMT_IS_POINT", mode="GMT_CONTAINER_ONLY", dim=[1, 3, 1, 0], # columns, rows, layers, dtype ) data = np.array([37, 12, 556], dtype="float32") with pytest.raises(GMTCLibError): lib.put_vector(dataset, column=1, vector=data)
def test_virtual_file_bad_direction(): """ Test passing an invalid direction argument. """ with clib.Session() as lib: vfargs = ( "GMT_IS_DATASET|GMT_VIA_MATRIX", "GMT_IS_POINT", "GMT_IS_GRID", # The invalid direction argument 0, ) with pytest.raises(GMTInvalidInput): with lib.open_virtual_file(*vfargs): print("This should have failed")
def test_put_vector_invalid_dtype(): """ Check that it fails with an exception for invalid data types. """ with clib.Session() as lib: dataset = lib.create_data( family="GMT_IS_DATASET|GMT_VIA_VECTOR", geometry="GMT_IS_POINT", mode="GMT_CONTAINER_ONLY", dim=[2, 3, 1, 0], # columns, rows, layers, dtype ) data = np.array([37, 12, 556], dtype="object") with pytest.raises(GMTInvalidInput): lib.put_vector(dataset, column=1, vector=data)
def test_create_data_fails(): """ Check that create_data raises exceptions for invalid input and output. """ # Passing in invalid mode with pytest.raises(GMTInvalidInput): with clib.Session() as lib: lib.create_data( family="GMT_IS_DATASET", geometry="GMT_IS_SURFACE", mode="Not_a_valid_mode", dim=[0, 0, 1, 0], ranges=[150.0, 250.0, -20.0, 20.0], inc=[0.1, 0.2], ) # Passing in invalid geometry with pytest.raises(GMTInvalidInput): with clib.Session() as lib: lib.create_data( family="GMT_IS_GRID", geometry="Not_a_valid_geometry", mode="GMT_CONTAINER_ONLY", dim=[0, 0, 1, 0], ranges=[150.0, 250.0, -20.0, 20.0], inc=[0.1, 0.2], ) # If the data pointer returned is None (NULL pointer) with pytest.raises(GMTCLibError): with clib.Session() as lib: with mock(lib, "GMT_Create_Data", returns=None): lib.create_data( family="GMT_IS_DATASET", geometry="GMT_IS_SURFACE", mode="GMT_CONTAINER_ONLY", dim=[11, 10, 2, 0], )
def test_virtualfile_from_vectors_arraylike(): "Pass array-like vectors to a dataset" size = 13 x = list(range(0, size, 1)) y = tuple(range(size, size * 2, 1)) z = range(size * 2, size * 3, 1) with clib.Session() as lib: with lib.virtualfile_from_vectors(x, y, z) as vfile: with GMTTempFile() as outfile: lib.call_module("info", "{} ->{}".format(vfile, outfile.name)) output = outfile.read(keep_tabs=True) bounds = "\t".join( ["<{:.0f}/{:.0f}>".format(min(i), max(i)) for i in (x, y, z)]) expected = "<vector memory>: N = {}\t{}\n".format(size, bounds) assert output == expected
def test_virtualfile_from_vectors_arraylike(): """ Pass array-like vectors to a dataset. """ size = 13 x = list(range(0, size, 1)) y = tuple(range(size, size * 2, 1)) z = range(size * 2, size * 3, 1) with clib.Session() as lib: with lib.virtualfile_from_vectors(x, y, z) as vfile: with GMTTempFile() as outfile: lib.call_module("info", f"{vfile} ->{outfile.name}") output = outfile.read(keep_tabs=True) bounds = "\t".join([f"<{min(i):.0f}/{max(i):.0f}>" for i in (x, y, z)]) expected = f"<vector memory>: N = {size}\t{bounds}\n" assert output == expected
def test_write_data_fails(): "Check that write data raises an exception for non-zero return codes" # It's hard to make the C API function fail without causing a Segmentation # Fault. Can't test this if by giving a bad file name because if # output=='', GMT will just write to stdout and spaces are valid file # names. Use a mock instead just to exercise this part of the code. with clib.Session() as lib: with mock(lib, "GMT_Write_Data", returns=1): with pytest.raises(GMTCLibError): lib.write_data( "GMT_IS_VECTOR", "GMT_IS_POINT", "GMT_WRITE_SET", [1] * 6, "some-file-name", None, )
def test_virtualfile_from_vectors_one_string_or_object_column(dtype): """ Test passing in one column with string or object dtype into virtual file dataset. """ size = 5 x = np.arange(size, dtype=np.int32) y = np.arange(size, size * 2, 1, dtype=np.int32) strings = np.array(["a", "bc", "defg", "hijklmn", "opqrst"], dtype=dtype) with clib.Session() as lib: with lib.virtualfile_from_vectors(x, y, strings) as vfile: with GMTTempFile() as outfile: lib.call_module("convert", f"{vfile} ->{outfile.name}") output = outfile.read(keep_tabs=True) expected = "".join(f"{i}\t{j}\t{k}\n" for i, j, k in zip(x, y, strings)) assert output == expected
def test_virtualfile_from_matrix(): """ Test transforming a matrix to virtual file dataset. """ dtypes = "float32 float64 int32 int64 uint32 uint64".split() shape = (7, 5) for dtype in dtypes: data = np.arange(shape[0] * shape[1], dtype=dtype).reshape(shape) with clib.Session() as lib: with lib.virtualfile_from_matrix(data) as vfile: with GMTTempFile() as outfile: lib.call_module("info", f"{vfile} ->{outfile.name}") output = outfile.read(keep_tabs=True) bounds = "\t".join( [f"<{col.min():.0f}/{col.max():.0f}>" for col in data.T]) expected = f"<matrix memory>: N = {shape[0]}\t{bounds}\n" assert output == expected
def test_virtualfile_from_vectors_transpose(): "Test transforming matrix columns to virtual file dataset" dtypes = "float32 float64 int32 int64 uint32 uint64".split() shape = (7, 5) for dtype in dtypes: data = np.arange(shape[0] * shape[1], dtype=dtype).reshape(shape) with clib.Session() as lib: with lib.virtualfile_from_vectors(*data.T) as vfile: with GMTTempFile() as outfile: lib.call_module("info", "{} -C ->{}".format(vfile, outfile.name)) output = outfile.read(keep_tabs=True) bounds = "\t".join([ "{:.0f}\t{:.0f}".format(col.min(), col.max()) for col in data.T ]) expected = "{}\n".format(bounds) assert output == expected
def test_create_data_dataset(): "Run the function to make sure it doesn't fail badly." with clib.Session() as lib: # Dataset from vectors data_vector = lib.create_data( family="GMT_IS_DATASET|GMT_VIA_VECTOR", geometry="GMT_IS_POINT", mode="GMT_CONTAINER_ONLY", dim=[10, 20, 1, 0], # columns, rows, layers, dtype ) # Dataset from matrices data_matrix = lib.create_data( family="GMT_IS_DATASET|GMT_VIA_MATRIX", geometry="GMT_IS_POINT", mode="GMT_CONTAINER_ONLY", dim=[10, 20, 1, 0], ) assert data_vector != data_matrix
def test_virtualfile_from_vectors_two_string_or_object_columns(dtype): """ Test passing in two columns of string or object dtype into virtual file dataset. """ size = 5 x = np.arange(size, dtype=np.int32) y = np.arange(size, size * 2, 1, dtype=np.int32) strings1 = np.array(["a", "bc", "def", "ghij", "klmno"], dtype=dtype) strings2 = np.array(["pqrst", "uvwx", "yz!", "@#", "$"], dtype=dtype) with clib.Session() as lib: with lib.virtualfile_from_vectors(x, y, strings1, strings2) as vfile: with GMTTempFile() as outfile: lib.call_module("convert", f"{vfile} ->{outfile.name}") output = outfile.read(keep_tabs=True) expected = "".join(f"{h}\t{i}\t{j} {k}\n" for h, i, j, k in zip(x, y, strings1, strings2)) assert output == expected
def test_virtualfile_from_vectors(): "Test the automation for transforming vectors to virtual file dataset" dtypes = "float32 float64 int32 int64 uint32 uint64".split() size = 10 for dtype in dtypes: x = np.arange(size, dtype=dtype) y = np.arange(size, size * 2, 1, dtype=dtype) z = np.arange(size * 2, size * 3, 1, dtype=dtype) with clib.Session() as lib: with lib.virtualfile_from_vectors(x, y, z) as vfile: with GMTTempFile() as outfile: lib.call_module("info", "{} ->{}".format(vfile, outfile.name)) output = outfile.read(keep_tabs=True) bounds = "\t".join([ "<{:.0f}/{:.0f}>".format(i.min(), i.max()) for i in (x, y, z) ]) expected = "<vector memory>: N = {}\t{}\n".format(size, bounds) assert output == expected
def test_virtualfile_from_vectors(): """ Test the automation for transforming vectors to virtual file dataset. """ dtypes = "float32 float64 int32 int64 uint32 uint64".split() size = 10 for dtype in dtypes: x = np.arange(size, dtype=dtype) y = np.arange(size, size * 2, 1, dtype=dtype) z = np.arange(size * 2, size * 3, 1, dtype=dtype) with clib.Session() as lib: with lib.virtualfile_from_vectors(x, y, z) as vfile: with GMTTempFile() as outfile: lib.call_module("info", f"{vfile} ->{outfile.name}") output = outfile.read(keep_tabs=True) bounds = "\t".join( [f"<{i.min():.0f}/{i.max():.0f}>" for i in (x, y, z)]) expected = f"<vector memory>: N = {size}\t{bounds}\n" assert output == expected
def test_fails_for_wrong_version(): "Make sure the clib.Session raises an exception if GMT is too old" # Mock GMT_Get_Default to return an old version def mock_defaults(api, name, value): # pylint: disable=unused-argument "Return an old version" if name == b"API_VERSION": value.value = b"5.4.3" else: value.value = b"bla" return 0 lib = clib.Session() with mock(lib, "GMT_Get_Default", mock_func=mock_defaults): with pytest.raises(GMTVersionError): with lib: assert lib.info["version"] != "5.4.3" # Make sure the session is closed when the exception is raised. with pytest.raises(GMTCLibNoSessionError): assert lib.session_pointer
def test_virtualfile_from_matrix_slice(): "Test transforming a slice of a larger array to virtual file dataset" dtypes = "float32 float64 int32 int64 uint32 uint64".split() shape = (10, 6) for dtype in dtypes: full_data = np.arange(shape[0] * shape[1], dtype=dtype).reshape(shape) rows = 5 cols = 3 data = full_data[:rows, :cols] with clib.Session() as lib: with lib.virtualfile_from_matrix(data) as vfile: with GMTTempFile() as outfile: lib.call_module("info", "{} ->{}".format(vfile, outfile.name)) output = outfile.read(keep_tabs=True) bounds = "\t".join([ "<{:.0f}/{:.0f}>".format(col.min(), col.max()) for col in data.T ]) expected = "<matrix memory>: N = {}\t{}\n".format(rows, bounds) assert output == expected
def test_virtualfile_from_data_required_z_matrix(array_func, kind): """ Test that function works when third z column in a matrix is needed and provided. """ shape = (5, 3) dataframe = pd.DataFrame(data=np.arange(shape[0] * shape[1]).reshape(shape), columns=["x", "y", "z"]) data = array_func(dataframe) with clib.Session() as lib: with lib.virtualfile_from_data(data=data, required_z=True) as vfile: with GMTTempFile() as outfile: lib.call_module("info", f"{vfile} ->{outfile.name}") output = outfile.read(keep_tabs=True) bounds = "\t".join([ f"<{i.min():.0f}/{i.max():.0f}>" for i in (dataframe.x, dataframe.y, dataframe.z) ]) expected = f"<{kind} memory>: N = {shape[0]}\t{bounds}\n" assert output == expected
def test_put_strings(): """ Check that assigning a numpy array of dtype str to a dataset works. """ with clib.Session() as lib: dataset = lib.create_data( family="GMT_IS_DATASET|GMT_VIA_VECTOR", geometry="GMT_IS_POINT", mode="GMT_CONTAINER_ONLY", dim=[2, 5, 1, 0], # columns, rows, layers, dtype ) x = np.array([1, 2, 3, 4, 5], dtype=np.int32) y = np.array([6, 7, 8, 9, 10], dtype=np.int32) strings = np.array(["a", "bc", "defg", "hijklmn", "opqrst"], dtype=np.str) lib.put_vector(dataset, column=lib["GMT_X"], vector=x) lib.put_vector(dataset, column=lib["GMT_Y"], vector=y) lib.put_strings(dataset, family="GMT_IS_VECTOR|GMT_IS_DUPLICATE", strings=strings) # Turns out wesn doesn't matter for Datasets wesn = [0] * 6 # Save the data to a file to see if it's being accessed correctly with GMTTempFile() as tmp_file: lib.write_data( "GMT_IS_VECTOR", "GMT_IS_POINT", "GMT_WRITE_SET", wesn, tmp_file.name, dataset, ) # Load the data and check that it's correct newx, newy, newstrings = tmp_file.loadtxt(unpack=True, dtype=[("x", np.int32), ("y", np.int32), ("text", "<U7")]) npt.assert_array_equal(newx, x) npt.assert_array_equal(newy, y) npt.assert_array_equal(newstrings, strings)
def test_put_vector_mixed_dtypes(): """ Passing a numpy array of mixed dtypes to a dataset. See https://github.com/GenericMappingTools/pygmt/issues/255 """ dtypes = "float32 float64 int32 int64 uint32 uint64".split() for dtypex, dtypey in itertools.permutations(dtypes, r=2): with clib.Session() as lib: dataset = lib.create_data( family="GMT_IS_DATASET|GMT_VIA_VECTOR", geometry="GMT_IS_POINT", mode="GMT_CONTAINER_ONLY", dim=[2, 5, 1, 0], # columns, rows, layers, dtype ) x = np.array([1, 2, 3, 4, 5], dtype=dtypex) y = np.array([6, 7, 8, 9, 10], dtype=dtypey) lib.put_vector(dataset, column=lib["GMT_X"], vector=x) lib.put_vector(dataset, column=lib["GMT_Y"], vector=y) # Turns out wesn doesn't matter for Datasets wesn = [0] * 6 # Save the data to a file to see if it's being accessed correctly with GMTTempFile() as tmp_file: lib.write_data( "GMT_IS_VECTOR", "GMT_IS_POINT", "GMT_WRITE_SET", wesn, tmp_file.name, dataset, ) # Load the data and check that it's correct newx, newy = tmp_file.loadtxt(unpack=True, dtype=[("x", dtypex), ("y", dtypey)]) assert x.dtype == newx.dtype assert y.dtype == newy.dtype npt.assert_allclose(newx, x) npt.assert_allclose(newy, y)
def test_virtualfile_from_vectors_pandas(): "Pass vectors to a dataset using pandas Series" dtypes = "float32 float64 int32 int64 uint32 uint64".split() size = 13 for dtype in dtypes: data = pd.DataFrame(data=dict( x=np.arange(size, dtype=dtype), y=np.arange(size, size * 2, 1, dtype=dtype), z=np.arange(size * 2, size * 3, 1, dtype=dtype), )) with clib.Session() as lib: with lib.virtualfile_from_vectors(data.x, data.y, data.z) as vfile: with GMTTempFile() as outfile: lib.call_module("info", "{} ->{}".format(vfile, outfile.name)) output = outfile.read(keep_tabs=True) bounds = "\t".join([ "<{:.0f}/{:.0f}>".format(i.min(), i.max()) for i in (data.x, data.y, data.z) ]) expected = "<vector memory>: N = {}\t{}\n".format(size, bounds) assert output == expected
def test_put_vector(): """ Check that assigning a numpy array to a dataset works. """ dtypes = "float32 float64 int32 int64 uint32 uint64".split() for dtype in dtypes: with clib.Session() as lib: dataset = lib.create_data( family="GMT_IS_DATASET|GMT_VIA_VECTOR", geometry="GMT_IS_POINT", mode="GMT_CONTAINER_ONLY", dim=[3, 5, 1, 0], # columns, rows, layers, dtype ) x = np.array([1, 2, 3, 4, 5], dtype=dtype) y = np.array([6, 7, 8, 9, 10], dtype=dtype) z = np.array([11, 12, 13, 14, 15], dtype=dtype) lib.put_vector(dataset, column=lib["GMT_X"], vector=x) lib.put_vector(dataset, column=lib["GMT_Y"], vector=y) lib.put_vector(dataset, column=lib["GMT_Z"], vector=z) # Turns out wesn doesn't matter for Datasets wesn = [0] * 6 # Save the data to a file to see if it's being accessed correctly with GMTTempFile() as tmp_file: lib.write_data( "GMT_IS_VECTOR", "GMT_IS_POINT", "GMT_WRITE_SET", wesn, tmp_file.name, dataset, ) # Load the data and check that it's correct newx, newy, newz = tmp_file.loadtxt(unpack=True, dtype=dtype) npt.assert_allclose(newx, x) npt.assert_allclose(newy, y) npt.assert_allclose(newz, z)
def test_get_default_fails(): "Make sure get_default raises an exception for invalid names" with clib.Session() as lib: with pytest.raises(GMTCLibError): lib.get_default("NOT_A_VALID_NAME")
def test_put_vector_string_dtype(): """ Passing string type vectors to a dataset. """ # input string vectors: numbers, longitudes, latitudes, and datetimes vectors = np.array([ ["10", "20.0", "-30.0", "3.5e1"], ["10W", "30.50E", "30:30W", "40:30:30.500E"], ["10N", "30.50S", "30:30N", "40:30:30.500S"], [ "2021-02-03", "2021-02-03T04", "2021-02-03T04:05:06.700", "T04:50:06.700" ], ]) # output vectors in double or string type # Notes: # 1. longitudes and latitudes are stored in double in GMT # 2. The default output format for datetime is YYYY-mm-ddTHH:MM:SS expected_vectors = [ [10.0, 20.0, -30.0, 35], [-10, 30.5, -30.5, 40.508472], [10, -30.50, 30.5, -40.508472], [ "2021-02-03T00:00:00", "2021-02-03T04:00:00", "2021-02-03T04:05:06", f"{datetime.utcnow().strftime('%Y-%m-%d')}T04:50:06", ], ] # loop over all possible combinations of input types for i, j in itertools.combinations_with_replacement(range(4), r=2): with clib.Session() as lib: dataset = lib.create_data( family="GMT_IS_DATASET|GMT_VIA_VECTOR", geometry="GMT_IS_POINT", mode="GMT_CONTAINER_ONLY", dim=[2, 4, 1, 0], # columns, rows, layers, dtype ) lib.put_vector(dataset, column=lib["GMT_X"], vector=vectors[i]) lib.put_vector(dataset, column=lib["GMT_Y"], vector=vectors[j]) # Turns out wesn doesn't matter for Datasets wesn = [0] * 6 # Save the data to a file to see if it's being accessed correctly with GMTTempFile() as tmp_file: lib.write_data( "GMT_IS_VECTOR", "GMT_IS_POINT", "GMT_WRITE_SET", wesn, tmp_file.name, dataset, ) # Load the data output = np.genfromtxt(tmp_file.name, dtype=None, names=("x", "y"), encoding=None) # check that the output is correct # Use npt.assert_allclose for numeric arrays # and npt.assert_array_equal for string arrays if i != 3: npt.assert_allclose(output["x"], expected_vectors[i]) else: npt.assert_array_equal(output["x"], expected_vectors[i]) if j != 3: npt.assert_allclose(output["y"], expected_vectors[j]) else: npt.assert_array_equal(output["y"], expected_vectors[j])
import xarray as xr from packaging.version import Version from pygmt import Figure, clib from pygmt.clib.conversion import dataarray_to_matrix from pygmt.clib.session import FAMILIES, VIAS from pygmt.exceptions import ( GMTCLibError, GMTCLibNoSessionError, GMTInvalidInput, GMTVersionError, ) from pygmt.helpers import GMTTempFile TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "data") with clib.Session() as _lib: gmt_version = Version(_lib.info["version"]) @contextmanager def mock(session, func, returns=None, mock_func=None): """ Mock a GMT C API function to make it always return a given value. Used to test that exceptions are raised when API functions fail by producing a NULL pointer as output or non-zero status codes. Needed because it's not easy to get some API functions to fail without inducing a Segmentation Fault (which is a good thing because libgmt usually only fails with errors). """