コード例 #1
0
cloud.setCurrentOutScalarField(1)

sfi = cloud.getCurrentInScalarField()
if sfi.getName() != cloud.getScalarFieldName(0):
    raise RuntimeError

sfo = cloud.getCurrentOutScalarField()
if sfo.getName() != cloud.getScalarFieldName(1):
    raise RuntimeError

radius = cc.GetPointCloudRadius([cloud])
print("radius: %s" % radius)
if not math.isclose(radius, 0.0293, rel_tol=1e-02):
    raise RuntimeError
radius = 0.03

res = cc.computeCurvature(cc.CurvatureType.GAUSSIAN_CURV, radius, [cloud])
nsf = cloud.getNumberOfScalarFields()
sfc = cloud.getScalarField(nsf - 1)
if sfc.getName() != "Gaussian curvature (0.03)":
    raise RuntimeError

cloud.setCurrentOutScalarField(nsf - 1)
fcloud = cc.filterBySFValue(0.01, sfc.getMax(), cloud)
filteredSize = fcloud.size()
print("filtered cloud size: %s" % filteredSize)
if not math.isclose(filteredSize, 113325, rel_tol=1e-03):
    raise RuntimeError

res = cc.SavePointCloud(fcloud, os.path.join(dataDir, "res3.xyz"))
コード例 #2
0
import numpy as np
import cloudComPy as cc

cc.initCC()  # Sto do once before using plugins or dealing with numpy

cloud = cc.loadPointCloud(getSampleCloud(2.0))
ok = cloud.exportCoordToSF(True, True, True)

# --- access to ScalarField by name

dic = cloud.getScalarFieldDic()
dic  # {'Coord. X': 0, 'Coord. Y': 1, 'Coord. Z': 2}
sf0 = cloud.getScalarField(dic['Coord. X'])
if sf0.getName() != 'Coord. X':
    raise RuntimeError
sf1 = cloud.getScalarField('Coord. Y')
if sf1.getName() != 'Coord. Y':
    raise RuntimeError

# --- check write and read ply format

cc.SavePointCloud(cloud, os.path.join(dataDir, "cloud.ply"))

cloud2 = cc.loadPointCloud(os.path.join(dataDir, "cloud.ply"))
if cloud2.size() != cloud.size():
    raise RuntimeError

dic2 = cloud.getScalarFieldDic()
if dic2 != dic:
    raise RuntimeError
コード例 #3
0
import sys
import math
import numpy as np

from gendata import getSampleCloud, getSampleCloud2, dataDir, isCoordEqual, createSymbolicLinks
import cloudComPy as cc
createSymbolicLinks()  # required for tests on build, before cc.initCC

cc.initCC()  # to do once before using plugins or dealing with numpy

# --- generate a set of coords and a scalar field

npts = 10000000
phi = 2 * np.pi * np.random.random((npts))
theta = 2 * np.pi * np.random.random((npts))
r = 5 + 0.3 * np.sin(2 * 2 * np.pi * phi + 3 * 2 * np.pi * theta)
x = np.float32(r * np.sin(phi) * np.cos(theta))
y = np.float32(r * np.sin(phi) * np.sin(theta))
z = np.float32(r * np.cos(phi))
coords = np.column_stack((x, y, z))
dr = np.float32(np.sqrt(x * x + y * y + z * z) - 5)

# --- create the pointCloud, add the scalar field, save

cl = cc.ccPointCloud("boule")
cl.coordsFromNPArray_copy(coords)
cl.addScalarField("delta")
sf = cl.getScalarField(0)
sf.fromNpArrayCopy(dr)
res = cc.SavePointCloud(cl, os.path.join(dataDir, "boule.bin"))
コード例 #4
0
ファイル: test007.py プロジェクト: prascle/CloudComPy
poly.setClosed(True)
if not poly.isClosed():
    raise RuntimeError

if poly.segmentCount() != 7:
    raise RuntimeError

if poly.size() != 7:
    raise RuntimeError

poly.setName("myPoly")
if poly.getName() != "myPoly":
    raise RuntimeError

cloudCropZ = cloud.crop2D(poly, 2, True)
cc.SavePointCloud(cloudCropZ, os.path.join(dataDir, "cloudCropZ.xyz"))
npts = cloudCropZ.size()
print("cloud.size %s" % npts)
if npts != 189981:
    raise RuntimeError

cloudCropX = cloud.crop2D(poly, 1, True)
cc.SavePointCloud(cloudCropX, os.path.join(dataDir, "cloudCropX.xyz"))
npts = cloudCropX.size()
print("cloud.size %s" % npts)
if npts != 458593:
    raise RuntimeError

cloudCropY = cloud.crop2D(poly, 0, True)
cc.SavePointCloud(cloudCropY, os.path.join(dataDir, "cloudCropY.xyz"))
npts = cloudCropY.size()
コード例 #5
0
print("min: %14.7e" % sfmin)
print("max: %14.7e" % sfmax)
if not math.isclose(sfmin, -4.3446699e-01, rel_tol=1e-06):
    raise RuntimeError
if not math.isclose(sfmax, 2.0000000e+00, rel_tol=1e-06):
    raise RuntimeError

asf1 = sf1.toNpArray()
print(asf1.size)
if asf1.size != 1000000:
    raise RuntimeError

cloud2 = cc.loadPointCloud(getSampleCloud(1.9))
res = cloud2.exportCoordToSF(False, False, True)
sf2 = cloud2.getScalarField(0)
asf2 = sf2.toNpArray()

sf2.fromNpArrayCopy(asf1)

res = cc.SavePointCloud(cloud2, os.path.join(dataDir, "res2.xyz"))
cloud2 = cc.loadPointCloud(os.path.join(dataDir, "res2.xyz"))
sf2 = cloud2.getScalarField(0)
sfname = sf2.getName()
print("scalar field name: %s" % sfname)
if sfname != "Scalar field":
    raise RuntimeError
asf2 = sf2.toNpArray()
ok = np.allclose(asf1, asf2, rtol=1.e-6)
if not ok:
    raise RuntimeError
コード例 #6
0
rc.placeIteratorAtBeginning()
rc.enableScalarField()
if not rc.isScalarFieldEnabled():
    raise RuntimeError
dmax1 = 0
for i in range(rc.size()):
    p = rc.getPoint(i)
    d = math.sqrt((p[0] - point[0])**2 + (p[1] - point[1])**2 +
                  (p[2] - point[2])**2)
    if d > dmax1:
        dmax1 = d
    #print(i, d)
    rc.setPointScalarValue(i, d)
print("dmax1", dmax1)
res = cc.SavePointCloud(cloud, os.path.join(dataDir, "resoctree.bin"))

#--- the same with a maximum distance

rc = cc.ReferenceCloud(cloud)  # start with an empty ReferenceCloud
res = octree.findPointNeighbourhood(point, rc, 1000, 7, dmax1 / 2.)
if rc.size() >= 1000:
    raise RuntimeError
if rc.size() != res[0]:
    raise RuntimeError
maxdist2 = res[2]
print("maxdist2", maxdist2)

rc.placeIteratorAtBeginning()
rc.enableScalarField()
dmax2 = 0
コード例 #7
0
# train shallow neural network
# nn = MLPClassifier(hidden_layer_sizes=(20,5),max_iter=300,early_stopping=True).fit(X_resampled,y_resampled)
nn = RandomForestClassifier().fit(X_resampled,y_resampled)

# fill nans in main cloud data
imp2 = SimpleImputer(missing_values=np.nan, strategy='mean').fit(DataMatrix)
outFeatures = imp2.transform(DataMatrix)

fullCloudLabel = nn.predict(outFeatures)
LabelProbs = nn.predict_proba(outFeatures)

# %% convert results to scalar fields and write output cloud


nClasses = np.size(np.unique(labeledLabels))

for iClass in range(nClasses):
    
    classInd=subsampledCloud.addScalarField('class '+str(iClass)+' probability')
    sf = subsampledCloud.getScalarField(classInd)
    sf.fromNpArrayCopy(np.float32(LabelProbs[:,iClass]))
    
classInd=subsampledCloud.addScalarField('Predicted Label')
sf = subsampledCloud.getScalarField(classInd)
sf.fromNpArrayCopy(fullCloudLabel)   

res = cc.SavePointCloud(subsampledCloud,R'C:\Users\lweidner\OneDrive - BGC Engineering Inc\Code\nnPred.bin')
print('saved cloud with result ',res)