Commit 775e431f authored by Paul Brust's avatar Paul Brust
Browse files

clean up and results

parent c19403e1
import os
from pathlib import Path
import numpy as np
import pandas as pd
import open3d as o3d
import matplotlib.pyplot as plt
import utils
def get_floors(data):
(n, bins, patches) = plt.hist(data["Z"], bins=100)
max_bin = np.argmax(n)
max_density_height = bins[max_bin]
n = np.delete(n, max_bin)
second_max_bin = np.argmax(n)
second_max_density_height = bins[second_max_bin]
floors = [max_density_height, second_max_density_height]
floors.sort()
plt.show()
return floors
def filter_floor(input_data, floor_height, floor_range, error_percentage, offset=0):
floor_data = input_data.loc[input_data['Z'] < (floor_height + offset + floor_range * error_percentage)]
return floor_data.loc[floor_data['Z'] > (floor_height + offset - floor_range * error_percentage)]
def do_open3d_clustering(input_data, eps, min_points):
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(input_data[["X", "Y", "Z"]].to_numpy())
with o3d.utility.VerbosityContextManager(o3d.utility.VerbosityLevel.Debug) as cm:
labels = np.array(pcd.cluster_dbscan(eps=eps, min_points=min_points, print_progress=True))
def get_plane_segmentation_and_bounding_boxes(input_data, distance_threshold=0.01, ransac_n=3, num_iterations=1000):
voxel_size = 0.05
filter_radius = 3 * voxel_size
point_cloud = o3d.geometry.PointCloud()
point_cloud.points = o3d.utility.Vector3dVector(input_data[["//X", "Y", "Z"]].to_numpy())
max_label = labels.max()
print(f"point cloud has {max_label + 1} clusters")
colors = plt.get_cmap("tab20")(labels / (max_label if max_label > 0 else 1))
colors[labels < 0] = 0
pcd.colors = o3d.utility.Vector3dVector(colors[:, :3])
o3d.visualization.draw_geometries([pcd])
print(f"---starting point cloud reduction---")
print(f"{len(point_cloud.points)} points at start")
point_cloud = point_cloud.voxel_down_sample(voxel_size=voxel_size)
def do_open3d_plane_segmentation(input_data, show_bounding_boxes=True, show_bounding_hulls=False):
rest = o3d.geometry.PointCloud()
rest.points = o3d.utility.Vector3dVector(input_data[["X", "Y", "Z"]].to_numpy())
plane_model, inliers = rest.segment_plane(distance_threshold=0.001,
ransac_n=3,
num_iterations=1000)
cmap = plt.get_cmap("tab20")
walls = []
while len(inliers) > 1000:
print(f"found wall with {len(inliers)} points")
inlier_cloud = rest.select_by_index(inliers)
walls.append(inlier_cloud)
rest = rest.select_by_index(inliers, invert=True)
plane_model, inliers = rest.segment_plane(distance_threshold=0.001,
ransac_n=3,
num_iterations=1000)
hulls = []
bounding_boxes = []
for i in range(len(walls)):
walls[i].paint_uniform_color(list(cmap(i / len(walls)))[:3])
print(f"{len(point_cloud.points)} points after downsampling")
if show_bounding_boxes:
bounding_box = walls[i].get_oriented_bounding_box()
bounding_box.color = (list(cmap(i / len(walls)))[:3])
bounding_boxes.append(bounding_box)
point_cloud, ind = point_cloud.remove_radius_outlier(nb_points=12, radius=filter_radius)
if show_bounding_hulls:
hull, _ = walls[i].compute_convex_hull()
hull_ls = o3d.geometry.LineSet.create_from_triangle_mesh(hull)
hull_ls.paint_uniform_color(list(cmap(i / len(walls)))[:3])
hulls.append(hull_ls)
print(f"{len(point_cloud.points)} points after filtering ")
print(f"---finished point cloud reduction---")
print(f"number of elements found: {len(walls)}")
if show_bounding_hulls:
o3d.visualization.draw_geometries([*walls, *hulls])
if show_bounding_boxes:
o3d.visualization.draw_geometries([*walls, *bounding_boxes])
def get_plane_segmentation_and_bounding_boxes(input_data, distance_threshold=0.001, ransac_n=3, num_iterations=1000):
print(input_data[["//X", "Y", "Z"]].to_numpy())
rest = o3d.geometry.PointCloud()
rest.points = o3d.utility.Vector3dVector(input_data[["//X", "Y", "Z"]].to_numpy())
plane_model, inliers = rest.segment_plane(distance_threshold=distance_threshold,
ransac_n=ransac_n,
num_iterations=num_iterations)
plane_model, inliers = point_cloud.segment_plane(distance_threshold=distance_threshold,
ransac_n=ransac_n,
num_iterations=num_iterations)
cmap = plt.get_cmap("tab20")
walls = []
hulls = []
while len(inliers) > 1000:
print(f"found wall with {len(inliers)} points")
inlier_cloud = rest.select_by_index(inliers)
inlier_cloud = point_cloud.select_by_index(inliers)
walls.append(inlier_cloud)
rest = rest.select_by_index(inliers, invert=True)
plane_model, inliers = rest.segment_plane(distance_threshold=distance_threshold,
ransac_n=ransac_n,
num_iterations=num_iterations)
point_cloud = point_cloud.select_by_index(inliers, invert=True)
plane_model, inliers = point_cloud.segment_plane(distance_threshold=distance_threshold,
ransac_n=ransac_n,
num_iterations=num_iterations)
for i in range(len(walls)):
walls[i].paint_uniform_color(list(cmap(i / len(walls)))[:3])
......@@ -110,52 +52,55 @@ def get_plane_segmentation_and_bounding_boxes(input_data, distance_threshold=0.0
return walls, hulls
def main():
input_data = pd.read_csv("datasets/floor_seperate/floor__000001.csv")
center = [53.2685, 15.2166, 10.5388]
def do_plane_segmentation(data_directory: str, floor_name: str, results_directory: str):
error_percentage = 0.2
full_data = []
# do_open3d_clustering(input_data, 0.15, 200)
# do_open3d_plane_segmentation(input_data)
floor_directory = str(Path(data_directory) / Path(floor_name))
for filename in os.listdir(floor_directory):
input_data = pd.read_csv(Path(floor_directory) / Path(filename))
full_data.append(input_data)
floor_heights = get_floors(input_data)
error_percentage = 0.1
floor_range = floor_heights[1] - floor_heights[0]
print(f"{len(full_data)} data sets read in from {floor_directory}")
# floor_data = filter_floor(input_data, floor_heights[0], floor_range, error_percentage)
# do_open3d_plane_segmentation(floor_data)
# floor_data.plot.scatter(x="X", y="Y", ax=axs[0, 1], title="floor")
directory = "./datasets/floor_seperate/"
floor_heights = utils.get_floors(pd.concat(full_data))
walls = []
hulls = []
for filename in os.listdir(directory):
print(f"processing {filename}")
segment_data = pd.read_csv(directory + filename)
for i, data_set in enumerate(full_data):
print(f"processing dataset no {i}")
wall_data = utils.filter_for_walls(data_set, floor_heights, error_percentage)
print(f"data filtered for walls")
wall_data = filter_floor(segment_data, floor_heights[0], floor_range, error_percentage, 1)
segment_wall, segment_hulls = get_plane_segmentation_and_bounding_boxes(wall_data)
walls = walls + segment_wall
hulls = hulls + segment_hulls
o3d.visualization.draw_geometries([*walls, *hulls])
o3d.visualization.draw_geometries([*walls])
# o3d.visualization.draw_geometries([*walls, *hulls])
print(f"writing results")
for i, wall in enumerate(walls):
o3d.io.write_point_cloud(f"./datasets/walls/result_wall{i}.xyz", wall)
# with open("./datasets/result_walls.csv", "w") as file1:
# # Writing data to a file
# for wall in walls:
# file1.write(wall)
o3d.io.write_point_cloud(f"{results_directory}{floor_name}/result_wall_{i}.xyz", wall)
# ceiling_data = filter_floor(input_data, floor_heights[1], floor_range, error_percentage)
# ceiling_data.plot.scatter(x="X", y="Y", ax=axs[1, 1], title="ceiling")
# do_open3d_plane_segmentation(ceiling_data)
plt.show()
def main_DBSCAN_clustering():
input_data = pd.read_csv("datasets/OneFloor_Full.csv")
utils.do_open3d_clustering(input_data, 0.15, 200)
if __name__ == '__main__':
main()
floors_directory = "./datasets/floors/"
results_directory = "./results/walls/"
# floor_name = "Floor_1"
#
# os.makedirs(f"{results_directory}{floor_name}")
#
# do_plane_segmentation(floors_directory, floor_name, results_directory)
for floor_name in os.listdir(floors_directory):
os.makedirs(f"{results_directory}{floor_name}")
do_plane_segmentation(floors_directory, floor_name, results_directory)
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment