- Projection and extrusion fixed again

This commit is contained in:
bklronin 2024-07-16 20:11:24 +02:00
parent c6f48a6e78
commit 048ace83ce
3 changed files with 99 additions and 45 deletions

View File

@ -57,6 +57,7 @@ class SketchWidget(QWidget):
for point in lines: for point in lines:
x, y = point x, y = point
self.proj_snap_lines = lines self.proj_snap_lines = lines
# Invert X from projection might be happening in the projection for some reason Careful
coord = QPoint(x, y) coord = QPoint(x, y)
self.proj_snap_points.append(coord) self.proj_snap_points.append(coord)

View File

@ -205,7 +205,7 @@ class VTKWidget(QtWidgets.QWidget):
mirror_transform.SetMatrix(matrix) mirror_transform.SetMatrix(matrix)
mirror_transform.Scale(-1, -1, 1) # Inverting the original mirror look down mirror_transform.Scale(-1, -1, 1) # Inverting the original mirror look down
else:
mirror_transform.Scale(1, 1, 1) # This mirrors across the y-axis""" mirror_transform.Scale(1, 1, 1) # This mirrors across the y-axis"""
# Apply the transform to the polydata # Apply the transform to the polydata
@ -348,48 +348,87 @@ class VTKWidget(QtWidgets.QWidget):
return projected_mesh return projected_mesh
def compute_2d_coordinates(self, projected_mesh, normal): def compute_2d_coordinates(self, projected_mesh, normal):
# Compute centroid of projected points # Normalize the normal vector
center_of_mass = vtk.vtkCenterOfMass() normal = np.array(normal)
center_of_mass.SetInputData(projected_mesh) normal = normal / np.linalg.norm(normal)
center_of_mass.SetUseScalarsAsWeights(False)
center_of_mass.Update()
centroid = np.array(center_of_mass.GetCenter())
# Create a coordinate system on the plane # Create a vtkTransform
z_axis = np.array(normal) transform = vtk.vtkTransform()
x_axis = np.cross(z_axis, [0, 0, 1]) transform.PostMultiply() # This ensures transforms are applied in the order we specify
if np.allclose(x_axis, 0):
x_axis = np.cross(z_axis, [0, 1, 0])
x_axis = x_axis / np.linalg.norm(x_axis)
y_axis = np.cross(z_axis, x_axis)
# Create rotation matrix (3x3) # Rotate so that the normal aligns with the Z-axis
rotation_matrix = np.column_stack((x_axis, y_axis, z_axis)) rotation_axis = np.cross(normal, [0, 0, 1])
angle = np.arccos(np.dot(normal, [0, 0, 1])) * 180 / np.pi # Convert to degrees
# Store the full transformation for later use if needed if np.linalg.norm(rotation_axis) > 1e-6: # Check if rotation is needed
full_transform = np.eye(4) transform.RotateWXYZ(angle, rotation_axis[0], rotation_axis[1], rotation_axis[2])
full_transform[:3, :3] = rotation_matrix
full_transform[:3, 3] = centroid
self.local_matrix = full_transform
# Transform points to 2D coordinates # Get the transformation matrix
points = projected_mesh.GetPoints() matrix = transform.GetMatrix()
self.local_matrix = [matrix.GetElement(i, j) for i in range(4) for j in range(4)]
# Apply the transform to the polydata
transformFilter = vtk.vtkTransformPolyDataFilter()
transformFilter.SetInputData(projected_mesh)
transformFilter.SetTransform(transform)
transformFilter.Update()
# Get the transformed points
transformed_polydata = transformFilter.GetOutput()
points = transformed_polydata.GetPoints()
# Extract 2D coordinates
xy_coordinates = [] xy_coordinates = []
for i in range(points.GetNumberOfPoints()): for i in range(points.GetNumberOfPoints()):
point = np.array(points.GetPoint(i)) point = points.GetPoint(i)
xy_coordinates.append((point[0], point[1]))
# Translate point to origin
point_centered = point - centroid
# Rotate point
rotated_point = np.dot(rotation_matrix.T, point_centered)
# Store only x and y coordinates
xy_coordinates.append((rotated_point[0], rotated_point[1]))
return xy_coordinates return xy_coordinates
def project_2d_to_3d(self, xy_coordinates, normal):
# Normalize the normal vector
normal = np.array(normal)
normal = normal / np.linalg.norm(normal)
# Create a vtkTransform for the reverse transformation
reverse_transform = vtk.vtkTransform()
reverse_transform.PostMultiply() # This ensures transforms are applied in the order we specify
# Compute the rotation axis and angle (same as in compute_2d_coordinates)
rotation_axis = np.cross(normal, [0, 0, 1])
angle = np.arccos(np.dot(normal, [0, 0, 1])) * 180 / np.pi # Convert to degrees
if np.linalg.norm(rotation_axis) > 1e-6: # Check if rotation is needed
# Apply the inverse rotation
reverse_transform.RotateWXYZ(-angle, rotation_axis[0], rotation_axis[1], rotation_axis[2])
# Create vtkPoints to store the 2D points
points_2d = vtk.vtkPoints()
for x, y in xy_coordinates:
points_2d.InsertNextPoint(x, y, 0) # Z-coordinate is 0 for 2D points
# Create a polydata with the 2D points
polydata_2d = vtk.vtkPolyData()
polydata_2d.SetPoints(points_2d)
# Apply the reverse transform to the polydata
transform_filter = vtk.vtkTransformPolyDataFilter()
transform_filter.SetInputData(polydata_2d)
transform_filter.SetTransform(reverse_transform)
transform_filter.Update()
# Get the transformed points (now in 3D)
transformed_polydata = transform_filter.GetOutput()
transformed_points = transformed_polydata.GetPoints()
# Extract 3D coordinates
xyz_coordinates = []
for i in range(transformed_points.GetNumberOfPoints()):
point = transformed_points.GetPoint(i)
xyz_coordinates.append((point[0], point[1], point[2]))
return xyz_coordinates
def add_normal_line(self, origin, normal, length=10.0, color=(1, 0, 0)): def add_normal_line(self, origin, normal, length=10.0, color=(1, 0, 0)):
# Normalize the normal vector # Normalize the normal vector
normal = np.array(normal) normal = np.array(normal)
@ -549,7 +588,6 @@ class VTKWidget(QtWidgets.QWidget):
# Extract 2D coordinates # Extract 2D coordinates
self.project_tosketch_edge = self.compute_2d_coordinates(projected_polydata, self.selected_normal) self.project_tosketch_edge = self.compute_2d_coordinates(projected_polydata, self.selected_normal)
#print("3d_points_proj", self.project_tosketch_edge)
# Create a mapper and actor for the projected data # Create a mapper and actor for the projected data
mapper = vtk.vtkPolyDataMapper() mapper = vtk.vtkPolyDataMapper()

35
main.py
View File

@ -501,22 +501,37 @@ class Geometry:
return math.sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2) return math.sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)
def extrude_shape(self, points, length: float, angle, normal, centroid, symet: bool = True, invert: bool = False): def extrude_shape(self, points, length: float, angle, normal, centroid, symet: bool = True, invert: bool = False):
"""2D to 3D sdf always first""" """
Extrude a 2D shape into 3D, orient it along the normal, and position it relative to the centroid.
"""
# Normalize the normal vector
normal = np.array(normal)
normal = normal / np.linalg.norm(normal)
# Create the 2D shape
f = polygon(points) f = polygon(points)
# Extrude the shape along the Z-axis
f = f.extrude(length) f = f.extrude(length)
# Calculate the offset vector (half the length in the direction of the normal) # Center the shape along its extrusion axis
offset = [n * (length / 2) for n in normal] f = f.translate((0, 0, length / 2))
# Apply the offset in the direction of the normal # Orient the shape along the normal vector
f = f.translate(offset)
# Apply the centroid translation
#f = f.translate(centroid)
# Apply the orientation
f = f.orient(normal) f = f.orient(normal)
# Calculate the current center of the shape
shape_center = [0,0,0]
# Calculate the vector from the shape's center to the centroid
center_to_centroid = np.array(centroid) - np.array(shape_center)
# Project this vector onto the normal to get the required translation along the normal
translation_along_normal = np.dot(center_to_centroid, normal) * normal
# Translate the shape along the normal
f = f.translate(translation_along_normal)
return f return f
def mirror_body(self, sdf_object3d): def mirror_body(self, sdf_object3d):