diff --git a/drawing_modules/draw_widget2d.py b/drawing_modules/draw_widget2d.py index ce6fe34..cf2122b 100644 --- a/drawing_modules/draw_widget2d.py +++ b/drawing_modules/draw_widget2d.py @@ -57,6 +57,7 @@ class SketchWidget(QWidget): for point in lines: x, y = point self.proj_snap_lines = lines + # Invert X from projection might be happening in the projection for some reason Careful coord = QPoint(x, y) self.proj_snap_points.append(coord) diff --git a/drawing_modules/vtk_widget.py b/drawing_modules/vtk_widget.py index 4482439..772188c 100644 --- a/drawing_modules/vtk_widget.py +++ b/drawing_modules/vtk_widget.py @@ -205,7 +205,7 @@ class VTKWidget(QtWidgets.QWidget): mirror_transform.SetMatrix(matrix) mirror_transform.Scale(-1, -1, 1) # Inverting the original mirror look down - else: + mirror_transform.Scale(1, 1, 1) # This mirrors across the y-axis""" # Apply the transform to the polydata @@ -348,48 +348,87 @@ class VTKWidget(QtWidgets.QWidget): return projected_mesh def compute_2d_coordinates(self, projected_mesh, normal): - # Compute centroid of projected points - center_of_mass = vtk.vtkCenterOfMass() - center_of_mass.SetInputData(projected_mesh) - center_of_mass.SetUseScalarsAsWeights(False) - center_of_mass.Update() - centroid = np.array(center_of_mass.GetCenter()) + # Normalize the normal vector + normal = np.array(normal) + normal = normal / np.linalg.norm(normal) - # Create a coordinate system on the plane - z_axis = np.array(normal) - x_axis = np.cross(z_axis, [0, 0, 1]) - if np.allclose(x_axis, 0): - x_axis = np.cross(z_axis, [0, 1, 0]) - x_axis = x_axis / np.linalg.norm(x_axis) - y_axis = np.cross(z_axis, x_axis) + # Create a vtkTransform + transform = vtk.vtkTransform() + transform.PostMultiply() # This ensures transforms are applied in the order we specify - # Create rotation matrix (3x3) - rotation_matrix = np.column_stack((x_axis, y_axis, z_axis)) + # Rotate so that the normal aligns with the Z-axis + rotation_axis = np.cross(normal, [0, 0, 1]) + angle = np.arccos(np.dot(normal, [0, 0, 1])) * 180 / np.pi # Convert to degrees - # Store the full transformation for later use if needed - full_transform = np.eye(4) - full_transform[:3, :3] = rotation_matrix - full_transform[:3, 3] = centroid - self.local_matrix = full_transform + if np.linalg.norm(rotation_axis) > 1e-6: # Check if rotation is needed + transform.RotateWXYZ(angle, rotation_axis[0], rotation_axis[1], rotation_axis[2]) - # Transform points to 2D coordinates - points = projected_mesh.GetPoints() + # Get the transformation matrix + matrix = transform.GetMatrix() + self.local_matrix = [matrix.GetElement(i, j) for i in range(4) for j in range(4)] + + # Apply the transform to the polydata + transformFilter = vtk.vtkTransformPolyDataFilter() + transformFilter.SetInputData(projected_mesh) + transformFilter.SetTransform(transform) + transformFilter.Update() + + # Get the transformed points + transformed_polydata = transformFilter.GetOutput() + points = transformed_polydata.GetPoints() + + # Extract 2D coordinates xy_coordinates = [] - for i in range(points.GetNumberOfPoints()): - point = np.array(points.GetPoint(i)) - - # Translate point to origin - point_centered = point - centroid - - # Rotate point - rotated_point = np.dot(rotation_matrix.T, point_centered) - - # Store only x and y coordinates - xy_coordinates.append((rotated_point[0], rotated_point[1])) + point = points.GetPoint(i) + xy_coordinates.append((point[0], point[1])) return xy_coordinates + def project_2d_to_3d(self, xy_coordinates, normal): + # Normalize the normal vector + normal = np.array(normal) + normal = normal / np.linalg.norm(normal) + + # Create a vtkTransform for the reverse transformation + reverse_transform = vtk.vtkTransform() + reverse_transform.PostMultiply() # This ensures transforms are applied in the order we specify + + # Compute the rotation axis and angle (same as in compute_2d_coordinates) + rotation_axis = np.cross(normal, [0, 0, 1]) + angle = np.arccos(np.dot(normal, [0, 0, 1])) * 180 / np.pi # Convert to degrees + + if np.linalg.norm(rotation_axis) > 1e-6: # Check if rotation is needed + # Apply the inverse rotation + reverse_transform.RotateWXYZ(-angle, rotation_axis[0], rotation_axis[1], rotation_axis[2]) + + # Create vtkPoints to store the 2D points + points_2d = vtk.vtkPoints() + for x, y in xy_coordinates: + points_2d.InsertNextPoint(x, y, 0) # Z-coordinate is 0 for 2D points + + # Create a polydata with the 2D points + polydata_2d = vtk.vtkPolyData() + polydata_2d.SetPoints(points_2d) + + # Apply the reverse transform to the polydata + transform_filter = vtk.vtkTransformPolyDataFilter() + transform_filter.SetInputData(polydata_2d) + transform_filter.SetTransform(reverse_transform) + transform_filter.Update() + + # Get the transformed points (now in 3D) + transformed_polydata = transform_filter.GetOutput() + transformed_points = transformed_polydata.GetPoints() + + # Extract 3D coordinates + xyz_coordinates = [] + for i in range(transformed_points.GetNumberOfPoints()): + point = transformed_points.GetPoint(i) + xyz_coordinates.append((point[0], point[1], point[2])) + + return xyz_coordinates + def add_normal_line(self, origin, normal, length=10.0, color=(1, 0, 0)): # Normalize the normal vector normal = np.array(normal) @@ -549,7 +588,6 @@ class VTKWidget(QtWidgets.QWidget): # Extract 2D coordinates self.project_tosketch_edge = self.compute_2d_coordinates(projected_polydata, self.selected_normal) - #print("3d_points_proj", self.project_tosketch_edge) # Create a mapper and actor for the projected data mapper = vtk.vtkPolyDataMapper() diff --git a/main.py b/main.py index a538690..27d4514 100644 --- a/main.py +++ b/main.py @@ -501,22 +501,37 @@ class Geometry: return math.sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2) def extrude_shape(self, points, length: float, angle, normal, centroid, symet: bool = True, invert: bool = False): - """2D to 3D sdf always first""" + """ + Extrude a 2D shape into 3D, orient it along the normal, and position it relative to the centroid. + """ + # Normalize the normal vector + normal = np.array(normal) + normal = normal / np.linalg.norm(normal) + + # Create the 2D shape f = polygon(points) + + # Extrude the shape along the Z-axis f = f.extrude(length) - # Calculate the offset vector (half the length in the direction of the normal) - offset = [n * (length / 2) for n in normal] + # Center the shape along its extrusion axis + f = f.translate((0, 0, length / 2)) - # Apply the offset in the direction of the normal - f = f.translate(offset) - - # Apply the centroid translation - #f = f.translate(centroid) - - # Apply the orientation + # Orient the shape along the normal vector f = f.orient(normal) + # Calculate the current center of the shape + shape_center = [0,0,0] + + # Calculate the vector from the shape's center to the centroid + center_to_centroid = np.array(centroid) - np.array(shape_center) + + # Project this vector onto the normal to get the required translation along the normal + translation_along_normal = np.dot(center_to_centroid, normal) * normal + + # Translate the shape along the normal + f = f.translate(translation_along_normal) + return f def mirror_body(self, sdf_object3d):