SeekTransformModule.py 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787
  1. import os
  2. import numpy as np
  3. import scipy
  4. from scipy.spatial.distance import cdist
  5. from scipy.spatial.transform import Rotation as R
  6. import slicer
  7. from DICOMLib import DICOMUtils
  8. from collections import deque
  9. import vtk
  10. from slicer.ScriptedLoadableModule import *
  11. import qt
  12. import matplotlib.pyplot as plt
  13. import csv
  14. #exec(open("C:/Users/lkomar/Documents/Prostata/FirstTryRegister.py").read())
  15. class SeekTransformModule(ScriptedLoadableModule):
  16. """
  17. Module description shown in the module panel.
  18. """
  19. def __init__(self, parent):
  20. ScriptedLoadableModule.__init__(self, parent)
  21. self.parent.title = "Seek Transform module"
  22. self.parent.categories = ["Image Processing"]
  23. self.parent.contributors = ["Luka Komar (Onkološki Inštitut Ljubljana, Fakulteta za Matematiko in Fiziko Ljubljana)"]
  24. self.parent.helpText = "This module applies rigid transformations to CBCT volumes based on reference CT volumes."
  25. self.parent.acknowledgementText = "Supported by doc. Primož Peterlin & prof. Andrej Studen"
  26. class SeekTransformModuleWidget(ScriptedLoadableModuleWidget):
  27. """
  28. GUI of the module.
  29. """
  30. def setup(self):
  31. ScriptedLoadableModuleWidget.setup(self)
  32. # Dropdown menu za izbiro metode
  33. self.rotationMethodComboBox = qt.QComboBox()
  34. self.rotationMethodComboBox.addItems(["Kabsch", "Horn", "Iterative Closest Point (Kabsch)"])
  35. self.layout.addWidget(self.rotationMethodComboBox)
  36. # Checkboxi za transformacije
  37. self.rotationCheckBox = qt.QCheckBox("Rotation")
  38. self.rotationCheckBox.setChecked(True)
  39. self.layout.addWidget(self.rotationCheckBox)
  40. self.translationCheckBox = qt.QCheckBox("Translation")
  41. self.translationCheckBox.setChecked(True)
  42. self.layout.addWidget(self.translationCheckBox)
  43. self.scalingCheckBox = qt.QCheckBox("Scaling")
  44. self.scalingCheckBox.setChecked(True)
  45. self.layout.addWidget(self.scalingCheckBox)
  46. self.writefileCheckBox = qt.QCheckBox("Write distances to csv file")
  47. self.writefileCheckBox.setChecked(True)
  48. self.layout.addWidget(self.writefileCheckBox)
  49. # Load button
  50. self.applyButton = qt.QPushButton("Find markers and transform")
  51. self.applyButton.toolTip = "Finds markers, computes optimal rigid transform and applies it to CBCT volumes."
  52. self.applyButton.enabled = True
  53. self.layout.addWidget(self.applyButton)
  54. # Connect button to logic
  55. self.applyButton.connect('clicked(bool)', self.onApplyButton)
  56. self.layout.addStretch(1)
  57. def onApplyButton(self):
  58. logic = MyTransformModuleLogic()
  59. selectedMethod = self.rotationMethodComboBox.currentText # izberi metodo izračuna rotacije
  60. # Preberi stanje checkboxov
  61. applyRotation = self.rotationCheckBox.isChecked()
  62. applyTranslation = self.translationCheckBox.isChecked()
  63. applyScaling = self.scalingCheckBox.isChecked()
  64. writefilecheck = self.writefileCheckBox.isChecked()
  65. # Pokliči logiko z izbranimi nastavitvami
  66. logic.run(selectedMethod, applyRotation, applyTranslation, applyScaling, writefilecheck)
  67. class MyTransformModuleLogic(ScriptedLoadableModuleLogic):
  68. """
  69. Core logic of the module.
  70. """
  71. def run(self, selectedMethod, applyRotation, applyTranslation, applyScaling, writefilecheck):
  72. print("Calculating...")
  73. def group_points(points, threshold):
  74. # Function to group points that are close to each other
  75. grouped_points = []
  76. while points:
  77. point = points.pop() # Take one point from the list
  78. group = [point] # Start a new group
  79. # Find all points close to this one
  80. distances = cdist([point], points) # Calculate distances from this point to others
  81. close_points = [i for i, dist in enumerate(distances[0]) if dist < threshold]
  82. # Add the close points to the group
  83. group.extend([points[i] for i in close_points])
  84. # Remove the grouped points from the list
  85. points = [point for i, point in enumerate(points) if i not in close_points]
  86. # Add the group to the result
  87. grouped_points.append(group)
  88. return grouped_points
  89. def region_growing(image_data, seed, intensity_threshold, max_distance):
  90. dimensions = image_data.GetDimensions()
  91. visited = set()
  92. region = []
  93. queue = deque([seed])
  94. while queue:
  95. x, y, z = queue.popleft()
  96. if (x, y, z) in visited:
  97. continue
  98. visited.add((x, y, z))
  99. voxel_value = image_data.GetScalarComponentAsDouble(x, y, z, 0)
  100. if voxel_value >= intensity_threshold:
  101. region.append((x, y, z))
  102. # Add neighbors within bounds
  103. for dx, dy, dz in [(1, 0, 0), (-1, 0, 0), (0, 1, 0), (0, -1, 0), (0, 0, 1), (0, 0, -1)]:
  104. nx, ny, nz = x + dx, y + dy, z + dz
  105. if 0 <= nx < dimensions[0] and 0 <= ny < dimensions[1] and 0 <= nz < dimensions[2]:
  106. if (nx, ny, nz) not in visited:
  107. queue.append((nx, ny, nz))
  108. return region
  109. def compute_optimal_scaling_per_axis(moving_points, fixed_points):
  110. """Computes optimal scaling factors for each axis (X, Y, Z) to align moving points (CBCT) to fixed points (CT).
  111. Args:
  112. moving_points (list of lists): List of (x, y, z) moving points (CBCT).
  113. fixed_points (list of lists): List of (x, y, z) fixed points (CT).
  114. Returns:
  115. tuple: Scaling factors (sx, sy, sz).
  116. """
  117. moving_points_np = np.array(moving_points)
  118. fixed_points_np = np.array(fixed_points)
  119. # Compute centroids
  120. centroid_moving = np.mean(moving_points_np, axis=0)
  121. centroid_fixed = np.mean(fixed_points_np, axis=0)
  122. # Compute absolute distances of each point from its centroid along each axis
  123. distances_moving = np.abs(moving_points_np - centroid_moving)
  124. distances_fixed = np.abs(fixed_points_np - centroid_fixed)
  125. # Compute scaling factors as the ratio of mean absolute distances per axis
  126. scale_factors = np.mean(distances_fixed, axis=0) / np.mean(distances_moving, axis=0)
  127. return tuple(scale_factors)
  128. def compute_scaling(cbct_points, scaling_factors):
  129. """Applies non-uniform scaling to CBCT points.
  130. Args:
  131. cbct_points (list of lists): List of (x, y, z) points.
  132. scaling_factors (tuple): Scaling factors (sx, sy, sz) for each axis.
  133. Returns:
  134. np.ndarray: Scaled CBCT points.
  135. """
  136. sx, sy, sz = scaling_factors # Extract scaling factors
  137. scaling_matrix = np.diag([sx, sy, sz]) # Create diagonal scaling matrix
  138. cbct_points_np = np.array(cbct_points) # Convert to numpy array
  139. scaled_points = cbct_points_np @ scaling_matrix.T # Apply scaling
  140. return scaled_points.tolist() # Convert back to list
  141. def compute_Kabsch_rotation(moving_points, fixed_points):
  142. """
  143. Computes the optimal rotation matrix to align moving_points to fixed_points.
  144. Parameters:
  145. moving_points (list or ndarray): List of points to be rotated CBCT
  146. fixed_points (list or ndarray): List of reference points CT
  147. Returns:
  148. ndarray: Optimal rotation matrix.
  149. """
  150. assert len(moving_points) == len(fixed_points), "Point lists must be the same length."
  151. # Convert to numpy arrays
  152. moving = np.array(moving_points)
  153. fixed = np.array(fixed_points)
  154. # Compute centroids
  155. centroid_moving = np.mean(moving, axis=0)
  156. centroid_fixed = np.mean(fixed, axis=0)
  157. # Center the points
  158. moving_centered = moving - centroid_moving
  159. fixed_centered = fixed - centroid_fixed
  160. # Compute covariance matrix
  161. H = np.dot(moving_centered.T, fixed_centered)
  162. # SVD decomposition
  163. U, _, Vt = np.linalg.svd(H)
  164. Rotate_optimal = np.dot(Vt.T, U.T)
  165. # Correct improper rotation (reflection)
  166. if np.linalg.det(Rotate_optimal) < 0:
  167. Vt[-1, :] *= -1
  168. Rotate_optimal = np.dot(Vt.T, U.T)
  169. return Rotate_optimal
  170. def compute_Horn_rotation(moving_points, fixed_points):
  171. """
  172. Computes the optimal rotation matrix using quaternions.
  173. Parameters:
  174. moving_points (list or ndarray): List of points to be rotated.
  175. fixed_points (list or ndarray): List of reference points.
  176. Returns:
  177. ndarray: Optimal rotation matrix.
  178. """
  179. assert len(moving_points) == len(fixed_points), "Point lists must be the same length."
  180. moving = np.array(moving_points)
  181. fixed = np.array(fixed_points)
  182. # Compute centroids
  183. centroid_moving = np.mean(moving, axis=0)
  184. centroid_fixed = np.mean(fixed, axis=0)
  185. # Center the points
  186. moving_centered = moving - centroid_moving
  187. fixed_centered = fixed - centroid_fixed
  188. # Construct the cross-dispersion matrix
  189. M = np.dot(moving_centered.T, fixed_centered)
  190. # Construct the N matrix for quaternion solution
  191. A = M - M.T
  192. delta = np.array([A[1, 2], A[2, 0], A[0, 1]])
  193. trace = np.trace(M)
  194. N = np.zeros((4, 4))
  195. N[0, 0] = trace
  196. N[1:, 0] = delta
  197. N[0, 1:] = delta
  198. N[1:, 1:] = M + M.T - np.eye(3) * trace
  199. # Compute the eigenvector corresponding to the maximum eigenvalue
  200. eigvals, eigvecs = np.linalg.eigh(N)
  201. q_optimal = eigvecs[:, np.argmax(eigvals)] # Optimal quaternion
  202. # Convert quaternion to rotation matrix
  203. w, x, y, z = q_optimal
  204. R = np.array([
  205. [1 - 2*(y**2 + z**2), 2*(x*y - z*w), 2*(x*z + y*w)],
  206. [2*(x*y + z*w), 1 - 2*(x**2 + z**2), 2*(y*z - x*w)],
  207. [2*(x*z - y*w), 2*(y*z + x*w), 1 - 2*(x**2 + y**2)]
  208. ])
  209. return R
  210. def icp_algorithm(moving_points, fixed_points, max_iterations=100, tolerance=1e-5):
  211. """
  212. Iterative Closest Point (ICP) algorithm to align moving_points to fixed_points.
  213. Parameters:
  214. moving_points (list or ndarray): List of points to be aligned.
  215. fixed_points (list or ndarray): List of reference points.
  216. max_iterations (int): Maximum number of iterations.
  217. tolerance (float): Convergence tolerance.
  218. Returns:
  219. ndarray: Transformed moving points.
  220. ndarray: Optimal rotation matrix.
  221. ndarray: Optimal translation vector.
  222. """
  223. # Convert to numpy arrays
  224. moving = np.array(moving_points)
  225. fixed = np.array(fixed_points)
  226. # Initialize transformation
  227. R = np.eye(3) # Identity matrix for rotation
  228. t = np.zeros(3) # Zero vector for translation
  229. prev_error = np.inf # Initialize previous error to a large value
  230. for iteration in range(max_iterations):
  231. # Step 1: Find the nearest neighbors (correspondences)
  232. distances = np.linalg.norm(moving[:, np.newaxis] - fixed, axis=2)
  233. nearest_indices = np.argmin(distances, axis=1)
  234. nearest_points = fixed[nearest_indices]
  235. # Step 2: Compute the optimal rotation and translation
  236. R_new = compute_Kabsch_rotation(moving, nearest_points)
  237. centroid_moving = np.mean(moving, axis=0)
  238. centroid_fixed = np.mean(nearest_points, axis=0)
  239. t_new = centroid_fixed - np.dot(R_new, centroid_moving)
  240. # Step 3: Apply the transformation
  241. moving = np.dot(moving, R_new.T) + t_new
  242. # Update the cumulative transformation
  243. R = np.dot(R_new, R)
  244. t = np.dot(R_new, t) + t_new
  245. # Step 4: Check for convergence
  246. mean_error = np.mean(np.linalg.norm(moving - nearest_points, axis=1))
  247. if np.abs(prev_error - mean_error) < tolerance:
  248. print(f"ICP converged after {iteration + 1} iterations.")
  249. break
  250. prev_error = mean_error
  251. else:
  252. print(f"ICP reached maximum iterations ({max_iterations}).")
  253. return moving, R, t
  254. def compute_translation(moving_points, fixed_points, rotation_matrix):
  255. """
  256. Computes the translation vector to align moving_points to fixed_points given a rotation matrix.
  257. Parameters:
  258. moving_points (list or ndarray): List of points to be translated.
  259. fixed_points (list or ndarray): List of reference points.
  260. rotation_matrix (ndarray): Rotation matrix.
  261. Returns:
  262. ndarray: Translation vector.
  263. """
  264. # Convert to numpy arrays
  265. moving = np.array(moving_points)
  266. fixed = np.array(fixed_points)
  267. # Compute centroids
  268. centroid_moving = np.mean(moving, axis=0)
  269. centroid_fixed = np.mean(fixed, axis=0)
  270. # Compute translation
  271. translation = centroid_fixed - np.dot(centroid_moving, rotation_matrix)
  272. return translation
  273. def create_vtk_transform(rotation_matrix, translation_vector):
  274. """
  275. Creates a vtkTransform from a rotation matrix and a translation vector.
  276. """
  277. # Create a 4x4 transformation matrix
  278. transform_matrix = np.eye(4) # Start with an identity matrix
  279. transform_matrix[:3, :3] = rotation_matrix # Set rotation part
  280. transform_matrix[:3, 3] = translation_vector # Set translation part
  281. # Convert to vtkMatrix4x4
  282. vtk_matrix = vtk.vtkMatrix4x4()
  283. for i in range(4):
  284. for j in range(4):
  285. vtk_matrix.SetElement(i, j, transform_matrix[i, j])
  286. #print("Transform matrix:")
  287. #for i in range(4):
  288. # print(" ".join(f"{vtk_matrix.GetElement(i, j):.6f}" for j in range(4)))
  289. # Create vtkTransform and set the matrix
  290. transform = vtk.vtkTransform()
  291. transform.SetMatrix(vtk_matrix)
  292. return transform
  293. def detect_points_region_growing(volume_name, yesCbct, intensity_threshold=3000, x_min=90, x_max=380, y_min=190, y_max=380, z_min=80, z_max=140, max_distance=9, centroid_merge_threshold=5):
  294. volume_node = slicer.util.getNode(volume_name)
  295. if not volume_node:
  296. raise RuntimeError(f"Volume {volume_name} not found.")
  297. image_data = volume_node.GetImageData()
  298. matrix = vtk.vtkMatrix4x4()
  299. volume_node.GetIJKToRASMatrix(matrix)
  300. dimensions = image_data.GetDimensions()
  301. #detected_regions = []
  302. if yesCbct: #je cbct ali ct?
  303. valid_x_min, valid_x_max = 0, dimensions[0] - 1
  304. valid_y_min, valid_y_max = 0, dimensions[1] - 1
  305. valid_z_min, valid_z_max = 0, dimensions[2] - 1
  306. else:
  307. valid_x_min, valid_x_max = max(x_min, 0), min(x_max, dimensions[0] - 1)
  308. valid_y_min, valid_y_max = max(y_min, 0), min(y_max, dimensions[1] - 1)
  309. valid_z_min, valid_z_max = max(z_min, 0), min(z_max, dimensions[2] - 1)
  310. visited = set()
  311. def grow_region(x, y, z):
  312. if (x, y, z) in visited:
  313. return None
  314. voxel_value = image_data.GetScalarComponentAsDouble(x, y, z, 0)
  315. if voxel_value < intensity_threshold:
  316. return None
  317. region = region_growing(image_data, (x, y, z), intensity_threshold, max_distance=max_distance)
  318. if region:
  319. for point in region:
  320. visited.add(tuple(point))
  321. return region
  322. return None
  323. regions = []
  324. for z in range(valid_z_min, valid_z_max + 1):
  325. for y in range(valid_y_min, valid_y_max + 1):
  326. for x in range(valid_x_min, valid_x_max + 1):
  327. region = grow_region(x, y, z)
  328. if region:
  329. regions.append(region)
  330. # Collect centroids using intensity-weighted average
  331. centroids = []
  332. for region in regions:
  333. points = np.array([matrix.MultiplyPoint([*point, 1])[:3] for point in region])
  334. intensities = np.array([image_data.GetScalarComponentAsDouble(*point, 0) for point in region])
  335. if intensities.sum() > 0:
  336. weighted_centroid = np.average(points, axis=0, weights=intensities)
  337. max_intensity = intensities.max()
  338. centroids.append((np.round(weighted_centroid, 2), max_intensity))
  339. unique_centroids = []
  340. for centroid, intensity in centroids:
  341. if not any(np.linalg.norm(centroid - existing_centroid) < centroid_merge_threshold for existing_centroid, _ in unique_centroids):
  342. unique_centroids.append((centroid, intensity))
  343. markups_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode", f"Markers_{volume_name}")
  344. for centroid, intensity in unique_centroids:
  345. markups_node.AddControlPoint(*centroid)
  346. markups_node.SetDisplayVisibility(False)
  347. #print(f"Detected Centroid (RAS): {centroid}, Max Intensity: {intensity}")
  348. return unique_centroids
  349. def find_table_top_z(ct_volume_name, writefilecheck, yesCbct):
  350. """
  351. Najde višino zgornjega roba mize v CT/CBCT volumnu in doda markerje.
  352. :param ct_volume_name: Ime volumna v slicerju
  353. :param writefilecheck: Če je True, zapiše rezultat v CSV
  354. :param yesCbct: Če je True, uporabi CBCT thresholde
  355. :return: Višina zgornjega roba mize v mm
  356. """
  357. # Pridobi volumen
  358. ct_volume_node = slicer.util.getNode(ct_volume_name)
  359. image_data = ct_volume_node.GetImageData()
  360. spacing = ct_volume_node.GetSpacing()
  361. dims = image_data.GetDimensions()
  362. #origin = ct_volume_node.GetOrigin()
  363. # Pretvori volumen v numpy array
  364. np_array = slicer.util.arrayFromVolume(ct_volume_node)
  365. # Izračunaj sredinske IJK koordinate
  366. mid_ijk = [dims[0] // 2, dims[1] // 2, dims[2] // 2]
  367. # Preveri, da so IJK indeksi v mejah volumna
  368. mid_ijk = [max(0, min(dims[i] - 1, mid_ijk[i])) for i in range(3)]
  369. # Pretvorba IJK → RAS
  370. ijkToRasMatrix = vtk.vtkMatrix4x4()
  371. ct_volume_node.GetIJKToRASMatrix(ijkToRasMatrix)
  372. #mid_ras = np.array(ijkToRasMatrix.MultiplyPoint([*mid_ijk, 1]))[:3]
  373. # Sredinski Z slice
  374. mid_z_voxel = mid_ijk[2]
  375. slice_data = np_array[mid_z_voxel, :, :] # (Y, X)
  376. # Sredinski stolpec
  377. mid_x_voxel = mid_ijk[0] - 15 # 15 pikslov levo od sredine da ne merimo pri hrbtenici
  378. column_values = slice_data[:, mid_x_voxel] # Y smer
  379. # Doda marker v RAS koordinatah
  380. #mid_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode", f"Sredina_{ct_volume_name}")
  381. #mid_node.AddControlPoint(mid_ras)
  382. # Določi threshold glede na CBCT ali CT
  383. threshold = -300 if yesCbct else -100 # če je cbct iščemo vrednost -300, sicer pri CT 0
  384. # Poišči rob mize (drugi dvig)
  385. previous_value = -1000
  386. edge_count = 0
  387. table_top_y = None
  388. min_jump = 100 if yesCbct else 50 # Minimalni skok za CBCT in CT
  389. for y in range(len(column_values) - 1, -1, -1): # Od spodaj navzgor
  390. intensity = column_values[y]
  391. #if column_values[y] > threshold and previous_value <= threshold:
  392. if (intensity - previous_value) > min_jump and intensity > threshold: # Namesto primerjave s threshold
  393. if yesCbct:
  394. table_top_y = y + 1 #Da nismo že v trupu
  395. #print(f"Zgornji rob mize najden pri Y = {table_top_y}") # CBCT
  396. break
  397. if edge_count == 0 or (edge_count == 1 and previous_value < -200): # Check if intensity is back lower than -400
  398. edge_count += 1
  399. #print(f"Zaznan rob mize pri X, Y, Z = {mid_x_voxel},{y}, {mid_z_voxel}")
  400. if edge_count == 2: # Drugi dvig = zgornji rob mize
  401. table_top_y = y + 1
  402. #print(f"Zgornji rob mize najden pri Y = {table_top_y}")
  403. break
  404. previous_value = column_values[y]
  405. if table_top_y is None:
  406. print("❌ Zgornji rob mize ni bil najden!")
  407. return None
  408. # Pretvorba Y IJK → RAS
  409. table_ijk = [mid_x_voxel, table_top_y, mid_z_voxel]
  410. table_ras = np.array(ijkToRasMatrix.MultiplyPoint([*table_ijk, 1]))[:3]
  411. # Doda marker za višino mize
  412. table_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode", f"VišinaMize_{ct_volume_name}")
  413. table_node.AddControlPoint(table_ras)
  414. table_node.SetDisplayVisibility(False)
  415. # Izračun višine v mm
  416. image_center_y = dims[1] // 2
  417. pixel_offset = table_top_y - image_center_y
  418. mm_offset = pixel_offset * spacing[1]
  419. #print(f"📏 Miza je {abs(mm_offset):.2f} mm {'nižja' if mm_offset > 0 else 'višja'} od središča.")
  420. #print(f"📏 Miza je {abs(pixel_offset)} pixel {'nižja' if pixel_offset > 0 else 'višja'} od središča.")
  421. # Shrani v CSV
  422. if writefilecheck:
  423. file_path = os.path.join(os.path.dirname(__file__), "heightdata.csv")
  424. with open(file_path, mode='a', newline='') as file:
  425. writer = csv.writer(file)
  426. modality = "CBCT " if yesCbct else "CT "
  427. writer.writerow([modality, ct_volume_name, f" Upper part of table detected at Z = {mm_offset:.2f} mm, {pixel_offset} pixels"])
  428. return mm_offset, pixel_offset
  429. # Globalni seznami za končno statistiko
  430. prostate_size_est = []
  431. ctcbct_distance = []
  432. # Pridobimo SubjectHierarchyNode
  433. shNode = slicer.vtkMRMLSubjectHierarchyNode.GetSubjectHierarchyNode(slicer.mrmlScene)
  434. studyItems = vtk.vtkIdList()
  435. shNode.GetItemChildren(shNode.GetSceneItemID(), studyItems)
  436. for i in range(studyItems.GetNumberOfIds()):
  437. studyItem = studyItems.GetId(i)
  438. # **LOKALNI** seznami, resetirajo se pri vsakem study-ju
  439. cbct_list = []
  440. ct_list = []
  441. volume_points_dict = {}
  442. CT_offset = 0
  443. # Get child items of the study item
  444. volumeItems = vtk.vtkIdList()
  445. shNode.GetItemChildren(studyItem, volumeItems)
  446. # Iteracija čez vse volumne v posameznem studyju
  447. for j in range(volumeItems.GetNumberOfIds()):
  448. intermediateItem = volumeItems.GetId(j)
  449. finalVolumeItems = vtk.vtkIdList()
  450. shNode.GetItemChildren(intermediateItem, finalVolumeItems) # Išči globlje!
  451. for k in range(finalVolumeItems.GetNumberOfIds()):
  452. volumeItem = finalVolumeItems.GetId(k)
  453. volumeNode = shNode.GetItemDataNode(volumeItem)
  454. dicomUIDs = volumeNode.GetAttribute("DICOM.instanceUIDs")
  455. if not dicomUIDs:
  456. print("❌ This is an NRRD volume!")
  457. continue # Preskoči, če ni DICOM volume
  458. volumeName = volumeNode.GetName()
  459. imageItem = shNode.GetItemByDataNode(volumeNode)
  460. modality = shNode.GetItemAttribute(imageItem, "DICOM.Modality") #deluje!
  461. #dimensions = volumeNode.GetImageData().GetDimensions()
  462. #spacing = volumeNode.GetSpacing()
  463. #print(f"Volume {volumeNode.GetName()} - Dimenzije: {dimensions}, Spacing: {spacing}")
  464. if modality != "CT":
  465. print("Not a CT")
  466. continue # Preskoči, če ni CT
  467. # Preveri, če volume obstaja v sceni
  468. if not slicer.mrmlScene.IsNodePresent(volumeNode):
  469. print(f"Volume {volumeName} not present in the scene.")
  470. continue
  471. # Preverimo proizvajalca (DICOM metapodatki)
  472. manufacturer = shNode.GetItemAttribute(imageItem, 'DICOM.Manufacturer')
  473. #manufacturer = volumeNode.GetAttribute("DICOM.Manufacturer")
  474. #manufacturer = slicer.dicomDatabase.fileValue(uid, "0008,0070")
  475. #print(manufacturer)
  476. # Določimo, ali gre za CBCT ali CT
  477. if "varian" in manufacturer.lower() or "elekta" in manufacturer.lower():
  478. cbct_list.append(volumeName)
  479. scan_type = "CBCT"
  480. yesCbct = True
  481. else: # Siemens ali Philips
  482. ct_list.append(volumeName)
  483. scan_type = "CT"
  484. yesCbct = False
  485. if volumeNode and volumeNode.IsA("vtkMRMLScalarVolumeNode"):
  486. print(f"✔️ {scan_type} {volumeNode.GetName()} (ID: {volumeItem})")
  487. if not volumeNode or not volumeNode.IsA("vtkMRMLScalarVolumeNode"):
  488. print("Can't find volumeNode")
  489. #continue # Preskoči, če ni veljaven volume
  490. mm_offset, pixel_offset = find_table_top_z(volumeName, writefilecheck, yesCbct)
  491. if scan_type == "CT":
  492. CT_offset = pixel_offset #Določi koliko je višina CT slike od središča
  493. else:
  494. # Poravnava CBCT z višino CT
  495. CBCT_offset = pixel_offset
  496. alignment_offset = CT_offset - CBCT_offset
  497. print(f"Poravnavam CBCT z CT. Offset: {alignment_offset}")
  498. # Uporabi alignment_offset za premik CBCT
  499. transform = vtk.vtkTransform()
  500. transform.Translate(0, alignment_offset, 0) # Premik samo po y-osi (višina)
  501. #transformacija volumna oziroma poravnava po višini
  502. transformNode = slicer.vtkMRMLTransformNode()
  503. slicer.mrmlScene.AddNode(transformNode)
  504. transformNode.SetAndObserveTransformToParent(transform)
  505. volumeNode.SetAndObserveTransformNodeID(transformNode.GetID()) # Prilepi transformacijo na CBCT
  506. slicer.mrmlScene.RemoveNode(transformNode) # Odstrani transformacijo iz scene
  507. # Detekcija točk v volumnu
  508. grouped_points = detect_points_region_growing(volumeName, yesCbct, intensity_threshold=3000)
  509. #print(f"Populating volume_points_dict with key ('{scan_type}', '{volumeName}')")
  510. volume_points_dict[(scan_type, volumeName)] = grouped_points
  511. #print(volume_points_dict) # Check if the key is correctly added
  512. # Če imamo oba tipa volumna (CBCT in CT) **znotraj istega studyja**
  513. if cbct_list and ct_list:
  514. ct_volume_name = ct_list[0] # Uporabi prvi CT kot referenco
  515. print(f"\nProcessing CT: {ct_volume_name}")
  516. #yesCbct = False
  517. ct_points = [centroid for centroid, _ in volume_points_dict[("CT", ct_volume_name)]]
  518. if len(ct_points) < 3:
  519. print(f"CT volume {ct_volume_name} doesn't have enough points for registration.")
  520. else:
  521. for cbct_volume_name in cbct_list:
  522. print(f"\nProcessing CBCT Volume: {cbct_volume_name}")
  523. #yesCbct = True
  524. cbct_points = [centroid for centroid, _ in volume_points_dict[("CBCT", cbct_volume_name)]]
  525. #find_table_top_z(cbct_volume_name, writefilecheck, yesCbct)
  526. if len(cbct_points) < 3:
  527. print(f"CBCT Volume '{cbct_volume_name}' doesn't have enough points for registration.")
  528. continue
  529. # Shranjevanje razdalj
  530. distances_ct_cbct = []
  531. distances_internal = {"A-B": [], "B-C": [], "C-A": []}
  532. cbct_points_array = np.array(cbct_points) # Pretvorba v numpy array
  533. ct_volume_node = slicer.util.getNode(ct_volume_name)
  534. cbct_volume_node = slicer.util.getNode(cbct_volume_name)
  535. #ct_spacing = ct_volume_node.GetSpacing() # (x_spacing, y_spacing, z_spacing)
  536. #cbct_spacing = cbct_volume_node.GetSpacing() # (x_spacing, y_spacing, z_spacing)
  537. #ct_scale_factor = np.array(ct_spacing) # Spacing za CT (x, y, z)
  538. #cbct_scale_factor = np.array(cbct_spacing) # Spacing za CBCT (x, y, z)
  539. #print(ct_scale_factor, cbct_scale_factor)
  540. # Sortiramo točke po Z-koordinati (ali X/Y, če raje uporabljaš drugo os)
  541. cbct_points_sorted = cbct_points_array[np.argsort(cbct_points_array[:, 2])]
  542. # Razdalje med CT in CBCT (SORTIRANE točke!)
  543. d_ct_cbct = np.linalg.norm(cbct_points_sorted - ct_points, axis=1)
  544. distances_ct_cbct.append(d_ct_cbct)
  545. # Razdalje med točkami znotraj SORTIRANIH cbct_points
  546. d_ab = np.linalg.norm(cbct_points_sorted[0] - cbct_points_sorted[1])
  547. d_bc = np.linalg.norm(cbct_points_sorted[1] - cbct_points_sorted[2])
  548. d_ca = np.linalg.norm(cbct_points_sorted[2] - cbct_points_sorted[0])
  549. # Sortiramo razdalje po velikosti, da so vedno v enakem vrstnem redu
  550. sorted_distances = sorted([d_ab, d_bc, d_ca])
  551. distances_internal["A-B"].append(sorted_distances[0])
  552. distances_internal["B-C"].append(sorted_distances[1])
  553. distances_internal["C-A"].append(sorted_distances[2])
  554. # Dodamo ime študije za v statistiko
  555. studyName = shNode.GetItemName(studyItem)
  556. # **Shrani razdalje v globalne sezname**
  557. prostate_size_est.append({"Study": studyName, "Distances": sorted_distances})
  558. ctcbct_distance.append({"Study": studyName, "Distances": list(distances_ct_cbct[-1])}) # Pretvorimo v seznam
  559. # Izberi metodo glede na uporabnikov izbor
  560. chosen_rotation_matrix = np.eye(3)
  561. chosen_translation_vector = np.zeros(3)
  562. if applyScaling:
  563. scaling_factors = compute_optimal_scaling_per_axis(cbct_points, ct_points)
  564. print("Scaling factors: ", scaling_factors)
  565. cbct_points = compute_scaling(cbct_points, scaling_factors)
  566. if applyRotation:
  567. if selectedMethod == "Kabsch":
  568. chosen_rotation_matrix = compute_Kabsch_rotation(cbct_points, ct_points)
  569. elif selectedMethod == "Horn":
  570. chosen_rotation_matrix = compute_Horn_rotation(cbct_points, ct_points)
  571. elif selectedMethod == "Iterative Closest Point (Kabsch)":
  572. _, chosen_rotation_matrix, _ = icp_algorithm(cbct_points, ct_points)
  573. print("Rotation Matrix:\n", chosen_rotation_matrix)
  574. if applyTranslation:
  575. chosen_translation_vector = compute_translation(cbct_points, ct_points, chosen_rotation_matrix)
  576. print("Translation Vector:\n", chosen_translation_vector)
  577. # Ustvari vtkTransformNode in ga poveži z CBCT volumenom
  578. imeTransformNoda = cbct_volume_name + " Transform"
  579. transform_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLTransformNode", imeTransformNoda)
  580. # Kreiraj transformacijo in jo uporabi
  581. vtk_transform = create_vtk_transform(chosen_rotation_matrix, chosen_translation_vector)
  582. transform_node.SetAndObserveTransformToParent(vtk_transform)
  583. # Pridobi CBCT volumen in aplikacijo transformacije
  584. cbct_volume_node = slicer.util.getNode(cbct_volume_name)
  585. cbct_volume_node.SetAndObserveTransformNodeID(transform_node.GetID())
  586. # Uporabi transformacijo na volumnu (fizična aplikacija)
  587. slicer.vtkSlicerTransformLogic().hardenTransform(cbct_volume_node) #aplicira transformacijo na volumnu
  588. slicer.mrmlScene.RemoveNode(transform_node) # Odstrani transformacijo iz scene
  589. print("Transform successful on ", cbct_volume_name)
  590. else:
  591. print(f"Study {studyItem} doesn't have any appropriate CT or CBCT volumes.")
  592. # Izpis globalne statistike
  593. if(writefilecheck):
  594. print("Distances between CT & CBCT markers: ", ctcbct_distance)
  595. print("Distances between pairs of markers for each volume: ", prostate_size_est)
  596. # Define the file path for the CSV file
  597. file_path = os.path.join(os.path.dirname(__file__), "study_data.csv")
  598. # Write lists to the CSV file
  599. with open(file_path, mode='w', newline='') as file: #w za write, a za append
  600. writer = csv.writer(file)
  601. # Write headers
  602. writer.writerow(["Prostate Size", "CT-CBCT Distance"])
  603. # Write data rows
  604. for i in range(len(prostate_size_est)):
  605. writer.writerow([prostate_size_est[i], ctcbct_distance[i]])
  606. print("File written at ", file_path)