SeekTransformModule.py 50 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066
  1. import os
  2. import numpy as np
  3. import scipy
  4. from scipy.spatial.distance import cdist
  5. from scipy.spatial.transform import Rotation as R
  6. import slicer
  7. import itertools
  8. from DICOMLib import DICOMUtils
  9. from collections import deque
  10. import vtk
  11. from slicer.ScriptedLoadableModule import *
  12. import qt
  13. import matplotlib.pyplot as plt
  14. import csv
  15. import time
  16. #exec(open("C:/Users/lkomar/Documents/Prostata/FirstTryRegister.py").read())
  17. class SeekTransformModule(ScriptedLoadableModule):
  18. """
  19. Module description shown in the module panel.
  20. """
  21. def __init__(self, parent):
  22. ScriptedLoadableModule.__init__(self, parent)
  23. self.parent.title = "Seek Transform module"
  24. self.parent.categories = ["Image Processing"]
  25. self.parent.contributors = ["Luka Komar (Onkološki Inštitut Ljubljana, Fakulteta za Matematiko in Fiziko Ljubljana)"]
  26. self.parent.helpText = "This module applies rigid transformations to CBCT volumes based on reference CT volumes."
  27. self.parent.acknowledgementText = "Supported by doc. Primož Peterlin & prof. Andrej Studen"
  28. class SeekTransformModuleWidget(ScriptedLoadableModuleWidget):
  29. """
  30. GUI of the module.
  31. """
  32. def setup(self):
  33. ScriptedLoadableModuleWidget.setup(self)
  34. # Dropdown menu za izbiro metode
  35. self.rotationMethodComboBox = qt.QComboBox()
  36. self.rotationMethodComboBox.addItems(["Kabsch", "Horn", "Iterative Closest Point (Kabsch)"])
  37. self.layout.addWidget(self.rotationMethodComboBox)
  38. # Checkboxi za transformacije
  39. self.rotationCheckBox = qt.QCheckBox("Rotation")
  40. self.rotationCheckBox.setChecked(True)
  41. self.layout.addWidget(self.rotationCheckBox)
  42. self.translationCheckBox = qt.QCheckBox("Translation")
  43. self.translationCheckBox.setChecked(True)
  44. self.layout.addWidget(self.translationCheckBox)
  45. self.scalingCheckBox = qt.QCheckBox("Scaling")
  46. self.scalingCheckBox.setChecked(True)
  47. self.layout.addWidget(self.scalingCheckBox)
  48. self.writefileCheckBox = qt.QCheckBox("Write distances to csv file")
  49. self.writefileCheckBox.setChecked(True)
  50. self.layout.addWidget(self.writefileCheckBox)
  51. # Load button
  52. self.applyButton = qt.QPushButton("Find markers and transform")
  53. self.applyButton.toolTip = "Finds markers, computes optimal rigid transform and applies it to CBCT volumes."
  54. self.applyButton.enabled = True
  55. self.layout.addWidget(self.applyButton)
  56. # Connect button to logic
  57. self.applyButton.connect('clicked(bool)', self.onApplyButton)
  58. self.layout.addStretch(1)
  59. def onApplyButton(self):
  60. logic = MyTransformModuleLogic()
  61. selectedMethod = self.rotationMethodComboBox.currentText # izberi metodo izračuna rotacije
  62. # Preberi stanje checkboxov
  63. applyRotation = self.rotationCheckBox.isChecked()
  64. applyTranslation = self.translationCheckBox.isChecked()
  65. applyScaling = self.scalingCheckBox.isChecked()
  66. writefilecheck = self.writefileCheckBox.isChecked()
  67. # Pokliči logiko z izbranimi nastavitvami
  68. logic.run(selectedMethod, applyRotation, applyTranslation, applyScaling, writefilecheck)
  69. class MyTransformModuleLogic(ScriptedLoadableModuleLogic):
  70. """
  71. Core logic of the module.
  72. """
  73. def run(self, selectedMethod, applyRotation, applyTranslation, applyScaling, writefilecheck):
  74. start_time = time.time()
  75. print("Calculating...")
  76. def group_points(points, threshold):
  77. # Function to group points that are close to each other
  78. grouped_points = []
  79. while points:
  80. point = points.pop() # Take one point from the list
  81. group = [point] # Start a new group
  82. # Find all points close to this one
  83. distances = cdist([point], points) # Calculate distances from this point to others
  84. close_points = [i for i, dist in enumerate(distances[0]) if dist < threshold]
  85. # Add the close points to the group
  86. group.extend([points[i] for i in close_points])
  87. # Remove the grouped points from the list
  88. points = [point for i, point in enumerate(points) if i not in close_points]
  89. # Add the group to the result
  90. grouped_points.append(group)
  91. return grouped_points
  92. def region_growing(image_data, seed, intensity_threshold, max_distance):
  93. dimensions = image_data.GetDimensions()
  94. visited = set()
  95. region = []
  96. queue = deque([seed])
  97. while queue:
  98. x, y, z = queue.popleft()
  99. if (x, y, z) in visited:
  100. continue
  101. visited.add((x, y, z))
  102. voxel_value = image_data.GetScalarComponentAsDouble(x, y, z, 0)
  103. if voxel_value >= intensity_threshold:
  104. region.append((x, y, z))
  105. # Add neighbors within bounds
  106. for dx, dy, dz in [(1, 0, 0), (-1, 0, 0), (0, 1, 0), (0, -1, 0), (0, 0, 1), (0, 0, -1)]:
  107. nx, ny, nz = x + dx, y + dy, z + dz
  108. if 0 <= nx < dimensions[0] and 0 <= ny < dimensions[1] and 0 <= nz < dimensions[2]:
  109. if (nx, ny, nz) not in visited:
  110. queue.append((nx, ny, nz))
  111. return region
  112. def compute_optimal_scaling_per_axis(moving_points, fixed_points):
  113. """Computes optimal scaling factors for each axis (X, Y, Z) to align moving points (CBCT) to fixed points (CT).
  114. Args:
  115. moving_points (list of lists): List of (x, y, z) moving points (CBCT).
  116. fixed_points (list of lists): List of (x, y, z) fixed points (CT).
  117. Returns:
  118. tuple: Scaling factors (sx, sy, sz).
  119. """
  120. moving_points_np = np.array(moving_points)
  121. fixed_points_np = np.array(fixed_points)
  122. # Compute centroids
  123. centroid_moving = np.mean(moving_points_np, axis=0)
  124. centroid_fixed = np.mean(fixed_points_np, axis=0)
  125. # Compute absolute distances of each point from its centroid along each axis
  126. distances_moving = np.abs(moving_points_np - centroid_moving)
  127. distances_fixed = np.abs(fixed_points_np - centroid_fixed)
  128. # Compute scaling factors as the ratio of mean absolute distances per axis
  129. scale_factors = np.mean(distances_fixed, axis=0) / np.mean(distances_moving, axis=0)
  130. return tuple(scale_factors)
  131. def compute_scaling(cbct_points, scaling_factors):
  132. """Applies non-uniform scaling to CBCT points.
  133. Args:
  134. cbct_points (list of lists): List of (x, y, z) points.
  135. scaling_factors (tuple): Scaling factors (sx, sy, sz) for each axis.
  136. Returns:
  137. np.ndarray: Scaled CBCT points.
  138. """
  139. sx, sy, sz = scaling_factors # Extract scaling factors
  140. scaling_matrix = np.diag([sx, sy, sz]) # Create diagonal scaling matrix
  141. cbct_points_np = np.array(cbct_points) # Convert to numpy array
  142. scaled_points = cbct_points_np @ scaling_matrix.T # Apply scaling
  143. return scaled_points.tolist() # Convert back to list
  144. def compute_Kabsch_rotation(moving_points, fixed_points):
  145. """
  146. Computes the optimal rotation matrix to align moving_points to fixed_points.
  147. Parameters:
  148. moving_points (list or ndarray): List of points to be rotated CBCT
  149. fixed_points (list or ndarray): List of reference points CT
  150. Returns:
  151. ndarray: Optimal rotation matrix.
  152. """
  153. assert len(moving_points) == len(fixed_points), "Point lists must be the same length."
  154. # Convert to numpy arrays
  155. moving = np.array(moving_points)
  156. fixed = np.array(fixed_points)
  157. # Compute centroids
  158. centroid_moving = np.mean(moving, axis=0)
  159. centroid_fixed = np.mean(fixed, axis=0)
  160. # Center the points
  161. moving_centered = moving - centroid_moving
  162. fixed_centered = fixed - centroid_fixed
  163. # Compute covariance matrix
  164. H = np.dot(moving_centered.T, fixed_centered)
  165. # SVD decomposition
  166. U, _, Vt = np.linalg.svd(H)
  167. Rotate_optimal = np.dot(Vt.T, U.T)
  168. # Correct improper rotation (reflection)
  169. if np.linalg.det(Rotate_optimal) < 0:
  170. Vt[-1, :] *= -1
  171. Rotate_optimal = np.dot(Vt.T, U.T)
  172. return Rotate_optimal
  173. def compute_Horn_rotation(moving_points, fixed_points):
  174. """
  175. Computes the optimal rotation matrix using quaternions.
  176. Parameters:
  177. moving_points (list or ndarray): List of points to be rotated.
  178. fixed_points (list or ndarray): List of reference points.
  179. Returns:
  180. ndarray: Optimal rotation matrix.
  181. """
  182. assert len(moving_points) == len(fixed_points), "Point lists must be the same length."
  183. moving = np.array(moving_points)
  184. fixed = np.array(fixed_points)
  185. # Compute centroids
  186. centroid_moving = np.mean(moving, axis=0)
  187. centroid_fixed = np.mean(fixed, axis=0)
  188. # Center the points
  189. moving_centered = moving - centroid_moving
  190. fixed_centered = fixed - centroid_fixed
  191. # Construct the cross-dispersion matrix
  192. M = np.dot(moving_centered.T, fixed_centered)
  193. # Construct the N matrix for quaternion solution
  194. A = M - M.T
  195. delta = np.array([A[1, 2], A[2, 0], A[0, 1]])
  196. trace = np.trace(M)
  197. N = np.zeros((4, 4))
  198. N[0, 0] = trace
  199. N[1:, 0] = delta
  200. N[0, 1:] = delta
  201. N[1:, 1:] = M + M.T - np.eye(3) * trace
  202. # Compute the eigenvector corresponding to the maximum eigenvalue
  203. eigvals, eigvecs = np.linalg.eigh(N)
  204. q_optimal = eigvecs[:, np.argmax(eigvals)] # Optimal quaternion
  205. # Convert quaternion to rotation matrix
  206. w, x, y, z = q_optimal
  207. R = np.array([
  208. [1 - 2*(y**2 + z**2), 2*(x*y - z*w), 2*(x*z + y*w)],
  209. [2*(x*y + z*w), 1 - 2*(x**2 + z**2), 2*(y*z - x*w)],
  210. [2*(x*z - y*w), 2*(y*z + x*w), 1 - 2*(x**2 + y**2)]
  211. ])
  212. return R
  213. def icp_algorithm(moving_points, fixed_points, max_iterations=100, tolerance=1e-5):
  214. """
  215. Iterative Closest Point (ICP) algorithm to align moving_points to fixed_points.
  216. Parameters:
  217. moving_points (list or ndarray): List of points to be aligned.
  218. fixed_points (list or ndarray): List of reference points.
  219. max_iterations (int): Maximum number of iterations.
  220. tolerance (float): Convergence tolerance.
  221. Returns:
  222. ndarray: Transformed moving points.
  223. ndarray: Optimal rotation matrix.
  224. ndarray: Optimal translation vector.
  225. """
  226. # Convert to numpy arrays
  227. moving = np.array(moving_points)
  228. fixed = np.array(fixed_points)
  229. # Initialize transformation
  230. R = np.eye(3) # Identity matrix for rotation
  231. t = np.zeros(3) # Zero vector for translation
  232. prev_error = np.inf # Initialize previous error to a large value
  233. for iteration in range(max_iterations):
  234. # Step 1: Find the nearest neighbors (correspondences)
  235. distances = np.linalg.norm(moving[:, np.newaxis] - fixed, axis=2)
  236. nearest_indices = np.argmin(distances, axis=1)
  237. nearest_points = fixed[nearest_indices]
  238. # Step 2: Compute the optimal rotation and translation
  239. R_new = compute_Kabsch_rotation(moving, nearest_points)
  240. centroid_moving = np.mean(moving, axis=0)
  241. centroid_fixed = np.mean(nearest_points, axis=0)
  242. t_new = centroid_fixed - np.dot(R_new, centroid_moving)
  243. # Step 3: Apply the transformation
  244. moving = np.dot(moving, R_new.T) + t_new
  245. # Update the cumulative transformation
  246. R = np.dot(R_new, R)
  247. t = np.dot(R_new, t) + t_new
  248. # Step 4: Check for convergence
  249. mean_error = np.mean(np.linalg.norm(moving - nearest_points, axis=1))
  250. if np.abs(prev_error - mean_error) < tolerance:
  251. print(f"ICP converged after {iteration + 1} iterations.")
  252. break
  253. prev_error = mean_error
  254. else:
  255. print(f"ICP reached maximum iterations ({max_iterations}).")
  256. return moving, R, t
  257. def match_points(cbct_points, ct_points, auto_weights=True, fallback_if_worse=True, normalize_lengths=True):
  258. def side_lengths(points):
  259. lengths = [
  260. np.linalg.norm(points[0] - points[1]),
  261. np.linalg.norm(points[1] - points[2]),
  262. np.linalg.norm(points[2] - points[0])
  263. ]
  264. if normalize_lengths:
  265. total = sum(lengths) + 1e-6 # da ne delimo z 0
  266. lengths = [l / total for l in lengths]
  267. return lengths
  268. def triangle_angles(points):
  269. a = np.linalg.norm(points[1] - points[2])
  270. b = np.linalg.norm(points[0] - points[2])
  271. c = np.linalg.norm(points[0] - points[1])
  272. angle_A = np.arccos(np.clip((b**2 + c**2 - a**2) / (2 * b * c), -1.0, 1.0))
  273. angle_B = np.arccos(np.clip((a**2 + c**2 - b**2) / (2 * a * c), -1.0, 1.0))
  274. angle_C = np.pi - angle_A - angle_B
  275. return [angle_A, angle_B, angle_C]
  276. def permutation_score(perm, ct_lengths, ct_angles, w_len, w_ang):
  277. perm_lengths = side_lengths(perm)
  278. perm_angles = triangle_angles(perm)
  279. # normaliziraj
  280. def normalize(vec):
  281. norm = np.linalg.norm(vec)
  282. return vec / norm if norm > 0 else vec
  283. perm_lengths = normalize(perm_lengths)
  284. perm_angles = normalize(perm_angles)
  285. ct_lengths_n = normalize(ct_lengths)
  286. ct_angles_n = normalize(ct_angles)
  287. score_len = sum(abs(a - b) for a, b in zip(perm_lengths, ct_lengths_n))
  288. score_ang = sum(abs(a - b) for a, b in zip(perm_angles, ct_angles_n))
  289. return w_len * score_len + w_ang * score_ang
  290. cbct_points = list(cbct_points)
  291. ct_lengths = side_lengths(np.array(ct_points))
  292. ct_angles = triangle_angles(np.array(ct_points))
  293. if auto_weights:
  294. var_len = np.var(ct_lengths)
  295. var_ang = np.var(ct_angles)
  296. total_var = var_len + var_ang + 1e-6
  297. weight_length = (1 - var_len / total_var)
  298. weight_angle = (1 - var_ang / total_var)
  299. else:
  300. weight_length = 0.5
  301. weight_angle = 0.5
  302. best_perm = None
  303. best_score = float('inf')
  304. for perm in itertools.permutations(cbct_points):
  305. perm = np.array(perm)
  306. score = permutation_score(perm, ct_lengths, ct_angles, weight_length, weight_angle)
  307. if score < best_score:
  308. best_score = score
  309. best_perm = perm
  310. if fallback_if_worse:
  311. original_score = permutation_score(np.array(cbct_points), ct_lengths, ct_angles, weight_length, weight_angle)
  312. if original_score <= best_score or np.allclose(cbct_points, best_perm):
  313. print("Fallback to original points due to worse score of the permutation.")
  314. return list(cbct_points)
  315. return list(best_perm)
  316. def compute_translation(moving_points, fixed_points, rotation_matrix):
  317. """
  318. Computes the translation vector to align moving_points to fixed_points given a rotation matrix.
  319. Parameters:
  320. moving_points (list or ndarray): List of points to be translated.
  321. fixed_points (list or ndarray): List of reference points.
  322. rotation_matrix (ndarray): Rotation matrix.
  323. Returns:
  324. ndarray: Translation vector.
  325. """
  326. # Convert to numpy arrays
  327. moving = np.array(moving_points)
  328. fixed = np.array(fixed_points)
  329. # Compute centroids
  330. centroid_moving = np.mean(moving, axis=0)
  331. centroid_fixed = np.mean(fixed, axis=0)
  332. # Compute translation
  333. translation = centroid_fixed - np.dot(centroid_moving, rotation_matrix)
  334. return translation
  335. def create_vtk_transform(rotation_matrix, translation_vector):
  336. """
  337. Creates a vtkTransform from a rotation matrix and a translation vector.
  338. """
  339. # Create a 4x4 transformation matrix
  340. transform_matrix = np.eye(4) # Start with an identity matrix
  341. transform_matrix[:3, :3] = rotation_matrix # Set rotation part
  342. transform_matrix[:3, 3] = translation_vector # Set translation part
  343. # Convert to vtkMatrix4x4
  344. vtk_matrix = vtk.vtkMatrix4x4()
  345. for i in range(4):
  346. for j in range(4):
  347. vtk_matrix.SetElement(i, j, transform_matrix[i, j])
  348. print("Transform matrix:")
  349. for i in range(4):
  350. print(" ".join(f"{vtk_matrix.GetElement(i, j):.6f}" for j in range(4)))
  351. # Create vtkTransform and set the matrix
  352. transform = vtk.vtkTransform()
  353. transform.SetMatrix(vtk_matrix)
  354. return transform
  355. def detect_points_region_growing(volume_name, yesCbct, create_marker, intensity_threshold=3000, x_min=90, x_max=380, y_min=190, y_max=380, z_min=80, z_max=140, max_distance=9, centroid_merge_threshold=5):
  356. volume_node = slicer.util.getNode(volume_name)
  357. if not volume_node:
  358. raise RuntimeError(f"Volume {volume_name} not found.")
  359. image_data = volume_node.GetImageData()
  360. matrix = vtk.vtkMatrix4x4()
  361. volume_node.GetIJKToRASMatrix(matrix)
  362. dimensions = image_data.GetDimensions()
  363. #detected_regions = []
  364. if yesCbct: #je cbct ali ct?
  365. valid_x_min, valid_x_max = 0, dimensions[0] - 1
  366. valid_y_min, valid_y_max = 0, dimensions[1] - 1
  367. valid_z_min, valid_z_max = 0, dimensions[2] - 1
  368. else:
  369. valid_x_min, valid_x_max = max(x_min, 0), min(x_max, dimensions[0] - 1)
  370. valid_y_min, valid_y_max = max(y_min, 0), min(y_max, dimensions[1] - 1)
  371. valid_z_min, valid_z_max = max(z_min, 0), min(z_max, dimensions[2] - 1)
  372. visited = set()
  373. def grow_region(x, y, z):
  374. if (x, y, z) in visited:
  375. return None
  376. voxel_value = image_data.GetScalarComponentAsDouble(x, y, z, 0)
  377. if voxel_value < intensity_threshold:
  378. return None
  379. region = region_growing(image_data, (x, y, z), intensity_threshold, max_distance=max_distance)
  380. if region:
  381. for point in region:
  382. visited.add(tuple(point))
  383. return region
  384. return None
  385. regions = []
  386. for z in range(valid_z_min, valid_z_max + 1):
  387. for y in range(valid_y_min, valid_y_max + 1):
  388. for x in range(valid_x_min, valid_x_max + 1):
  389. region = grow_region(x, y, z)
  390. if region:
  391. regions.append(region)
  392. # Collect centroids using intensity-weighted average
  393. centroids = []
  394. for region in regions:
  395. points = np.array([matrix.MultiplyPoint([*point, 1])[:3] for point in region])
  396. intensities = np.array([image_data.GetScalarComponentAsDouble(*point, 0) for point in region])
  397. if intensities.sum() > 0:
  398. weighted_centroid = np.average(points, axis=0, weights=intensities)
  399. max_intensity = intensities.max()
  400. centroids.append((np.round(weighted_centroid, 2), max_intensity))
  401. unique_centroids = []
  402. for centroid, intensity in centroids:
  403. if not any(np.linalg.norm(centroid - existing_centroid) < centroid_merge_threshold for existing_centroid, _ in unique_centroids):
  404. unique_centroids.append((centroid, intensity))
  405. if create_marker:
  406. markups_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode", f"Markers_{volume_name}")
  407. for centroid, intensity in unique_centroids:
  408. markups_node.AddControlPoint(*centroid)
  409. markups_node.SetDisplayVisibility(False)
  410. #print(f"Detected Centroid (RAS): {centroid}, Max Intensity: {intensity}")
  411. return unique_centroids
  412. def find_table_top_z(ct_volume_name, writefilecheck, yesCbct):
  413. """
  414. Najde višino zgornjega roba mize v CT/CBCT volumnu in doda markerje.
  415. :param ct_volume_name: Ime volumna v slicerju
  416. :param writefilecheck: Če je True, zapiše rezultat v CSV
  417. :param yesCbct: Če je True, uporabi CBCT thresholde
  418. :return: Višina zgornjega roba mize v mm
  419. """
  420. # Pridobi volumen
  421. ct_volume_node = slicer.util.getNode(ct_volume_name)
  422. image_data = ct_volume_node.GetImageData()
  423. spacing = ct_volume_node.GetSpacing()
  424. dims = image_data.GetDimensions()
  425. #origin = ct_volume_node.GetOrigin()
  426. # Pretvori volumen v numpy array
  427. np_array = slicer.util.arrayFromVolume(ct_volume_node)
  428. # Izračunaj sredinske IJK koordinate
  429. mid_ijk = [dims[0] // 2, dims[1] // 2, dims[2] // 2]
  430. # Preveri, da so IJK indeksi v mejah volumna
  431. mid_ijk = [max(0, min(dims[i] - 1, mid_ijk[i])) for i in range(3)]
  432. # Pretvorba IJK → RAS
  433. ijkToRasMatrix = vtk.vtkMatrix4x4()
  434. ct_volume_node.GetIJKToRASMatrix(ijkToRasMatrix)
  435. # Sredinski Z slice
  436. mid_z_voxel = mid_ijk[2]
  437. slice_data = np_array[mid_z_voxel, :, :] # (Y, X)
  438. # Sredinski stolpec
  439. mid_x_voxel = mid_ijk[0] - 15 # 15 pikslov levo od sredine da ne merimo pri hrbtenici
  440. column_values = slice_data[:, mid_x_voxel] # Y smer
  441. # Doda marker v RAS koordinatah
  442. #mid_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode", f"Sredina_{ct_volume_name}")
  443. #mid_ras = np.array(ijkToRasMatrix.MultiplyPoint([*mid_ijk, 1]))[:3]
  444. #mid_node.AddControlPoint(mid_ras)
  445. # Določi threshold glede na CBCT ali CT
  446. threshold = -300 if yesCbct else -100 # če je cbct iščemo vrednost -300, sicer pri CT 0
  447. # Poišči rob mize (drugi dvig)
  448. previous_value = -1000
  449. edge_count = 0
  450. table_top_y = None
  451. min_jump = 100 if yesCbct else 50 # Minimalni skok za CBCT in CT
  452. for y in range(len(column_values) - 1, -1, -1): # Od spodaj navzgor
  453. intensity = column_values[y]
  454. #if column_values[y] > threshold and previous_value <= threshold:
  455. if (intensity - previous_value) > min_jump and intensity > threshold: # Namesto primerjave s threshold
  456. if yesCbct:
  457. table_top_y = y + 1 #Da nismo že v trupu
  458. #print(f"Zgornji rob mize najden pri Y = {table_top_y}") # CBCT
  459. break
  460. if edge_count == 0 or (edge_count == 1 and previous_value < -200): # Check if intensity is back lower than -400
  461. edge_count += 1
  462. #print(f"Zaznan rob mize pri X, Y, Z = {mid_x_voxel},{y}, {mid_z_voxel}")
  463. if edge_count == 2: # Drugi dvig = zgornji rob mize
  464. table_top_y = y + 1
  465. #print(f"Zgornji rob mize najden pri Y = {table_top_y}")
  466. break
  467. previous_value = column_values[y]
  468. if table_top_y is None:
  469. print("❌ Zgornji rob mize ni bil najden!")
  470. return None
  471. # Pretvorba Y IJK → RAS
  472. table_ijk = [mid_x_voxel, table_top_y, mid_z_voxel]
  473. table_ras = np.array(ijkToRasMatrix.MultiplyPoint([*table_ijk, 1]))[:3]
  474. # Doda marker za višino mize
  475. table_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode", f"VišinaMize_{ct_volume_name}")
  476. table_node.AddControlPoint(table_ras)
  477. table_node.SetDisplayVisibility(False)
  478. # Izračun višine v mm
  479. #image_center_y = dims[1] // 2
  480. pixel_offset = table_top_y
  481. #mm_offset = pixel_offset * spacing[1]
  482. #print(f"📏 Miza je {abs(mm_offset):.2f} mm {'nižja' if mm_offset > 0 else 'višja'} od središča.")
  483. #print(f"📏 Miza je {abs(pixel_offset)} pixlov {'nižja' if pixel_offset > 0 else 'višja'} od središča.")
  484. #print(f"📏 CBCT table_top_y: {table_top_y}, image_center_y: {image_center_y}, pixel_offset: {pixel_offset}, mm_offset: {mm_offset}")
  485. #print(f"🛏 Absolutna višina mize (RAS Y): {table_ras[1]} mm")
  486. # Shrani v CSV
  487. if writefilecheck:
  488. file_path = os.path.join(os.path.dirname(__file__), "heightdata.csv")
  489. with open(file_path, mode='a', newline='') as file:
  490. writer = csv.writer(file)
  491. modality = "CBCT " if yesCbct else "CT "
  492. writer.writerow([modality, ct_volume_name, f" Upper part of table detected at Z = {table_ras[1]:.2f} mm"])
  493. return table_ras[1], pixel_offset
  494. def align_cbct_to_ct(volumeNode, scan_type, offset, CT_offset=None, CT_spacing=None):
  495. """
  496. Aligns CBCT volume to CT volume based on height offset.
  497. Args:
  498. volumeNode (vtkMRMLScalarVolumeNode): The volume node to be aligned.
  499. scan_type (str): The type of scan ("CT" or "CBCT").
  500. offset (float): The height offset of the current volume from the center in mm.
  501. CT_offset (float, optional): The height offset of the CT volume from the center. Required for CBCT alignment.
  502. CT_spacing (float, optional): The voxel spacing of the CT volume in mm (for scaling the offset).
  503. Returns:
  504. float: The alignment offset applied to the CBCT volume (if applicable).
  505. """
  506. if scan_type == "CT":
  507. CT_offset = offset
  508. CT_spacing = volumeNode.GetSpacing()[1]
  509. #print(f"CT offset set to: {CT_offset}, CT spacing: {CT_spacing} mm/voxel")
  510. return CT_offset, CT_spacing
  511. else:
  512. if CT_offset is None or CT_spacing is None:
  513. raise ValueError("CT_offset and CT_spacing must be provided to align CBCT to CT.")
  514. CBCT_offset = offset
  515. # Razlika v mm brez skaliranja na CBCT_spacing
  516. alignment_offset_mm = CT_offset - CBCT_offset
  517. #print(f"CT offset: {CT_offset}, CBCT offset: {CBCT_offset}")
  518. #print(f"CT spacing: {CT_spacing} mm/voxel, CBCT spacing: {volumeNode.GetSpacing()[1]} mm/voxel")
  519. #print(f"Aligning CBCT with CT. Offset in mm: {alignment_offset_mm}")
  520. # Uporabi transformacijo
  521. transform = vtk.vtkTransform()
  522. transform.Translate(0, alignment_offset_mm, 0)
  523. transformNode = slicer.vtkMRMLTransformNode()
  524. slicer.mrmlScene.AddNode(transformNode)
  525. transformNode.SetAndObserveTransformToParent(transform)
  526. volumeNode.SetAndObserveTransformNodeID(transformNode.GetID())
  527. slicer.vtkSlicerTransformLogic().hardenTransform(volumeNode)
  528. slicer.mrmlScene.RemoveNode(transformNode)
  529. return alignment_offset_mm
  530. def print_orientation(volume_name):
  531. node = slicer.util.getNode(volume_name)
  532. matrix = vtk.vtkMatrix4x4()
  533. node.GetIJKToRASMatrix(matrix)
  534. print(f"{volume_name} IJK→RAS:")
  535. for i in range(3):
  536. print([matrix.GetElement(i, j) for j in range(3)])
  537. def prealign_by_centroid(cbct_points, ct_points):
  538. """
  539. Predporavna CBCT markerje na CT markerje glede na centrične točke.
  540. Args:
  541. cbct_points: List ali ndarray točk iz CBCT.
  542. ct_points: List ali ndarray točk iz CT.
  543. Returns:
  544. List: CBCT točke premaknjene tako, da so centrične točke usklajene.
  545. """
  546. cbct_points = np.array(cbct_points)
  547. ct_points = np.array(ct_points)
  548. cbct_centroid = np.mean(cbct_points, axis=0)
  549. ct_centroid = np.mean(ct_points, axis=0)
  550. translation_vector = ct_centroid - cbct_centroid
  551. aligned_cbct = cbct_points + translation_vector
  552. return aligned_cbct
  553. def choose_best_translation(cbct_points, ct_points, rotation_matrix):
  554. """
  555. Izbere boljšo translacijo: centroidno ali povprečno po rotaciji (retranslation).
  556. Args:
  557. cbct_points (array-like): Točke iz CBCT (še ne rotirane).
  558. ct_points (array-like): Ciljne CT točke.
  559. rotation_matrix (ndarray): Rotacijska matrika.
  560. Returns:
  561. tuple: (best_translation_vector, transformed_cbct_points, used_method)
  562. """
  563. cbct_points = np.array(cbct_points)
  564. ct_points = np.array(ct_points)
  565. # 1. Rotiraj CBCT točke
  566. rotated_cbct = np.dot(cbct_points, rotation_matrix.T)
  567. # 2. Centroid translacija
  568. centroid_moving = np.mean(cbct_points, axis=0)
  569. centroid_fixed = np.mean(ct_points, axis=0)
  570. translation_centroid = centroid_fixed - np.dot(centroid_moving, rotation_matrix)
  571. transformed_centroid = rotated_cbct + translation_centroid
  572. error_centroid = np.mean(np.linalg.norm(transformed_centroid - ct_points, axis=1))
  573. # 3. Retranslacija (srednja razlika)
  574. translation_recomputed = np.mean(ct_points - rotated_cbct, axis=0)
  575. transformed_recomputed = rotated_cbct + translation_recomputed
  576. error_recomputed = np.mean(np.linalg.norm(transformed_recomputed - ct_points, axis=1))
  577. # 4. Izberi boljšo
  578. if error_recomputed < error_centroid:
  579. print(f"✅ Using retranslation (error: {error_recomputed:.2f} mm)")
  580. return translation_recomputed, transformed_recomputed, "retranslation"
  581. else:
  582. print(f"✅ Using centroid-based translation (error: {error_centroid:.2f} mm)")
  583. return translation_centroid, transformed_centroid, "centroid"
  584. def visualize_point_matches_in_slicer(cbct_points, ct_points, study_name="MatchVisualization"):
  585. assert len(cbct_points) == len(ct_points), "Mora biti enako število točk!"
  586. # Ustvari markups za CBCT
  587. cbct_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode", f"{study_name}_CBCT")
  588. cbct_node.GetDisplayNode().SetSelectedColor(0, 0, 1) # modra
  589. # Ustvari markups za CT
  590. ct_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode", f"{study_name}_CT")
  591. ct_node.GetDisplayNode().SetSelectedColor(1, 0, 0) # rdeča
  592. # Dodaj točke
  593. for i, (cbct, ct) in enumerate(zip(cbct_points, ct_points)):
  594. cbct_node.AddControlPoint(*cbct, f"CBCT_{i}")
  595. ct_node.AddControlPoint(*ct, f"CT_{i}")
  596. # Ustvari model z linijami med pari
  597. points = vtk.vtkPoints()
  598. lines = vtk.vtkCellArray()
  599. for i, (p1, p2) in enumerate(zip(cbct_points, ct_points)):
  600. id1 = points.InsertNextPoint(p1)
  601. id2 = points.InsertNextPoint(p2)
  602. line = vtk.vtkLine()
  603. line.GetPointIds().SetId(0, id1)
  604. line.GetPointIds().SetId(1, id2)
  605. lines.InsertNextCell(line)
  606. polyData = vtk.vtkPolyData()
  607. polyData.SetPoints(points)
  608. polyData.SetLines(lines)
  609. # Model node
  610. modelNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLModelNode", f"{study_name}_Connections")
  611. modelNode.SetAndObservePolyData(polyData)
  612. modelDisplay = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLModelDisplayNode")
  613. modelDisplay.SetColor(0, 0, 0) # črna
  614. modelDisplay.SetLineWidth(2)
  615. modelDisplay.SetVisibility(True)
  616. modelNode.SetAndObserveDisplayNodeID(modelDisplay.GetID())
  617. modelNode.SetAndObservePolyData(polyData)
  618. print(f"✅ Vizualizacija dodana za {study_name} (točke + povezave)")
  619. # Globalni seznami za končno statistiko
  620. prostate_size_est = []
  621. ctcbct_distance = []
  622. # Pridobimo SubjectHierarchyNode
  623. shNode = slicer.vtkMRMLSubjectHierarchyNode.GetSubjectHierarchyNode(slicer.mrmlScene)
  624. studyItems = vtk.vtkIdList()
  625. shNode.GetItemChildren(shNode.GetSceneItemID(), studyItems)
  626. for i in range(studyItems.GetNumberOfIds()):
  627. studyItem = studyItems.GetId(i)
  628. studyName = shNode.GetItemName(studyItem)
  629. print(f"\nProcessing study: {studyName}")
  630. # **LOKALNI** seznami, resetirajo se pri vsakem study-ju
  631. cbct_list = []
  632. ct_list = []
  633. volume_points_dict = {}
  634. CT_offset = 0
  635. # Get child items of the study item
  636. volumeItems = vtk.vtkIdList()
  637. shNode.GetItemChildren(studyItem, volumeItems)
  638. # Iteracija čez vse volumne v posameznem studyju
  639. for j in range(volumeItems.GetNumberOfIds()):
  640. intermediateItem = volumeItems.GetId(j)
  641. finalVolumeItems = vtk.vtkIdList()
  642. shNode.GetItemChildren(intermediateItem, finalVolumeItems) # Išči globlje!
  643. for k in range(finalVolumeItems.GetNumberOfIds()):
  644. volumeItem = finalVolumeItems.GetId(k)
  645. volumeNode = shNode.GetItemDataNode(volumeItem)
  646. dicomUIDs = volumeNode.GetAttribute("DICOM.instanceUIDs")
  647. if not dicomUIDs:
  648. print("❌ This is an NRRD volume!")
  649. continue # Preskoči, če ni DICOM volume
  650. volumeName = volumeNode.GetName()
  651. imageItem = shNode.GetItemByDataNode(volumeNode)
  652. modality = shNode.GetItemAttribute(imageItem, "DICOM.Modality") #deluje!
  653. #dimensions = volumeNode.GetImageData().GetDimensions()
  654. #spacing = volumeNode.GetSpacing()
  655. #print(f"Volume {volumeNode.GetName()} - Dimenzije: {dimensions}, Spacing: {spacing}")
  656. if modality != "CT":
  657. print("Not a CT")
  658. continue # Preskoči, če ni CT
  659. # Preveri, če volume obstaja v sceni
  660. if not slicer.mrmlScene.IsNodePresent(volumeNode):
  661. print(f"Volume {volumeName} not present in the scene.")
  662. continue
  663. # Preverimo proizvajalca (DICOM metapodatki)
  664. manufacturer = shNode.GetItemAttribute(imageItem, 'DICOM.Manufacturer')
  665. #manufacturer = volumeNode.GetAttribute("DICOM.Manufacturer")
  666. #manufacturer = slicer.dicomDatabase.fileValue(uid, "0008,0070")
  667. #print(manufacturer)
  668. # Določimo, ali gre za CBCT ali CT
  669. if "varian" in manufacturer.lower() or "elekta" in manufacturer.lower():
  670. cbct_list.append(volumeName)
  671. scan_type = "CBCT"
  672. yesCbct = True
  673. else: # Siemens ali Philips
  674. ct_list.append(volumeName)
  675. scan_type = "CT"
  676. yesCbct = False
  677. if volumeNode and volumeNode.IsA("vtkMRMLScalarVolumeNode"):
  678. print(f"✔️ {scan_type} {volumeNode.GetName()} (ID: {volumeItem})")
  679. if not volumeNode or not volumeNode.IsA("vtkMRMLScalarVolumeNode"):
  680. print("Can't find volumeNode")
  681. #continue # Preskoči, če ni veljaven volume
  682. # Detekcija točk v volumnu
  683. ustvari_marker = not yesCbct # Ustvari markerje pred poravnavo na mizo
  684. grouped_points = detect_points_region_growing(volumeName, yesCbct, ustvari_marker, intensity_threshold=3000)
  685. #print(f"Populating volume_points_dict with key ('{scan_type}', '{volumeName}')")
  686. volume_points_dict[(scan_type, volumeName)] = grouped_points
  687. #print(volume_points_dict) # Check if the key is correctly added
  688. # Če imamo oba tipa volumna (CBCT in CT) **znotraj istega studyja**
  689. if cbct_list and ct_list:
  690. ct_volume_name = ct_list[0] # Uporabi prvi CT kot referenco
  691. print(f"\nProcessing CT: {ct_volume_name}")
  692. yesCbct = False
  693. mm_offset, pixel_offset = find_table_top_z(ct_volume_name, writefilecheck, yesCbct)
  694. CT_offset, CT_spacing = align_cbct_to_ct(slicer.util.getNode(ct_volume_name), "CT", mm_offset)
  695. ct_points = [centroid for centroid, _ in volume_points_dict[("CT", ct_volume_name)]]
  696. print(f"CT points: {ct_points}")
  697. if len(ct_points) < 3:
  698. print(f"CT volume {ct_volume_name} doesn't have enough points for registration. Points: {len(ct_points)}")
  699. continue
  700. else:
  701. for cbct_volume_name in cbct_list:
  702. print(f"\nProcessing CBCT Volume: {cbct_volume_name}")
  703. yesCbct = True
  704. scan_type = "CBCT" #redundant but here we are
  705. cbct_volume_node = slicer.util.getNode(cbct_volume_name)
  706. mm_offset, pixel_offset = find_table_top_z(cbct_volume_name, writefilecheck, yesCbct)
  707. cbct_points = [centroid for centroid, _ in volume_points_dict[("CBCT", cbct_volume_name)]] #zastareli podatki
  708. cbct_points_array = np.array(cbct_points) # Pretvorba v numpy array
  709. #print_orientation(ct_volume_name)
  710. #print_orientation(cbct_volume_name)
  711. #for i, (cb, ct) in enumerate(zip(cbct_points, ct_points)):
  712. # print(f"Pair {i}: CBCT {cb}, CT {ct}, diff: {np.linalg.norm(cb - ct):.2f}")
  713. align_cbct_to_ct(cbct_volume_node, scan_type, mm_offset, CT_offset, CT_spacing)
  714. #cbct_points = [centroid for centroid, _ in volume_points_dict[("CBCT", cbct_volume_name)]] #zastareli podatki
  715. # for i in range(len(cbct_points)):
  716. # x, y, z = cbct_points[i]
  717. # y_new = y + mm_offset # Upoštevaj premik zaradi poravnave mize
  718. # cbct_points[i] = (x, y_new, z) # Posodobi marker s premaknjeno višino
  719. ustvari_marker = False # Ustvari markerje
  720. cbct_points = [centroid for centroid, _ in detect_points_region_growing(cbct_volume_name, yesCbct, ustvari_marker, intensity_threshold=3000)]
  721. #cbct_points = detect_points_region_growing(cbct_volume_name, yesCbct, intensity_threshold=3000)
  722. #cbct_points = [centroid for centroid, _ in volume_points_dict[("CBCT", cbct_volume_name)]] #zastareli podatki
  723. if len(cbct_points) < 3:
  724. print(f"CBCT Volume '{cbct_volume_name}' doesn't have enough points for registration. Points: {len(cbct_points)}")
  725. continue
  726. #Sortiramo točke po X/Y/Z da se izognemo težavam pri poravnavi
  727. cbct_points = match_points(cbct_points, ct_points)
  728. #visualize_point_matches_in_slicer(cbct_points, ct_points, studyName) #poveže pare markerjev
  729. # Shranjevanje razdalj
  730. distances_ct_cbct = []
  731. distances_internal = {"A-B": [], "B-C": [], "C-A": []}
  732. cbct_volume_node = slicer.util.getNode(cbct_volume_name)
  733. # Sortiramo točke po Z-koordinati (ali X/Y, če raje uporabljaš drugo os)
  734. cbct_points_sorted = cbct_points_array[np.argsort(cbct_points_array[:, 2])]
  735. # Razdalje med CT in CBCT (SORTIRANE točke!)
  736. d_ct_cbct = np.linalg.norm(cbct_points_sorted - ct_points, axis=1)
  737. distances_ct_cbct.append(d_ct_cbct)
  738. # Razdalje med točkami znotraj SORTIRANIH cbct_points
  739. d_ab = np.linalg.norm(cbct_points_sorted[0] - cbct_points_sorted[1])
  740. d_bc = np.linalg.norm(cbct_points_sorted[1] - cbct_points_sorted[2])
  741. d_ca = np.linalg.norm(cbct_points_sorted[2] - cbct_points_sorted[0])
  742. # Sortiramo razdalje po velikosti, da so vedno v enakem vrstnem redu
  743. sorted_distances = sorted([d_ab, d_bc, d_ca])
  744. distances_internal["A-B"].append(sorted_distances[0])
  745. distances_internal["B-C"].append(sorted_distances[1])
  746. distances_internal["C-A"].append(sorted_distances[2])
  747. # Dodamo ime študije za v statistiko
  748. studyName = shNode.GetItemName(studyItem)
  749. # **Shrani razdalje v globalne sezname**
  750. prostate_size_est.append({"Study": studyName, "Distances": sorted_distances})
  751. ctcbct_distance.append({"Study": studyName, "Distances": list(distances_ct_cbct[-1])}) # Pretvorimo v seznam
  752. # Izberi metodo glede na uporabnikov izbor
  753. chosen_rotation_matrix = np.eye(3)
  754. chosen_translation_vector = np.zeros(3)
  755. print("Markerji pred transformacijo:", cbct_points, ct_points)
  756. if applyScaling:
  757. scaling_factors = compute_optimal_scaling_per_axis(cbct_points, ct_points)
  758. #print("Scaling factors: ", scaling_factors)
  759. cbct_points = compute_scaling(cbct_points, scaling_factors)
  760. initial_error = np.mean(np.linalg.norm(np.array(cbct_points) - np.array(ct_points), axis=1))
  761. if initial_error > 30:
  762. print("⚠️ Initial distance too large, applying centroid prealignment.")
  763. cbct_points = prealign_by_centroid(cbct_points, ct_points)
  764. if applyRotation:
  765. if selectedMethod == "Kabsch":
  766. chosen_rotation_matrix = compute_Kabsch_rotation(cbct_points, ct_points)
  767. elif selectedMethod == "Horn":
  768. chosen_rotation_matrix = compute_Horn_rotation(cbct_points, ct_points)
  769. elif selectedMethod == "Iterative Closest Point (Kabsch)":
  770. _, chosen_rotation_matrix, _ = icp_algorithm(cbct_points, ct_points)
  771. #print("Rotation Matrix:\n", chosen_rotation_matrix)
  772. if applyTranslation:
  773. chosen_translation_vector, cbct_points_transformed, method_used = choose_best_translation(cbct_points, ct_points, chosen_rotation_matrix) #Izbere optimalno translacijo
  774. per_axis_error = np.abs(np.array(cbct_points_transformed) - np.array(ct_points))
  775. dz_mean = np.mean(per_axis_error[:, 2])
  776. print(f"per-axis error: {per_axis_error}, dz_mean err: {dz_mean:.2f} mm")
  777. #chosen_translation_vector = compute_translation(cbct_points, ct_points, chosen_rotation_matrix)
  778. #print("Translation Vector:\n", chosen_translation_vector)
  779. # Ustvari vtkTransformNode in ga poveži z CBCT volumenom
  780. imeTransformNoda = cbct_volume_name + " Transform"
  781. transform_node = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLTransformNode", imeTransformNoda)
  782. # Kreiraj transformacijo in jo uporabi
  783. vtk_transform = create_vtk_transform(chosen_rotation_matrix, chosen_translation_vector)
  784. transform_node.SetAndObserveTransformToParent(vtk_transform)
  785. # Pridobi CBCT volumen in aplikacijo transformacije
  786. cbct_volume_node = slicer.util.getNode(cbct_volume_name)
  787. cbct_volume_node.SetAndObserveTransformNodeID(transform_node.GetID())
  788. # Uporabi transformacijo na volumnu (fizična aplikacija)
  789. slicer.vtkSlicerTransformLogic().hardenTransform(cbct_volume_node) #aplicira transformacijo na volumnu
  790. slicer.mrmlScene.RemoveNode(transform_node) # Odstrani transformacijo iz scene
  791. #print("Transform successful on ", cbct_volume_name)
  792. ustvari_marker = True # Ustvari markerje
  793. cbct_points = [centroid for centroid, _ in detect_points_region_growing(cbct_volume_name, yesCbct, ustvari_marker, intensity_threshold=3000)]
  794. print("Markerji po transformaciji:\n", cbct_points, ct_points)
  795. # Izračun razdalj
  796. errors = [np.linalg.norm(cbct - ct) for cbct, ct in zip(cbct_points, ct_points)]
  797. mean_error = np.mean(errors)
  798. print("Individualne napake:", errors)
  799. print("📏 Povprečna napaka poravnave: {:.2f} mm".format(mean_error))
  800. else:
  801. print(f"Study {studyItem} doesn't have any appropriate CT or CBCT volumes.")
  802. # Izpis globalne statistike
  803. if writefilecheck:
  804. print("Distances between CT & CBCT markers: ", ctcbct_distance)
  805. print("Distances between pairs of markers for each volume: ", prostate_size_est)
  806. # Define file paths
  807. prostate_size_file = os.path.join(os.path.dirname(__file__), "prostate_size.csv")
  808. ctcbct_distance_file = os.path.join(os.path.dirname(__file__), "ct_cbct_distance.csv")
  809. # Write prostate size data
  810. with open(prostate_size_file, mode='w', newline='') as file:
  811. writer = csv.writer(file)
  812. writer.writerow(["Prostate Size"])
  813. for size in prostate_size_est:
  814. writer.writerow([size])
  815. print("Prostate size file written at ", prostate_size_file)
  816. # Write CT-CBCT distance data
  817. with open(ctcbct_distance_file, mode='w', newline='') as file:
  818. writer = csv.writer(file)
  819. writer.writerow(["CT-CBCT Distance"])
  820. for distance in ctcbct_distance:
  821. writer.writerow([distance])
  822. print("CT-CBCT distance file written at ", ctcbct_distance_file)
  823. end_time = time.time()
  824. # Calculate and print elapsed time
  825. elapsed_time = end_time - start_time
  826. # Convert to minutes and seconds
  827. minutes = int(elapsed_time // 60)
  828. seconds = elapsed_time % 60
  829. print(f"Execution time: {minutes} minutes and {seconds:.6f} seconds")