curve_fit_parametri.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640
  1. import ivp
  2. import solveMatrix
  3. import os
  4. import cModel_posodabljanje as cModel
  5. import sys
  6. import json
  7. from contextlib import contextmanager
  8. import numpy as np
  9. import time
  10. import scipy.interpolate
  11. import shutil
  12. import importlib
  13. from scipy.optimize import least_squares
  14. importlib.reload(solveMatrix)
  15. import runSolver
  16. importlib.reload(runSolver)
  17. def updateSetup(job):
  18. setupFileSrc = job['setupFile']
  19. setupFile = os.path.join(job['jobDir'], 'setupInput.json')
  20. with open(setupFileSrc, 'r') as f:
  21. setup = json.load(f)
  22. try:
  23. setup.update(job['setupUpdates'])
  24. except KeyError:
  25. pass
  26. with open(setupFile, 'w+') as f:
  27. f.write(json.dumps(setup))
  28. return setupFile
  29. def generate_noisy_data(full_sol, noise_std=20.5):
  30. full_sol = np.asarray(full_sol) # Ensure input is a NumPy array
  31. random_state = np.random.RandomState(seed=None)
  32. # Create highly volatile noise
  33. noise = random_state.normal(loc=0, scale=noise_std, size=full_sol.shape) ** 2 # Squared to amplify large deviations
  34. noise2 = np.random.laplace(loc=0, scale=noise_std * 10, size=full_sol.shape) # Increased scale for even more extreme spikes
  35. # Introduce highly chaotic scaling factors
  36. random_scaling = random_state.uniform(-200, 400, size=full_sol.shape) # Further extended range for more variation
  37. random_exponent = random_state.uniform(4, 12, size=full_sol.shape) # Increased exponentiation for more instability
  38. # Apply extreme, chaotic transformations
  39. noisy_data = np.zeros_like(full_sol)
  40. for i in range(len(full_sol)):
  41. perturbation = random_scaling[i] * (abs(noise[i]) ** random_exponent[i]) - noise2[i] * np.sin(full_sol[i])
  42. noisy_data[i] = (1 + np.tanh(perturbation)) * full_sol[i] * np.cosh(noise[i])
  43. # Introduce frequent large perturbations to increase instability
  44. if random_state.rand() < 0.75: # 30% chance of a drastic shift
  45. noisy_data[i] *= random_state.uniform(2, 5)
  46. return noisy_data
  47. def calculate_residuals(full_sol, noisy_data):
  48. """
  49. Calculates residuals based on the solution and noisy data.
  50. """
  51. residuals = np.ravel((noisy_data - full_sol))
  52. #print(residuals)
  53. return residuals
  54. # Updated cost function
  55. def cost_function(params,parnames,noisy_data,model,startPoint,setup,jobDir):
  56. """
  57. Cost function that calculates residuals based on the model.
  58. """
  59. #print(params)
  60. model.setValues(parnames,params)
  61. clean_dir(jobDir)
  62. t,sol,se,qt,sOut,full_t,full_sol=runSolver.solve(model,setup,startPoint,jobDir)
  63. residuals = calculate_residuals(np.array(full_sol), noisy_data)
  64. return residuals
  65. def create_name_value_dict(original_dict):
  66. return {
  67. val['name']: val['value']
  68. for key, val in original_dict.items()
  69. if isinstance(val, dict) and 'name' in val and 'value' in val
  70. }
  71. # Create parameter dictionary
  72. def clean_dir(jobDir):
  73. shutil.rmtree(jobDir)
  74. os.mkdir(jobDir)
  75. #PRENOS ZAČETNIH PODATKOV
  76. ##############################################################################################################################################
  77. fh = os.path.expanduser("~")
  78. jobDir = os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen',"rezultati","curve_fit")
  79. srcDir = "NONE"
  80. setup_file = os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen', 'setup', 'setupMinute.json')
  81. model_file = os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen', 'models', 'cDiazepam.json')
  82. parameter_file1 = os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen', 'models', 'cDiazepam_parameters.json')
  83. # Prvi model s prvim parameter file
  84. model1 = cModel.model()
  85. model1.parse(model_file, parameter_file1)
  86. setup1 = runSolver.parseSetup(setup_file)
  87. name_value_dict1 = create_name_value_dict(model1.parSetup['parameters'])
  88. # Lahko zdaj uporabiš name_value_dict1 in name_value_dict2 ločeno
  89. # Excluded parameters
  90. vse_parametri = {
  91. 'commentsInput', 'venousInput', '_venousInput', 'inputA', 'inputTau', 'commentPC', 'adiposePC', 'brainPC',
  92. 'heartPC', 'kidneyPC', 'liverPC', 'lungPC', 'musclePC', 'remainderPC', 'skinPC', 'splanchnicPC',
  93. 'stomachPC', 'testesPC', 'commentFlows', 'flowNotes', 'adiposeFlow', 'brainFlow', 'heartFlow',
  94. 'kidneyFlow', 'liverInFlow', 'liverOutFlow', 'gutOutFlow', 'totalFlow', 'muscleFlow', 'skinFlow',
  95. 'splanchnicFlow', 'stomachFlow', 'testesFlow', 'remainderFlow', 'adiposeVolume', 'brainVolume',
  96. 'heartVolume', 'kidneyVolume', 'liverVolume', 'lungVolume', 'muscleVolume', 'skinVolume',
  97. 'splanchnicVolume', 'stomachVolume', 'testesVolume', 'remainderVolume', 'venousVolume',
  98. 'arterialVolume', 'liverToExcrementNotes', 'k1', 'one', 'zero', 'two'
  99. }
  100. excluded_params1 = {'venousInput',"heartPC","kidneyPC","remainderPC","skinPC","splanchnicPC","stomachPC","testesPC","one","venousVolume","heartVolume","brainVolume","adiposeVolume"}
  101. #excluded_params1={}
  102. # Create initial guesses without excluded parameters
  103. initial_guesses = np.array(
  104. [val for par, val in name_value_dict1.items() if par not in excluded_params1],
  105. dtype=np.float64
  106. )
  107. # Create list of parameter names without excluded
  108. param_names = [par for par in name_value_dict1.keys() if par not in excluded_params1]
  109. initial_pars = np.array([x for x in initial_guesses])
  110. #initial_pars[3]*=1.1
  111. #print("kolicnik")
  112. #print(initial_guesses/initial_pars)
  113. #print(param_names)
  114. # TO NAREDI REŠITVE Z ZAČETNIMI PARAMETRI
  115. start_point= runSolver.findStartPoint("NONE",setup1)
  116. #model1.setValues(param_names,initial_pars)
  117. clean_dir(jobDir)
  118. t,sol,se,qt,sOut,full_t,full_sol = runSolver.solve(model1,setup1,start_point,jobDir)
  119. clean_dir(jobDir)
  120. # Step 4: Create a new dictionary for the optimized parameters
  121. # Map the optimized parameters back to the correct order in the original parameter dictionary
  122. optimized_params_dict1 = {par: initial_guesses[i] for i, par in enumerate(param_names)}
  123. # Step 5: Include the excluded parameters with their original values
  124. for par in excluded_params1:
  125. optimized_params_dict1[par] = name_value_dict1[par]
  126. # Step 6: Ensure that everything stays in order in the final dictionary
  127. # This will maintain the order of parameters as they appear in the original `name_value_dict2`
  128. final_dict1 = {par: optimized_params_dict1.get(par, name_value_dict1[par]) for par in name_value_dict1}
  129. #################################################################################################################333
  130. # Z DRUGIMI PARAMETRI
  131. ##########################################################################################################################
  132. fh = os.path.expanduser("~")
  133. # Drugi model z drugim parameter file
  134. parameter_file2 = os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen', 'models', 'cDiazepam_parameters1.json')
  135. model2 = cModel.model()
  136. model2.parse(model_file, parameter_file2)
  137. setup2 = runSolver.parseSetup(setup_file)
  138. name_value_dict2 = create_name_value_dict(model2.parSetup['parameters'])
  139. # Excluded parameters
  140. excluded_params2 = {'venousInput',"heartPC","kidneyPC","remainderPC","skinPC","splanchnicPC","stomachPC","testesPC","one","venousVolume","heartVolume","brainVolume","adiposeVolume"}
  141. #excluded_params2={}
  142. # Step 1: Filter out excluded parameters for initial guesses
  143. initial_guesses2 = np.array(
  144. [val for par, val in name_value_dict2.items() if par not in excluded_params2],
  145. dtype=np.float64
  146. )
  147. param_names2 = [par for par in name_value_dict2.keys() if par not in excluded_params2]
  148. initial_pars2 = np.array([x for x in initial_guesses2])
  149. # Step 4: Create a new dictionary for the optimized parameters
  150. # Map the optimized parameters back to the correct order in the original parameter dictionary
  151. optimized_params_dict2 = {par: initial_guesses2[i] for i, par in enumerate(param_names2)}
  152. # Step 5: Include the excluded parameters with their original values
  153. for par in excluded_params2:
  154. optimized_params_dict2[par] = name_value_dict2[par]
  155. # Step 6: Ensure that everything stays in order in the final dictionary
  156. # This will maintain the order of parameters as they appear in the original `name_value_dict2`
  157. final_dict2 = {par: optimized_params_dict2.get(par, name_value_dict2[par]) for par in name_value_dict2}
  158. # Step 2: Run optimization
  159. clean_dir(jobDir)
  160. lower_bound_value = 0 # Example: all parameters must be ≥ 0
  161. upper_bound_value = 1000 # Example: all parameters must be ≤ 10
  162. # Create bounds arrays of the same length as initial_guesses2
  163. lower_bounds = np.full_like(initial_guesses2, lower_bound_value)
  164. upper_bounds = np.full_like(initial_guesses2, upper_bound_value)
  165. start_point2= runSolver.findStartPoint("NONE",setup2)
  166. #t,sol,se,qt,sOut,full_t,full_sol2 = runSolver.solve(model2,setup2,start_point2,jobDir)
  167. from scipy.optimize import curve_fit
  168. import numpy as np
  169. # Define the wrapper function for curve_fit
  170. def wrapped_cost_function(xdata, *param_values_and_args):
  171. """
  172. Curve fitting wrapper function that adapts the cost function to work with curve_fit.
  173. """
  174. # Separate parameter values from extra arguments
  175. param_values = param_values_and_args[:len(param_names2)] # Extract parameter values
  176. model, startPoint, setup, jobDir = param_values_and_args[len(param_names2):] # Extract extra arguments
  177. # Set parameter values in the model
  178. model.setValues(param_names2, param_values)
  179. clean_dir(jobDir) # Clean job directory before solving
  180. # Solve the model
  181. t, sol, se, qt, sOut, full_t, full_sol = runSolver.solve(model, setup, startPoint, jobDir)
  182. return np.ravel(full_sol) # Flatten the output for curve_fit
  183. # Prepare extra arguments (pack model and related arguments in a tuple)
  184. extra_args = (model2, start_point2, setup2, jobDir)
  185. # Use `curve_fit`, passing extra arguments through a lambda function
  186. optimized_params3, covariance, infodict, errmsg, success = curve_fit(
  187. lambda xdata, *param_values: wrapped_cost_function(xdata, *param_values, *extra_args),
  188. xdata=np.arange(len(full_sol)), # Explicit x-data (index of time points or data points)
  189. ydata=np.ravel(full_sol), # Explicit y-data (flattened model output)
  190. p0=initial_guesses2, # Initial parameter guesses
  191. bounds=(lower_bounds, upper_bounds), # Parameter bounds
  192. full_output=True # Get additional output
  193. )
  194. # Extract the number of iterations from the info dictionary
  195. num_iterations = infodict.get('nfev', None) # 'nfev' is the number of function evaluations (which correlates to iterations)
  196. print(f"Number of iterations: {num_iterations}")
  197. # Step 3: Retrieve optimized parameters
  198. #optimized_params3 = result.x
  199. #print(result.x)
  200. # Step 4: Create a new dictionary for the optimized parameters
  201. # Map the optimized parameters back to the correct order in the original parameter dictionary
  202. optimized_params_dict3 = {par: optimized_params3[i] for i, par in enumerate(param_names2)}
  203. # Step 5: Include the excluded parameters with their original values
  204. for par in excluded_params2:
  205. optimized_params_dict3[par] = name_value_dict2[par]
  206. # Step 6: Ensure that everything stays in order in the final dictionary
  207. # This will maintain the order of parameters as they appear in the original `name_value_dict2`
  208. final_optimized_dict3 = {par: optimized_params_dict3.get(par, name_value_dict2[par]) for par in name_value_dict2}
  209. # Now, final_optimized_dict contains the optimized values in the same order as in `name_value_dict2`
  210. print(name_value_dict2)
  211. print(final_optimized_dict3)
  212. vrednosti3 = np.array(
  213. [val for par, val in final_optimized_dict3.items()],
  214. dtype=np.float64
  215. )
  216. param_names3 = [par for par in final_optimized_dict3.keys()]
  217. #model2.setValues(param_names3,vrednosti3)
  218. #################################################################################################################################
  219. # PREVRBA ČE SO 3 različne rešitve
  220. #clean_dir(jobDir)
  221. #t,sol,se,qt,sOut,full_t,full_sol1 = runSolver.solve(model1,setup1,start_point,jobDir)
  222. #clean_dir(jobDir)
  223. #t,sol,se,qt,sOut,full_t,full_sol2 = runSolver.solve(model2,setup2,start_point2,jobDir)
  224. #model2.setValues(param_names3,vrednosti3)
  225. #clean_dir(jobDir)
  226. #t,sol,se,qt,sOut,full_t,full_sol3 = runSolver.solve(model2,setup2,start_point,jobDir)
  227. #print(np.array(full_sol2)/np.array(full_sol1))
  228. #print(np.array(full_sol3)/np.array(full_sol1))
  229. #print(np.array(full_sol3)/np.array(full_sol2))
  230. #print(initial_guesses2/initial_guesses)
  231. #print(optimized_params3/initial_guesses2)
  232. #print(optimized_params3/initial_guesses)
  233. ##################################################################################################################################
  234. # SHRANIMO REŠITVE ZA PARAMETRE
  235. def create_or_clean_dir(directory):
  236. """Create the directory if it doesn't exist, otherwise clean it."""
  237. if os.path.exists(directory):
  238. shutil.rmtree(directory) # Remove the existing directory and its contents
  239. os.makedirs(directory) # Create a fresh directory
  240. # Example structure to match your requested output format
  241. def save_parameters(file_path, param_dict):
  242. """
  243. Overwrites the original file with the updated parameters.
  244. Parameters:
  245. file_path (str): The path where the updated parameters will be saved.
  246. param_dict (dict): The updated parameters to save.
  247. """
  248. # Ensure the directory exists
  249. os.makedirs(os.path.dirname(file_path), exist_ok=True)
  250. # Open the original file and load its content to preserve its structure
  251. with open(file_path, 'r') as f:
  252. data = json.load(f)
  253. # Update the 'parameters' field with the new parameters
  254. data['parameters'] = param_dict
  255. # Save the updated structure back to the same file
  256. with open(file_path, 'w') as f:
  257. json.dump(data, f, indent=4)
  258. print(f"Updated parameters have been written to {file_path}")
  259. return file_path # Make sure the file path is returned
  260. # Define unique subdirectories
  261. jobDir1 = os.path.join(jobDir, "run1")
  262. jobDir2 = os.path.join(jobDir, "run2")
  263. jobDir3 = os.path.join(jobDir, "run3")
  264. # Ensure directories exist and are clean
  265. create_or_clean_dir(jobDir1)
  266. create_or_clean_dir(jobDir2)
  267. create_or_clean_dir(jobDir3)
  268. #ZA ORIGNAL
  269. # Solve and save results in respective directories
  270. t, sol, se, qt, sOut, full_t, full_sol1 = runSolver.solve(model1, setup1, start_point, jobDir1)
  271. # Define file paths for the new parameter file
  272. new_param_file1 = os.path.join(jobDir1, "parameters.json")
  273. # Update parameters and save them to the new file
  274. model1.updateParameters(final_dict1, parameter_file1, new_param_file1)
  275. # Save updated parameters to the respective directory
  276. save_parameters(os.path.join(jobDir1, "parameters.json"), model1.parSetup['parameters'])
  277. #ZA DRUGI ORIGINAL
  278. new_param_file2 = os.path.join(jobDir2, "parameters.json")
  279. t, sol, se, qt, sOut, full_t, full_sol2 = runSolver.solve(model2, setup2, start_point, jobDir2)
  280. model2.updateParameters(final_dict2, parameter_file2, new_param_file2)
  281. save_parameters(os.path.join(jobDir2, "parameters.json"), model2.parSetup['parameters'])
  282. # Posodobljeni parametri in rešitev
  283. new_param_file3 = os.path.join(jobDir3, "updated_parameters.json")
  284. model2.setValues(param_names3, vrednosti3)
  285. t, sol, se, qt, sOut, full_t, full_sol3 = runSolver.solve(model2, setup2, start_point, jobDir3)
  286. model2.updateParameters(final_optimized_dict3, parameter_file2, new_param_file3)
  287. # Save parameters and immediately store the path for further use
  288. param_file3 = save_parameters(os.path.join(jobDir3, "updated_parameters.json"), model2.parSetup['parameters'])
  289. # Now you can directly use param_file3 in further processing
  290. print(f"Parameters for run3 saved at: {param_file3}")
  291. ########################################################################################################################
  292. #ZGRADIMO 3 MODEL (da bo lažje pol risat)
  293. ##############################################################################################################################################
  294. # param_file3 already contains the full path to "parameters.json"
  295. import os
  296. import glob
  297. directory = os.path.dirname(param_file3)
  298. jobDir3 = os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen',"rezultati","curve_fit","run3")
  299. # Make sure param_file3 contains the correct file path, including file name.
  300. # If param_file3 only has the directory, append the file name
  301. parameter_file3 = os.path.join(directory, "updated_parameters.json") # Specify the file name
  302. # Check if it's a valid file
  303. if os.path.isfile(parameter_file3):
  304. with open(parameter_file3, 'r') as f:
  305. # Your code to process the file
  306. pass
  307. else:
  308. print(f"Error: {parameter_file3} is not a valid file.")
  309. import os
  310. import glob
  311. def clean_files(directory):
  312. """Deletes all .txt and .csv files in the given directory."""
  313. # Use glob to find all .txt and .csv files in the directory
  314. txt_files = glob.glob(os.path.join(directory, "*.txt"))
  315. csv_files = glob.glob(os.path.join(directory, "*.csv"))
  316. # Combine both lists of files
  317. all_files = txt_files + csv_files
  318. # Loop through the list of files and delete each one
  319. for file in all_files:
  320. try:
  321. os.remove(file)
  322. print(f"Deleted: {file}")
  323. except Exception as e:
  324. print(f"Error deleting {file}: {e}")
  325. clean_files(jobDir3)
  326. clean_files(jobDir)
  327. clean_files(jobDir1)
  328. clean_files(jobDir2)
  329. #Saves alll the necessary information for drawing in the right files
  330. runSolver.main([setup_file,model_file,parameter_file2,srcDir],jobDir2)
  331. runSolver.main([setup_file,model_file,parameter_file1,srcDir],jobDir1)
  332. runSolver.main([setup_file,model_file,parameter_file3,srcDir],jobDir3)
  333. ########################################################################################################3333
  334. #RISANJE
  335. ########################################################################################################################
  336. import matplotlib.pyplot as plt
  337. job_configs = {
  338. 'Osnovni': {
  339. 'jobDir': os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen', 'rezultati','curve_fit',"run1")
  340. },
  341. 'Osnovni2': {
  342. 'jobDir': os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen', 'rezultati','curve_fit','run2')
  343. },
  344. 'Posodobljeni_parametri': {
  345. 'jobDir': os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen', 'rezultati','curve_fit','run3')
  346. }
  347. }
  348. def getModel(solution, modelName):
  349. """
  350. Load and parse the model from the given solution dictionary using the modelName.
  351. """
  352. Q = solution[modelName] # Retrieve the solution dictionary for the specified model
  353. model = cModel.model() # Create an instance of the cModel class
  354. setupFile = Q['setup'] # Get the setup file path from the solution
  355. modelFile = Q['model'] # Get the model file path from the solution
  356. parameterFile = Q['parameters'] # Get the parameter file path from the solution
  357. model.parse(modelFile, parameterFile) # Parse the model file with the given parameters
  358. return model # Return the loaded model
  359. def mergeSolutions(seq):
  360. """
  361. Merge multiple solution dictionaries into a single dictionary.
  362. """
  363. out = {}
  364. for v in ['t', 'sol', 'se', 'qt', 'sOut']: # Merge arrays from each solution
  365. out[v] = np.concatenate([x[v] for x in seq])
  366. for v in ['lut', 'lutSE', 'setup', 'model', 'parameters', 'qt', 'sOut']: # Use data from the last solution for these keys
  367. out[v] = seq[-1][v]
  368. return out # Return the merged dictionary
  369. # Load and merge solutions
  370. solution = {}
  371. for modelName, job in job_configs.items():
  372. seq = [runSolver.loadSolutionFromDir(job['jobDir'], True)]
  373. solution[modelName] = mergeSolutions(seq)
  374. def analyze_model(model, setup):
  375. """
  376. Perform matrix operations and print results.
  377. """
  378. tscale = runSolver.getScale(setup) # Get the time scale from the setup
  379. model.inspect() # Inspect the model
  380. nt = setup['nt'] # Number of time points
  381. qtmax = 1 # Maximum time (minute)
  382. qt = np.linspace(0, qtmax, nt) # Generate time points
  383. par = 'venousInput' # Parameter to plot
  384. # Plot parameters
  385. try:
  386. hw = model.get(par) # Get the parameter from the model
  387. ht = [10 * hw['value'](x) for x in qt] # Calculate parameter values over time
  388. plt.plot(qt / tscale, ht) # Plot the parameter values
  389. except (KeyError, TypeError): # Handle errors if the parameter is not found
  390. pass
  391. # Measure performance of matrix operations
  392. start_time = time.time() # Record start time
  393. for i in range(100000): # Perform matrix operations 100,000 times
  394. model.M(1e7) # Call the matrix operation
  395. end_time = time.time() # Record end time
  396. fM = model.M(0) # Get the matrix from the model
  397. np.set_printoptions(suppress=True, precision=2, linewidth=150) # Set NumPy print options
  398. np.set_printoptions(suppress=False) # Reset NumPy print options
  399. v, Q = np.linalg.eig(fM) # Perform eigenvalue decomposition
  400. Q1 = np.linalg.inv(Q) # Calculate the inverse of the eigenvector matrix
  401. D = np.diag(v) # Create a diagonal matrix of eigenvalues
  402. t = np.linspace(0, 100, 101) # Generate time points for plotting
  403. fy = [model.u(x)[14] for x in t] # Calculate model output over time
  404. plt.figure() # Create a new figure
  405. plt.plot(fy) # Plot the model output
  406. fu = model.u(0) # Get the model output at time 0
  407. plt.show() # Display the plot
  408. # Restore stdout
  409. def plot_results(solution, modelName):
  410. """
  411. Plot results for the specified model from the solution.
  412. """
  413. model = getModel(solution, modelName) # Load the model
  414. setup = solution[modelName]['setup'] # Get the setup from the solution
  415. tscale = runSolver.getScale(setup) # Get the time scale
  416. name = ['venous', 'adipose', 'brain', 'heart', 'kidney', 'liver', 'lung', 'muscle', 'skin', 'stomach', 'splanchnic', "arterial","remainder","excrement"] # Compartment names
  417. tmax = solution[modelName]['t'][-1] # Get the maximum time from the solution
  418. # Define colors and shading for models
  419. models = {
  420. 'Osnovni': {'color': 'orange', 'shadeColor': 'red'},
  421. 'Osnovni2': {'color': 'blue', 'shadeColor': 'green'},
  422. 'Posodobljeni_parametri': {'color': 'black', 'shadeColor': 'yellow'}
  423. }
  424. fig, axs = plt.subplots(5, 3, figsize=(15, 20)) # Create a grid of subplots
  425. for i in range(len(name)): # Loop over each compartment
  426. row = i // 3 # Determine row index
  427. col = i % 3 # Determine column index
  428. ax = axs[row, col] # Get the subplot
  429. for m in models: # Loop over each model
  430. fM = models[m] # Get model configuration
  431. Q = solution[m] # Get solution for the model
  432. v = name[i] # Get the compartment name
  433. try:
  434. j = Q['lut'][v] # Get the index for the compartment
  435. except KeyError:
  436. try:
  437. v1 = alias[v] # Handle alias if needed
  438. j = Q['lut'][v1] # Get the index for the alias
  439. except KeyError:
  440. continue
  441. fy = Q['sol'][:, j] # Get the solution for the compartment
  442. fe = Q['se'][:, j] # Get the standard error for the compartment
  443. t = Q['t'] # Get the time points
  444. # Plot the mean values and confidence intervals
  445. ax.plot(t / tscale, fy, color=fM['color'], label=f'{m} sol')
  446. ax.fill_between(t / tscale, fy - fe, fy + fe, color=fM['shadeColor'], alpha=0.1) # Shaded area for confidence intervals
  447. ax.plot(t / tscale, fy - fe, color=fM['shadeColor'], linewidth=1, alpha=0.2) # Lower bound
  448. ax.plot(t / tscale, fy + fe, color=fM['shadeColor'], linewidth=1, alpha=0.2) # Upper bound
  449. # Find the maximum y-value including confidence intervals
  450. max_y_value = max(np.max(fy + fe), np.max(fy - fe))
  451. ax.set_ylim([0, min(max_y_value, 10000)]) # Set y-axis limit to max value or 10000, whichever is smaller
  452. ax.set_xlim([0, 1.1 * tmax / tscale]) # Set x-axis limits
  453. ax.set_xlabel(setup['tUnit']) # Set x-axis label
  454. ax.set_title(name[i]) # Set plot title
  455. ax.legend() # Add legend
  456. # output_file = os.path.join(fh, 'Documents', 'Sola', 'IJS', 'PBPK_ociscen', f'{modelName}_results.pdf')
  457. plt.tight_layout()
  458. # plt.savefig(output_file) # Save the figure
  459. plt.show() # Display the plot
  460. # Extract the number of iterations from the info dictionary
  461. num_iterations = infodict.get('nfev', None) # 'nfev' is the number of function evaluations (which correlates to iterations)
  462. print(f"Number of iterations: {num_iterations}")
  463. # Analyze and plot results for the specified models
  464. for modelName in job_configs.keys():
  465. if modelName == 'Osnovni': # Analyze only the specified model
  466. analyze_model(getModel(solution, modelName), solution[modelName]['setup'])
  467. plot_results(solution, modelName)