Преглед изворни кода

Merged to head, currently, concrete path is specified in segmentation/test/testConfig.cfg.template, should be replace by generic paths

Andrej Studen/Merlin пре 3 година
родитељ
комит
bf94d54099
25 измењених фајлова са 1964 додато и 47 уклоњено
  1. 584 0
      pythonScripts/fillClinicalDataInternational.ipynb
  2. 137 0
      pythonScripts/findCandidatesForSchedule.py
  3. 229 0
      pythonScripts/generateFigures.py
  4. 22 0
      pythonScripts/iraemmUploadTestPatients.ipynb
  5. 61 0
      pythonScripts/modifyDataset.py
  6. 153 0
      pythonScripts/organ_percentile.py
  7. 83 18
      pythonScripts/populateImagingFromTransferList.py
  8. 15 8
      pythonScripts/preprocess.py
  9. 1 2
      pythonScripts/runPython.sh
  10. 5 0
      pythonScripts/runPythonnnUNet.sh
  11. 47 7
      pythonScripts/runSegmentationDM.py
  12. 259 0
      pythonScripts/runSegmentationnnUNet.py
  13. 41 0
      pythonScripts/test.py
  14. BIN
      segmentation/saved_models/DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt.data-00000-of-00001
  15. BIN
      segmentation/saved_models/DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt.index
  16. BIN
      segmentation/saved_models/DM_defaults.DM_train_qtii_LABELMASKS4.final.2020-10-31.05.59.36.425298.model.ckpt.data-00000-of-00001
  17. BIN
      segmentation/saved_models/DM_defaults.DM_train_qtii_LABELMASKS4.final.2020-10-31.05.59.36.425298.model.ckpt.index
  18. 0 6
      segmentation/saved_models/INFO_ABOUT_MODELS.txt
  19. 1 1
      segmentation/test/testConfig.cfg.template
  20. 3 3
      slicerModules/iraemmBrowser.py
  21. 2 2
      templates/segmentation.json.sample
  22. 84 0
      templates/segmentationIRAEMM.json
  23. 87 0
      templates/segmentationIRAEMM_ONKO.json
  24. 66 0
      templates/segmentationPreproces.json
  25. 84 0
      templates/segmentationTCIA.json

+ 584 - 0
pythonScripts/fillClinicalDataInternational.ipynb

@@ -0,0 +1,584 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Using: /home/studen/.labkey/astuden/astuden.crt\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/usr/lib/python3/dist-packages/urllib3/connection.py:391: SubjectAltNameWarning: Certificate for merlin.fmf.uni-lj.si has no `subjectAltName`, falling back to check for a `commonName` for now. This feature is being removed by major browsers and deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 for details.)\n",
+      "  warnings.warn(\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "User: andrej studen CSRF: 52cd68383b7a832e93e7d9b5801002e5\n"
+     ]
+    }
+   ],
+   "source": [
+    "import sys\n",
+    "import os\n",
+    "import SimpleITK\n",
+    "import numpy\n",
+    "import matplotlib.pyplot\n",
+    "import chardet\n",
+    "import json\n",
+    "sys.path.append(os.path.join(os.path.expanduser('~'),'software','src','nixSuite','wrapper'))\n",
+    "import nixWrapper\n",
+    "nixWrapper.loadLibrary('labkeyInterface')\n",
+    "import labkeyInterface\n",
+    "net=labkeyInterface.labkeyInterface()\n",
+    "fconfig=os.path.join(os.path.expanduser('~'),'.labkey','network.json')\n",
+    "net.init(fconfig)\n",
+    "print('Using: {}'.format(net.connectionConfig['SSL']['user']))\n",
+    "net.getCSRF()\n",
+    "import labkeyFileBrowser\n",
+    "sys.path.append(os.getcwd())\n",
+    "fb=labkeyFileBrowser.labkeyFileBrowser(net)\n",
+    "project='iPNUMMretro/Study'\n",
+    "import labkeyDatabaseBrowser\n",
+    "db=labkeyDatabaseBrowser.labkeyDB(net)\n",
+    "ds=db.selectRows(project,'study','ClinicalData',[])\n",
+    "patients=[row['PatientId'] for row in ds['rows']]\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 61,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/usr/lib/python3/dist-packages/urllib3/connection.py:391: SubjectAltNameWarning: Certificate for merlin.fmf.uni-lj.si has no `subjectAltName`, falling back to check for a `commonName` for now. This feature is being removed by major browsers and deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 for details.)\n",
+      "  warnings.warn(\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "User: andrej studen CSRF: da3766ebe339095e3a6098173e1021be\n",
+      "User: andrej studen CSRF: f6a2f89d2f85bab544539a2a6369623f\n",
+      "User: andrej studen CSRF: 9f326c3dcb4b44d6a7e5ab4f8a7ad8e5\n",
+      "NIX-LJU-D2002-IRAE-A002: Row 1 Could not convert 'Brez simptomov' for field pneumonitisDate, should be of type Timestamp\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/usr/lib/python3/dist-packages/urllib3/connection.py:391: SubjectAltNameWarning: Certificate for merlin.fmf.uni-lj.si has no `subjectAltName`, falling back to check for a `commonName` for now. This feature is being removed by major browsers and deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 for details.)\n",
+      "  warnings.warn(\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "User: andrej studen CSRF: 23dc8fd59e280d53b02bbe070154ddfd\n",
+      "User: andrej studen CSRF: a5262b1412a1eee7c4b89b1125cae433\n",
+      "User: andrej studen CSRF: 43860ad987c603df25731d108a6f8001\n",
+      "User: andrej studen CSRF: af866c8085460ff91530b735e3c736dd\n",
+      "User: andrej studen CSRF: 24f34a00e4376366740efe2e187fa4e2\n",
+      "User: andrej studen CSRF: 933ce18a272115a2183d751a91dba4e3\n",
+      "User: andrej studen CSRF: cf62df2f5e93ae8f4585e1180edd81d2\n",
+      "User: andrej studen CSRF: 35fbfe403566416581defd2efb709fba\n",
+      "User: andrej studen CSRF: f73fbbe1e541cd5ede1ff09b7f5807be\n",
+      "User: andrej studen CSRF: 882aac6ea404860faab770ae320df827\n",
+      "User: andrej studen CSRF: bfdb52f52cae66c27d88d93dc2dbe2bf\n",
+      "User: andrej studen CSRF: 3c48c8bec64e886deb27ee07d7f100c7\n",
+      "NIX-LJU-D2002-IRAE-A014: Row 1 Could not convert '2 (bolj gradus 1)' for field colitisGrade, should be of type Integer\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/usr/lib/python3/dist-packages/urllib3/connection.py:391: SubjectAltNameWarning: Certificate for merlin.fmf.uni-lj.si has no `subjectAltName`, falling back to check for a `commonName` for now. This feature is being removed by major browsers and deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 for details.)\n",
+      "  warnings.warn(\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "User: andrej studen CSRF: 753d6094324a2cf7b0c2183a58efc7f5\n",
+      "User: andrej studen CSRF: f062199d4b6f0ae27c79ae8419d14d78\n",
+      "User: andrej studen CSRF: 008a3f6d93aa90095b3583f755820351\n",
+      "User: andrej studen CSRF: afdb1f8aaf40d60beaba5b3bdb7a1e5e\n",
+      "User: andrej studen CSRF: eaf924935c314c9e900ee44e4cfbc01f\n",
+      "User: andrej studen CSRF: a4bbb057b0b857b171f1d64df73f4ece\n",
+      "User: andrej studen CSRF: 762f91b1c9d5660a47e9db009c5ef3dd\n",
+      "User: andrej studen CSRF: b4bf09d3c6253cf1778cf469ae44e835\n",
+      "User: andrej studen CSRF: efb18145b40fc1f1bd11eb1453d29eef\n",
+      "User: andrej studen CSRF: 850ae1edf1e364273f7a61a115e3a6b1\n",
+      "User: andrej studen CSRF: b776dca81c5e8a4229c1cfaf9271e14a\n",
+      "User: andrej studen CSRF: 8beff4a7833d3ab9038e39b03e10a6ce\n",
+      "User: andrej studen CSRF: 9ff9519885f76f6915d9882745416f3c\n",
+      "User: andrej studen CSRF: 6d04cf5aa9301790c5f88f358f016954\n",
+      "User: andrej studen CSRF: e6340e8bded2d34150a4c897288477af\n",
+      "User: andrej studen CSRF: 351d0f95a8414060f042fd76a6668417\n"
+     ]
+    }
+   ],
+   "source": [
+    "rows=ds['rows']\n",
+    "for row in rows:\n",
+    "    idFilter={'variable':'PatientId','value':row['PatientId'],'oper':'eq'}\n",
+    "    dsQ=db.selectRows(project,'study','ClinicalDataInternational',[idFilter])\n",
+    "    \n",
+    "    if len(dsQ['rows'])==0:\n",
+    "        fields=['PatientId','SequenceNum']\n",
+    "        qrow={f:row[f] for f in fields}\n",
+    "        mode='insert' \n",
+    "    else:\n",
+    "        qrow=dsQ['rows'][0]\n",
+    "        mode='update'\n",
+    "    \n",
+    "    copyFields=['itStartAge']\n",
+    "    for f in copyFields:\n",
+    "        qrow[f]=row[f]\n",
+    "    \n",
+    "    if row['sex']==0:\n",
+    "        qrow['gender']=1\n",
+    "    if row['sex']==1:\n",
+    "        qrow['gender']=2\n",
+    "    qrow['aeStartDate']=row['aeIdentificationDate']\n",
+    "    \n",
+    "    if row['origoCode']==0:\n",
+    "        qrow['melanomaPrimaryAnatomicSite']=3\n",
+    "    if row['origoCode']==1:\n",
+    "        qrow['melanomaPrimaryAnatomicSite']=1\n",
+    "    if row['origoCode']==2:\n",
+    "        qrow['melanomaPrimaryAnatomicSite']=2\n",
+    "    if row['origoCode']==3:\n",
+    "        qrow['melanomaPrimaryAnatomicSite']=4\n",
+    "    \n",
+    "    qrow['psECOGStartIT']=row['psAtITIntroduction']\n",
+    "    if row['mutations']==0:\n",
+    "        qrow ['actionableMutation']=1\n",
+    "    if row['mutations']==1:\n",
+    "        qrow ['actionableMutation']=2\n",
+    "    if row['mutations']==2:\n",
+    "        qrow ['actionableMutation']=3\n",
+    "    if row['mutations']==3:\n",
+    "        qrow ['actionableMutation']=4\n",
+    "    if row['mutations']==9:\n",
+    "        qrow ['actionableMutation']=5\n",
+    "        \n",
+    "    qrow['itTreatmentSetting']=2\n",
+    "    qrow['itStartDate']=row['itStart']\n",
+    "    qrow['firstITStartDate']=row['itStart']\n",
+    "    \n",
+    "    if row['bora']==0:#CR\n",
+    "        qrow ['bestResponse']=4\n",
+    "    if row['bora']==1:#PR\n",
+    "        qrow ['bestResponse']=2 #radiographic response\n",
+    "    if row['bora']==2:#SD\n",
+    "        qrow ['bestResponse']=3 #mixed radiographic response\n",
+    "    if row['bora']==9:#PD\n",
+    "        qrow ['bestResponse']=1\n",
+    "    \n",
+    "    if row['pneumonitis']==0:\n",
+    "        qrow['pneumonitisGrade']=7\n",
+    "    else:\n",
+    "        qrow['pneumonitisGrade']=row['pneumonitis']\n",
+    "        qrow['pneumonitisDate']=row['aeIdentificationDate']\n",
+    "        \n",
+    "    \n",
+    "    if row['diarrhea']==\"0\":\n",
+    "        qrow['colitisGrade']=7\n",
+    "    else:\n",
+    "        qrow['colitisGrade']=row['diarrhea']\n",
+    "        qrow['colitisDate']=row['aeIdentificationDate']\n",
+    "        \n",
+    "        \n",
+    "    if row['hypotirosis']==0:\n",
+    "        qrow['thyroiditisGrade']=7\n",
+    "    else:\n",
+    "        qrow['thyroiditisGrade']=row['hypotirosis']\n",
+    "        qrow['thyroiditisDate']=row['aeIdentificationDate']\n",
+    "        \n",
+    "    \n",
+    "    if row['sistemicKS']==0:\n",
+    "        qrow['immunosupressionFlag']=2\n",
+    "    if row['sistemicKS']==1:\n",
+    "        qrow['immunosupressionFlag']=1\n",
+    "    \n",
+    "    qrow['firstITInfusionDate']=row['itStart']\n",
+    "    qrow['itLine']=2\n",
+    "    #qrow['durationOfCurrentIT']=\n",
+    "    \n",
+    "    statusResponse=db.modifyRows(mode,project,'study','ClinicalDataInternational',[qrow])\n",
+    "    encoding=chardet.detect(statusResponse)['encoding']\n",
+    "    status=json.loads(statusResponse.decode(encoding))\n",
+    "    try:\n",
+    "        print('{}: {}'.format(row['PatientId'],status['exception']))\n",
+    "    except KeyError:\n",
+    "        pass\n",
+    "    #qrow['itType']=\n",
+    "\n",
+    "    "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "{}:{} NIX-LJU-D2002-IRAE-A014 True\n",
+      "User: andrej studen CSRF: 14eef5230ef9055ca662e626545b47a3\n"
+     ]
+    }
+   ],
+   "source": [
+    "dset='ClinicalDataInternational'\n",
+    "#dset='Imaging1'\n",
+    "idFilter={'variable':'PatientId','value':'NIX-LJU-D2002-IRAE-A014','oper':'eq'}\n",
+    "ds=db.selectRows(project,'study',dset,[idFilter])\n",
+    "\n",
+    "rows=ds['rows']\n",
+    "for row in rows:\n",
+    "   print('{}:{}',row['PatientId'],row['valid'])\n",
+    "   row['valid']=0\n",
+    "   db.modifyRows('update',project,'study',dset,[row])    \n",
+    "        "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/usr/lib/python3/dist-packages/urllib3/connection.py:391: SubjectAltNameWarning: Certificate for merlin.fmf.uni-lj.si has no `subjectAltName`, falling back to check for a `commonName` for now. This feature is being removed by major browsers and deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 for details.)\n",
+      "  warnings.warn(\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A000\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: e6ddccd420b2c45a98f4448be951ccef\n",
+      "DA\n",
+      "NE\n",
+      "N/A\n",
+      "ID: NIX-LJU-D2002-IRAE-A001\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 9e7dbdb1cdac31d0864826f400101393\n",
+      "DA\n",
+      "DA\n",
+      "DA\n",
+      "ID: NIX-LJU-D2002-IRAE-A002\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 1\n",
+      "petRelatedAction: 1\n",
+      "User: andrej studen CSRF: c0bbc36da5070408e01f64bab3080f1b\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A003\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: e7ca4b3cd6c8d27d543fff197b90a032\n",
+      "DA\n",
+      "DA\n",
+      "DA\n",
+      "ID: NIX-LJU-D2002-IRAE-A004\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 1\n",
+      "petRelatedAction: 1\n",
+      "User: andrej studen CSRF: fa458a4c0e96331402bdd87f8a7f710a\n",
+      "NE?\n",
+      "?\n",
+      "N/A\n",
+      "ID: NIX-LJU-D2002-IRAE-A005\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 59fb9bedb5feef9f13b413f6c8c5df38\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A006\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 58159ae265b106dd58482a3712cc4d7a\n",
+      "DA\n",
+      "NE\n",
+      "N/A\n",
+      "ID: NIX-LJU-D2002-IRAE-A007\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: d183cf6807cc174b9938c50af7e02cf4\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A008\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: f0c31176678928d65da70372149b6526\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A009\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 88dac5d71b7c07b8d81a24a63ce42988\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A010\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 5681d301d8ae4f91211f0a809d161dc4\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A011\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: d55c8dc31a662a73ecf05f8e5db480d9\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A012\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: dc01e50e6657c46e98953eefca1ebf45\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A013\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: c7436d74e009397cbde3617e504dbbb4\n",
+      "DA\n",
+      "NE\n",
+      "NE\n",
+      "ID: NIX-LJU-D2002-IRAE-A014\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 8dad9e03f6e363ca3203d1e6fa331a6a\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A015\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 13d60528591a5ee6bf769e5377a75a40\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A016\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 08f475ab641d385e7d41627fd0009aba\n",
+      "DA\n",
+      "DA\n",
+      "NE\n",
+      "ID: NIX-LJU-D2002-IRAE-A017\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 1\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: b97cdd281b69c27093080fe96fe5dfb3\n",
+      "DA\n",
+      "NE\n",
+      "N/A\n",
+      "ID: NIX-LJU-D2002-IRAE-A018\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 681751b836021ac4b9823468b0d567b4\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A019\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 46bdbb07c59514955ef84ccd34fc9349\n",
+      "DA\n",
+      "DA\n",
+      "DA\n",
+      "ID: NIX-LJU-D2002-IRAE-A020\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 1\n",
+      "petRelatedAction: 1\n",
+      "User: andrej studen CSRF: 1bb520ede2c623366a00e51d0c716cbd\n",
+      "DA\n",
+      "DA\n",
+      "DA\n",
+      "ID: NIX-LJU-D2002-IRAE-A021\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 1\n",
+      "petRelatedAction: 1\n",
+      "User: andrej studen CSRF: b218a6de9bbf877d662dc97a0e8aad7c\n",
+      "NE?\n",
+      "NE\n",
+      "NE\n",
+      "ID: NIX-LJU-D2002-IRAE-A021\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 6057ac8cf59a41bb4363ec935884a95b\n",
+      "DA\n",
+      "DA\n",
+      "NE\n",
+      "ID: NIX-LJU-D2002-IRAE-A022\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 1\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: c388fa864554c83d0fd3774f14a7a76d\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A023\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 0dd3c771579e964cc9f30d36e2c76be9\n",
+      "NE?\n",
+      "?\n",
+      "N/A\n",
+      "ID: NIX-LJU-D2002-IRAE-A024\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 675b2897a02eb1e4aa400d4cfd362605\n",
+      "?\n",
+      "NE\n",
+      "NE\n",
+      "ID: NIX-LJU-D2002-IRAE-A025\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: eccd25fec46ab30e9c663469cfdc5dae\n",
+      "DA\n",
+      "DA\n",
+      "NE\n",
+      "ID: NIX-LJU-D2002-IRAE-A026\n",
+      "petDetectedAE: 1\n",
+      "earlyPETAEDetection: 1\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: cc46e5322f1a0118a140756345ca26f8\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A027\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: afdb95dcf8e9a7ebf37890d082bf18d7\n",
+      "DA- hepatalna fleksura?\n",
+      "DA\n",
+      "NE\n",
+      "ID: NIX-LJU-D2002-IRAE-A028\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 1\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 10fc4b1aa9f13aea658b41faede0b857\n",
+      "None\n",
+      "None\n",
+      "None\n",
+      "ID: NIX-LJU-D2002-IRAE-A029\n",
+      "petDetectedAE: 2\n",
+      "earlyPETAEDetection: 2\n",
+      "petRelatedAction: 2\n",
+      "User: andrej studen CSRF: 1b70124a2a0ca3e3f36bd084b4da0033\n"
+     ]
+    }
+   ],
+   "source": [
+    "rows=ds['rows']\n",
+    "for row in rows:\n",
+    "    idFilter={'variable':'PatientId','value':row['PatientId'],'oper':'eq'}\n",
+    "    dsQ=db.selectRows(project,'study','ClinicalDataInternational',[idFilter])\n",
+    "    \n",
+    "    vals={'petDetectedAE':2,'earlyPETAEDetection':2,'petRelatedAction':2}#No\n",
+    "    for v in vals:\n",
+    "        print(row[v])\n",
+    "        if row[v]=='DA':\n",
+    "            vals[v]=1 #Yes\n",
+    "    for qrow in dsQ['rows']:\n",
+    "        print('ID: {}'.format(qrow['PatientId']))\n",
+    "        for v in vals:\n",
+    "            qrow[v]=vals[v]\n",
+    "            print('{}: {}'.format(v,qrow[v]))\n",
+    "    db.modifyRows('update',project,'study','ClinicalDataInternational',[qrow])    \n",
+    "        "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.8.5"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}

+ 137 - 0
pythonScripts/findCandidatesForSchedule.py

@@ -0,0 +1,137 @@
+#date sorts studies from orthanc dataset into target study dataset
+
+#takes transferQuery as the list of images that should be available on orthanc
+
+
+import os
+import json
+import re
+import sys
+import datetime
+import re
+
+def main(parameterFile):
+    fhome=os.path.expanduser('~')
+    fsetup=os.path.join(fhome,'.labkey','setup.json')
+    with open(fsetup,'r') as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup['paths']['nixWrapper'])
+
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+    fconfig=os.path.join(fhome,'.labkey','network.json')
+
+    net=labkeyInterface.labkeyInterface()
+    net.init(fconfig)
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+    fb=labkeyFileBrowser.labkeyFileBrowser(net)
+
+    with open(parameterFile,'r') as f:
+        pars=json.load(f)
+
+
+
+    i=0
+    #from orthancDatabase/Imaging dataset
+    projectOrthanc=pars['Orthanc']['project']
+    inputQuery=pars['Orthanc']['queryName']
+    inputSchema=pars['Orthanc']['schemaName']
+    inputParticipantField=pars['Orthanc']['participantField']
+
+    #to target project dataset
+    projectStudy=pars['Database']['project']
+    #'iPNUMMretro/Study'
+    #for prospective, set
+    #projectStudy='IPNUMMprospektiva/Study'
+    outputQuery=pars['Database']['queryName']
+    outputSchema=pars['Database']['schemaName']
+    #test for schedule entries which are scheduled
+    scheduleCutOffDate=pars['Database']['scheduleCutOffDate']#format YYYY-MM-DD
+    scheduleQuery=pars['Database']['scheduleQuery']
+    scheduleToleranceDays=int(pars['Database']['scheduleToleranceDays'])
+    dbParticipantField=pars['Database']['participantField']
+
+    missingSchema=pars['Database']['missingImagesSchema']
+    missingQuery=pars['Database']['missingImagesQuery']
+
+    #make a list of images from transferQuery
+    cutOffFilter={'variable':'actionDate','value':scheduleCutOffDate,'oper':'datelt'}
+    #status of 1 is scheduled
+    statusFilter={'variable':'actionStatus','value':'1','oper':'eq'}
+    dsImage=db.selectRows(projectStudy,outputSchema,scheduleQuery,[cutOffFilter,statusFilter])
+
+    #clear entries from missing list
+    failureFilter={'variable':'failureDescription','value':'MISSING FORM','oper':'eq'}
+    dsDelete=db.selectRows(projectStudy,missingSchema,missingQuery,[failureFilter])
+    db.modifyRows('delete',projectStudy,missingSchema,missingQuery,dsDelete['rows'])
+
+
+    for im in dsImage['rows']:
+
+        print('{}: {} {}'.format(im[dbParticipantField],im['actionDate'],im['actionStatus']))       
+        im['imagingVisitId']=im['visitId']
+        actionDate=datetime.datetime.strptime(im['actionDate'],'%Y/%m/%d %H:%M:%S') 
+        actionDateYMD=actionDate.strftime('%Y%m%d')
+        im['imageDate']=actionDateYMD
+        #this is still to be configured
+        inputIdFilter={'variable':inputParticipantField,\
+                'value':im[dbParticipantField],\
+                'oper':'eq'}
+
+        idFilter={'variable':dbParticipantField,\
+                'value':im[dbParticipantField],\
+                'oper':'eq'}
+        #filter on series descripton - for PET, we should have 'PET WB'
+        sdFilter={'variable':'seriesDescription','value':'PET WB','oper':'eq'}
+        
+        
+        #have to convert from datetime to %Y%m%d format
+        #dateFilter={'variable':'imageDate','value':im['imageDate'],'oper':'eq'}
+
+        dsOrthanc=db.selectRows(projectOrthanc,inputSchema,inputQuery,[inputIdFilter,sdFilter])
+        #what if dsOrthanc['rows'] is empty?
+        #this is part of QA and is reported in missingImages Schema/Query
+
+        uploadStatus="FAILED"
+        imageDateMismatch=[]
+        for im1 in dsOrthanc['rows']:
+
+            date=datetime.datetime.strptime(im1['studyDate'],'%Y/%m/%d %H:%M:%S') 
+            dt=date-actionDate
+            if (abs(dt.days)>scheduleToleranceDays): 
+                continue
+
+            #convert date to %Y%m%d notation
+            dateYMD=date.strftime('%Y%m%d')
+            
+            #enter this time difference to a list
+            imFilter={'variable':'imagingVisitId',
+                    'value':'{}'.format(im['imagingVisitId']),
+                    'oper':'eq'}
+            dsMissing=db.selectRows(projectStudy,missingSchema,\
+                    missingQuery,[idFilter,imFilter])
+
+            #already recorded
+            vals=[dbParticipantField,'imagingVisitId','imageDate']
+            mode='insert'
+            if len(dsMissing['rows'])>0:
+                mode='update'
+                orow=dsMissing['rows'][0]
+            else:
+                orow={v:im[v] for v in vals}
+            orow['imageDateMismatch']=dateYMD
+            orow['failureDescription']='MISSING FORM'
+            db.modifyRows(mode,projectStudy,missingSchema,\
+                    missingQuery,[orow])
+        
+    print("Done: {}".format(len(dsImage['rows'])))
+
+if __name__=='__main__':
+    main(sys.argv[1])

+ 229 - 0
pythonScripts/generateFigures.py

@@ -0,0 +1,229 @@
+import nibabel
+import os
+import json
+import sys
+import numpy
+import matplotlib.pyplot
+#import chardet
+
+def buildPath(server,project,imageDir,patientCode,visitCode,imageType):
+    path='/'.join([server,'labkey/_webdav',project,'@files',imageDir,patientCode,visitCode])
+    tail='_notCropped_2mmVoxel'
+    if imageType=='Segm':
+        tail='_v5'
+    path+='/'+patientCode+'-'+visitCode+'_'+imageType+tail+'.nii.gz'
+    return path
+
+
+def getCOWAxis(seg,val,axis):
+#returns center of weight for segmentation image where val is selected
+    if axis==0:
+        #2,1 or 1,1
+        i1=2
+        i2=1
+    if axis==1:
+        #0,1 or 2,0
+        i1=2
+        i2=0
+    if axis==2:
+        #0,0 or 1,0
+        i1=1
+        i2=0
+    
+    s=numpy.sum(numpy.sum(seg==val,i1),i2)
+    x=numpy.arange(len(s))
+    s0=0
+    for i in x:
+        if s[i]==0:
+            continue
+        s0=i
+        break
+    s1=len(s)
+    for i in numpy.arange(s0,len(s)):
+        if s[i]>0:
+            continue
+        s1=i
+        break
+    return [s0,numpy.average(x,weights=s),s1]
+
+def getGeometry(seg,val):
+#return center of weight of segmentation seg for segment val as a 3D vector
+    return [getCOWAxis(seg,val,x) for x in [0,1,2]]
+
+def getCOW(geom):
+    return [x[1] for x in geom]
+
+def getRange(geom):
+    return [[x[0],x[2]] for x in geom]
+
+
+def plot(imgs,t,val,tempBase):
+
+
+    segColors=[[0,0,0],[0.1,0.1,0.1],[0,0.2,0],[1,0,0],[0,0,1],[1,0,1]]
+
+#3-lungs 4-thyroid 5-bowel
+    delta=20
+    if val==4:
+        delta=40
+    window=350
+    level=40
+    geo=getGeometry(imgs['Segm'],val)
+    cowF=getCOW(geo)
+    rng=getRange(geo)
+    #print(rng)
+    cowI=[int(x) for x in cowF]
+    segment=imgs['Segm']==val
+
+    i0=rng[0][0]-delta
+    if i0<0:
+        i0=0
+    i1=rng[0][1]+delta
+    if i1>imgs['CT'].shape[0]:
+        i1=imgs['CT'].shape[0]
+    k0=rng[2][0]-delta
+    if k0<0:
+        k0=0
+    k1=rng[2][1]+delta
+    if k1>imgs['CT'].shape[2]:
+        k1=imgs['CT'].shape[2]
+
+    if t=='CT':
+        v0=level-0.5*window
+        v1=v0+window
+        matplotlib.pyplot.imshow(imgs['CT'][i0:i1,cowI[1],k0:k1].transpose(),cmap='gray',vmin=v0,vmax=v1)
+    if t=='PET':
+        matplotlib.pyplot.imshow(imgs['PET'][i0:i1,cowI[1],k0:k1].transpose(),cmap='inferno')
+        #blueish
+
+    if t=='CT':
+        rgb=segColors[val]
+    if t=='PET':
+        rgb=[1,1,1]
+
+    colors = [rgb+[c] for c in numpy.linspace(0,1,100)]
+                                                                                    
+    cmap = matplotlib.colors.LinearSegmentedColormap.from_list('mycmap', colors, N=5)
+
+    matplotlib.pyplot.imshow(segment[i0:i1,cowI[1],k0:k1].transpose(), cmap=cmap, alpha=0.2)
+    matplotlib.pyplot.gca().invert_yaxis()
+    outfile=os.path.join(tempBase,'slice{}_{}.png'.format(t,val))
+    matplotlib.pyplot.savefig(outfile)
+    return outfile
+
+
+
+def main(parameterFile):
+#mask for segmentations
+
+    setupJSON=os.path.join(os.path.expanduser('~'),'.labkey','setup.json')
+    with open(setupJSON) as f:
+        setup=json.load(f)
+            
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    import nixWrapper 
+             
+    nixWrapper.loadLibrary("labkeyInterface")
+
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+    fconfig=os.path.join(os.path.expanduser('~'),'.labkey','network.json')
+                 
+    net=labkeyInterface.labkeyInterface()
+    net.init(fconfig)
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+    fb=labkeyFileBrowser.labkeyFileBrowser(net)
+
+    tempBase=os.path.join(os.path.expanduser('~'),'temp')
+    
+    with open(parameterFile) as f:
+        pars=json.load(f)
+
+    project=pars['project']
+    dataset=pars['targetQuery']
+    schema=pars['targetSchema']
+
+    reportSchema=pars['reportSchema']
+    reportQuery=pars['reportQuery']
+    participantField=pars['participantField']
+
+    #all images from database
+    ds=db.selectRows(project,schema,dataset,[])
+    
+    #input
+    imageResampledField={"CT":"ctResampled","PET":"petResampled","patientmask":"ROImask"}
+
+    rows=ds['rows']
+    rows=[ds['rows'][0]]
+
+    for r in rows:
+        print(r)
+        iTypes=['CT','PET','Segm']
+        needToCalculate=False
+        for t in ['CT','PET']:
+            idFilter={'variable':participantField,'value':r[participantField],'oper':'eq'}
+            visitFilter={'variable':'visitCode','value':r['visitCode'],'oper':'eq'}
+            verFilter={'variable':'version','value':pars['version'],'oper':'eq'}
+            typeFilter={'variable':'type','value':t,'oper':'eq'}
+            
+            ds2=db.selectRows(project,reportSchema,reportQuery,[idFilter,visitFilter,verFilter,typeFilter])
+            
+            if len(ds2['rows'])==0:
+                #skip if row is present
+                #there are in fact multiple rows for multiple organs...
+                needToCalculate=True
+                break
+
+        if not needToCalculate:
+            continue
+        imgs={} 
+        for t in iTypes:
+            
+            try:
+                imagePath=r['_labkeyurl_'+imageResampledField[t]]
+            except KeyError:
+                ds1=db.selectRows(project,pars['segmentationSchema'],pars['segmentationQuery'],\
+                        [idFilter,visitFilter,verFilter])
+                imagePath=ds1['rows'][0]['_labkeyurl_segmentation']
+
+            localPath=os.path.join(tempBase,'image'+t+'.nii.gz')
+            if os.path.isfile(localPath):
+                os.remove(localPath)
+            fb.readFileToFile(imagePath,localPath)
+            img=nibabel.load(localPath)
+            imgs[t]=img.get_fdata()
+        print('Loading completed')
+        for t in ['CT','PET']:
+            for val in [3,4,5]:
+                outfile=plot(imgs,t,val,tempBase)
+                remoteDir=fb.buildPathURL(project,[pars['imageDir'],r['patientCode'],r['visitCode']])
+                imageFile=r['patientCode']+'-'+r['visitCode']+'_'+t+'_{}'.format(val)+'_'+pars['version']+'.png'
+                remoteFile='/'.join([remoteDir,imageFile])
+                fb.writeFileToFile(outfile,remoteFile)
+                print('Uploaded {}'.format(remoteFile))
+                os.remove(outfile)  
+                organFilter={'variable':'organ','value':'{}'.format(val),'oper':'eq'}
+                typeFilter['value']=t
+                ds3=db.selectRows(project,reportSchema,reportQuery,\
+                        [idFilter,visitFilter,verFilter,organFilter,typeFilter])
+                if len(ds3['rows'])>0:
+                    mode='update'
+                    frow=ds3['rows'][0]
+                else:
+                    mode='insert'
+                    frow={}
+                    for f in [participantField,'patientCode','visitCode']:
+                        frow[f]=r[f]
+                frow['organ']='{}'.format(val)
+                frow['type']=t
+                frow['version']=pars['version']
+                frow['file']=imageFile
+                db.modifyRows(mode,project,reportSchema,reportQuery,[frow])
+        print('Images uploaded')
+
+
+if __name__ == '__main__':
+    main(sys.argv[1])
+

Разлика између датотеке није приказан због своје велике величине
+ 22 - 0
pythonScripts/iraemmUploadTestPatients.ipynb


+ 61 - 0
pythonScripts/modifyDataset.py

@@ -0,0 +1,61 @@
+#a script to modify a patient from list. Current implementation deletes 
+#all data for patients identified by study regulatory number from study
+
+#basic python
+import os
+import subprocess
+import re
+import datetime
+import sys
+import json
+
+
+def main(parameterFile):
+    fhome=os.path.expanduser('~')
+
+    with open(os.path.join(fhome,".labkey","setup.json")) as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+
+    net=labkeyInterface.labkeyInterface()
+    net.init(os.path.join(fhome,'.labkey','network.json'))
+
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+
+    #by default uses .labkey/Remote.json configuration
+    with open(parameterFile) as f:
+        pars=json.load(f)
+
+
+    project=pars['project']
+    schema=pars['schema']
+    query=pars['query']
+    
+
+    #study section ################
+
+
+
+#select patients enroled under regulatory number
+    
+    ds=db.selectRows(project,schema,query,[])
+
+    for r in ds['rows']:
+        r['View']='[VIEW]'
+        db.modifyRows('update',project,schema,query,[r])
+
+    print("Done")
+
+if __name__ == '__main__':
+    main(sys.argv[1])
+
+

+ 153 - 0
pythonScripts/organ_percentile.py

@@ -0,0 +1,153 @@
+import numpy
+import sys
+import os
+import json
+import nibabel
+
+def organ_percentile(img, mask, level, p):
+#ORGAN_PERCENTILE - computes suv_x the pth percentile of the distribution of
+#   image intensity values in img from within some ROI mask
+#
+#   Inputs:
+#       img - image. ndarray
+#       mask - ROI mask. Must be same dimension as img. Must be binary ndarray
+#       p - percentile. Between 0-100
+#
+#   Outputs:
+#       suv_x - percentile of distribution. Defined by:
+#
+#                 suv_x
+#           p/100 = ∫ H(x) dx
+#                   0
+#
+#       where H(x) is the normalized distribution of image values within mask
+    #img = np.array(img)
+    #mask = np.array(mask)
+
+    h = img[mask == level]
+    suv_x = numpy.percentile(h, p)
+
+    return suv_x
+
+
+def main(parameterFile):
+    fhome=os.path.expanduser('~')
+    with open(os.path.join(fhome,".labkey","setup.json")) as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+
+    fconfig=os.path.join(fhome,'.labkey','network.json')
+
+
+    net=labkeyInterface.labkeyInterface()
+    net.init(fconfig)
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+    fb=labkeyFileBrowser.labkeyFileBrowser(net)
+
+    with open(parameterFile) as f:
+        pars=json.load(f)
+
+    #segmentation layout
+    project=pars['project']
+    dataset=pars['targetQuery']
+    schema=pars['targetSchema']
+    segSchema=pars['segmentationSchema']
+    segQuery=pars['segmentationQuery']
+    qQuery=pars['percentileQuery']
+    segVersion=pars['version']
+    segVersionI=int(pars['versionNumber'])
+
+    tempBase=pars['tempBase']
+    if not os.path.isdir(tempBase):
+        os.makedirs(tempBase)
+    
+    participantField=pars['participantField']
+
+    #all images from database
+    ds=db.selectRows(project,schema,dataset,[])
+    
+    petField=pars['images']['PET']['queryField']
+    
+    rows=ds['rows']
+    #rows=[ds['rows'][0]]
+
+    pv=numpy.concatenate((numpy.linspace(10,50,5),
+            numpy.linspace(55,80,6),  
+            numpy.linspace(82,90,5),
+            numpy.linspace(91,100,10)))
+    for r in rows:
+        localPET=os.path.join(tempBase,'PET.nii.gz')
+        localSeg=os.path.join(tempBase,'Seg.nii.gz')
+        for f in [localPET,localSeg]:
+            if os.path.isfile(f):
+                os.remove(f)
+
+        #build image path
+        remoteDir=fb.buildPathURL(project,[pars['imageDir'],\
+            r['patientCode'],r['visitCode']])
+        print('{}: {}'.format(petField,r[petField]))
+        remotePET=remoteDir+'/'+r[petField]
+        print('{}:{}'.format(remotePET,fb.entryExists(remotePET)))
+
+        vFilter={'variable':'version','value':segVersion,'oper':'eq'}
+        idFilter={'variable':'patientCode','value':r['patientCode'], 'oper':'eq'}
+        visitFilter={'variable':'visitCode','value':r['visitCode'], 'oper':'eq'}
+
+        dsSeg=db.selectRows(project,segSchema,segQuery,[idFilter,visitFilter,vFilter])
+        if len(dsSeg['rows'])!=1:
+            print('Failed to get segmentation for {}/{}'.format(r[participantField],segVersion))
+
+        remoteSeg=remoteDir+'/'+dsSeg['rows'][0]['segmentation']
+
+        print('{}:{}'.format(remoteSeg,fb.entryExists(remoteSeg)))
+
+        fb.readFileToFile(remotePET,localPET)
+        fb.readFileToFile(remoteSeg,localSeg)
+        
+        niPET=nibabel.load(localPET)
+        niSeg=nibabel.load(localSeg)
+        #3 lungs
+        #4 thyroid
+        #5 bowel
+        for level in [3,4,5]:
+            v=organ_percentile(niPET.get_fdata(),niSeg.get_fdata(),level,pv)
+            for (x,y) in zip(pv,v):
+                #get existing entry
+                seqNum=r['SequenceNum']+0.0001*x+0.01*segVersionI
+                print('[{:.8f}] {}/{}: {}/{}'.format(seqNum,r['patientCode'],r['visitCode'],x,y))
+
+                sFilter={'variable':'SequenceNum','value':'{}'.format(seqNum),'oper':'eq'}
+                oFilter={'variable':'organ','value':'{}'.format(level),'oper':'eq'}
+                dsP=db.selectRows(project,schema,qQuery,[idFilter,sFilter,oFilter])
+                mode='update'
+                if len(dsP['rows'])==0:
+                    mode='insert'
+                    rowDSP={x:r[x] for x in [participantField,'patientCode','visitCode']}
+                    rowDSP['SequenceNum']=seqNum
+                    rowDSP['segmentationVersion']=segVersion
+                else:
+                    rowDSP=dsP['rows'][0]
+                rowDSP['percentile']=x
+                rowDSP['value']=y
+                rowDSP['organ']=level
+                db.modifyRows(mode,project,schema,qQuery,[rowDSP])
+
+    
+
+    print('Done')
+
+
+if __name__ == '__main__':
+    main(sys.argv[1])
+
+
+

+ 83 - 18
pythonScripts/populateImagingFromTransferList.py

@@ -1,5 +1,8 @@
 #date sorts studies from orthanc dataset into target study dataset
 #date sorts studies from orthanc dataset into target study dataset
 
 
+#takes transferQuery as the list of images that should be available on orthanc
+
+
 import os
 import os
 import json
 import json
 import re
 import re
@@ -13,7 +16,12 @@ def main(parameterFile):
     with open(fsetup,'r') as f:
     with open(fsetup,'r') as f:
         setup=json.load(f)
         setup=json.load(f)
 
 
-    sys.path.insert(0,setup['paths']['labkeyInterface'])
+    sys.path.insert(0,setup['paths']['nixWrapper'])
+
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
     import labkeyInterface
     import labkeyInterface
     import labkeyDatabaseBrowser
     import labkeyDatabaseBrowser
     import labkeyFileBrowser
     import labkeyFileBrowser
@@ -48,17 +56,55 @@ def main(parameterFile):
     transferQuery=pars['Database']['transferQuery']
     transferQuery=pars['Database']['transferQuery']
     dbParticipantField=pars['Database']['participantField']
     dbParticipantField=pars['Database']['participantField']
 
 
-    #make a list of images
+    missingSchema=pars['Database']['missingImagesSchema']
+    missingQuery=pars['Database']['missingImagesQuery']
+
+    #make a list of images from transferQuery
     dsImage=db.selectRows(projectStudy,outputSchema,transferQuery,[])
     dsImage=db.selectRows(projectStudy,outputSchema,transferQuery,[])
 
 
     for im in dsImage['rows']:
     for im in dsImage['rows']:
-        idFilter={'variable':inputParticipantField,'value':im[dbParticipantField],\
-            'oper':'eq'}
+
+
+        #for orthanc
+        inputIdFilter={'variable':inputParticipantField,\
+                'value':im[dbParticipantField],\
+                'oper':'eq'}
+
+        #for database
+        idFilter={'variable':dbParticipantField,\
+                'value':im[dbParticipantField],\
+                'oper':'eq'}
+
+
+        seqNum=im['imagingVisitId']
+        seqFilter={'variable':'SequenceNum','value':str(seqNum),'oper':'eq'}
+
+        
+        #for speedup one should check if a match was already done in Database/queryName
+        #ds1 are the matching outputs in target dataset
+        ds1=db.selectRows(projectStudy,outputSchema,outputQuery,\
+                [idFilter,seqFilter])
+
+        if len(ds1['rows'])>1:
+            #never happens (idFilter and seqFilter)
+            print('ERROR: too many matches in {} for {}/{}'.\
+                    format(outputQuery,im[dbParticipantField],seqNum))
+            continue
+        if len(ds1['rows'])>0:
+            #just the one match, fine
+            print('Entry for {}/{} already resolved'.\
+                    format(im[dbParticipantField],seqNum))
+            continue
+
         #have to convert from datetime to %Y%m%d format
         #have to convert from datetime to %Y%m%d format
         #dateFilter={'variable':'imageDate','value':im['imageDate'],'oper':'eq'}
         #dateFilter={'variable':'imageDate','value':im['imageDate'],'oper':'eq'}
 
 
-        dsOrthanc=db.selectRows(projectOrthanc,inputSchema,inputQuery,[idFilter])
+        dsOrthanc=db.selectRows(projectOrthanc,inputSchema,inputQuery,[inputIdFilter])
+        #what if dsOrthanc['rows'] is empty?
+        #this is part of QA and is reported in missingImages Schema/Query
 
 
+        uploadStatus="FAILED"
+        imageDateMismatch=[]
         for im1 in dsOrthanc['rows']:
         for im1 in dsOrthanc['rows']:
 
 
             date=datetime.datetime.strptime(im1['studyDate'],'%Y/%m/%d %H:%M:%S') 
             date=datetime.datetime.strptime(im1['studyDate'],'%Y/%m/%d %H:%M:%S') 
@@ -66,6 +112,7 @@ def main(parameterFile):
             dateYMD=date.strftime('%Y%m%d')
             dateYMD=date.strftime('%Y%m%d')
             if dateYMD!=im['imageDate']:
             if dateYMD!=im['imageDate']:
                 print('Rejecting mismatch: {}/{}'.format(dateYMD,im['imageDate']))
                 print('Rejecting mismatch: {}/{}'.format(dateYMD,im['imageDate']))
+                imageDateMismatch.append(dateYMD)
                 continue
                 continue
         
         
             outvar='NONE'
             outvar='NONE'
@@ -84,18 +131,9 @@ def main(parameterFile):
 
 
             #figure out which row in output study to update
             #figure out which row in output study to update
             filters=[]
             filters=[]
-            idFilter={'variable':dbParticipantField,'value':im[dbParticipantField],'oper':'eq'}
-            seqNum=im['imagingVisitId']
-            seqFilter={'variable':'SequenceNum','value':str(seqNum),'oper':'eq'}
-            print('Participant {} Sequence number {}'.format(im[dbParticipantField],str(seqNum)))
-            #ds1 are the matching outputs in target dataset
-            ds1=db.selectRows(projectStudy,outputSchema,outputQuery,\
-                [idFilter,seqFilter])
+            print('Participant {} Sequence number {}'.\
+                    format(im[dbParticipantField],str(seqNum)))
     
     
-            if len(ds1['rows'])>1:
-                print('ERROR: too many matches for {}/{}'.\
-                    format(im[dbParticipantField],seqNum))
-                continue
 
 
             mode='update'
             mode='update'
             outRow={}
             outRow={}
@@ -107,16 +145,43 @@ def main(parameterFile):
                 outRow['dicomStudy']=im1['dicomStudy']
                 outRow['dicomStudy']=im1['dicomStudy']
         
         
             else:
             else:
+                #never happens if we check for sd1 before matches are found
                 outRow=ds1['rows'][0]
                 outRow=ds1['rows'][0]
         
         
+            
             outRow[outvar]=im1['orthancSeries']
             outRow[outvar]=im1['orthancSeries']
             outRow['studyDate']=im1['studyDate']
             outRow['studyDate']=im1['studyDate']
             outRow['imagingVisitId']=im['imagingVisitId']
             outRow['imagingVisitId']=im['imagingVisitId']
             outRow['visitCode']='VISIT_'+str(im['imagingVisitId'])
             outRow['visitCode']='VISIT_'+str(im['imagingVisitId'])
 
 
-            status=db.modifyRows(mode,projectStudy,outputSchema,outputQuery,[outRow])
-            print('{}'.format(status))
+            modifyStatus=db.modifyRows(mode,projectStudy,outputSchema,\
+                    outputQuery,[outRow])
+            print('{}'.format(modifyStatus))
+            uploadStatus="LOADED"
         
         
+        if uploadStatus=="FAILED":
+            #standard spiel - find if already present; if so, skip, if not, add
+            imFilter={'variable':'imagingVisitId',
+                    'value':'{}'.format(im['imagingVisitId']),
+                    'oper':'eq'}
+            dsMissing=db.selectRows(projectStudy,missingSchema,\
+                    missingQuery,[idFilter,imFilter])
+
+            #already recorded
+            imageDateMismatch=list(set(imageDateMismatch))
+            vals=[dbParticipantField,'imagingVisitId','imageDate']
+            mode='insert'
+            if len(dsMissing['rows'])>0:
+                mode='update'
+                orow=dsMissing['rows'][0]
+            else:
+                orow={v:im[v] for v in vals}
+            orow['imageDateMismatch']=','.join(imageDateMismatch)
+            orow['failureDescription']='MISSING DICOM'
+            db.modifyRows(mode,projectStudy,missingSchema,\
+                    missingQuery,[orow])
+            
+       
     print("Done")
     print("Done")
 
 
 if __name__=='__main__':
 if __name__=='__main__':

+ 15 - 8
pythonScripts/preprocess.py

@@ -18,7 +18,7 @@ def getStudyLabel(row,participantField='PatientId'):
     return getPatientLabel(row,participantField)+'-'+getVisitLabel(row)
     return getPatientLabel(row,participantField)+'-'+getVisitLabel(row)
 
 
 def runPreprocess_DM(matlab,generalCodes,niftiTools,studyDir):
 def runPreprocess_DM(matlab,generalCodes,niftiTools,studyDir):
-
+    print("Running matlab")
     #run after all directories have been assembled
     #run after all directories have been assembled
     script="addpath('"+generalCodes+"');"
     script="addpath('"+generalCodes+"');"
     script+="addpath('"+niftiTools+"');"
     script+="addpath('"+niftiTools+"');"
@@ -46,7 +46,7 @@ def getDicom(ofb,row,zipDir,rawDir,im,imageSelector,\
     if seriesId=="0":
     if seriesId=="0":
         return False
         return False
 
 
-    print("{}: {}".format(im,seriesId))
+    print("getDicom: {}: {}".format(im,seriesId))
     fname=os.path.join(zipDir,\
     fname=os.path.join(zipDir,\
             getStudyLabel(row,participantField)+'_'+im+".zip");
             getStudyLabel(row,participantField)+'_'+im+".zip");
 
 
@@ -54,7 +54,7 @@ def getDicom(ofb,row,zipDir,rawDir,im,imageSelector,\
     if os.path.isfile(fname):
     if os.path.isfile(fname):
         print("Data already loaded. Skipping")
         print("Data already loaded. Skipping")
     else:
     else:
-        print("Loading data from orthanc")
+        print("getDicom: Loading data from orthanc")
         ofb.getZip('series',seriesId,fname)
         ofb.getZip('series',seriesId,fname)
 
 
     #unzip the zipped dicom series
     #unzip the zipped dicom series
@@ -88,17 +88,21 @@ def updateRow(db,project,dataset,row,imageResampledField,gzFileNames,\
  
  
 
 
 def main(parameterFile):
 def main(parameterFile):
-    shome=os.path.expanduser('~nixUser')
     fhome=os.path.expanduser('~')
     fhome=os.path.expanduser('~')
     with open(os.path.join(fhome,".labkey","setup.json")) as f:
     with open(os.path.join(fhome,".labkey","setup.json")) as f:
         setup=json.load(f)
         setup=json.load(f)
 
 
-    sys.path.insert(0,setup["paths"]["labkeyInterface"])
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
     import labkeyInterface
     import labkeyInterface
     import labkeyDatabaseBrowser
     import labkeyDatabaseBrowser
     import labkeyFileBrowser
     import labkeyFileBrowser
 
 
-    sys.path.insert(0,setup["paths"]["orthancInterface"])
+    nixWrapper.loadLibrary("orthancInterface")
+    
     import orthancInterface
     import orthancInterface
     import orthancFileBrowser
     import orthancFileBrowser
 
 
@@ -107,6 +111,7 @@ def main(parameterFile):
     matlab=setup["paths"]["matlab"]
     matlab=setup["paths"]["matlab"]
     generalCodes=setup["paths"]["generalCodes"]
     generalCodes=setup["paths"]["generalCodes"]
     niftiTools=setup["paths"]["niftiTools"]
     niftiTools=setup["paths"]["niftiTools"]
+    gzip=setup['paths']['gzip']
 
 
     net=labkeyInterface.labkeyInterface()
     net=labkeyInterface.labkeyInterface()
     net.init(fconfig)
     net.init(fconfig)
@@ -144,7 +149,7 @@ def main(parameterFile):
 
 
     i=0
     i=0
     for row in ds["rows"]:
     for row in ds["rows"]:
-
+        print("Starting row id:{} seq:{}".format(row[participantField],row['SequenceNum']))
         #interesting files are processedDir/studyName_CT_notCropped_2mmVoxel.nii
         #interesting files are processedDir/studyName_CT_notCropped_2mmVoxel.nii
         #asn processedDir/studyName_PET_notCropped_2mmVoxel.nii
         #asn processedDir/studyName_PET_notCropped_2mmVoxel.nii
         volumeFileNames={im:\
         volumeFileNames={im:\
@@ -177,6 +182,8 @@ def main(parameterFile):
     
     
         #setup the directory structure for preprocess_DM
         #setup the directory structure for preprocess_DM
         studyDir=os.path.join(tempBase,getStudyLabel(row,participantField))
         studyDir=os.path.join(tempBase,getStudyLabel(row,participantField))
+        print("Making local directories in {}".format(studyDir))
+
         if not os.path.isdir(studyDir):
         if not os.path.isdir(studyDir):
             os.mkdir(studyDir)
             os.mkdir(studyDir)
 
 
@@ -220,7 +227,7 @@ def main(parameterFile):
 
 
             for f in volumeFiles.values():
             for f in volumeFiles.values():
                 print("Running gzip {}".format(f))
                 print("Running gzip {}".format(f))
-                outText=subprocess.check_output(["/bin/gzip",f])
+                outText=subprocess.check_output([gzip,f])
                 print(outText.decode('utf-8'))
                 print(outText.decode('utf-8'))
 
 
         #upload local files to remote
         #upload local files to remote

+ 1 - 2
pythonScripts/runPython.sh

@@ -1,4 +1,3 @@
 #!/bin/bash
 #!/bin/bash
 
 
-
-nohup python3 $1  0<&- &> $HOME/logs/runPython.log &
+nohup python3 -u $@  0<&- &> $HOME/logs/runPython.log &

+ 5 - 0
pythonScripts/runPythonnnUNet.sh

@@ -0,0 +1,5 @@
+#!/bin/bash
+LOG=$HOME/logs/runPython.log
+rm $LOG;
+. ~/venv/nnUNet/bin/activate
+nohup python -u $@  0<&- &> $LOG &

+ 47 - 7
pythonScripts/runSegmentation.py → pythonScripts/runSegmentationDM.py

@@ -86,11 +86,18 @@ def runDeepMedic(setup,pars):
     print(args) 
     print(args) 
     print(subprocess.run(args,check=True,stdout=subprocess.PIPE).stdout)
     print(subprocess.run(args,check=True,stdout=subprocess.PIPE).stdout)
 
 
+def runDeepMedicDocker(setup,pars):
+    args=[]
+    args.extend(['docker-compose','-f',pars['deepmedic']['segmentationdmYAML'],'up'])
+    print(args) 
+    print(subprocess.run(args,check=True,stdout=subprocess.PIPE).stdout)
+
+
 def getSegmentationFile(pars):
 def getSegmentationFile(pars):
     #this is how deep medic stores files
     #this is how deep medic stores files
     return getSegmImagePath(\
     return getSegmImagePath(\
             os.path.join(pars['tempBase'],'output','predictions','currentSession','predictions',\
             os.path.join(pars['tempBase'],'output','predictions','currentSession','predictions',\
-            pars['images']['images']['segmentations']['tempFile'])
+            pars['images']['segmentations']['tempFile'])
             )
             )
 
 
 def runSegmentation(fb,row,pars,setup):
 def runSegmentation(fb,row,pars,setup):
@@ -98,28 +105,30 @@ def runSegmentation(fb,row,pars,setup):
      
      
     #download to temp file (could be a fixed name)
     #download to temp file (could be a fixed name)
     project=pars['project']
     project=pars['project']
-    images=pars['images']['images']
+    images=pars['images']
     participantField=pars['participantField']
     participantField=pars['participantField']
     baseDir=fb.formatPathURL(project,pars['imageDir']+'/'+\
     baseDir=fb.formatPathURL(project,pars['imageDir']+'/'+\
         getPatientLabel(row,participantField)+'/'+\
         getPatientLabel(row,participantField)+'/'+\
         getVisitLabel(row))
         getVisitLabel(row))
     
     
     #download 
     #download 
+    fullFile={key:os.path.join(pars['tempBase'],images[key]['tempFile']) for key in images}
     for im in images:
     for im in images:
-        tmpFile=images[im]['tempFile']
         if 'queryField' in images[im]:
         if 'queryField' in images[im]:
-            fb.readFileToFile(baseDir+'/'+row[images[im]['queryField']],tmpFile)
+            fb.readFileToFile(baseDir+'/'+row[images[im]['queryField']],fullFile[im])
    
    
     #normalize 
     #normalize 
-    normalizeCT(images['CT']['tempFile'],images['patientmask']['tempFile'])
+
+    normalizeCT(fullFile['CT'],fullFile['patientmask'])
 
 
     #update templates to know which files to process
     #update templates to know which files to process
 
 
 
 
     #run deep medic
     #run deep medic
-    #runDeepMedic(setup,pars)
+    runDeepMedicDocker(setup,pars)
     
     
-    #segFile=os.path.join(pars['tempBase'],images['segmentations']['tempFile'])
+    #processed file is
+    segFile=getSegmentationFile(pars)
     #SimpleITK.WriteImage(outImg,segFile)
     #SimpleITK.WriteImage(outImg,segFile)
     return segFile
     return segFile
 
 
@@ -127,6 +136,37 @@ def runSegmentation(fb,row,pars,setup):
 def main(parameterFile):
 def main(parameterFile):
     
     
     fhome=os.path.expanduser('~')
     fhome=os.path.expanduser('~')
+
+
+    with open(os.path.join(fhome,".labkey","setup.json")) as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup["paths"]["labkeyInterface"])
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+    sys.path.append(setup['paths']['parseConfig'])
+    import parseConfig
+
+    with open(parameterFile) as f:
+        pars=json.load(f)
+    
+    pars=parseConfig.convert(pars)
+    pars=parseConfig.convertValues(pars)
+    print(pars)
+    #images=pars['images']
+    #ctFile=os.path.join(pars['tempBase'],images['CT']['tempFile'])
+    #maskFile=os.path.join(pars['tempBase'],images['patientmask']['tempFile'])
+    #normalizeCT(ctFile,maskFile)
+
+
+    
+
+def doSegmentation(parameterFile):
+    fhome=os.path.expanduser('~')
+
+
     with open(os.path.join(fhome,".labkey","setup.json")) as f:
     with open(os.path.join(fhome,".labkey","setup.json")) as f:
         setup=json.load(f)
         setup=json.load(f)
 
 

+ 259 - 0
pythonScripts/runSegmentationnnUNet.py

@@ -0,0 +1,259 @@
+import os
+import json
+import re
+import subprocess
+import nibabel
+import shutil
+import sys
+import pathlib
+import numpy
+
+#nothing gets done if you do import
+
+def getPatientLabel(row,participantField='PatientId'):
+    return row[participantField].replace('/','_') 
+
+def getVisitLabel(row):
+    return 'VISIT_'+str(int(row['SequenceNum']))
+
+def getStudyLabel(row,participantField='PatientId'):
+    return getPatientLabel(row,participantField)+'-'+getVisitLabel(row)
+
+
+def updateRow(project,dataset,row,imageResampledField,gzFileNames,\
+        participantField='PatientId'):
+    row['patientCode']=getPatientLabel(row,participantField)
+    row['visitCode']=getVisitLabel(row)
+    for im in imageResampledField:
+        row[imageResampledField[im]]=gzFileNames[im]
+    db.modifyRows('update',project,'study',dataset,[row])
+ 
+def replacePatterns(infile,outfile,replacePatterns):
+    of=open(outfile,'w')
+    with open(infile,'r') as f:
+        data=f.read()
+        for p in replacePatterns:
+            val=replacePatterns[p]
+            data=re.sub(p,val,data)
+    of.write(data)
+    of.close()
+    
+def valueSubstitution(pars,val):
+    if val.find('__home__')>-1:
+        val=re.sub(r'__home__',os.path.expanduser('~'),val)
+
+    return path
+
+def getSuffix(tempFile):
+    p=pathlib.Path(tempFile)
+    return ''.join(p.suffixes)
+
+def getSegmImagePath(tempFile):
+    sfx=getSuffix(tempFile)
+    return re.sub(sfx,'_Segm'+sfx,tempFile)
+
+def addVersion(tempFile,version):
+    sfx=getSuffix(tempFile)
+    return re.sub(sfx,'_'+version+sfx,tempFile)
+
+def addnnUNetCode(tempFile,fileNumber=0):
+    sfx=getSuffix(tempFile)
+    return re.sub(sfx,'_'+'{:04d}'.format(fileNumber)+sfx,tempFile)
+
+def runnnUNet(setup,pars):
+    args=[]
+    #set the environment
+    args.append(setup['paths']['nnUNetRunInference'])
+    #location of input images
+    args.extend(['-i',os.path.join(pars['tempBase'],'CT')])
+    #output path is segmentations
+    args.extend(['-o',os.path.join(pars['tempBase'],'segmentations')])
+    #modelid, nnUNet internal rules.
+    args.extend(['-t',pars['nnUNet']['ModelId']])
+    #specify configuration (3d_fullres)
+    args.extend(['-m',pars['nnUNet']['configuration']])
+    print(args) 
+    my_env = os.environ
+    for key in pars['nnUNet']['env']:
+        my_env[key]=pars['nnUNet']['env'][key]
+      
+    print(subprocess.run(args,env=my_env,check=True,stdout=subprocess.PIPE).stdout)
+
+def getSegmentationFile(pars):
+    #this is how deep medic stores files
+    return os.path.join(pars['tempBase'],'segmentations',\
+            pars['images']['CT']['tempFile'])
+            
+
+def runSegmentation(fb,row,pars,setup):
+    
+     
+    #download to temp file (could be a fixed name)
+    project=pars['project']
+    images=pars['images']
+    participantField=pars['participantField']
+    baseDir=fb.formatPathURL(project,pars['imageDir']+'/'+\
+        getPatientLabel(row,participantField)+'/'+\
+        getVisitLabel(row))
+    
+    #download CT
+    ctDir=os.path.join(pars['tempBase'],'CT')
+    if not os.path.isdir(ctDir):
+        os.mkdir(ctDir)
+    fullFile=os.path.join(ctDir,images['CT']['tempFile']) 
+
+    fullFile=addnnUNetCode(fullFile)
+    fb.readFileToFile(baseDir+'/'+row[images['CT']['queryField']],fullFile)
+    
+    #debug
+
+    #run deep medic
+    runnnUNet(setup,pars)
+
+    #processed file is
+    segFile=getSegmentationFile(pars)
+    #SimpleITK.WriteImage(outImg,segFile)
+    return segFile
+
+        
+def test(parameterFile):
+    
+    fhome=os.path.expanduser('~')
+
+    
+    with open(os.path.join(fhome,".labkey","setup.json")) as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    
+    import nixWrapper
+    
+    nixWrapper.loadLibrary("labkeyInterface")#force reload
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+    nixWrapper.loadLibrary("parseConfig")
+    import parseConfig
+
+    with open(parameterFile) as f:
+        pars=json.load(f)
+    
+    pars=parseConfig.convert(pars)
+    pars=parseConfig.convertValues(pars)
+    #print(pars)
+
+
+    
+
+def doSegmentation(parameterFile):
+    fhome=os.path.expanduser('~')
+
+    
+    with open(os.path.join(fhome,".labkey","setup.json")) as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    
+    import nixWrapper
+    
+    nixWrapper.loadLibrary("labkeyInterface")#force reload
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+    nixWrapper.loadLibrary("parseConfig")
+    import parseConfig
+
+    with open(parameterFile) as f:
+        pars=json.load(f)
+    
+    pars=parseConfig.convert(pars)
+    pars=parseConfig.convertValues(pars)
+    
+    project=pars['project']
+    dataset=pars['targetQuery']
+    schema=pars['targetSchema']
+    view=pars['viewName']
+
+
+    tempBase=pars['tempBase']
+    if not os.path.isdir(tempBase):
+        os.makedirs(tempBase)
+
+    #start the database interface
+    fconfig=os.path.join(fhome,'.labkey','network.json')
+    net=labkeyInterface.labkeyInterface()
+    net.init(fconfig)
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+    fb=labkeyFileBrowser.labkeyFileBrowser(net)
+
+
+    #all images from database
+    ds=db.selectRows(project,schema,dataset,[],view)
+    
+    #input
+    #use webdav to transfer file (even though it is localhost)
+
+ 
+    i=0
+    #for debugging
+    rows=[ds['rows'][0]]
+    #production mode
+    rows=ds['rows']
+    for row in rows:
+       
+
+        #build file name 
+        sfx=pars['images']['segmentation']['suffix']
+        outpath=fb.buildPathURL(pars['project'],[pars['imageDir'],row['patientCode'],row['visitCode']])
+        outName=addVersion(\
+                getSegmImagePath(\
+                    getStudyLabel(row,pars['participantField'])+sfx),\
+                pars['version'])
+
+        outFile=outpath+'/'+outName
+
+        #check if file is there
+        if not fb.entryExists(outFile):
+
+            
+            segFile=getSegmentationFile(pars)
+            #remove existing file
+            if os.path.isfile(segFile):
+                os.remove(segFile)
+            
+            segFile=runSegmentation(fb,row,pars,setup)
+            #copy file to file
+            #normally I would update the targetQuery, but it contains previously set images
+            #copy to labkey
+            fb.writeFileToFile(segFile,outFile)
+            print(segFile)
+        #debug 
+
+        #update database
+        copyFields=[pars['participantField'],'SequenceNum','patientCode','visitCode']
+        row['SequenceNum']+=0.001*float(pars['versionNumber'])
+        filters=[{'variable':v,'value':str(row[v]),'oper':'eq'} for v in copyFields]
+        filters.append({'variable':'Version','value':pars['version'],'oper':'eq'})
+
+        ds1=db.selectRows(pars['project'],pars['segmentationSchema'],pars['segmentationQuery'],filters)
+
+        if len(ds1['rows'])>0:
+            mode='update'
+            outRow=ds1['rows'][0]
+        else:
+            mode='insert'
+            outRow={v:row[v] for v in copyFields}
+        outRow['Version']= pars['version']
+        outRow['Segmentation']= outName
+        print(db.modifyRows(mode,pars['project'],pars['segmentationSchema'],pars['segmentationQuery'],[outRow]))
+        #push results back to LabKey
+    print("Done")
+
+
+if __name__ == '__main__':
+    #test(sys.argv[1])
+    doSegmentation(sys.argv[1])
+    #sys.exit()
+

+ 41 - 0
pythonScripts/test.py

@@ -0,0 +1,41 @@
+import sys
+import os
+import json
+
+def main(parameterFile):
+    fhome=os.path.expanduser('~')
+    fsetup=os.path.join(fhome,'.labkey','setup.json')
+    with open(fsetup) as f:
+        setup=json.load(f)
+
+    sys.path.append(setup['paths']['labkeyInterface'])
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+
+    net=labkeyInterface.labkeyInterface()
+    fconfig=os.path.join(fhome,'.labkey','network.json')
+    net.init(fconfig)
+
+    with open(parameterFile) as f:
+        pars=json.load(f)
+    
+    id=net.getUserId()  
+    print('User id: {}'.format(id))
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+
+    fv={'PatientId':'NIX-LJU-D2002-IRAE-A011',
+            'visitCode':'VISIT_0',
+            'ModifiedBy':str(id)}
+
+    fv={'ModifiedBy':str(id)}
+    filters=[{'variable':v,'value':fv[v],'oper':'eq'} for v in fv]
+    ds=db.selectRows(pars['project'],pars['schemaName'],pars['queryName'],filters)
+    
+    for r in ds['rows']:
+        print(r)
+    print('here')
+    return 0
+
+
+if __name__=="__main__":
+    main(sys.argv[1])

BIN
segmentation/saved_models/DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt.data-00000-of-00001


BIN
segmentation/saved_models/DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt.index


BIN
segmentation/saved_models/DM_defaults.DM_train_qtii_LABELMASKS4.final.2020-10-31.05.59.36.425298.model.ckpt.data-00000-of-00001


BIN
segmentation/saved_models/DM_defaults.DM_train_qtii_LABELMASKS4.final.2020-10-31.05.59.36.425298.model.ckpt.index


+ 0 - 6
segmentation/saved_models/INFO_ABOUT_MODELS.txt

@@ -1,6 +0,0 @@
-Retrained on more patients, improved thyroid performance, only for 4 organs, labels are changed to 1-4!
-DM_defaults.DM_train_qtii_LABELMASKS4.final.2020-10-31.05.59.36.425298.model.ckpt
-
-Old model on which all results are obtained, labels 1-16, poor thyroid performance.
-DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt
-

+ 1 - 1
segmentation/test/testConfig.cfg.template

@@ -9,7 +9,7 @@ sessionName = "currentSession"
 folderForOutput = "__workDir__/output"
 folderForOutput = "__workDir__/output"
 
 
 #  [Optional] Path to a saved model, to load parameters from in the beginning of the session. If one is also specified using the command line, the latter will be used.
 #  [Optional] Path to a saved model, to load parameters from in the beginning of the session. If one is also specified using the command line, the latter will be used.
-cnnModelFilePath = "__segmentBase__/saved_models/__model__"
+cnnModelFilePath = "/home/nixUser/software/src/irAEMMSegmentationModels/deepmedic/__model__"
 
 
 #  +++++++++++ Input +++++++++++
 #  +++++++++++ Input +++++++++++
 #  [Required] A list that should contain as many entries as the channels of the input image (eg multi-modal MRI). The entries should be paths to files. Those files should be listing the paths to the corresponding channels for each test-case. (see example files).
 #  [Required] A list that should contain as many entries as the channels of the input image (eg multi-modal MRI). The entries should be paths to files. Those files should be listing the paths to the corresponding channels for each test-case. (see example files).

+ 3 - 3
slicerModules/iraemmBrowser.py

@@ -83,7 +83,7 @@ class iraemmBrowserWidget(ScriptedLoadableModuleWidget):
 
 
     
     
 
 
-    ds=self.db.selectRows(self.project,self.schema,self.dataset,[])
+    ds=self.db.selectRows(self.project,self.schema,self.dataset,[],"segmentationReview")
     ids=[row['PatientId'] for row in ds['rows']]
     ids=[row['PatientId'] for row in ds['rows']]
     ids=list(set(ids))
     ids=list(set(ids))
 
 
@@ -225,7 +225,7 @@ class iraemmBrowserWidget(ScriptedLoadableModuleWidget):
 
 
   def onPatientListChanged(self,i):
   def onPatientListChanged(self,i):
       idFilter={'variable':'PatientId','value':self.patientList.currentText,'oper':'eq'}
       idFilter={'variable':'PatientId','value':self.patientList.currentText,'oper':'eq'}
-      ds=self.db.selectRows(self.project,self.schema,self.dataset, [idFilter])
+      ds=self.db.selectRows(self.project,self.schema,self.dataset, [idFilter],"segmentationReview")
       seq=[int(row['SequenceNum']) for row in ds['rows']]
       seq=[int(row['SequenceNum']) for row in ds['rows']]
       self.visitList.clear()  
       self.visitList.clear()  
             
             
@@ -242,7 +242,7 @@ class iraemmBrowserWidget(ScriptedLoadableModuleWidget):
       idFilter={'variable':'PatientId',\
       idFilter={'variable':'PatientId',\
               'value':self.patientList.currentText,'oper':'eq'}
               'value':self.patientList.currentText,'oper':'eq'}
       sFilter={'variable':'SequenceNum','value':s,'oper':'eq'}
       sFilter={'variable':'SequenceNum','value':s,'oper':'eq'}
-      ds=self.db.selectRows(self.project,self.schema,self.dataset,[idFilter,sFilter])
+      ds=self.db.selectRows(self.project,self.schema,self.dataset,[idFilter,sFilter],"segmentationReview")
       if not len(ds['rows'])==1:
       if not len(ds['rows'])==1:
           print("Found incorrect number {} of matches for [{}]/[{}]".\
           print("Found incorrect number {} of matches for [{}]/[{}]".\
                   format(len(ds['rows']),\
                   format(len(ds['rows']),\

+ 2 - 2
templates/segmentation.json.sample

@@ -1,8 +1,8 @@
 {
 {
  "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
  "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
  "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
  "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
- "__tempBase__":"__home__/temp/segmentation",
- "__segBase__":"/home/nixUser/software/src/irAEMM/segmentation",
+ "__tempBase__":"__home__/temp/segmentationdm",
+ "__segBase__":"/home/studen/software/src/irAEMM/segmentation",
  "__roiFile__":"testMask.nii.gz",
  "__roiFile__":"testMask.nii.gz",
  "__ctFile__":"testCT.nii.gz",
  "__ctFile__":"testCT.nii.gz",
  "__petFile__":"testPET.nii.gz",
  "__petFile__":"testPET.nii.gz",

+ 84 - 0
templates/segmentationIRAEMM.json

@@ -0,0 +1,84 @@
+{
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
+ "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
+ "__tempBase__":"__home__/temp/segmentationdm",
+ "__segBase__":"/home/studen/software/src/iraemm/segmentation",
+ "__roiFile__":"testMask.nii.gz",
+ "__ctFile__":"testCT.nii.gz",
+ "__petFile__":"testPET.nii.gz",
+ "__segFile__":"segmentation.nii.gz",
+ "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "tempBase":"__tempBase__",
+ "model":"__model__",
+ "project":"/iPNUMMretro/Study",
+ "targetSchema":"study",
+ "targetQuery":"Imaging1",
+ "viewName":"segmentationReview",
+ "participantField":"PatientId",
+ "segmentationSchema":"study",
+ "segmentationQuery":"Segmentations",
+ "imageDir":"preprocessedImages",
+ "version":"v5",
+ "versionNumber":"5",
+ "images":{
+	"CT":{
+		"queryField":"ctResampled",
+		"tempFile":"__ctFile__"},
+	"PET":{
+		"queryField":"petResampled",
+		"tempFile":"__petFile__"},
+	"patientmask":{
+		"queryField":"ROImask",
+		"tempFile":"__roiFile__"},
+	"segmentation":{
+		"suffix":".nii.gz"
+	}
+ },
+ "replacePattern":{
+	 "__workDir__":"__tempBase__",
+	 "__roi__":"__tempBase__/__roiFile__",
+	 "__pet__":"__tempBase__/__petFile__",
+	 "__ct__":"__tempBase__/__ctFile__",
+	 "__seg__":"__tempBase__/__segFile__",
+	 "__model__":"__modelName__"
+ },
+ "nnUNet":{
+	 "ModelId":"501",
+	 "configuration":"3d_fullres",
+	 "env":{
+		"nnUNet_raw_data_base":"__tempBase__",
+		"nnUNet_preprocessed":"__tempBase__",
+		"RESULTS_FOLDER":"/home/studen/software/src/iraemmsegmentationmodels"
+	 }
+ },
+ "deepmedic": {
+	 "config":{
+		 "model":{
+		 	"template":"__segBase__/model/modelConfig.cfg.template",
+		 	"out":"__tempBase__/modelConfig.cfg"
+	 	},
+	 	"test":{
+			"template":"__segBase__/test/testConfig.cfg.template",
+		 	"out":"__tempBase__/testConfig.cfg"
+	 	},
+		"predictions":{
+			"template":"__segBase__/test/testNamesOfPredictions.cfg.template",
+			"out":"__tempBase__/testNamesOfPredictions.cfg"
+		},
+		"CT":{
+			"template":"__segBase__/test/testChannels_CT.cfg.template",
+			"out":"__tempBase__/testChannels_CT.cfg"
+		},
+		"ROI":{
+			"template":"__segBase__/test/testRoiMasks.cfg.template",
+			"out":"__tempBase__/testRoiMasks.cfg"
+		}
+
+
+
+	 }
+ }
+
+
+
+}

+ 87 - 0
templates/segmentationIRAEMM_ONKO.json

@@ -0,0 +1,87 @@
+{
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
+ "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
+ "__tempBase__":"__home__/temp/iraemm",
+ "__segBase__":"/home/studen/software/src/iraemm/segmentation",
+ "__roiFile__":"testMask.nii.gz",
+ "__ctFile__":"testCT.nii.gz",
+ "__petFile__":"testPET.nii.gz",
+ "__segFile__":"segmentation.nii.gz",
+ "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "tempBase":"__tempBase__",
+ "model":"__model__",
+ "project":"IPNUMMprospektiva/Study",
+ "targetSchema":"study",
+ "targetQuery":"Imaging1",
+ "viewName":"segmentationReview",
+ "participantField":"ParticipantId",
+ "segmentationSchema":"study",
+ "segmentationQuery":"Segmentations",
+ "reportQuery":"reportImages",
+ "reportSchema":"lists",
+ "percentileQuery":"SUVQuantiles",
+ "imageDir":"preprocessedImages",
+ "version":"v5",
+ "versionNumber":"5",
+ "images":{
+	"CT":{
+		"queryField":"ctResampled",
+		"tempFile":"__ctFile__"},
+	"PET":{
+		"queryField":"petResampled",
+		"tempFile":"__petFile__"},
+	"patientmask":{
+		"queryField":"ROImask",
+		"tempFile":"__roiFile__"},
+	"segmentation":{
+		"suffix":".nii.gz"
+	}
+ },
+ "replacePattern":{
+	 "__workDir__":"__tempBase__",
+	 "__roi__":"__tempBase__/__roiFile__",
+	 "__pet__":"__tempBase__/__petFile__",
+	 "__ct__":"__tempBase__/__ctFile__",
+	 "__seg__":"__tempBase__/__segFile__",
+	 "__model__":"__modelName__"
+ },
+ "nnUNet":{
+	 "ModelId":"501",
+	 "configuration":"3d_fullres",
+	 "env":{
+		"nnUNet_raw_data_base":"__tempBase__",
+		"nnUNet_preprocessed":"__tempBase__",
+		"RESULTS_FOLDER":"/home/studen/software/src/iraemmsegmentationmodels"
+	 }
+ },
+ "deepmedic": {
+	 "config":{
+		 "model":{
+		 	"template":"__segBase__/model/modelConfig.cfg.template",
+		 	"out":"__tempBase__/modelConfig.cfg"
+	 	},
+	 	"test":{
+			"template":"__segBase__/test/testConfig.cfg.template",
+		 	"out":"__tempBase__/testConfig.cfg"
+	 	},
+		"predictions":{
+			"template":"__segBase__/test/testNamesOfPredictions.cfg.template",
+			"out":"__tempBase__/testNamesOfPredictions.cfg"
+		},
+		"CT":{
+			"template":"__segBase__/test/testChannels_CT.cfg.template",
+			"out":"__tempBase__/testChannels_CT.cfg"
+		},
+		"ROI":{
+			"template":"__segBase__/test/testRoiMasks.cfg.template",
+			"out":"__tempBase__/testRoiMasks.cfg"
+		}
+
+
+
+	 }
+ }
+
+
+
+}

+ 66 - 0
templates/segmentationPreproces.json

@@ -0,0 +1,66 @@
+{
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
+ "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
+ "__tempBase__":"__home__/temp/segmentation",
+ "__segBase__":"/home/nixUser/software/src/irAEMM/segmentation",
+ "__roiFile__":"testMask.nii.gz",
+ "__ctFile__":"testCT.nii.gz",
+ "__petFile__":"testPET.nii.gz",
+ "__segFile__":"segmentation.nii.gz",
+ "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "tempBase":"__tempBase__",
+ "model":"__model__",
+ "project":"IPNUMMprospektiva/Study",
+ "targetSchema":"study",
+ "targetQuery":"Imaging1",
+ "participantField":"ParticipantId",
+ "imageDir":"preprocessedImages",
+ "images":{
+	"CT":{
+		"queryField":"ctResampled",
+		"tempFile":"__ctFile__"},
+	"PET":{
+		"queryField":"petResampled",
+		"tempFile":"__petFile__"},
+	"patientmask":{
+		"queryField":"ROImask",
+		"tempFile":"__roiFile__"}
+ },
+ "replacePattern":{
+	 "__workDir__":"__tempBase__",
+	 "__roi__":"__tempBase__/__roiFile__",
+	 "__pet__":"__tempBase__/__petFile__",
+	 "__ct__":"__tempBase__/__ctFile__",
+	 "__seg__":"__tempBase__/__segFile__",
+	 "__model__":"__modelName__"
+ },
+ "deepmedic": {
+	 "config":{
+		 "model":{
+		 	"template":"__segBase__/model/modelConfig.cfg.template",
+		 	"out":"__tempBase__/modelConfig.cfg"
+	 	},
+	 	"test":{
+			"template":"__segBase__/test/testConfig.cfg.template",
+		 	"out":"__tempBase__/testConfig.cfg"
+	 	},
+		"predictions":{
+			"template":"__segBase__/test/testNamesOfPredictions.cfg.template",
+			"out":"__tempBase__/testNamesOfPredictions.cfg"
+		},
+		"CT":{
+			"template":"__segBase__/test/testChannels_CT.cfg.template",
+			"out":"__tempBase__/testChannels_CT.cfg"
+		},
+		"ROI":{
+			"template":"__segBase__/test/testRoiMasks.cfg.template",
+			"out":"__tempBase__/testRoiMasks.cfg"
+		}
+
+
+
+	 }
+ }
+
+
+}

+ 84 - 0
templates/segmentationTCIA.json

@@ -0,0 +1,84 @@
+{
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
+ "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
+ "__tempBase__":"__home__/temp/segmentationdm",
+ "__segBase__":"/home/studen/software/src/iraemm/segmentation",
+ "__roiFile__":"testMask.nii.gz",
+ "__ctFile__":"testCT.nii.gz",
+ "__petFile__":"testPET.nii.gz",
+ "__segFile__":"segmentation.nii.gz",
+ "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "tempBase":"__tempBase__",
+ "model":"__model__",
+ "project":"/Test/segmentationTest",
+ "targetSchema":"study",
+ "targetQuery":"Imaging1",
+ "viewName":"segmentationReview",
+ "participantField":"ParticipantId",
+ "segmentationSchema":"study",
+ "segmentationQuery":"Segmentations",
+ "imageDir":"preprocessedImages",
+ "version":"v5",
+ "versionNumber":"5",
+ "images":{
+	"CT":{
+		"queryField":"ctResampled",
+		"tempFile":"__ctFile__"},
+	"PET":{
+		"queryField":"petResampled",
+		"tempFile":"__petFile__"},
+	"patientmask":{
+		"queryField":"ROImask",
+		"tempFile":"__roiFile__"},
+	"segmentation":{
+		"suffix":".nii.gz"
+	}
+ },
+ "replacePattern":{
+	 "__workDir__":"__tempBase__",
+	 "__roi__":"__tempBase__/__roiFile__",
+	 "__pet__":"__tempBase__/__petFile__",
+	 "__ct__":"__tempBase__/__ctFile__",
+	 "__seg__":"__tempBase__/__segFile__",
+	 "__model__":"__modelName__"
+ },
+ "nnUNet":{
+	 "ModelId":"501",
+	 "configuration":"3d_fullres",
+	 "env":{
+		"nnUNet_raw_data_base":"__tempBase__",
+		"nnUNet_preprocessed":"__tempBase__",
+		"RESULTS_FOLDER":"/home/studen/software/src/iraemmsegmentationmodels"
+	 }
+ },
+ "deepmedic": {
+	 "config":{
+		 "model":{
+		 	"template":"__segBase__/model/modelConfig.cfg.template",
+		 	"out":"__tempBase__/modelConfig.cfg"
+	 	},
+	 	"test":{
+			"template":"__segBase__/test/testConfig.cfg.template",
+		 	"out":"__tempBase__/testConfig.cfg"
+	 	},
+		"predictions":{
+			"template":"__segBase__/test/testNamesOfPredictions.cfg.template",
+			"out":"__tempBase__/testNamesOfPredictions.cfg"
+		},
+		"CT":{
+			"template":"__segBase__/test/testChannels_CT.cfg.template",
+			"out":"__tempBase__/testChannels_CT.cfg"
+		},
+		"ROI":{
+			"template":"__segBase__/test/testRoiMasks.cfg.template",
+			"out":"__tempBase__/testRoiMasks.cfg"
+		}
+
+
+
+	 }
+ }
+
+
+
+}

Неке датотеке нису приказане због велике количине промена