123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348 |
- import DICOMLib
- import sys
- import json
- import numpy
- import zipfile
- import shutil
- import os
- import signal
- from contextlib import contextmanager
- def main(configFile=None):
-
- print('Imported!')
- with open(configFile) as f:
- config=json.load(f)
- config.update(connectDB(config))
- parseData(config,getMeanHeartDose)
- class TimeoutException(Exception): pass
- @contextmanager
- def time_limit(seconds):
- def signal_handler(signum, frame):
- raise TimeoutException("Timed out!")
- signal.signal(signal.SIGALRM, signal_handler)
- signal.alarm(seconds)
- try:
- yield
- finally:
- signal.alarm(0)
- def connectDB(setup):
- nixSuite=os.path.join(os.path.expanduser('~'),'software','src','nixsuite')
- sys.path.append(os.path.join(nixSuite,'wrapper'))
- import nixWrapper
- nixWrapper.loadLibrary('labkeyInterface')
- import labkeyInterface
- import labkeyDatabaseBrowser
- import labkeyFileBrowser
- nixWrapper.loadLibrary('orthancInterface')
- import orthancInterface
- import orthancDatabaseBrowser
- import orthancFileBrowser
- importlib.reload(orthancFileBrowser)
- net=labkeyInterface.labkeyInterface()
- qfile='{}.json'.format(setup['server'])
- fconfig=os.path.join(os.path.expanduser('~'),'.labkey',qfile)
- net.init(fconfig)
- net.getCSRF()
- onet=orthancInterface.orthancInterface()
- onet.init(fconfig)
- return {"db":labkeyDatabaseBrowser.labkeyDB(net),
- "fb":labkeyFileBrowser.labkeyFileBrowser(net),
- "odb":orthancDatabaseBrowser.orthancDB(onet),
- "ofb":orthancFileBrowser.orthancFileBrowser(onet)}
- #explicit template
- def _updateRow(config,r):
- print(r)
- return False
- def parseData(config,updateRow=_updateRow):
- #iterates over data appliying updateRow function to every row
- #updateRow is an implementation of a generic function
- #with arguments
- # def updateRow(config,r)
- #returning True if row needs to be updated on the server
- #and False otherwise
- #update values are stored in the r dict
- db=config['db']
-
- qFilter=config.get('qFilter',[])
- debug=config.get('debug',False)
- #get dataset
- ds=db.selectRows(config['project'],config['schema'],config['query'],qFilter)
- rows=ds['rows']
- #shorten list in debug mode
- if debug:
- rows=rows[0:3]
- for r in rows:
- #this could be a generic function either as parameter of config or an argument to parseData
- update=updateRow(config,r)
- #print(r)
- if not update:
- continue
- db.modifyRows('update',config['project'],config['schema'],config['query'],[r])
- def getMeanHeartDose(config,r):
- #calculates mean heart dose
- #stores it as doseHeart to row r
- #return True if r needs to be updated on the server
- #and False if r is unchanged
- pid=r['ParticipantId']
- sid=r['orthancStudyId']
- if not sid:
- print(f'No study for {pid}')
- return False
- if r['comments']=='TIMEOUT':
- print(f'Skipping {pid} - timeout')
- return False
- timeout=config.get('timeout',1200)
- doseHeart=r['doseHeart']
- if doseHeart:
- #no need to update
- return False
- #outDir=getDicomZip(config,sid)
- outDir=getDicomInstances(config,sid)
- nodes=loadVolumes(outDir)
- msg=checkNodes(config,nodes)
- if len(msg)>0:
- r['comments']=msg
- clearDir(outDir)
- removeNodes(nodes)
- return True
- print(f'Running with timeout={timeout} s')
- r['doseHeart']=getMeanDoseRibbon(nodes,'Heart')
- clearDir(outDir)
- #needs updating
- #remove all created nodes from scene
- removeNodes(nodes)
- return True
- def loadVolumes(dataDir):
- nodeNames=[]
- with DICOMLib.DICOMUtils.TemporaryDICOMDatabase() as db:
- DICOMLib.DICOMUtils.importDicom(dataDir, db)
- patientUIDs = db.patients()
- for patientUID in patientUIDs:
- print(patientUID)
- nodeNames.extend(DICOMLib.DICOMUtils.loadPatientByUID(patientUID))
- #print(nodes)
- nodes=[slicer.util.getNode(pattern=n) for n in nodeNames]
- volumeNodes=[n for n in nodes if n.GetClassName()=='vtkMRMLScalarVolumeNode']
- doseNodes=[n for n in volumeNodes if n.GetName().find('RTDOSE')>-1]
- segmentationNodes=[n for n in nodes if n.GetClassName()=='vtkMRMLSegmentationNode']
- nv=len(volumeNodes)
- ns=len(segmentationNodes)
- nd=len(doseNodes)
- print(f'vol:{nv} seg:{ns} dose: {nd}')
- return {'all':nodes,'vol':volumeNodes,'dose':doseNodes,'seg':segmentationNodes}
- def checkNodes(config,nodes):
- msg=''
- nD=len(nodes['dose'])
- if nD>1:
- msg+=f'DOSE[{nD}]'
- nS=len(nodes['seg'])
- if nS>1:
- if len(msg)>0:
- msg+='/'
- msg+=f'SEG[{nS}]'
- return msg
- def removeNodes(nodes):
- for node in nodes['all']:
- slicer.mrmlScene.RemoveNode(node)
- def getMeanDose(nodes,target):
- segNode=nodes['seg'][0]
- seg=segNode.GetSegmentation()
- segmentIds=seg.GetSegmentIDs()
- targetSegmentIds=[s for s in segmentIds if seg.GetSegment(s).GetName()==target]
- doseNode=nodes['dose'][0]
- doseArray = slicer.util.arrayFromVolume(doseNode)
- print('Dose array shape: {}'.format(doseArray.shape))
- export=slicer.util.arrayFromSegmentBinaryLabelmap
- segmentArray = export(segNode, targetSegmentIds[0], doseNode)
- print('Segment array shape: {}'.format(segmentArray.shape))
- doseVoxels = doseArray[segmentArray != 0]
- meanDose=float(numpy.mean(doseVoxels))
- print(f'Dose {meanDose}')
- #add a float() to avoid JSON complaining about float32 converion
- return meanDose
- def getMeanDoseRibbon(nodes,target):
- segNode=nodes['seg'][0]
- seg=segNode.GetSegmentation()
- seg.CreateRepresentation('Ribbon model')
- seg.SetSourceRepresentationName('Ribbon model')
- segmentIds=seg.GetSegmentIDs()
- targetSegmentIds=[s for s in segmentIds if seg.GetSegment(s).GetName()==target]
- doseNode=nodes['dose'][0]
- doseArray = slicer.util.arrayFromVolume(doseNode)
- print('Dose array shape: {}'.format(doseArray.shape))
- export=slicer.util.arrayFromSegmentBinaryLabelmap
- segmentArray = export(segNode, targetSegmentIds[0], doseNode)
- print('Segment array shape: {}'.format(segmentArray.shape))
- doseVoxels = doseArray[segmentArray != 0]
- meanDose=float(numpy.mean(doseVoxels))
- print(f'Dose {meanDose}')
- #add a float() to avoid JSON complaining about float32 converion
- return meanDose
- def getMeanDoseSlicerRT(nodes,target):
- #use SlicerRT BatchProcessing example to extract LabelMaps
- #alternatove to getMeanDose since it was taking forever to build label map
- #no success, still takes forever
- segNode=nodes['seg'][0]
- seg=segNode.GetSegmentation()
- segmentIds=seg.GetSegmentIDs()
- targetSegmentIds=[s for s in segmentIds if seg.GetSegment(s).GetName()==target]
- doseNode=nodes['dose'][0]
- labelMapNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLLabelMapVolumeNode")
- export=slicer.vtkSlicerSegmentationsModuleLogic.ExportSegmentsToLabelmapNode
- export(segNode, targetSegmentIds, labelMapNode, doseNode)
- doseArray = slicer.util.arrayFromVolume(doseNode)
- print('SlicerRT: Dose array shape: {}'.format(doseArray.shape))
- segmentArray = slicer.util.arrayFromVolume(labelMapNode)
- print('SlicerRT: Segment array shape: {}'.format(segmentArray.shape))
- doseVoxels = doseArray[segmentArray != 0]
- #add a float() to avoid JSON complaining about float32 converion
- meanDose= float(numpy.mean(doseVoxels))
- print(f'SlicerRT: Dose {meanDose} Gy')
- slicer.mrmlScene.removeNode(labelMapNode)
- return meanDose
- def getDicomInstances(config,sid):
- odb=config['odb']
- ofb=config['ofb']
- sd=odb.getStudyData(sid)
- series=sd['Series']
- instances=[]
- validModalities=['RTSTRUCT','RTDOSE']
- for s in series:
- sed=odb.getSeriesData(s)
- if sed['MainDicomTags']['Modality'] not in validModalities:
- continue
- instances.extend(sed['Instances'])
- #download instances one by one
- baseDir=config.get('baseDir',os.path.join(os.path.expanduser('~'),'temp'))
- outDir=os.path.join(baseDir,sid)
- clearDir(outDir)
- os.mkdir(outDir)
- for oid in instances:
- local=os.path.join(outDir,f'{oid}.dcm')
- ofb.getInstance(oid,local)
- return outDir
- def getDicomZip(config,sid):
- ofb=config['ofb']
- baseDir=config.get('baseDir',os.path.join(os.path.expanduser('~'),'temp'))
- fname=f'{sid}.zip'
- path=os.path.join(baseDir,fname)
- if not os.path.isfile(path):
- ofb.getZip('studies',sid,path,'archive')
- print(f'Using {path}')
- #unzip path
- outDir=extractZip(config,path)
- os.remove(path)
- return outDir
- def extractZip(config,fname):
- #flattens the zip files in the baseDir/bname directory
- #where bname is the basename of the file without the .zip suffix
- fzip=zipfile.ZipFile(fname)
- names=fzip.namelist()
- bname=os.path.basename(fname)
- bname=bname.replace('.zip','')
- baseDir=config['baseDir']
- outDir=os.path.join(baseDir,bname)
- #clean
- clearDir(outDir)
- os.mkdir(outDir)
- outnames=[os.path.join(outDir,f'out{i:03d}.dcm') for i in range(len(names))]
- #extracts and renames (avoids *nix and win directory separator confusion)
- for (member,out) in zip(names,outnames):
- with fzip.open(member) as zf, open(out, 'wb') as f:
- shutil.copyfileobj(zf, f)
- return outDir
- def clearDir(outDir):
- if os.path.isdir(outDir):
- shutil.rmtree(outDir)
- if __name__=='__main__':
- try:
- main(sys.argv[1])
- except IndexError:
- main()
- print('Succesful completion')
- quit()
|