Browse Source

Merge to head

Andrej 3 years ago
parent
commit
572caa77de

+ 137 - 0
pythonScripts/findCandidatesForSchedule.py

@@ -0,0 +1,137 @@
+#date sorts studies from orthanc dataset into target study dataset
+
+#takes transferQuery as the list of images that should be available on orthanc
+
+
+import os
+import json
+import re
+import sys
+import datetime
+import re
+
+def main(parameterFile):
+    fhome=os.path.expanduser('~')
+    fsetup=os.path.join(fhome,'.labkey','setup.json')
+    with open(fsetup,'r') as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup['paths']['nixWrapper'])
+
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+    fconfig=os.path.join(fhome,'.labkey','network.json')
+
+    net=labkeyInterface.labkeyInterface()
+    net.init(fconfig)
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+    fb=labkeyFileBrowser.labkeyFileBrowser(net)
+
+    with open(parameterFile,'r') as f:
+        pars=json.load(f)
+
+
+
+    i=0
+    #from orthancDatabase/Imaging dataset
+    projectOrthanc=pars['Orthanc']['project']
+    inputQuery=pars['Orthanc']['queryName']
+    inputSchema=pars['Orthanc']['schemaName']
+    inputParticipantField=pars['Orthanc']['participantField']
+
+    #to target project dataset
+    projectStudy=pars['Database']['project']
+    #'iPNUMMretro/Study'
+    #for prospective, set
+    #projectStudy='IPNUMMprospektiva/Study'
+    outputQuery=pars['Database']['queryName']
+    outputSchema=pars['Database']['schemaName']
+    #test for schedule entries which are scheduled
+    scheduleCutOffDate=pars['Database']['scheduleCutOffDate']#format YYYY-MM-DD
+    scheduleQuery=pars['Database']['scheduleQuery']
+    scheduleToleranceDays=int(pars['Database']['scheduleToleranceDays'])
+    dbParticipantField=pars['Database']['participantField']
+
+    missingSchema=pars['Database']['missingImagesSchema']
+    missingQuery=pars['Database']['missingImagesQuery']
+
+    #make a list of images from transferQuery
+    cutOffFilter={'variable':'actionDate','value':scheduleCutOffDate,'oper':'datelt'}
+    #status of 1 is scheduled
+    statusFilter={'variable':'actionStatus','value':'1','oper':'eq'}
+    dsImage=db.selectRows(projectStudy,outputSchema,scheduleQuery,[cutOffFilter,statusFilter])
+
+    #clear entries from missing list
+    failureFilter={'variable':'failureDescription','value':'MISSING FORM','oper':'eq'}
+    dsDelete=db.selectRows(projectStudy,missingSchema,missingQuery,[failureFilter])
+    db.modifyRows('delete',projectStudy,missingSchema,missingQuery,dsDelete['rows'])
+
+
+    for im in dsImage['rows']:
+
+        print('{}: {} {}'.format(im[dbParticipantField],im['actionDate'],im['actionStatus']))       
+        im['imagingVisitId']=im['visitId']
+        actionDate=datetime.datetime.strptime(im['actionDate'],'%Y/%m/%d %H:%M:%S') 
+        actionDateYMD=actionDate.strftime('%Y%m%d')
+        im['imageDate']=actionDateYMD
+        #this is still to be configured
+        inputIdFilter={'variable':inputParticipantField,\
+                'value':im[dbParticipantField],\
+                'oper':'eq'}
+
+        idFilter={'variable':dbParticipantField,\
+                'value':im[dbParticipantField],\
+                'oper':'eq'}
+        #filter on series descripton - for PET, we should have 'PET WB'
+        sdFilter={'variable':'seriesDescription','value':'PET WB','oper':'eq'}
+        
+        
+        #have to convert from datetime to %Y%m%d format
+        #dateFilter={'variable':'imageDate','value':im['imageDate'],'oper':'eq'}
+
+        dsOrthanc=db.selectRows(projectOrthanc,inputSchema,inputQuery,[inputIdFilter,sdFilter])
+        #what if dsOrthanc['rows'] is empty?
+        #this is part of QA and is reported in missingImages Schema/Query
+
+        uploadStatus="FAILED"
+        imageDateMismatch=[]
+        for im1 in dsOrthanc['rows']:
+
+            date=datetime.datetime.strptime(im1['studyDate'],'%Y/%m/%d %H:%M:%S') 
+            dt=date-actionDate
+            if (abs(dt.days)>scheduleToleranceDays): 
+                continue
+
+            #convert date to %Y%m%d notation
+            dateYMD=date.strftime('%Y%m%d')
+            
+            #enter this time difference to a list
+            imFilter={'variable':'imagingVisitId',
+                    'value':'{}'.format(im['imagingVisitId']),
+                    'oper':'eq'}
+            dsMissing=db.selectRows(projectStudy,missingSchema,\
+                    missingQuery,[idFilter,imFilter])
+
+            #already recorded
+            vals=[dbParticipantField,'imagingVisitId','imageDate']
+            mode='insert'
+            if len(dsMissing['rows'])>0:
+                mode='update'
+                orow=dsMissing['rows'][0]
+            else:
+                orow={v:im[v] for v in vals}
+            orow['imageDateMismatch']=dateYMD
+            orow['failureDescription']='MISSING FORM'
+            db.modifyRows(mode,projectStudy,missingSchema,\
+                    missingQuery,[orow])
+        
+    print("Done: {}".format(len(dsImage['rows'])))
+
+if __name__=='__main__':
+    main(sys.argv[1])

+ 234 - 0
pythonScripts/generateFigures.py

@@ -0,0 +1,234 @@
+import nibabel
+import os
+import json
+import sys
+import numpy
+import matplotlib.pyplot
+#import chardet
+
+def buildPath(server,project,imageDir,patientCode,visitCode,imageType):
+    path='/'.join([server,'labkey/_webdav',project,'@files',imageDir,patientCode,visitCode])
+    tail='_notCropped_2mmVoxel'
+    if imageType=='Segm':
+        tail='_v5'
+    path+='/'+patientCode+'-'+visitCode+'_'+imageType+tail+'.nii.gz'
+    return path
+
+
+def getCOWAxis(seg,val,axis):
+#returns center of weight for segmentation image where val is selected
+    if axis==0:
+        #2,1 or 1,1
+        i1=2
+        i2=1
+    if axis==1:
+        #0,1 or 2,0
+        i1=2
+        i2=0
+    if axis==2:
+        #0,0 or 1,0
+        i1=1
+        i2=0
+    
+    s=numpy.sum(numpy.sum(seg==val,i1),i2)
+    x=numpy.arange(len(s))
+    s0=0
+    for i in x:
+        if s[i]==0:
+            continue
+        s0=i
+        break
+    s1=len(s)-1
+    revIdx=numpy.arange(len(s),0,-1)-1
+    for i in revIdx:
+        if s[i]==0:
+            continue
+        s1=i
+        break
+    s1+=1
+    return [s0,numpy.average(x,weights=s),s1]
+
+def getGeometry(seg,val):
+#return center of weight of segmentation seg for segment val as a 3D vector
+    return [getCOWAxis(seg,val,x) for x in [0,1,2]]
+
+def getCOW(geom):
+    return [x[1] for x in geom]
+
+def getRange(geom):
+    return [[x[0],x[2]] for x in geom]
+
+
+def plot(imgs,t,val,tempBase):
+
+
+    segColors=[[0,0,0],[0.1,0.1,0.1],[0,0.2,0],[1,0,0],[0,0,1],[1,0,1]]
+
+#3-lungs 4-thyroid 5-bowel
+    delta=20
+    if val==4:
+        delta=40
+    window=350
+    level=40
+    geo=getGeometry(imgs['Segm'],val)
+    cowF=getCOW(geo)
+    rng=getRange(geo)
+    #print(rng)
+    cowI=[int(x) for x in cowF]
+    segment=imgs['Segm']==val
+
+    i0=rng[0][0]-delta
+    if i0<0:
+        i0=0
+    i1=rng[0][1]+delta
+    if i1>imgs['CT'].shape[0]:
+        i1=imgs['CT'].shape[0]
+    k0=rng[2][0]-delta
+    if k0<0:
+        k0=0
+    k1=rng[2][1]+delta
+    if k1>imgs['CT'].shape[2]:
+        k1=imgs['CT'].shape[2]
+
+    if t=='CT':
+        v0=level-0.5*window
+        v1=v0+window
+        matplotlib.pyplot.imshow(imgs['CT'][i0:i1,cowI[1],k0:k1].transpose(),cmap='gray',
+                vmin=v0,vmax=v1)
+    if t=='PET':
+        matplotlib.pyplot.imshow(imgs['PET'][i0:i1,cowI[1],k0:k1].transpose(),cmap='inferno')
+        #blueish
+
+    if t=='CT':
+        rgb=segColors[val]
+    if t=='PET':
+        rgb=[1,1,1]
+
+    colors = [rgb+[c] for c in numpy.linspace(0,1,100)]
+                                                                                    
+    cmap = matplotlib.colors.LinearSegmentedColormap.from_list('mycmap', colors, N=5)
+
+    matplotlib.pyplot.imshow(segment[i0:i1,cowI[1],k0:k1].transpose(), cmap=cmap, alpha=0.2)
+    matplotlib.pyplot.gca().invert_yaxis()
+    outfile=os.path.join(tempBase,'slice{}_{}.png'.format(t,val))
+    matplotlib.pyplot.savefig(outfile)
+    return outfile
+
+
+
+def main(parameterFile):
+#mask for segmentations
+
+    setupJSON=os.path.join(os.path.expanduser('~'),'.labkey','setup.json')
+    with open(setupJSON) as f:
+        setup=json.load(f)
+            
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    import nixWrapper 
+             
+    nixWrapper.loadLibrary("labkeyInterface")
+
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+    fconfig=os.path.join(os.path.expanduser('~'),'.labkey','network.json')
+                 
+    net=labkeyInterface.labkeyInterface()
+    net.init(fconfig)
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+    fb=labkeyFileBrowser.labkeyFileBrowser(net)
+
+    tempBase=os.path.join(os.path.expanduser('~'),'temp')
+    
+    with open(parameterFile) as f:
+        pars=json.load(f)
+
+    project=pars['project']
+    dataset=pars['targetQuery']
+    schema=pars['targetSchema']
+    view=pars['viewName']
+    
+    reportSchema=pars['reportSchema']
+    reportQuery=pars['reportQuery']
+    participantField=pars['participantField']
+
+    #all images from database
+    ds=db.selectRows(project,schema,dataset,[],view)
+    
+    #input
+    imageResampledField={"CT":"ctResampled","PET":"petResampled","patientmask":"ROImask"}
+
+    rows=ds['rows']
+    #rows=[ds['rows'][0]]
+
+    for r in rows:
+        print(r)
+        iTypes=['CT','PET','Segm']
+        needToCalculate=False
+        for t in ['CT','PET']:
+            idFilter={'variable':'patientCode','value':r['patientCode'],'oper':'eq'}
+            visitFilter={'variable':'visitCode','value':r['visitCode'],'oper':'eq'}
+            verFilter={'variable':'version','value':pars['version'],'oper':'eq'}
+            typeFilter={'variable':'type','value':t,'oper':'eq'}
+            
+            ds2=db.selectRows(project,reportSchema,reportQuery,[idFilter,visitFilter,verFilter,typeFilter])
+            
+            if len(ds2['rows'])==0:
+                #skip if row is present
+                #there are in fact multiple rows for multiple organs...
+                needToCalculate=True
+                break
+
+        if not needToCalculate:
+            continue
+        imgs={} 
+        for t in iTypes:
+            
+            try:
+                imagePath=r['_labkeyurl_'+imageResampledField[t]]
+            except KeyError:
+                ds1=db.selectRows(project,pars['segmentationSchema'],pars['segmentationQuery'],\
+                        [idFilter,visitFilter,verFilter])
+                imagePath=ds1['rows'][0]['_labkeyurl_segmentation']
+
+            localPath=os.path.join(tempBase,'image'+t+'.nii.gz')
+            if os.path.isfile(localPath):
+                os.remove(localPath)
+            fb.readFileToFile(imagePath,localPath)
+            img=nibabel.load(localPath)
+            imgs[t]=img.get_fdata()
+        print('Loading completed')
+        for t in ['CT','PET']:
+            for val in [3,4,5]:
+                outfile=plot(imgs,t,val,tempBase)
+                remoteDir=fb.buildPathURL(project,[pars['imageDir'],r['patientCode'],r['visitCode']])
+                imageFile=r['patientCode']+'-'+r['visitCode']+'_'+t+'_{}'.format(val)+'_'+pars['version']+'.png'
+                remoteFile='/'.join([remoteDir,imageFile])
+                fb.writeFileToFile(outfile,remoteFile)
+                print('Uploaded {}'.format(remoteFile))
+                os.remove(outfile)  
+                organFilter={'variable':'organ','value':'{}'.format(val),'oper':'eq'}
+                typeFilter['value']=t
+                ds3=db.selectRows(project,reportSchema,reportQuery,\
+                        [idFilter,visitFilter,verFilter,organFilter,typeFilter])
+                if len(ds3['rows'])>0:
+                    mode='update'
+                    frow=ds3['rows'][0]
+                else:
+                    mode='insert'
+                    frow={}
+                    for f in [participantField,'patientCode','visitCode']:
+                        frow[f]=r[f]
+                frow['organ']='{}'.format(val)
+                frow['type']=t
+                frow['version']=pars['version']
+                frow['file']=imageFile
+                db.modifyRows(mode,project,reportSchema,reportQuery,[frow])
+        print('Images uploaded')
+    print('Done')
+
+
+if __name__ == '__main__':
+    main(sys.argv[1])
+

+ 61 - 0
pythonScripts/modifyDataset.py

@@ -0,0 +1,61 @@
+#a script to modify a patient from list. Current implementation deletes 
+#all data for patients identified by study regulatory number from study
+
+#basic python
+import os
+import subprocess
+import re
+import datetime
+import sys
+import json
+
+
+def main(parameterFile):
+    fhome=os.path.expanduser('~')
+
+    with open(os.path.join(fhome,".labkey","setup.json")) as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+
+    net=labkeyInterface.labkeyInterface()
+    net.init(os.path.join(fhome,'.labkey','network.json'))
+
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+
+    #by default uses .labkey/Remote.json configuration
+    with open(parameterFile) as f:
+        pars=json.load(f)
+
+
+    project=pars['project']
+    schema=pars['schema']
+    query=pars['query']
+    
+
+    #study section ################
+
+
+
+#select patients enroled under regulatory number
+    
+    ds=db.selectRows(project,schema,query,[])
+
+    for r in ds['rows']:
+        r['View']='[VIEW]'
+        db.modifyRows('update',project,schema,query,[r])
+
+    print("Done")
+
+if __name__ == '__main__':
+    main(sys.argv[1])
+
+

+ 168 - 0
pythonScripts/organ_percentile.py

@@ -0,0 +1,168 @@
+import numpy
+import sys
+import os
+import json
+import nibabel
+
+def organ_percentile(img, mask, level, p):
+#ORGAN_PERCENTILE - computes suv_x the pth percentile of the distribution of
+#   image intensity values in img from within some ROI mask
+#
+#   Inputs:
+#       img - image. ndarray
+#       mask - ROI mask. Must be same dimension as img. Must be binary ndarray
+#       p - percentile. Between 0-100
+#
+#   Outputs:
+#       suv_x - percentile of distribution. Defined by:
+#
+#                 suv_x
+#           p/100 = ∫ H(x) dx
+#                   0
+#
+#       where H(x) is the normalized distribution of image values within mask
+    #img = np.array(img)
+    #mask = np.array(mask)
+
+    h = img[mask == level]
+    suv_x = numpy.percentile(h, p)
+
+    return suv_x
+
+
+def main(parameterFile):
+    fhome=os.path.expanduser('~')
+    with open(os.path.join(fhome,".labkey","setup.json")) as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+
+    fconfig=os.path.join(fhome,'.labkey','network.json')
+
+
+    net=labkeyInterface.labkeyInterface()
+    net.init(fconfig)
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+    fb=labkeyFileBrowser.labkeyFileBrowser(net)
+
+    with open(parameterFile) as f:
+        pars=json.load(f)
+
+    #segmentation layout
+    project=pars['project']
+    dataset=pars['targetQuery']
+    schema=pars['targetSchema']
+    view=pars['viewName']
+    segSchema=pars['segmentationSchema']
+    segQuery=pars['segmentationQuery']
+    qQuery=pars['percentileQuery']
+    segVersion=pars['version']
+    segVersionI=int(pars['versionNumber'])
+
+    tempBase=pars['tempBase']
+    if not os.path.isdir(tempBase):
+        os.makedirs(tempBase)
+    
+    participantField=pars['participantField']
+
+    #all images from database
+    ds=db.selectRows(project,schema,dataset,[],view)
+    
+    petField=pars['images']['PET']['queryField']
+    
+    rows=ds['rows']
+    #rows=[ds['rows'][0]]
+
+    pv=numpy.concatenate((numpy.linspace(10,50,5),
+            numpy.linspace(55,80,6),  
+            numpy.linspace(82,90,5),
+            numpy.linspace(91,100,10)))
+
+    #debug, select
+    #rows1=[r for r in rows \
+    #        if r['patientCode']=='NIX-LJU-D2002-IRAE-A010' \
+    #        and r['visitCode']=='VISIT_6']
+    #rows=rows1
+
+    for r in rows:
+        localPET=os.path.join(tempBase,'PET.nii.gz')
+        localSeg=os.path.join(tempBase,'Seg.nii.gz')
+        for f in [localPET,localSeg]:
+            if os.path.isfile(f):
+                os.remove(f)
+
+        #build image path
+        remoteDir=fb.buildPathURL(project,[pars['imageDir'],\
+            r['patientCode'],r['visitCode']])
+        print('{}: {}'.format(petField,r[petField]))
+        remotePET=remoteDir+'/'+r[petField]
+        print('{}:{}'.format(remotePET,fb.entryExists(remotePET)))
+
+        vFilter={'variable':'version','value':segVersion,'oper':'eq'}
+        idFilter={'variable':'patientCode','value':r['patientCode'], 'oper':'eq'}
+        visitFilter={'variable':'visitCode','value':r['visitCode'], 'oper':'eq'}
+
+        dsSeg=db.selectRows(project,segSchema,segQuery,[idFilter,visitFilter,vFilter])
+        if len(dsSeg['rows'])!=1:
+            print('Failed to get segmentation for {}/{}'.format(r[participantField],segVersion))
+
+        remoteSeg=remoteDir+'/'+dsSeg['rows'][0]['segmentation']
+
+        print('{}:{}'.format(remoteSeg,fb.entryExists(remoteSeg)))
+
+        fb.readFileToFile(remotePET,localPET)
+        fb.readFileToFile(remoteSeg,localSeg)
+        
+        niPET=nibabel.load(localPET)
+        niSeg=nibabel.load(localSeg)
+        #3 lungs
+        #4 thyroid
+        #5 bowel
+        dsP=db.selectRows(project,schema,qQuery,[idFilter,visitFilter,vFilter])
+        db.modifyRows('delete',project,schema,qQuery,dsP['rows'])
+        dspRows=[]
+        for level in [3,4,5]:
+            try:
+                v=organ_percentile(niPET.get_fdata(),niSeg.get_fdata(),level,pv)
+            except IndexError:
+                print('Error for {}/{}: {}'.format(r['patientCode'],r['visitCode'],level))
+                continue
+            for (x,y) in zip(pv,v):
+                #get existing entry
+                seqNum=r['SequenceNum']+0.0001*x+0.01*segVersionI
+                #print('[{:.8f}] {}/{}: {}/{}'.format(seqNum,r['patientCode'],r['visitCode'],x,y))
+
+#                sFilter={'variable':'SequenceNum','value':'{}'.format(seqNum),'oper':'eq'}
+#                oFilter={'variable':'organ','value':'{}'.format(level),'oper':'eq'}
+#                mode='update'
+#                if len(dsP['rows'])==0:
+#                    mode='insert'
+                rowDSP={x:r[x] for x in [participantField,'patientCode','visitCode']}
+                rowDSP['SequenceNum']=seqNum
+                rowDSP['segmentationVersion']=segVersion
+#                else:
+#                    rowDSP=dsP['rows'][0]
+                rowDSP['percentile']=x
+                rowDSP['value']=y
+                rowDSP['organ']=level
+                dspRows.append(rowDSP)
+        db.modifyRows('insert',project,schema,qQuery,dspRows)
+
+    
+
+    print('Done')
+
+
+if __name__ == '__main__':
+    main(sys.argv[1])
+
+
+

+ 83 - 18
pythonScripts/populateImagingFromTransferList.py

@@ -1,5 +1,8 @@
 #date sorts studies from orthanc dataset into target study dataset
 
+#takes transferQuery as the list of images that should be available on orthanc
+
+
 import os
 import json
 import re
@@ -13,7 +16,12 @@ def main(parameterFile):
     with open(fsetup,'r') as f:
         setup=json.load(f)
 
-    sys.path.insert(0,setup['paths']['labkeyInterface'])
+    sys.path.insert(0,setup['paths']['nixWrapper'])
+
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
     import labkeyInterface
     import labkeyDatabaseBrowser
     import labkeyFileBrowser
@@ -48,17 +56,55 @@ def main(parameterFile):
     transferQuery=pars['Database']['transferQuery']
     dbParticipantField=pars['Database']['participantField']
 
-    #make a list of images
+    missingSchema=pars['Database']['missingImagesSchema']
+    missingQuery=pars['Database']['missingImagesQuery']
+
+    #make a list of images from transferQuery
     dsImage=db.selectRows(projectStudy,outputSchema,transferQuery,[])
 
     for im in dsImage['rows']:
-        idFilter={'variable':inputParticipantField,'value':im[dbParticipantField],\
-            'oper':'eq'}
+
+
+        #for orthanc
+        inputIdFilter={'variable':inputParticipantField,\
+                'value':im[dbParticipantField],\
+                'oper':'eq'}
+
+        #for database
+        idFilter={'variable':dbParticipantField,\
+                'value':im[dbParticipantField],\
+                'oper':'eq'}
+
+
+        seqNum=im['imagingVisitId']
+        seqFilter={'variable':'SequenceNum','value':str(seqNum),'oper':'eq'}
+
+        
+        #for speedup one should check if a match was already done in Database/queryName
+        #ds1 are the matching outputs in target dataset
+        ds1=db.selectRows(projectStudy,outputSchema,outputQuery,\
+                [idFilter,seqFilter])
+
+        if len(ds1['rows'])>1:
+            #never happens (idFilter and seqFilter)
+            print('ERROR: too many matches in {} for {}/{}'.\
+                    format(outputQuery,im[dbParticipantField],seqNum))
+            continue
+        if len(ds1['rows'])>0:
+            #just the one match, fine
+            print('Entry for {}/{} already resolved'.\
+                    format(im[dbParticipantField],seqNum))
+            continue
+
         #have to convert from datetime to %Y%m%d format
         #dateFilter={'variable':'imageDate','value':im['imageDate'],'oper':'eq'}
 
-        dsOrthanc=db.selectRows(projectOrthanc,inputSchema,inputQuery,[idFilter])
+        dsOrthanc=db.selectRows(projectOrthanc,inputSchema,inputQuery,[inputIdFilter])
+        #what if dsOrthanc['rows'] is empty?
+        #this is part of QA and is reported in missingImages Schema/Query
 
+        uploadStatus="FAILED"
+        imageDateMismatch=[]
         for im1 in dsOrthanc['rows']:
 
             date=datetime.datetime.strptime(im1['studyDate'],'%Y/%m/%d %H:%M:%S') 
@@ -66,6 +112,7 @@ def main(parameterFile):
             dateYMD=date.strftime('%Y%m%d')
             if dateYMD!=im['imageDate']:
                 print('Rejecting mismatch: {}/{}'.format(dateYMD,im['imageDate']))
+                imageDateMismatch.append(dateYMD)
                 continue
         
             outvar='NONE'
@@ -84,18 +131,9 @@ def main(parameterFile):
 
             #figure out which row in output study to update
             filters=[]
-            idFilter={'variable':dbParticipantField,'value':im[dbParticipantField],'oper':'eq'}
-            seqNum=im['imagingVisitId']
-            seqFilter={'variable':'SequenceNum','value':str(seqNum),'oper':'eq'}
-            print('Participant {} Sequence number {}'.format(im[dbParticipantField],str(seqNum)))
-            #ds1 are the matching outputs in target dataset
-            ds1=db.selectRows(projectStudy,outputSchema,outputQuery,\
-                [idFilter,seqFilter])
+            print('Participant {} Sequence number {}'.\
+                    format(im[dbParticipantField],str(seqNum)))
     
-            if len(ds1['rows'])>1:
-                print('ERROR: too many matches for {}/{}'.\
-                    format(im[dbParticipantField],seqNum))
-                continue
 
             mode='update'
             outRow={}
@@ -107,16 +145,43 @@ def main(parameterFile):
                 outRow['dicomStudy']=im1['dicomStudy']
         
             else:
+                #never happens if we check for sd1 before matches are found
                 outRow=ds1['rows'][0]
         
+            
             outRow[outvar]=im1['orthancSeries']
             outRow['studyDate']=im1['studyDate']
             outRow['imagingVisitId']=im['imagingVisitId']
             outRow['visitCode']='VISIT_'+str(im['imagingVisitId'])
 
-            status=db.modifyRows(mode,projectStudy,outputSchema,outputQuery,[outRow])
-            print('{}'.format(status))
+            modifyStatus=db.modifyRows(mode,projectStudy,outputSchema,\
+                    outputQuery,[outRow])
+            print('{}'.format(modifyStatus))
+            uploadStatus="LOADED"
         
+        if uploadStatus=="FAILED":
+            #standard spiel - find if already present; if so, skip, if not, add
+            imFilter={'variable':'imagingVisitId',
+                    'value':'{}'.format(im['imagingVisitId']),
+                    'oper':'eq'}
+            dsMissing=db.selectRows(projectStudy,missingSchema,\
+                    missingQuery,[idFilter,imFilter])
+
+            #already recorded
+            imageDateMismatch=list(set(imageDateMismatch))
+            vals=[dbParticipantField,'imagingVisitId','imageDate']
+            mode='insert'
+            if len(dsMissing['rows'])>0:
+                mode='update'
+                orow=dsMissing['rows'][0]
+            else:
+                orow={v:im[v] for v in vals}
+            orow['imageDateMismatch']=','.join(imageDateMismatch)
+            orow['failureDescription']='MISSING DICOM'
+            db.modifyRows(mode,projectStudy,missingSchema,\
+                    missingQuery,[orow])
+            
+       
     print("Done")
 
 if __name__=='__main__':

+ 15 - 8
pythonScripts/preprocess.py

@@ -18,7 +18,7 @@ def getStudyLabel(row,participantField='PatientId'):
     return getPatientLabel(row,participantField)+'-'+getVisitLabel(row)
 
 def runPreprocess_DM(matlab,generalCodes,niftiTools,studyDir):
-
+    print("Running matlab")
     #run after all directories have been assembled
     script="addpath('"+generalCodes+"');"
     script+="addpath('"+niftiTools+"');"
@@ -46,7 +46,7 @@ def getDicom(ofb,row,zipDir,rawDir,im,imageSelector,\
     if seriesId=="0":
         return False
 
-    print("{}: {}".format(im,seriesId))
+    print("getDicom: {}: {}".format(im,seriesId))
     fname=os.path.join(zipDir,\
             getStudyLabel(row,participantField)+'_'+im+".zip");
 
@@ -54,7 +54,7 @@ def getDicom(ofb,row,zipDir,rawDir,im,imageSelector,\
     if os.path.isfile(fname):
         print("Data already loaded. Skipping")
     else:
-        print("Loading data from orthanc")
+        print("getDicom: Loading data from orthanc")
         ofb.getZip('series',seriesId,fname)
 
     #unzip the zipped dicom series
@@ -88,17 +88,21 @@ def updateRow(db,project,dataset,row,imageResampledField,gzFileNames,\
  
 
 def main(parameterFile):
-    shome=os.path.expanduser('~nixUser')
     fhome=os.path.expanduser('~')
     with open(os.path.join(fhome,".labkey","setup.json")) as f:
         setup=json.load(f)
 
-    sys.path.insert(0,setup["paths"]["labkeyInterface"])
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    import nixWrapper
+
+    nixWrapper.loadLibrary("labkeyInterface")
+
     import labkeyInterface
     import labkeyDatabaseBrowser
     import labkeyFileBrowser
 
-    sys.path.insert(0,setup["paths"]["orthancInterface"])
+    nixWrapper.loadLibrary("orthancInterface")
+    
     import orthancInterface
     import orthancFileBrowser
 
@@ -107,6 +111,7 @@ def main(parameterFile):
     matlab=setup["paths"]["matlab"]
     generalCodes=setup["paths"]["generalCodes"]
     niftiTools=setup["paths"]["niftiTools"]
+    gzip=setup['paths']['gzip']
 
     net=labkeyInterface.labkeyInterface()
     net.init(fconfig)
@@ -144,7 +149,7 @@ def main(parameterFile):
 
     i=0
     for row in ds["rows"]:
-
+        print("Starting row id:{} seq:{}".format(row[participantField],row['SequenceNum']))
         #interesting files are processedDir/studyName_CT_notCropped_2mmVoxel.nii
         #asn processedDir/studyName_PET_notCropped_2mmVoxel.nii
         volumeFileNames={im:\
@@ -177,6 +182,8 @@ def main(parameterFile):
     
         #setup the directory structure for preprocess_DM
         studyDir=os.path.join(tempBase,getStudyLabel(row,participantField))
+        print("Making local directories in {}".format(studyDir))
+
         if not os.path.isdir(studyDir):
             os.mkdir(studyDir)
 
@@ -220,7 +227,7 @@ def main(parameterFile):
 
             for f in volumeFiles.values():
                 print("Running gzip {}".format(f))
-                outText=subprocess.check_output(["/bin/gzip",f])
+                outText=subprocess.check_output([gzip,f])
                 print(outText.decode('utf-8'))
 
         #upload local files to remote

+ 1 - 2
pythonScripts/runPython.sh

@@ -1,4 +1,3 @@
 #!/bin/bash
 
-
-nohup python3 $1  0<&- &> $HOME/logs/runPython.log &
+nohup python3 -u $@  0<&- &> $HOME/logs/runPython.log &

+ 5 - 0
pythonScripts/runPythonnnUNet.sh

@@ -0,0 +1,5 @@
+#!/bin/bash
+LOG=$HOME/logs/runPython.log
+rm $LOG;
+. ~/venv/nnUNet/bin/activate
+nohup python -u $@  0<&- &> $LOG &

+ 0 - 0
pythonScripts/runSegmentation.py → pythonScripts/runSegmentationDM.py


+ 259 - 0
pythonScripts/runSegmentationnnUNet.py

@@ -0,0 +1,259 @@
+import os
+import json
+import re
+import subprocess
+import nibabel
+import shutil
+import sys
+import pathlib
+import numpy
+
+#nothing gets done if you do import
+
+def getPatientLabel(row,participantField='PatientId'):
+    return row[participantField].replace('/','_') 
+
+def getVisitLabel(row):
+    return 'VISIT_'+str(int(row['SequenceNum']))
+
+def getStudyLabel(row,participantField='PatientId'):
+    return getPatientLabel(row,participantField)+'-'+getVisitLabel(row)
+
+
+def updateRow(project,dataset,row,imageResampledField,gzFileNames,\
+        participantField='PatientId'):
+    row['patientCode']=getPatientLabel(row,participantField)
+    row['visitCode']=getVisitLabel(row)
+    for im in imageResampledField:
+        row[imageResampledField[im]]=gzFileNames[im]
+    db.modifyRows('update',project,'study',dataset,[row])
+ 
+def replacePatterns(infile,outfile,replacePatterns):
+    of=open(outfile,'w')
+    with open(infile,'r') as f:
+        data=f.read()
+        for p in replacePatterns:
+            val=replacePatterns[p]
+            data=re.sub(p,val,data)
+    of.write(data)
+    of.close()
+    
+def valueSubstitution(pars,val):
+    if val.find('__home__')>-1:
+        val=re.sub(r'__home__',os.path.expanduser('~'),val)
+
+    return path
+
+def getSuffix(tempFile):
+    p=pathlib.Path(tempFile)
+    return ''.join(p.suffixes)
+
+def getSegmImagePath(tempFile):
+    sfx=getSuffix(tempFile)
+    return re.sub(sfx,'_Segm'+sfx,tempFile)
+
+def addVersion(tempFile,version):
+    sfx=getSuffix(tempFile)
+    return re.sub(sfx,'_'+version+sfx,tempFile)
+
+def addnnUNetCode(tempFile,fileNumber=0):
+    sfx=getSuffix(tempFile)
+    return re.sub(sfx,'_'+'{:04d}'.format(fileNumber)+sfx,tempFile)
+
+def runnnUNet(setup,pars):
+    args=[]
+    #set the environment
+    args.append(setup['paths']['nnUNetRunInference'])
+    #location of input images
+    args.extend(['-i',os.path.join(pars['tempBase'],'CT')])
+    #output path is segmentations
+    args.extend(['-o',os.path.join(pars['tempBase'],'segmentations')])
+    #modelid, nnUNet internal rules.
+    args.extend(['-t',pars['nnUNet']['ModelId']])
+    #specify configuration (3d_fullres)
+    args.extend(['-m',pars['nnUNet']['configuration']])
+    print(args) 
+    my_env = os.environ
+    for key in pars['nnUNet']['env']:
+        my_env[key]=pars['nnUNet']['env'][key]
+      
+    print(subprocess.run(args,env=my_env,check=True,stdout=subprocess.PIPE).stdout)
+
+def getSegmentationFile(pars):
+    #this is how deep medic stores files
+    return os.path.join(pars['tempBase'],'segmentations',\
+            pars['images']['CT']['tempFile'])
+            
+
+def runSegmentation(fb,row,pars,setup):
+    
+     
+    #download to temp file (could be a fixed name)
+    project=pars['project']
+    images=pars['images']
+    participantField=pars['participantField']
+    baseDir=fb.formatPathURL(project,pars['imageDir']+'/'+\
+        getPatientLabel(row,participantField)+'/'+\
+        getVisitLabel(row))
+    
+    #download CT
+    ctDir=os.path.join(pars['tempBase'],'CT')
+    if not os.path.isdir(ctDir):
+        os.mkdir(ctDir)
+    fullFile=os.path.join(ctDir,images['CT']['tempFile']) 
+
+    fullFile=addnnUNetCode(fullFile)
+    fb.readFileToFile(baseDir+'/'+row[images['CT']['queryField']],fullFile)
+    
+    #debug
+
+    #run deep medic
+    runnnUNet(setup,pars)
+
+    #processed file is
+    segFile=getSegmentationFile(pars)
+    #SimpleITK.WriteImage(outImg,segFile)
+    return segFile
+
+        
+def test(parameterFile):
+    
+    fhome=os.path.expanduser('~')
+
+    
+    with open(os.path.join(fhome,".labkey","setup.json")) as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    
+    import nixWrapper
+    
+    nixWrapper.loadLibrary("labkeyInterface")#force reload
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+    nixWrapper.loadLibrary("parseConfig")
+    import parseConfig
+
+    with open(parameterFile) as f:
+        pars=json.load(f)
+    
+    pars=parseConfig.convert(pars)
+    pars=parseConfig.convertValues(pars)
+    #print(pars)
+
+
+    
+
+def doSegmentation(parameterFile):
+    fhome=os.path.expanduser('~')
+
+    
+    with open(os.path.join(fhome,".labkey","setup.json")) as f:
+        setup=json.load(f)
+
+    sys.path.insert(0,setup["paths"]["nixWrapper"])
+    
+    import nixWrapper
+    
+    nixWrapper.loadLibrary("labkeyInterface")#force reload
+    import labkeyInterface
+    import labkeyDatabaseBrowser
+    import labkeyFileBrowser
+
+    nixWrapper.loadLibrary("parseConfig")
+    import parseConfig
+
+    with open(parameterFile) as f:
+        pars=json.load(f)
+    
+    pars=parseConfig.convert(pars)
+    pars=parseConfig.convertValues(pars)
+    
+    project=pars['project']
+    dataset=pars['targetQuery']
+    schema=pars['targetSchema']
+    view=pars['viewName']
+
+
+    tempBase=pars['tempBase']
+    if not os.path.isdir(tempBase):
+        os.makedirs(tempBase)
+
+    #start the database interface
+    fconfig=os.path.join(fhome,'.labkey','network.json')
+    net=labkeyInterface.labkeyInterface()
+    net.init(fconfig)
+    db=labkeyDatabaseBrowser.labkeyDB(net)
+    fb=labkeyFileBrowser.labkeyFileBrowser(net)
+
+
+    #all images from database
+    ds=db.selectRows(project,schema,dataset,[],view)
+    
+    #input
+    #use webdav to transfer file (even though it is localhost)
+
+ 
+    i=0
+    #for debugging
+    rows=[ds['rows'][0]]
+    #production mode
+    rows=ds['rows']
+    for row in rows:
+       
+
+        #build file name 
+        sfx=pars['images']['segmentation']['suffix']
+        outpath=fb.buildPathURL(pars['project'],[pars['imageDir'],row['patientCode'],row['visitCode']])
+        outName=addVersion(\
+                getSegmImagePath(\
+                    getStudyLabel(row,pars['participantField'])+sfx),\
+                pars['version'])
+
+        outFile=outpath+'/'+outName
+
+        #check if file is there
+        if not fb.entryExists(outFile):
+
+            
+            segFile=getSegmentationFile(pars)
+            #remove existing file
+            if os.path.isfile(segFile):
+                os.remove(segFile)
+            
+            segFile=runSegmentation(fb,row,pars,setup)
+            #copy file to file
+            #normally I would update the targetQuery, but it contains previously set images
+            #copy to labkey
+            fb.writeFileToFile(segFile,outFile)
+            print(segFile)
+        #debug 
+
+        #update database
+        copyFields=[pars['participantField'],'SequenceNum','patientCode','visitCode']
+        row['SequenceNum']+=0.001*float(pars['versionNumber'])
+        filters=[{'variable':v,'value':str(row[v]),'oper':'eq'} for v in copyFields]
+        filters.append({'variable':'Version','value':pars['version'],'oper':'eq'})
+
+        ds1=db.selectRows(pars['project'],pars['segmentationSchema'],pars['segmentationQuery'],filters)
+
+        if len(ds1['rows'])>0:
+            mode='update'
+            outRow=ds1['rows'][0]
+        else:
+            mode='insert'
+            outRow={v:row[v] for v in copyFields}
+        outRow['Version']= pars['version']
+        outRow['Segmentation']= outName
+        print(db.modifyRows(mode,pars['project'],pars['segmentationSchema'],pars['segmentationQuery'],[outRow]))
+        #push results back to LabKey
+    print("Done")
+
+
+if __name__ == '__main__':
+    #test(sys.argv[1])
+    doSegmentation(sys.argv[1])
+    #sys.exit()
+

+ 4 - 0
templates/modifyDataset.json

@@ -0,0 +1,4 @@
+{"project":"IPNUMMprospektiva/Study",
+ "schema":"lists",
+ "query":"crfEntry"
+}

+ 4 - 0
templates/preprocess.json

@@ -0,0 +1,4 @@
+{ "config": 
+	"http://onko-nix.onko-i.si:8080/labkey/_webdav/Analysis/Run/%40files/configuration/prospectiveIRAEMM.json"
+}
+

+ 14 - 0
templates/preprocessRetrospectiveIRAEMM.json

@@ -0,0 +1,14 @@
+{
+	"Orthanc":{
+		"project":"Orthanc/Database",
+		"schemaName":"study",
+		"queryName":"Imaging",
+		"participantField":"PatientId"
+	},
+	"Database":{
+		"project":"IPNUMMretro/Study",
+		"schemaName":"study",
+		"queryName":"Imaging1",
+		"participantField":"PatientId"
+	}
+}

+ 17 - 0
templates/prospectiveIRAEMM.json

@@ -0,0 +1,17 @@
+{ "Orthanc":
+	{"project":"Orthanc/Database",
+	"schemaName":"study",
+	"queryName":"Imaging",
+	"demographicsQuery":"Demographics",
+	"participantField":"PatientId"
+	},
+  "Database":
+  	{"project":"IPNUMMprospektiva/Study",
+	 "schemaName":"study",
+	 "queryName":"Imaging1",
+ 	 "participantField":"ParticipantId",
+	"transferQuery":"imageTransferReport",
+	"missingImagesQuery":"missingImages",
+	"missingImagesSchema":"lists"
+	}
+}

+ 37 - 45
templates/segmentation.json

@@ -1,47 +1,54 @@
 {
- "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__","__suffix__"],
  "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
- "__tempBase__":"__home__/temp/segmentationdm",
- "__segBase__":"/home/studen/software/src/irAEMM/segmentation",
- "__roiFile__":"testMask.nii.gz",
- "__ctFile__":"testCT.nii.gz",
- "__petFile__":"testPET.nii.gz",
- "__segFile__":"segmentation.nii.gz",
+ "__tempBase__":"__home__/temp/segmentation",
+ "__segBase__":"/home/andrej/software/src/irAEMM/segmentation",
+ "__roiFile__":"testMask",
+ "__ctFile__":"testCT",
+ "__petFile__":"testPET",
+ "__segFile__":"segmentation",
  "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "__suffix__":"nii.gz",
  "tempBase":"__tempBase__",
  "model":"__model__",
- "project":"IPNUMMprospektiva/Study",
+ "project":"iPNUMMretro/Study",
  "targetSchema":"study",
  "targetQuery":"Imaging1",
- "participantField":"ParticipantId",
+ "participantField":"PatientId",
  "imageDir":"preprocessedImages",
+ "segmentationSchema":"study",
+ "segmentationQuery":"Segmentations",
+ "version":"v3",
  "images":{
-	"CT":{
-		"queryField":"ctResampled",
-		"tempFile":"__ctFile__"},
-	"PET":{
-		"queryField":"petResampled",
-		"tempFile":"__petFile__"},
-	"patientmask":{
-		"queryField":"ROImask",
-		"tempFile":"__roiFile__"
-	},
-	"segmentation":{
-		"tempFile":"__segFile__",
-		"comment":"no queryField not to download segmentation"
+	"comment":"weight is coded as a sequence of intervals and [k,n] of linear function inbetween",
+	"images":{
+		"CT":{
+			"queryField":"ctResampled",
+			"tempFile":"__tempBase__/__ctFile__.__suffix__",
+			"fileList":"__tempBase__/testChannels_CT.cfg"
+		},
+		"patientmask":{
+			"queryField":"ROImask",
+			"tempFile":"__tempBase__/__roiFile__.__suffix__",
+			"fileList":"__tempBase__/testRoiMasks.cfg"
+		},
+		"segmentations":{
+			"tempFile":"__segFile__.__suffix__",
+			"fileList":"__tempBase__/testNamesOfPredictions.cfg"
+		}
 	}
  },
  "replacePattern":{
 	 "__workDir__":"__tempBase__",
-	 "__roi__":"__tempBase__/__roiFile__",
-	 "__pet__":"__tempBase__/__petFile__",
-	 "__ct__":"__tempBase__/__ctFile__",
-	 "__seg__":"__tempBase__/__segFile__",
-	 "__model__":"__modelName__"
+	 "__roi__":"__tempBase__/__roiFile__.__suffix__",
+	 "__pet__":"__tempBase__/__petFile__.__suffix__",
+	 "__ct__":"__tempBase__/__ctFile__.__suffix__",
+	 "__seg__":"__segFile__.__suffix__",
+	 "__model__":"__modelName__",
+	 "__segmentBase__":"__segBase__",
+	 "__sfx__":"__suffix__"
  },
  "deepmedic": {
-	 "dockerWorkDir": "/home/studen/temp/segmentationdm",
-	 "dockerYAML":"/home/studen/scripts/docker/segmentationdm.yaml",
 	 "config":{
 		 "model":{
 		 	"template":"__segBase__/model/modelConfig.cfg.template",
@@ -50,22 +57,7 @@
 	 	"test":{
 			"template":"__segBase__/test/testConfig.cfg.template",
 		 	"out":"__tempBase__/testConfig.cfg"
-	 	},
-		"predictions":{
-			"template":"__segBase__/test/testNamesOfPredictions.cfg.template",
-			"out":"__tempBase__/testNamesOfPredictions.cfg"
-		},
-		"CT":{
-			"template":"__segBase__/test/testChannels_CT.cfg.template",
-			"out":"__tempBase__/testChannels_CT.cfg"
-		},
-		"ROI":{
-			"template":"__segBase__/test/testRoiMasks.cfg.template",
-			"out":"__tempBase__/testRoiMasks.cfg"
-		}
-
-
-
+	 	}
 	 }
  }
 

+ 99 - 0
templates/segmentation3.json

@@ -0,0 +1,99 @@
+{
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__","__suffix__"],
+ "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
+ "__tempBase__":"__home__/temp/segmentation",
+ "__segBase__":"/home/andrej/software/src/irAEMM/segmentation",
+ "__roiFile__":"testMask",
+ "__ctFile__":"testCT",
+ "__petFile__":"testPET",
+ "__segFile__":"segmentation",
+ "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "__suffix__":"nii.gz",
+ "tempBase":"__tempBase__",
+ "model":"__model__",
+ "project":"iPNUMMretro/Study",
+ "targetSchema":"study",
+ "targetQuery":"Imaging1",
+ "participantField":"PatientId",
+ "imageDir":"preprocessedImages",
+ "segmentationSchema":"study",
+ "segmentationQuery":"Segmentations",
+ "version":"v3",
+ "images":{
+	"comment":"weight is coded as a sequence of intervals and [k,n] of linear function inbetween",
+ 	"crop":{
+		"00":{
+			"range":["0","0.5"],
+			"axis":"2",
+			"n":"NONE",
+			"w":[
+				{"range":["0.0","0.3333"],"n":"1"},
+				{"range":["0.3333","0.4166"],"k":"-12","n":"5"},
+				{"range":["0.4166","1"],"n":"0"}
+			]
+		},
+		"01":{
+			"range":["0.25","0.75"],
+			"axis":"2",
+			"n":"NONE",
+			"w":[
+				{"range":["0.0","0.3333"],"n":"0"},
+				{"range":["0.3333","0.4166"],"k":"12","n":"-4"},
+				{"range":["0.4166","0.5833"],"n":"1"},
+				{"range":["0.5833","0.6666"],"k":"-12","n":"8"},
+				{"range":["0.6666","1"],"n":"0"}
+			]
+		},
+		"02":{
+			"range":["0.5","1"],
+			"axis":"2",
+			"n":"NONE",
+			"w":[
+				{"range":["0.0","0.5833"],"n":"0"},
+				{"range":["0.5833","0.6666"],"k":"12","n":"-7"},
+				{"range":["0.6666","1"],"n":"1"}
+			]
+		}
+	},
+	"images":{
+		"CT":{
+			"queryField":"ctResampled",
+			"tempFile":"__tempBase__/__ctFile__.__suffix__",
+			"fileList":"__tempBase__/testChannels_CT.cfg"
+		},
+		"patientmask":{
+			"queryField":"ROImask",
+			"tempFile":"__tempBase__/__roiFile__.__suffix__",
+			"fileList":"__tempBase__/testRoiMasks.cfg"
+		},
+		"segmentations":{
+			"tempFile":"__segFile__.__suffix__",
+			"fileList":"__tempBase__/testNamesOfPredictions.cfg"
+		}
+	}
+ },
+ "replacePattern":{
+	 "__workDir__":"__tempBase__",
+	 "__roi__":"__tempBase__/__roiFile__.__suffix__",
+	 "__pet__":"__tempBase__/__petFile__.__suffix__",
+	 "__ct__":"__tempBase__/__ctFile__.__suffix__",
+	 "__seg__":"__segFile__.__suffix__",
+	 "__model__":"__modelName__",
+	 "__segmentBase__":"__segBase__",
+	 "__sfx__":"__suffix__"
+ },
+ "deepmedic": {
+	 "config":{
+		 "model":{
+		 	"template":"__segBase__/model/modelConfig.cfg.template",
+		 	"out":"__tempBase__/modelConfig.cfg"
+	 	},
+	 	"test":{
+			"template":"__segBase__/test/testConfig.cfg.template",
+		 	"out":"__tempBase__/testConfig.cfg"
+	 	}
+	 }
+ }
+
+
+}

+ 87 - 0
templates/segmentationIRAEMM.json

@@ -0,0 +1,87 @@
+{
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
+ "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
+ "__tempBase__":"__home__/temp/segmentationdm",
+ "__segBase__":"/home/studen/software/src/iraemm/segmentation",
+ "__roiFile__":"testMask.nii.gz",
+ "__ctFile__":"testCT.nii.gz",
+ "__petFile__":"testPET.nii.gz",
+ "__segFile__":"segmentation.nii.gz",
+ "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "tempBase":"__tempBase__",
+ "model":"__model__",
+ "project":"/iPNUMMretro/Study",
+ "targetSchema":"study",
+ "targetQuery":"Imaging1",
+ "viewName":"segmentationReview",
+ "participantField":"PatientId",
+ "segmentationSchema":"study",
+ "segmentationQuery":"Segmentations",
+ "reportQuery":"reportImages",
+ "reportSchema":"lists",
+ "percentileQuery":"SUVQuantiles",
+ "imageDir":"preprocessedImages",
+ "version":"v5",
+ "versionNumber":"5",
+ "images":{
+	"CT":{
+		"queryField":"ctResampled",
+		"tempFile":"__ctFile__"},
+	"PET":{
+		"queryField":"petResampled",
+		"tempFile":"__petFile__"},
+	"patientmask":{
+		"queryField":"ROImask",
+		"tempFile":"__roiFile__"},
+	"segmentation":{
+		"suffix":".nii.gz"
+	}
+ },
+ "replacePattern":{
+	 "__workDir__":"__tempBase__",
+	 "__roi__":"__tempBase__/__roiFile__",
+	 "__pet__":"__tempBase__/__petFile__",
+	 "__ct__":"__tempBase__/__ctFile__",
+	 "__seg__":"__tempBase__/__segFile__",
+	 "__model__":"__modelName__"
+ },
+ "nnUNet":{
+	 "ModelId":"501",
+	 "configuration":"3d_fullres",
+	 "env":{
+		"nnUNet_raw_data_base":"__tempBase__",
+		"nnUNet_preprocessed":"__tempBase__",
+		"RESULTS_FOLDER":"/home/studen/software/src/iraemmsegmentationmodels"
+	 }
+ },
+ "deepmedic": {
+	 "config":{
+		 "model":{
+		 	"template":"__segBase__/model/modelConfig.cfg.template",
+		 	"out":"__tempBase__/modelConfig.cfg"
+	 	},
+	 	"test":{
+			"template":"__segBase__/test/testConfig.cfg.template",
+		 	"out":"__tempBase__/testConfig.cfg"
+	 	},
+		"predictions":{
+			"template":"__segBase__/test/testNamesOfPredictions.cfg.template",
+			"out":"__tempBase__/testNamesOfPredictions.cfg"
+		},
+		"CT":{
+			"template":"__segBase__/test/testChannels_CT.cfg.template",
+			"out":"__tempBase__/testChannels_CT.cfg"
+		},
+		"ROI":{
+			"template":"__segBase__/test/testRoiMasks.cfg.template",
+			"out":"__tempBase__/testRoiMasks.cfg"
+		}
+
+
+
+	 }
+ }
+
+
+
+}

+ 87 - 0
templates/segmentationIRAEMM_ONKO.json

@@ -0,0 +1,87 @@
+{
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
+ "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
+ "__tempBase__":"__home__/temp/iraemm",
+ "__segBase__":"/home/studen/software/src/iraemm/segmentation",
+ "__roiFile__":"testMask.nii.gz",
+ "__ctFile__":"testCT.nii.gz",
+ "__petFile__":"testPET.nii.gz",
+ "__segFile__":"segmentation.nii.gz",
+ "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "tempBase":"__tempBase__",
+ "model":"__model__",
+ "project":"IPNUMMprospektiva/Study",
+ "targetSchema":"study",
+ "targetQuery":"Imaging1",
+ "viewName":"segmentationReview",
+ "participantField":"ParticipantId",
+ "segmentationSchema":"study",
+ "segmentationQuery":"Segmentations",
+ "reportQuery":"reportImages",
+ "reportSchema":"lists",
+ "percentileQuery":"SUVQuantiles",
+ "imageDir":"preprocessedImages",
+ "version":"v5",
+ "versionNumber":"5",
+ "images":{
+	"CT":{
+		"queryField":"ctResampled",
+		"tempFile":"__ctFile__"},
+	"PET":{
+		"queryField":"petResampled",
+		"tempFile":"__petFile__"},
+	"patientmask":{
+		"queryField":"ROImask",
+		"tempFile":"__roiFile__"},
+	"segmentation":{
+		"suffix":".nii.gz"
+	}
+ },
+ "replacePattern":{
+	 "__workDir__":"__tempBase__",
+	 "__roi__":"__tempBase__/__roiFile__",
+	 "__pet__":"__tempBase__/__petFile__",
+	 "__ct__":"__tempBase__/__ctFile__",
+	 "__seg__":"__tempBase__/__segFile__",
+	 "__model__":"__modelName__"
+ },
+ "nnUNet":{
+	 "ModelId":"501",
+	 "configuration":"3d_fullres",
+	 "env":{
+		"nnUNet_raw_data_base":"__tempBase__",
+		"nnUNet_preprocessed":"__tempBase__",
+		"RESULTS_FOLDER":"/home/studen/software/src/iraemmsegmentationmodels"
+	 }
+ },
+ "deepmedic": {
+	 "config":{
+		 "model":{
+		 	"template":"__segBase__/model/modelConfig.cfg.template",
+		 	"out":"__tempBase__/modelConfig.cfg"
+	 	},
+	 	"test":{
+			"template":"__segBase__/test/testConfig.cfg.template",
+		 	"out":"__tempBase__/testConfig.cfg"
+	 	},
+		"predictions":{
+			"template":"__segBase__/test/testNamesOfPredictions.cfg.template",
+			"out":"__tempBase__/testNamesOfPredictions.cfg"
+		},
+		"CT":{
+			"template":"__segBase__/test/testChannels_CT.cfg.template",
+			"out":"__tempBase__/testChannels_CT.cfg"
+		},
+		"ROI":{
+			"template":"__segBase__/test/testRoiMasks.cfg.template",
+			"out":"__tempBase__/testRoiMasks.cfg"
+		}
+
+
+
+	 }
+ }
+
+
+
+}

+ 66 - 0
templates/segmentationPreproces.json

@@ -0,0 +1,66 @@
+{
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
+ "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
+ "__tempBase__":"__home__/temp/segmentation",
+ "__segBase__":"/home/nixUser/software/src/irAEMM/segmentation",
+ "__roiFile__":"testMask.nii.gz",
+ "__ctFile__":"testCT.nii.gz",
+ "__petFile__":"testPET.nii.gz",
+ "__segFile__":"segmentation.nii.gz",
+ "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "tempBase":"__tempBase__",
+ "model":"__model__",
+ "project":"IPNUMMprospektiva/Study",
+ "targetSchema":"study",
+ "targetQuery":"Imaging1",
+ "participantField":"ParticipantId",
+ "imageDir":"preprocessedImages",
+ "images":{
+	"CT":{
+		"queryField":"ctResampled",
+		"tempFile":"__ctFile__"},
+	"PET":{
+		"queryField":"petResampled",
+		"tempFile":"__petFile__"},
+	"patientmask":{
+		"queryField":"ROImask",
+		"tempFile":"__roiFile__"}
+ },
+ "replacePattern":{
+	 "__workDir__":"__tempBase__",
+	 "__roi__":"__tempBase__/__roiFile__",
+	 "__pet__":"__tempBase__/__petFile__",
+	 "__ct__":"__tempBase__/__ctFile__",
+	 "__seg__":"__tempBase__/__segFile__",
+	 "__model__":"__modelName__"
+ },
+ "deepmedic": {
+	 "config":{
+		 "model":{
+		 	"template":"__segBase__/model/modelConfig.cfg.template",
+		 	"out":"__tempBase__/modelConfig.cfg"
+	 	},
+	 	"test":{
+			"template":"__segBase__/test/testConfig.cfg.template",
+		 	"out":"__tempBase__/testConfig.cfg"
+	 	},
+		"predictions":{
+			"template":"__segBase__/test/testNamesOfPredictions.cfg.template",
+			"out":"__tempBase__/testNamesOfPredictions.cfg"
+		},
+		"CT":{
+			"template":"__segBase__/test/testChannels_CT.cfg.template",
+			"out":"__tempBase__/testChannels_CT.cfg"
+		},
+		"ROI":{
+			"template":"__segBase__/test/testRoiMasks.cfg.template",
+			"out":"__tempBase__/testRoiMasks.cfg"
+		}
+
+
+
+	 }
+ }
+
+
+}

+ 84 - 0
templates/segmentationTCIA.json

@@ -0,0 +1,84 @@
+{
+ "setVariables":["__tempBase__","__segBase__","__roiFile__","__petFile__","__ctFile__","__segFile__","__modelName__"],
+ "setVariablesComment":"this variables will get updated with local values like home and can be used to set variables further on",
+ "__tempBase__":"__home__/temp/segmentationdm",
+ "__segBase__":"/home/studen/software/src/iraemm/segmentation",
+ "__roiFile__":"testMask.nii.gz",
+ "__ctFile__":"testCT.nii.gz",
+ "__petFile__":"testPET.nii.gz",
+ "__segFile__":"segmentation.nii.gz",
+ "__modelName__":"DM_defaults.DM_train_VISCERAL16_Fold1.final.2019-10-01.07.46.19.932616.model.ckpt",
+ "tempBase":"__tempBase__",
+ "model":"__model__",
+ "project":"/Test/segmentationTest",
+ "targetSchema":"study",
+ "targetQuery":"Imaging1",
+ "viewName":"segmentationReview",
+ "participantField":"ParticipantId",
+ "segmentationSchema":"study",
+ "segmentationQuery":"Segmentations",
+ "imageDir":"preprocessedImages",
+ "version":"v5",
+ "versionNumber":"5",
+ "images":{
+	"CT":{
+		"queryField":"ctResampled",
+		"tempFile":"__ctFile__"},
+	"PET":{
+		"queryField":"petResampled",
+		"tempFile":"__petFile__"},
+	"patientmask":{
+		"queryField":"ROImask",
+		"tempFile":"__roiFile__"},
+	"segmentation":{
+		"suffix":".nii.gz"
+	}
+ },
+ "replacePattern":{
+	 "__workDir__":"__tempBase__",
+	 "__roi__":"__tempBase__/__roiFile__",
+	 "__pet__":"__tempBase__/__petFile__",
+	 "__ct__":"__tempBase__/__ctFile__",
+	 "__seg__":"__tempBase__/__segFile__",
+	 "__model__":"__modelName__"
+ },
+ "nnUNet":{
+	 "ModelId":"501",
+	 "configuration":"3d_fullres",
+	 "env":{
+		"nnUNet_raw_data_base":"__tempBase__",
+		"nnUNet_preprocessed":"__tempBase__",
+		"RESULTS_FOLDER":"/home/studen/software/src/iraemmsegmentationmodels"
+	 }
+ },
+ "deepmedic": {
+	 "config":{
+		 "model":{
+		 	"template":"__segBase__/model/modelConfig.cfg.template",
+		 	"out":"__tempBase__/modelConfig.cfg"
+	 	},
+	 	"test":{
+			"template":"__segBase__/test/testConfig.cfg.template",
+		 	"out":"__tempBase__/testConfig.cfg"
+	 	},
+		"predictions":{
+			"template":"__segBase__/test/testNamesOfPredictions.cfg.template",
+			"out":"__tempBase__/testNamesOfPredictions.cfg"
+		},
+		"CT":{
+			"template":"__segBase__/test/testChannels_CT.cfg.template",
+			"out":"__tempBase__/testChannels_CT.cfg"
+		},
+		"ROI":{
+			"template":"__segBase__/test/testRoiMasks.cfg.template",
+			"out":"__tempBase__/testRoiMasks.cfg"
+		}
+
+
+
+	 }
+ }
+
+
+
+}