import os import json import re import subprocess import nibabel import shutil import sys shome=os.path.expanduser('~nixUser') sys.path.insert(1,shome+'/software/src/labkeyInterface') import labkeyInterface import labkeyDatabaseBrowser import labkeyFileBrowser sys.path.insert(1,shome+'/software/src/orthancInterface') import orthancInterface import orthancFileBrowser #sys.path.insert(1,shome+'/software/src/IPNUMM/dicomUtils') #import loadDicom fhome=os.path.expanduser('~') fconfig=os.path.join(fhome,'.labkey','network.json') matlab=os.path.join("/","data","software","install","matlab","bin","matlab") generalCodes=os.path.join(fhome,"software","src","generalCodes") niftiTools=os.path.join(fhome,"software","src","NifTiScripts") net=labkeyInterface.labkeyInterface() net.init(fconfig) db=labkeyDatabaseBrowser.labkeyDB(net) fb=labkeyFileBrowser.labkeyFileBrowser(net) onet=orthancInterface.orthancInterface() onet.init(fconfig) ofb=orthancFileBrowser.orthancFileBrowser(onet) hi=0 project='iPNUMMretro/Study' dataset='Imaging1' tempBase=os.path.join(fhome,'temp') #all images from database ds=db.selectRows(project,'study','Imaging1',[]) imageSelector={"CT":"CT","PETWB":"PET"}; imageResampledField={"CT":"ctResampled","PETWB":"petResampled"} niftiBase='/data/nifti' labkeyBase='/data/labkey' #projectNIfTIBase=os.path.join(labkeyBase,'files',project,'@files/nifti') #use webdav to transfer file (even though it is localhost) def getPatientLabel(row): return row['PatientId'].replace('/','_') def getVisitLabel(row): return 'VISIT_'+str(int(row['SequenceNum'])) def getStudyLabel(row): return getPatientLabel(row)+'-'+getVisitLabel(row) def runPreprocess_DM(matlab,generalCodes,niftiTools,studyDir): #run after all directories have been assembled script="addpath('"+generalCodes+"');" script+="addpath('"+niftiTools+"');" script+="preprocess_DM('"+studyDir+"',0,0);" script+="test;" script+="quit();" #outText=subprocess.check_output(["/bin/echo",script]) try: outText=subprocess.check_output([matlab,"-nojvm","-r",script]) except subprocess.CalledProcessError as e: print("Failed with:\n{}".format(e.output.decode('utf-8'))) return False print(outText.decode('utf-8')) return True def getDicom(ofb,row,zipDir,rawDir,im,imageSelector): #Load the dicom zip file and unzips it. If zip file is already at the expected path, it skips the loading step #Return True for valid outcome and False for troubles in row formating or unzip failures seriesId=row[im]; if seriesId=="0": return False print("{}: {}".format(im,seriesId)) fname=os.path.join(zipDir,getStudyLabel(row)+'_'+imageSelector[im]+".zip"); #copy data from orthanc if os.path.isfile(fname): print("Data already loaded. Skipping") else: print("Loading data from orthanc") ofb.getZip('series',seriesId,fname) #unzip the zipped dicom series unzipDir=os.path.join(rawDir,imageSelector[im]) if os.path.isdir(unzipDir): print("Data already unzipped") return True try: os.mkdir(unzipDir) except FileExistsError: shutil.rmtree(unzipDir) try: outTxt=subprocess.check_output(["unzip","-d",unzipDir,"-xj",fname]) except subprocess.CalledProcessError: print("unzip failed for {}".format(fname)) return False return True def updateRow(project,dataset,row,imageResampledField,gzFileNames): row['patientCode']=getPatientLabel(row) row['visitCode']=getVisitLabel(row) for im in imageResampledField: row[imageResampledField[im]]=gzFileNames[im] db.modifyRows('update',project,'study',dataset,[row]) i=0 for row in ds["rows"]: #interesting files are processedDir/studyName_CT_notCropped_2mmVoxel.nii #asn processedDir/studyName_PET_notCropped_2mmVoxel.nii volumeFileNames={im:\ getStudyLabel(row)+'_'+imageSelector[im]+ '_notCropped_2mmVoxel.nii'\ for im in imageSelector} gzFileNames={im:f+".gz" \ for (im,f) in volumeFileNames.items()} #build/check remote directory structure remoteDir=fb.buildPathURL(project,['preprocessedImages',getPatientLabel(row),getVisitLabel(row)]) gzRemoteFiles={im:remoteDir+'/'+f\ for (im,f) in gzFileNames.items()} remoteFilePresent=[fb.entryExists(f)\ for f in gzRemoteFiles.values()] for f in gzRemoteFiles.values(): print("[{}]: [{}]".format(f,fb.entryExists(f))) if all(remoteFilePresent): print("Entry for row done.") updateRow(project,dataset,row,imageResampledField,\ gzFileNames) continue #setup the directory structure for preprocess_DM studyDir=os.path.join(tempBase,getStudyLabel(row)) if not os.path.isdir(studyDir): os.mkdir(studyDir) rawDir=os.path.join(studyDir,'Raw') if not os.path.isdir(rawDir): os.mkdir(rawDir) zipDir=os.path.join(studyDir,'Zip') if not os.path.isdir(zipDir): os.mkdir(zipDir) processedDir=os.path.join(studyDir,'Processed') if not os.path.isdir(processedDir): os.mkdir(processedDir) #specify local file names with path volumeFiles={im:os.path.join(processedDir,f)\ for (im.f) in volumeFileNames.items()} gzFiles={im:f+".gz"\ for (im,f) in volumeFiles.items()} filesPresent=[os.path.isfile(f) for f in gzFiles.values()] if not all(filesPresent): for im in imageSelector: #checks if raw files are already loaded getDicom(ofb,row,zipDir,rawDir,im,imageSelector) #preprocess and zip ok=runPreprocess_DM(matlab,generalCodes,niftiTools,studyDir) if not ok: shutil.rmtree(studyDir) continue for f in volumeFiles.values(): print("Running gzip {}".format(f)) outText=subprocess.check_output(["/bin/gzip",f]) print(outText.decode('utf-8')) #upload local files to remote for im in gzFiles: #for local,remote in zip(gzFiles,gzRemoteFiles): local=gzFiles[im] remote=gzRemoteFiles[im] print("Uploading {}".format(local)) fb.writeFileToFile(local,remote) #update row and let it know where the processed files are updateRow(project,dataset,row,imageResampledField,gzFileNames) #cleanup shutil.rmtree(studyDir) if i==-1: break i=i+1 print("Done")