import os
import json
import re
import subprocess
import nibabel
import shutil
import sys

#nothing gets done if you do import

def getPatientLabel(row,participantField='PatientId'):
    return row[participantField].replace('/','_') 

def getVisitLabel(row):
    return 'VISIT_'+str(int(row['SequenceNum']))

def getStudyLabel(row,participantField='PatientId'):
    return getPatientLabel(row,participantField)+'-'+getVisitLabel(row)

def runPreprocess_DM(matlab,generalCodes,niftiTools,studyDir):
    print("Running matlab")
    #run after all directories have been assembled
    script="addpath('"+generalCodes+"');"
    script+="addpath('"+niftiTools+"');"
    script+="preprocess_DM('"+studyDir+"',0,0);"
    script+="test;"
    script+="quit();"
    #outText=subprocess.check_output(["/bin/echo",script])
    try: 
        outText=subprocess.check_output([matlab,"-nojvm","-r",script])
    except subprocess.CalledProcessError as e:
        print("Failed with:\n{}".format(e.output.decode('utf-8')))
        return False
    print(outText.decode('utf-8'))
    return True


def getDicom(ofb,row,zipDir,rawDir,im,imageSelector,\
        participantField='PatientId'):

    #Load the dicom zip file and unzips it. If zip file is already at the expected path, it skips the loading step

    #Return True for valid outcome and False for troubles in row formating or unzip failures

    seriesId=row[imageSelector[im]];
    if seriesId=="0":
        return False

    print("getDicom: {}: {}".format(im,seriesId))
    fname=os.path.join(zipDir,\
            getStudyLabel(row,participantField)+'_'+im+".zip");

    #copy data from orthanc
    if os.path.isfile(fname):
        print("Data already loaded. Skipping")
    else:
        print("getDicom: Loading data from orthanc")
        ofb.getZip('series',seriesId,fname)

    #unzip the zipped dicom series

    unzipDir=os.path.join(rawDir,im)

    if os.path.isdir(unzipDir):
        print("Data already unzipped")
        return True
    
    try:
        os.mkdir(unzipDir)
    except FileExistsError:
        shutil.rmtree(unzipDir)
    
    try:
        outTxt=subprocess.check_output(["unzip","-d",unzipDir,"-xj",fname])
    except subprocess.CalledProcessError:
        print("unzip failed for {}".format(fname))
        return False

    return True    

def updateRow(db,project,dataset,row,imageResampledField,gzFileNames,\
        participantField='PatientId'):
    row['patientCode']=getPatientLabel(row,participantField)
    row['visitCode']=getVisitLabel(row)
    for im in imageResampledField:
        row[imageResampledField[im]]=gzFileNames[im]
    db.modifyRows('update',project,'study',dataset,[row])
 

def main(parameterFile):
    fhome=os.path.expanduser('~')
    with open(os.path.join(fhome,".labkey","setup.json")) as f:
        setup=json.load(f)

    sys.path.insert(0,setup["paths"]["nixWrapper"])
    import nixWrapper

    nixWrapper.loadLibrary("labkeyInterface")

    import labkeyInterface
    import labkeyDatabaseBrowser
    import labkeyFileBrowser

    nixWrapper.loadLibrary("orthancInterface")
    
    import orthancInterface
    import orthancFileBrowser

    fconfig=os.path.join(fhome,'.labkey','network.json')

    matlab=setup["paths"]["matlab"]
    generalCodes=setup["paths"]["generalCodes"]
    niftiTools=setup["paths"]["niftiTools"]
    gzip=setup['paths']['gzip']

    net=labkeyInterface.labkeyInterface()
    net.init(fconfig)
    db=labkeyDatabaseBrowser.labkeyDB(net)
    fb=labkeyFileBrowser.labkeyFileBrowser(net)

    onet=orthancInterface.orthancInterface()
    onet.init(fconfig)
    ofb=orthancFileBrowser.orthancFileBrowser(onet)


    with open(parameterFile) as f:
        pars=json.load(f)

    hi=0
    project=pars['Database']['project']
    dataset=pars['Database']['queryName']
    schema=pars['Database']['schemaName']

    tempBase=os.path.join(fhome,'temp')


    participantField=pars['Database']['participantField']

    #all images from database
    ds=db.selectRows(project,schema,dataset,[])
    imageSelector={"CT":"CT_orthancId","PET":"PETWB_orthancId"}
    #output
    imageResampledField={"CT":"ctResampled","PET":"petResampled","patientmask":"ROImask"}

    #use webdav to transfer file (even though it is localhost)


 

    i=0
    for row in ds["rows"]:
        print("Starting row id:{} seq:{}".format(row[participantField],row['SequenceNum']))
        #interesting files are processedDir/studyName_CT_notCropped_2mmVoxel.nii
        #asn processedDir/studyName_PET_notCropped_2mmVoxel.nii
        volumeFileNames={im:\
            getStudyLabel(row,participantField)+'_'+im+
            '_notCropped_2mmVoxel.nii'\
                for im in imageResampledField}
        gzFileNames={im:f+".gz" \
            for (im,f) in volumeFileNames.items()}
    
        #build/check remote directory structure
        remoteDir=fb.buildPathURL(project,['preprocessedImages',\
            getPatientLabel(row,participantField),getVisitLabel(row)])

        gzRemoteFiles={im:remoteDir+'/'+f\
            for (im,f) in gzFileNames.items()}

        remoteFilePresent=[fb.entryExists(f)\
            for f in gzRemoteFiles.values()]

        for f in gzRemoteFiles.values():
            print("[{}]: [{}]".format(f,fb.entryExists(f)))


        if all(remoteFilePresent):
            print("Entry for row done.")
            updateRow(db,project,dataset,row,imageResampledField,\
                gzFileNames,participantField)
            continue

    
        #setup the directory structure for preprocess_DM
        studyDir=os.path.join(tempBase,getStudyLabel(row,participantField))
        print("Making local directories in {}".format(studyDir))

        if not os.path.isdir(studyDir):
            os.mkdir(studyDir)

        rawDir=os.path.join(studyDir,'Raw')
        if not os.path.isdir(rawDir):
            os.mkdir(rawDir)

        zipDir=os.path.join(studyDir,'Zip')
        if not os.path.isdir(zipDir):
            os.mkdir(zipDir)

        processedDir=os.path.join(studyDir,'Processed')
        if not os.path.isdir(processedDir):
            os.mkdir(processedDir)

        #specify local file names with path 
        volumeFiles={im:os.path.join(processedDir,f)\
            for (im,f) in volumeFileNames.items()}
        gzFiles={im:f+".gz"\
            for (im,f) in volumeFiles.items()}

        filesPresent=[os.path.isfile(f) for f in gzFiles.values()]
    
    
        if not all(filesPresent):

            #use imageSelector -> inputs
            for im in imageSelector:
                #checks if raw files are already loaded
                getDicom(ofb,row,zipDir,rawDir,im,imageSelector,\
                    participantField)


    
            #preprocess and zip
            ok=runPreprocess_DM(matlab,generalCodes,niftiTools,studyDir)
            if not ok:
                shutil.rmtree(studyDir)
                continue


            for f in volumeFiles.values():
                print("Running gzip {}".format(f))
                outText=subprocess.check_output([gzip,f])
                print(outText.decode('utf-8'))

        #upload local files to remote
        for im in gzFiles:
        #for local,remote in zip(gzFiles,gzRemoteFiles):
            local=gzFiles[im]
            remote=gzRemoteFiles[im]
            print("Uploading {}".format(local))
            fb.writeFileToFile(local,remote)


        #update row and let it know where the processed files are
        updateRow(db,project,dataset,row,imageResampledField,gzFileNames,\
            participantField)
   

        #cleanup
        shutil.rmtree(studyDir)
    

        if i==-1:
            break
        i=i+1

    print("Done")


if __name__ == '__main__':
    main(sys.argv[1])