import ivp import os import cModel import sys import json import numpy import time import scipy.interpolate import shutil defaultValues={\ 'method':'LSODA',\ 'atol':1e-4,\ 'rtol':1e-4,\ 't0':0,\ 'tmax':1,\ 'mode':'IVPSimultaneous',\ 'nt':201, 'tUnit':'day',\ 'network':'labkey-klimt.json',\ 'startFromRef':'None',\ 'project':'Analysis/Run',\ 'schema':'lists',\ 'query':'runs',\ 'view':'WithStrings'} def get(setup,par): try: return setup[par] except KeyError: pass return defaultValues[par] def parseSetup(setupFile): with open(setupFile,'r') as f: setup=json.load(f) out={} for p in defaultValues: out[p]=get(setup,p) #q=[x if not x in out for x in setup] q=[x for x in setup if not x in out] for x in q: out[x]=setup[x] return out def getScale(setup): tUnit=setup['tUnit'] return convertUnit(tUnit) def convertUnit(tUnit): if tUnit=='min': return 1 if tUnit=='hour': return 60 if tUnit=='day': return 24*60 if tUnit=='month': return 24*60*30 if tUnit=='year': return 24*60*365 return 1 def getStrides(setup): tmax=float(setup['tmax'])*convertUnit(setup['tUnit']) t0=setup['t0'] print('t0 {} tmax {}'.format(t0,tmax)) try: stride=setup['stride'] except KeyError: return [{'label':'','length':tmax-t0}] #cover tmax with steps of length stride['length'] strides=[] strideLength=stride['length']*convertUnit(stride['tUnit']) i=0 while t00: for x in model.lutSE: #this could be modified for different ordering of lut as well sIn[:,model.lutSE[x]]=s0[:,startPoint['lutSE'][x]] #copy t0 to setup to help get the strides right setup['t0']=t0 strides=getStrides(setup) #number of strides is a guess lastFiles=None for step in strides: #check if stored readable=['t','sol','se','qt','sOut'] l=step['label'] files=[os.path.join(jobDir,'{}{}.txt'.format(x,l)) for x in readable] filesPresent=all( [os.path.isfile(x) for x in files]) if not filesPresent: break print('Completed step {}: {}'.format(l,filesPresent)) lastFiles={x:'{}{}.txt'.format(x,l) for x in readable} print('Completed run, lastFiles={}'.format(lastFiles)) data=loadSolutionFromFiles(setup,jobDir,[lastFiles]) endPoint=startPointObject(data) tmax=endPoint['t0'] print('tmax({})={} {}'.format(tmax,tmax/scale,setup['tUnit'])) setup['tmax']=tmax/scale #update setup with new content (particularly t0) setupOut=os.path.join(jobDir,'setup.json') with open(setupOut,'w+') as f: f.write(json.dumps(setup)) #this is 3D, so new routines print('Written {}'.format(setupOut)) #write model and parameter file origFiles=[modelFile,parameterFile] outFiles=["model.json","parameters.json"] for x in zip(origFiles,outFiles): xOut=os.path.join(jobDir,x[1]) shutil.copyfile(x[0],xOut) print('Written {}'.format(xOut)) def interpolate(setup,model,t,s1,t0,tmax): #interpolate on s1 #s1 has shape ft x n x m #where ft is LSODE driven number of points sOut=numpy.zeros((setup['nt'],model.n,model.m)) qt=numpy.linspace(t0,tmax,setup['nt']) for i in range(model.n): for j in range(model.m): y=s1[:,i,j] f = scipy.interpolate.interp1d(t, y, kind='cubic') sOut[:,i,j]=f(qt) return qt,sOut def invertLUT(lut): fcode=[None]*len(lut) for x in lut: fcode[lut[x]]=x return fcode def write3D(fName,a,lut,lutSE): with open(fName,'w') as f: f.write('#Shape {}\n'.format(a.shape)) f.write('#Lut {}\n'.format(invertLUT(lut))) f.write('#Lut (SE) {}\n'.format(invertLUT(lutSE))) i=0 for data_slice in a: f.write('#\t Slice {}\n'.format(i)) numpy.savetxt(f,data_slice) i+=1 def parseLUT(line,isSE=False): SE='' if isSE: SE='(SE)' header='#Lut {}'.format(SE) names=line.replace(header,'').\ replace('[','').\ replace(']','').\ replace(' ','').\ replace('\n','').\ replace("'",'').\ split(',') lut={names[i]:i for i in range(len(names))} return lut def parseShape(line): shape=line.replace('#Shape ','').\ replace('\n','').\ replace('(','').\ replace(')','').\ split(',') return [int(x) for x in shape] def read3D(fName): new_data=numpy.loadtxt(fName) #read and parse first three lines to get shape,lut and lutSE nLines=3 with open(fName,'r') as f: lines=[f.readline() for i in range(nLines)] shape=parseShape(lines[0]) lut=parseLUT(lines[1]) lutSE=parseLUT(lines[2],isSE=True) return new_data.reshape(shape),lut,lutSE def loadSolutionFromRef(setup, loadAll=False): if setup['startFromRef']=='None': return {'t':0,'sol':numpy.array([]),'sOut':numpy.array([]),'x':numpy.array([]),'y':numpy.array([])} ref=setup['startFromRef'] return loadSolutionFromRunRef(setup,ref,loadAll) def connect(setup): nixSuite=os.path.join(os.path.expanduser('~'),'software','src','nixSuite') sys.path.append(os.path.join(nixSuite,'wrapper')) import nixWrapper nixWrapper.loadLibrary('labkeyInterface') import labkeyInterface import labkeyFileBrowser import labkeyDatabaseBrowser net=labkeyInterface.labkeyInterface() fconfig=os.path.join(os.path.expanduser('~'),'.labkey',setup['network']) net.init(fconfig) #debug net.getCSRF() db=labkeyDatabaseBrowser.labkeyDB(net) fb=labkeyFileBrowser.labkeyFileBrowser(net) return db,fb def getRunRef(setup,key): #get solution from key rather than runRef (easier to copy, understand) db,fb=connect(setup) keyFilter={'variable':'Key','value':'{}'.format(key),'oper':'eq'} ds=db.selectRows(setup['project'],setup['schema'],setup['query'],[keyFilter],setup['view']) try: if len(ds['rows'])==0: return 'NONE' except KeyError: print(ds) return 'NONE' print('Found {} rows for {}'.format(len(ds['rows']),key)) return ds['rows'][0]['runRef'] def loadSolutionFromRunRef(setup,ref,loadAll=False): db,fb=connect(setup) remoteDir=fb.formatPathURL(setup['project'],'/'.join(['jobs',ref])) try: localDir=setup['localDir'] except KeyError: localDir=os.path.join(os.path.expanduser('~'),'temp','jobDir') if not os.path.isdir(localDir): os.mkdir(localDir) #download setup fs=['setup.json','model.json','parameters.json'] for f in fs: localPath=os.path.join(localDir,f) remotePath='/'.join([remoteDir,f]) fb.readFileToFile(remotePath,localPath) setupFile=os.path.join(localDir,fs[0]) setupOld=parseSetup(setupFile) inFiles=getFiles(setupOld,loadAll) #inFiles.extend(parFiles) i=0 n=len(inFiles) for fSet in inFiles: for x in fSet: f=fSet[x] localPath=os.path.join(localDir,f) remotePath='/'.join([remoteDir,f]) fb.readFileToFile(remotePath,localPath) print('Loaded {}/{} {}'.format(i+1,n,localDir)) i+=1 return loadSolutionFromDir(localDir,loadAll) def merge(a1,a2): try: return numpy.concatenate(a1,a2) except ValueError: return a2 def loadSolutionFromDir(jobDir,loadAll=False): setupFile=os.path.join(jobDir,'setup.json') setup=parseSetup(setupFile) inFiles=getFiles(setup,loadAll) return loadSolutionFromFiles(setup,jobDir,inFiles) def loadSolutionFromFiles(setup,jobDir,inFiles): isFirst=True operators={x:numpy.loadtxt for x in inFiles[0]} operators['sOut']=read3D data={} i=0 n=len(inFiles) for fSet in inFiles: idata={} print('Parsing [{}/{}]'.format(i+1,n)) for f in fSet: path=os.path.join(jobDir,fSet[f]) idata=operators[f](os.path.join(jobDir,fSet[f])) if f=='sOut': lut=idata[1] lutSE=idata[2] idata=idata[0] try: data[f]=numpy.concatenate((data[f],idata)) except KeyError: data[f]=idata i+=1 data['lut']=lut data['lutSE']=lutSE data['setup']=setup data['model']=os.path.join(jobDir,'model.json') data['parameters']=os.path.join(jobDir,'parameters.json') return data def getStartPointFromDir(jobDir): data=loadSolutionFromDir(jobDir) return startPointObject(data) def startPointObject(data): vrs={'t':'t0','sol':'y0','sOut':'s0'} out={vrs[x]:data[x][-1] for x in vrs} addVrs=['lut','lutSE'] for x in addVrs: out[x]=data[x] return out def getStartPoint(setup): if setup['startFromRef']=='None': return {'t0':0,'y0':numpy.array([]),'s0':numpy.array([]),'lut':[],'lutSE':[]} data=loadSolutionFromRef(setup) return startPointObject(data) if __name__=="__main__": parFiles=sys.argv[1].split(';') jobDir=sys.argv[2] main(parFiles,jobDir)