#module for storing the files/parameters/... for the tracking code. #Is it necessary to reload the import to re-call all functions? import glob import os, errno import numpy as np import datetime as dt from mpi4py import MPI import netCDF4 import netcdftime as nct commWorld = MPI.COMM_WORLD myRank = commWorld.Get_rank() nRanks = commWorld.size def getLimits_startStop(iStartGlobal, iEndGlobal, iWork=myRank, nWork=nRanks): #assign contiguous chunks in a sequence to processors. just leave the leftovers to the last processor(s). #when the length isn't divisible by the number of workers, this isn't the best solution but we can optimize for that later. szChunk = int(np.ceil( (iEndGlobal-iStartGlobal)/float(nWork) )) #interval must cover length s.t. # elements for all but last worker if (szChunk<1): print 'Check logic in getLimits_startStop for your strange case w/ more workers than elements' iStart = iStartGlobal+iWork*szChunk iEnd = iStart+szChunk-1 iEnd = min(iEndGlobal,iEnd) return (iStart,iEnd) rEarth = 6370.e3 #radius of spherical Earth (m) dFilter = 300.e3 #radius for whether local extremum is regional extremum areaOverlap = .01 #fraction of tpv area overlap for determining correspondence latThresh = 30.*np.pi/180. #segment N of this latitude trackMinMaxBoth = 0 #0-min, 1-max (2-both shouldn't be used w/o further development) info = '30N_erai_ecmwf' years = ['1979','1980','1981','1982','1983','1984','1985','1986','1987','1988', '1989','1990','1991','1992','1993','1994','1995','1996','1997','1998', '1999','2000','2001','2002','2003','2004','2005','2006','2007','2008', '2009','2010','2011','2012','2013','2014','2015','2016'] nYears = 38 fDirData = '/keyserlab_rit/kbiernat/erai_ecmwf/' filesData = [] for year in years: filesData.append(fDirData+year+'/u_pv.'+year+'.nc') filesData.append(fDirData+year+'/v_pv.'+year+'.nc') filesData.append(fDirData+year+'/pt_pv.'+year+'.nc') #time information of input data timeStart0 = dt.datetime(1979,1,1,0,0) #timeEnd0 = dt.datetime(1979,12,31,18,0) timeEnd1 = dt.datetime(2016,3,31,18,0) deltaT = 6.*60.*60. #timestep between file times (s) timeDelta = dt.timedelta(seconds=deltaT) nctime = nct.utime('hours since 1800-01-01 00:00:00') #tNumStart0 = nctime.date2num(timeStart0) #tNumEnd0 = nctime.date2num(timeEnd0) tNumEnd1 = nctime.date2num(timeEnd1) #data0 = netCDF4.Dataset(filesData[0],'r') data1 = netCDF4.Dataset(filesData[111],'r') #time0 = data0.variables['time'][:] time1 = data1.variables['time'][:] iTimeStart_fData = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] iTimeEnd_fData = [-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,np.where(time1==tNumEnd1)[0][0]] fDirSave = '/keyserlab_rit/kbiernat/track/szapiro_python/erai_0.5_tpv_climatology/' if not os.path.exists(fDirSave): os.makedirs(fDirSave) fMesh = filesData[0] fMetr = fDirSave+'fields.nc' #fMetr = '/keyserlab_rit/kbiernat/track/szapiro_python/erai_0.5_tpv_climatology/fields.nc' fSegFmt = fDirSave+'seg_{0}.nc' fSeg = fSegFmt.format(myRank) fSegFinal = fDirSave+'seg.nc'; #fSeg = fSegFinal #for after running seg in parallel... fCorr = fDirSave+'correspond_horizPlusVert.nc' fTrackFmt = fDirSave+'tracks_{0}.nc' fTrack = fTrackFmt.format(myRank) fTrackFinal = fDirSave+'tracks_low.nc' fMetrics = fDirSave+'metrics.nc' inputType = 'erai_ecmwf' doPreProc = True doSeg = False doMetrics = False doCorr = False doTracks = False def silentremove(filename): #from http://stackoverflow.com/questions/10840533/most-pythonic-way-to-delete-a-file-which-may-not-exist print "Removing file (if it exists): ",filename try: os.remove(filename) except OSError as e: # this would be "except OSError, e:" before Python 2.6 if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory raise # re-raise exception if a different error occured