Surface NMR processing and inversion GUI
Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

gateIntegrate4.py 14KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392
  1. from __future__ import division
  2. import matplotlib as mpl
  3. mpl.use('pdf')
  4. #from rasterize import rasterize_and_save
  5. import matplotlib.patches as mpatches
  6. from pwctime import pwcTime
  7. from logbarrier import *
  8. from perlin import perlin
  9. from scipy import stats
  10. import cmocean
  11. import sys
  12. import numpy as np
  13. import seaborn as sns
  14. def bootstrapWindows(N, nboot, isum, adapt=False):
  15. """ Bootstraps noise as a function of gate width
  16. N = input noise signal
  17. nboot = number of boostrap windows to perform
  18. isum = length of windows (L_i)
  19. adapt = reduce nboot as window size increases
  20. """
  21. nc = np.shape(N)[0]
  22. Means = {}
  23. if adapt:
  24. Means = -9999*np.ones((len(isum), nboot//isum[0])) # dummy value
  25. for ii, nwin in enumerate(isum):
  26. for iboot in range(nboot//isum[ii]):
  27. cs = np.random.randint(0,nc-nwin)
  28. Means[ii,iboot] = np.mean( N[cs:cs+nwin] )
  29. Means = np.ma.masked_less(Means, -9995)
  30. else:
  31. Means = np.zeros((len(isum), nboot))
  32. for ii, nwin in enumerate(isum):
  33. for iboot in range(nboot):
  34. cs = np.random.randint(0,nc-nwin)
  35. Means[ii,iboot] = np.mean( N[cs:cs+nwin] )
  36. return Means, np.array(isum)
  37. def gateIntegrate(T2D, T2T, gpd, sigma, stackEfficiency=2.):
  38. """ Gate integrate the signal to gpd, gates per decade
  39. T2D = the time series to gate integrate, complex
  40. T2T = the abscissa values
  41. gpd = gates per decade
  42. sigma = estimate of standard deviation for theoretical gate noise
  43. stackEfficiency = exponential in theoretical gate noise, 2 represents ideal stacking
  44. """
  45. # use artificial time gates so that early times are fully captured
  46. T2T0 = T2T[0]
  47. T2TD = T2T[0] - (T2T[1]-T2T[0])
  48. T2T -= T2TD
  49. #####################################
  50. # calculate total number of decades #
  51. # windows edges are approximate until binning but will be adjusted to reflect data timing, this
  52. # primarily impacts bins with a few samples
  53. nd = np.log10(T2T[-1]/T2T[0])
  54. tdd = np.logspace( np.log10(T2T[0]), np.log10(T2T[-1]), (int)(gpd*nd)+1, base=10, endpoint=True)
  55. tdl = tdd[0:-1] # approximate window left edges
  56. tdr = tdd[1::] # approximate window right edges
  57. td = (tdl+tdr) / 2. # approximate window centres
  58. Vars = np.zeros( len(td) )
  59. htd = np.zeros( len(td), dtype=complex )
  60. isum = np.zeros( len(td), dtype=int )
  61. ii = 0
  62. for itd in range(len(T2T)):
  63. if ( T2T[itd] > tdr[ii] ):
  64. ii += 1
  65. # correct window edges to centre about data
  66. tdr[ii-1] = (T2T[itd-1]+T2T[itd])*.5
  67. tdl[ii ] = (T2T[itd-1]+T2T[itd])*.5
  68. isum[ii] += 1
  69. htd[ii] += T2D[ itd ]
  70. Vars[ii] += sigma**2
  71. td = (tdl+tdr) / 2. # actual window centres
  72. sigma2 = np.sqrt( Vars * ((1/(isum))**stackEfficiency) )
  73. # Reset abscissa where isum == 1
  74. # when there is no windowing going on
  75. td[isum==1] = T2T[0:len(td)][isum==1]
  76. tdd = np.append(tdl, tdr[-1])
  77. htd /= isum # average
  78. T2T += T2TD # not used
  79. return td+T2TD, htd, tdd+T2TD, sigma2, isum # centre abscissa, data, window edges, error
  80. PhiD = []
  81. def invert(Time, t, v, sig, lambdastar):
  82. """ helper function that simply calls logBarrier, here to allow for drop in repacement
  83. """
  84. #model = logBarrier(Time.Genv, 1e-2*v, Time.T2Bins, MAXITER=5000, sigma=1e-2*sig, alpha=1e6, smooth="Both")
  85. model = logBarrier(Time.Genv, 1e-2*v, Time.T2Bins, lambdastar, MAXITER=750, sigma=1e-2*sig, alpha=1e6, smooth="Smallest")
  86. PhiD.append(model[2])
  87. return model
  88. def gateTest(vc, vgc, pperlin, boot, lamdastar):
  89. """ Performs gate integration and adds random noise
  90. vc = clean data (dense)
  91. vgc = clean data at gates
  92. boot = if "boot" then bootstrap the gate noise
  93. lambdastar = l-curve or discrepency principle
  94. pperlin = percent perlin noise, noise floor is maintained at 2.00 PU
  95. """
  96. t = np.arange(2e-4, .3601, 2e-4)
  97. zeta = np.pi / 3.
  98. v = np.copy(vc) # important!
  99. # Scaling factors to keep noise floor constant with increasing levels of
  100. # Perlin noise. These were determined using populations of 5,000 and hold to
  101. # two significant digits (i.e, 2.00 PU)
  102. PF = {0.0:0,\
  103. 2.5:.450,\
  104. 5.0:.6125,\
  105. 7.5:.765,\
  106. 10.0:.87375,\
  107. 12.5:.9725,\
  108. 15.0:1.05,\
  109. 17.5:1.1275,\
  110. 20.0:1.20,\
  111. 22.5:1.265,\
  112. 25.0:1.325}
  113. # random noise
  114. np.random.seed() # necessary for thread pool, otherwise all threads can get same numbers
  115. sigma = 2.*(1.-1e-2*pperlin)
  116. eps = np.random.normal(0, sigma, len(t)) + \
  117. 1j*np.random.normal(0, sigma, len(t))
  118. eps += PF[pperlin] * perlin(len(t), L=.3601, sigma_f=.005, sigma_r=0.72) # 1 PU std
  119. v += eps
  120. # Noise based on residual
  121. sigmahat = np.std( v.imag )
  122. gt, gd, we, err, isum = gateIntegrate(v.real, t, 20, sigmahat)
  123. ge = np.copy(err)
  124. if boot=="boot":
  125. Means, isum2 = bootstrapWindows(v.imag, 20000, isum[isum!=1], adapt=True)
  126. # STD
  127. #err[isum!=1] = np.ma.std(Means, axis=1, ddof=1)[isum!=1]
  128. # MAD, only for windows > 1
  129. c = stats.norm.ppf(3./4.)
  130. err[isum!=1] = np.ma.median(np.ma.abs(Means), axis=1) / c
  131. if boot=="uniform":
  132. err = sigmahat
  133. return gt, gd, gd-vgc, err, v.real, t, isum
  134. if __name__ == "__main__":
  135. import multiprocessing
  136. import itertools
  137. from GJIPlot import *
  138. import numpy as np
  139. import matplotlib.pyplot as plt
  140. from matplotlib.ticker import FormatStrFormatter
  141. from matplotlib import ticker
  142. from collections import OrderedDict
  143. if len(sys.argv)<4:
  144. print ( "Python script for generating plots used in GJI publication")
  145. print ( "useage:")
  146. print ( "python gateIntegrate4.py NoiseType Sigma_i Lambda* " )
  147. exit()
  148. if sys.argv[1] not in ['0.0','2.5','5.0','7.5','10.0','12.5','15.0','17.5','20.0','22.5','25.0']:
  149. print ( "PercentPerlin: [0.0,2.5,5.0...25.0] ", "got", sys.argv[1])
  150. exit(1)
  151. if sys.argv[2] != "gauss" and sys.argv[2] != "boot" and sys.argv[2] != "uniform":
  152. print ( "Sigma_i: gauss | boot | uniform")
  153. exit(1)
  154. if sys.argv[3] != "lcurve" and sys.argv[3] != "discrepency":
  155. print ( "Lambda*: lcurve | discrepency ")
  156. exit(1)
  157. #offwhite = (.98,.98,.98)
  158. offwhite = (1.,1.,1.)
  159. mDarkBrown = '#eb811b' # alert colour
  160. mDarkTeal = '#23373b'
  161. mLightBrown= "#EB811B"
  162. mLightGreen = "#14B03D"
  163. # Time series plot
  164. fig = plt.figure(figsize=(pc2in(18),pc2in(18)), facecolor=offwhite)
  165. ax2 = fig.add_axes([.195, .175, .750, .75], facecolor=offwhite) # time
  166. # Main plot
  167. fig2 = plt.figure(figsize=(pc2in(20),pc2in(2*.5*20)))
  168. ax1 = fig2.add_axes([.175, .410*1.5, .6, .225*1.5])
  169. ax1c = fig2.add_axes([.800, .410*1.5, .025, .225*1.5])
  170. ax3 = fig2.add_axes([.175, .100*1.5, .495, .225*1.5], facecolor='None')
  171. ax3r = fig2.add_axes([.175, .100*1.5, .495, .225*1.5], facecolor='None', rasterized=True, sharex=ax3, sharey=ax3)
  172. ax3b = fig2.add_axes([.825, .100*1.5, .1, .225*1.5])
  173. SIG = []
  174. ER = []
  175. GD = []
  176. GT = []
  177. V = []
  178. MOD = []
  179. CONV = []
  180. PHID = []
  181. PHIM = []
  182. LSTAR = []
  183. ns = 10000 #10000 #10000 # number of realizations for PDF
  184. ni = 5000 #5000 #1000 # number of inversions to plot
  185. t = np.arange(2e-4, .3601, 2e-4) # CMR sampling
  186. #CMAP = cmocean.cm.solar
  187. CMAP = cmocean.cm.gray_r
  188. #CMAP = cmocean.cm.haline
  189. #CMAP = cmocean.cm.tempo
  190. ##############################################
  191. # set up model
  192. lowT2 = .001
  193. hiT2 = 1.0
  194. nT2 = 30
  195. spacing = "Log_10"
  196. Time = pwcTime()
  197. Time.setT2(lowT2, hiT2, nT2, spacing)
  198. Time.setSampling( np.arange(2e-4, .3601, 2e-4) )
  199. Time.generateGenv()
  200. tmod = np.zeros(nT2)
  201. tmod [8] = .15 # distribution centres...to be smoothed
  202. tmod [20] = .1
  203. for i in range(2):
  204. tmod = np.convolve(tmod, np.array([.0625,.125,.1875,.25,.1875,.125,.0625]), 'same')
  205. vc = 100. * np.dot(Time.Genv, tmod) + 0j # in PU
  206. gt, gd, we, err, isum = gateIntegrate(vc, t, 20, 3)
  207. ##############################################
  208. # Set up inversion
  209. Time = pwcTime()
  210. Time.setT2(lowT2, hiT2, nT2, spacing)
  211. Time.setSampling( gt )
  212. Time.generateGenv()
  213. vgc = 100.*np.dot(Time.Genv, tmod) + 0j # in PU
  214. # make the Pool of workers
  215. print("pool gate integrate")
  216. with multiprocessing.Pool() as pool:
  217. results = pool.starmap(gateTest, zip(np.tile(vc, (ns, 1)), np.tile(vgc, (ns,1)), itertools.repeat(eval(sys.argv[1])), \
  218. itertools.repeat(sys.argv[2]), \
  219. itertools.repeat(sys.argv[3])))
  220. print("done pool gate integrate")
  221. # parse out results
  222. for i in range(ns):
  223. gt,gd,ge,err,v,vt,isum = results[i]
  224. V.append(v.real)
  225. GT.append(gt.real)
  226. GD.append(gd.real)
  227. ER.append( ge.real / err.real )
  228. SIG.append( err.real )
  229. print("pool inversions")
  230. with multiprocessing.Pool() as pool:
  231. invresults = pool.starmap(invert, zip(itertools.repeat(Time), GT[0:ni], GD[0:ni], SIG[0:ni], itertools.repeat(sys.argv[3]) ))
  232. #print("done pool inversions",results[:][0])
  233. # Parse results
  234. for i in range(ns):
  235. #print("Sym %", round(100.*i/(float)(1)/(float)(ns)))
  236. # invert
  237. if i < ni:
  238. #mod, conv, phid = invert(Time, gt, gd.real, err)
  239. if sys.argv[3] == "discrepency":
  240. mod, conv, phid_final = invresults[i]
  241. else:
  242. mod, conv, phid_final, phim, phid, lstar = invresults[i]
  243. MOD.append(mod)
  244. CONV.append(conv)
  245. PHID.append(phid)
  246. PHIM.append(phim)
  247. LSTAR.append(lstar)
  248. PHIM = np.array(PHIM)
  249. PHID = np.array(PHID)
  250. ER = np.array(ER)
  251. MOD = np.array(MOD)
  252. GD = np.array(GD)
  253. ####################
  254. # Time series plot #
  255. ax2.plot( 1e3*vt, V[0], color=mDarkTeal, label="$V_N$", linewidth=1, zorder=-32) #, rasterized=True)
  256. ax2.errorbar( 1e3*gt, GD[0], yerr=SIG[0], fmt='.', markersize=6, color=mLightBrown, label="$V_G$")
  257. ax2.set_ylim([-10,30])
  258. leg1 = ax2.legend( labelspacing=0.2, scatterpoints=1, numpoints=1, frameon=True )
  259. fixLeg(leg1)
  260. ax2.set_xscale("log", nonposx='clip')
  261. ax2.set_ylabel(r"$V_N$ (PU)")
  262. ax2.get_xaxis().set_major_formatter(FormatStrFormatter('%1.0f'))
  263. ax2.set_xlabel("time (ms)")
  264. deSpine(ax2)
  265. fig.savefig( sys.argv[1] + "-" + sys.argv[2] + "-" + sys.argv[3] + "-ts.pdf", dpi=400, facecolor=offwhite,edgecolor=offwhite)
  266. # histogram of error statistic
  267. bins = np.linspace( -3, 3, 40, endpoint=True )
  268. HIST = []
  269. for i in range(0,np.shape(ER)[1]):
  270. hist, edges = np.histogram(ER[:,i], bins=bins, density=False)
  271. HIST.append(hist)
  272. HIST = np.array(HIST)/(float)(ns) # normalize
  273. im = ax1.pcolor(1e3*we, edges, HIST.T, cmap=CMAP, vmin=0, vmax=.1, rasterized=True)
  274. im.set_edgecolor('face')
  275. cb = plt.colorbar(im, ax1c, label=r"probability density", format=FormatStrFormatter('%1.2f'))
  276. cb.solids.set_rasterized(True)
  277. tick_locator = ticker.MaxNLocator(nbins=4)
  278. cb.locator = tick_locator
  279. cb.update_ticks()
  280. ax1.set_xscale("log", nonposx='clip')
  281. ax1.get_xaxis().set_major_formatter(FormatStrFormatter('%1.0f'))
  282. ax1.set_xlabel("time (ms)")
  283. ax1.set_ylabel(r"gate error $\left( \left( {V_G - V_T} \right) / {\tilde{\sigma_i}} \right)$")
  284. LMT2 = []
  285. THETA = []
  286. MODERR = []
  287. # plot a random sample of ns instead?
  288. for i in range(ni):
  289. # plot log mean and amplitude
  290. model = MOD[i]
  291. theta = np.sum( model )
  292. LogMeanT2 = np.exp(np.sum( model * np.log( Time.T2Bins ) ) / theta )
  293. LMT2.append(LogMeanT2)
  294. THETA.append( np.sum(model) )
  295. MODERR.append( np.linalg.norm(model-tmod) )
  296. CONV = np.array(CONV)
  297. THETA = np.array(THETA)
  298. MOD = np.array(MOD)
  299. MODERR = np.array(MODERR)
  300. #############################
  301. # plot all models, 1 colour #
  302. ires = ax3r.plot( 1e3*np.tile(Time.T2Bins, (np.sum(np.array(CONV)) ,1)).T , 1e2*MOD.T, color=mDarkTeal, alpha=.01, lw=.5, label="$\mathbf{f}_I$", zorder=0, rasterized=True)
  303. lns2, = ax3r.plot(1e3*Time.T2Bins, 1e2*tmod, color=mLightBrown, linewidth=2, label="$\mathbf{f}_T$")
  304. handles, labels = ax3r.get_legend_handles_labels()
  305. by_label = OrderedDict(zip(labels, handles))
  306. leg3 = ax3r.legend(by_label.values(), by_label.keys(), labelspacing=0.2, scatterpoints=1, numpoints=1, frameon=True , loc="upper right")
  307. for line in leg3.get_lines():
  308. line.set_linewidth(1)
  309. for lh in leg3.legendHandles:
  310. lh.set_alpha(1)
  311. fixLeg(leg3)
  312. ###########################
  313. # Error histogram on side #
  314. ax3b.hist( 1e2*MODERR, bins='auto', orientation="horizontal", color=mDarkTeal, stacked=True, density=True, range=(0,20))
  315. ax3b.axhline(1e2*np.mean(MODERR), linewidth=1.25, color=mLightBrown) #, color=CMAP(0.7), zorder=1)
  316. deSpine(ax3b)
  317. ax3b.set_xscale("log", nonposx='clip')
  318. ax3b.set_ylabel(r"$\Vert \mathbf{f}_I -\mathbf{f}_T \Vert$") # %(m$^3$/m$^3$)") #, color="C0")
  319. ax3b.set_xlabel("log probability\ndensity") #, color="C0")
  320. ax3.set_xlim( (1e3*Time.T2Bins[0], 1e3*Time.T2Bins[-1]) )
  321. ax3.set_ylim( (0,5) )
  322. ax3.set_xlabel("$T_2$ (ms)")
  323. ax3.set_ylabel("partial water content (PU)") #, color="C0")
  324. ax3.set_xscale("log", nonposx='clip')
  325. ax3.get_xaxis().set_major_formatter(FormatStrFormatter('%1.0f'))
  326. plt.setp(ax3r.get_xticklabels(), visible=False)
  327. plt.setp(ax3r.get_yticklabels(), visible=False)
  328. deSpine(ax3)
  329. deSpine(ax3r)
  330. np.save("pperlin" + str(round(1e1*eval(sys.argv[1]))) + "-" + sys.argv[2] + "-" + sys.argv[3] + "-err", MODERR)
  331. plt.savefig("pperlin" + str(round(1e1*eval(sys.argv[1]))) + "-" + sys.argv[2] + "-" + sys.argv[3] + ".pdf", dpi=600, facecolor=offwhite, edgecolor=offwhite)
  332. plt.show()