Surface NMR processing and inversion GUI
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. import numpy, array #,rpy2
  2. from matplotlib import pyplot as plt
  3. import numpy as np
  4. from scipy.optimize import least_squares
  5. #from rpy2.robjects.packages import importr
  6. #import rpy2.robjects as robjects
  7. #import rpy2.robjects.numpy2ri
  8. #import notch
  9. from numpy.fft import fft, fftfreq
  10. # We know/can calculate frequency peak, use this to guess where picks will be.
  11. # maybe have a sliding window that reports peak values.
  12. def peakPicker(data, omega, dt):
  13. # compute window based on omega and dt
  14. # make sure you are not aliased, grab every other peak
  15. window = (2*numpy.pi) / (omega*dt)
  16. data = numpy.array(data)
  17. peaks = []
  18. troughs = []
  19. times = []
  20. times2 = []
  21. indices = []
  22. ws = 0
  23. we = window
  24. ii = 0
  25. for i in range((int)(len(data)/window)):
  26. # initially was just returning this I think avg is better
  27. #times.append( (ws + numpy.abs(data[ws:we]).argmax()) * dt )
  28. peaks.append(numpy.max(data[ws:we]))
  29. times.append( (ws + data[ws:we].argmax()) * dt )
  30. indices.append( ii + data[ws:we].argmax() )
  31. troughs.append(numpy.min(data[ws:we]))
  32. times2.append( (ws + (data[ws:we]).argmin()) * dt )
  33. indices.append( ii + data[ws:we].argmin() )
  34. ws += window
  35. we += window
  36. ii += (int)(we-ws)
  37. #return numpy.array(peaks), numpy.array(times)
  38. # Averaging peaks does a good job of removing bias in noise
  39. return (numpy.array(peaks)-numpy.array(troughs))/2., \
  40. (numpy.array(times)+numpy.array(times2))/2., \
  41. indices
  42. def fun(x, t, y):
  43. """ Cost function for regression, single exponential, no DC term
  44. x[0] = A0
  45. x[1] = zeta
  46. x[2] = df
  47. x[3] = T2
  48. """
  49. # concatenated real and imaginary parts
  50. return y - np.concatenate((-x[0]*np.sin(2.*np.pi*x[2]*t + x[1])*np.exp(-t/x[3]), \
  51. +x[0]*np.cos(2.*np.pi*x[2]*t + x[1])*np.exp(-t/x[3])))
  52. def fun2(x, t, y):
  53. """ Cost function for regression, single exponential, no DC term
  54. x[0] = A0
  55. x[1] = zeta
  56. x[2] = T2
  57. """
  58. # concatenated real and imaginary parts
  59. pre = np.concatenate((x[0]*np.cos(x[1])*np.exp(-t/x[2]), \
  60. -x[0]*np.sin(x[1])*np.exp(-t/x[2])))
  61. return y-pre
  62. def quadratureDetect2(X, Y, tt, method, loss, x0="None"):
  63. """ Pure python quadrature detection using Scipy.
  64. X = real part of NMR signal
  65. Y = imaginary component of NMR signal
  66. tt = time
  67. """
  68. #method = ['trf','dogbox','lm'][method_int]
  69. #loss = ['linear','soft_l1','cauchy','huber'][loss_int]
  70. #print ("method", method, 'loss', loss)
  71. if x0=="None":
  72. if method == 'lm':
  73. x0 = np.array( [50., 0., 0., .200] ) # A0, zeta, df, T2
  74. res_lsq = least_squares(fun, x0, args=(tt, np.concatenate((X, Y))), loss=loss, f_scale=1.0,\
  75. method=method
  76. )
  77. else:
  78. x0 = np.array( [50., 0., 0., .200] ) # A0, zeta, df, T2
  79. res_lsq = least_squares(fun, x0, args=(tt, np.concatenate((X, Y))), loss=loss, f_scale=1.0,\
  80. bounds=( [5, -np.pi, -5, .001] , [5000., np.pi, 5, .800] ),
  81. method=method
  82. )
  83. x = res_lsq.x
  84. #print ("A0={} zeta={} df={} T2={}".format(x[0],x[1],x[2],x[3]))
  85. else:
  86. res_lsq = least_squares(fun, x0, args=(tt, np.concatenate((X, Y))), loss=loss, f_scale=1.0,\
  87. #bounds=( [1., -np.pi, -5, .005] , [1000., np.pi, 5, .800] ),
  88. method=method
  89. )
  90. #bounds=( [0., 0, -20, .0] , [1., np.pi, 20, .6] ))
  91. x = res_lsq.x
  92. return res_lsq.success, x[0], x[2], x[1], x[3]
  93. # no df
  94. #x = np.array( [1., 0., 0.2] )
  95. #res_lsq = least_squares(fun2, x, args=(tt, np.concatenate((X, Y))), loss='soft_l1', f_scale=0.1)
  96. #x = res_lsq.x
  97. #return conv, E0,df,phi,T2
  98. #return res_lsq.success, x[0], 0, x[1], x[2]
  99. ###################################################################
  100. ###################################################################
  101. ###################################################################
  102. if __name__ == "__main__":
  103. dt = .0001
  104. T2 = .1
  105. omega = 2000.*2*numpy.pi
  106. phi = .0
  107. T = 8.*T2
  108. t = numpy.arange(0, T, dt)
  109. # Synthetic data, simple single decaying sinusoid
  110. # with a single decay parameter and gaussian noise added
  111. data = numpy.exp(-t/T2) * numpy.sin(omega * t + phi) + numpy.random.normal(0,.05,len(t)) \
  112. + numpy.random.randint(-1,2,len(t))*numpy.random.exponential(.2,len(t))
  113. cdata = numpy.exp(-t/T2) * numpy.sin(omega * t + phi) #+ numpy.random.normal(0,.25,len(t))
  114. #data = numpy.random.normal(0,.25,len(t))
  115. sigma2 = numpy.std(data[::-len(data)/4])
  116. #sigma2 = numpy.var(data[::-len(data)/4])
  117. print("sigma2", sigma2)
  118. [peaks,times,indices] = peakPicker(data, omega, dt)
  119. [b1,b2,rT2] = regressCurve(peaks,times)
  120. print("rT2 nonweighted", rT2)
  121. [b1,b2,rT2] = regressCurve(peaks,times,sigma2)
  122. print("rT2 weighted", rT2)
  123. envelope = numpy.exp(-t/T2)
  124. renvelope = numpy.exp(-t/rT2)
  125. #outf = file('regress.txt','w')
  126. #for i in range(len(times)):
  127. # outf.write(str(times[i]) + " " + str(peaks[i]) + "\n")
  128. #outf.close()
  129. plt.plot(t,data, 'b')
  130. plt.plot(t,cdata, 'g', linewidth=1)
  131. plt.plot(t,envelope, color='violet', linewidth=4)
  132. plt.plot(t,renvelope, 'r', linewidth=4)
  133. plt.plot(times, numpy.array(peaks), 'bo', markersize=8, alpha=.25)
  134. plt.legend(['noisy data','clean data','real envelope','regressed env','picks'])
  135. plt.savefig("regression.pdf")
  136. # FFT check
  137. fourier = fft(data)
  138. plt.figure()
  139. freq = fftfreq(len(data), d=dt)
  140. plt.plot(freq, (fourier.real))
  141. plt.show()
  142. # TODO do a bunch in batch mode to see if T2 estimate is better with or without
  143. # weighting and which model is best.
  144. # TODO try with real data
  145. # TODO test filters (median, FFT, notch)
  146. # It looks like weighting is good for relatively low sigma, but for noisy data
  147. # it hurts us. Check