Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

393

394

395

#!/usr/bin/env python 

#------------------------------------------------------------------ 

# Filename: freqattributes.py 

#   Author: Conny Hammer 

#    Email: conny.hammer@geo.uni-potsdam.de 

# 

# Copyright (C) 2008-2012 Conny Hammer 

#------------------------------------------------------------------ 

""" 

Frequency Attributes 

 

:copyright: 

    The ObsPy Development Team (devs@obspy.org) 

:license: 

    GNU Lesser General Public License, Version 3 

    (http://www.gnu.org/copyleft/lesser.html) 

""" 

 

from operator import itemgetter 

from scipy import fftpack, signal, sparse 

from obspy.signal.invsim import seisSim, cornFreq2Paz 

import numpy as np 

import util 

 

 

def mper(data, win, Nfft, n1=0, n2=0): 

    """ 

    Spectrum of a signal. 

 

    Computes the spectrum of the given data which can be windowed or not. The 

    spectrum is estimated using the modified periodogram. If n1 and n2 are not 

    specified the periodogram of the entire sequence is returned. 

 

    The modified periodogram of the given signal is returned. 

 

    :type data: :class:`~numpy.ndarray` 

    :param data: Data to make spectrum of. 

    :param win: Window to multiply with given signal. 

    :param Nfft: Number of points for FFT. 

    :type n1: int, optional 

    :param n1: Starting index, defaults to ``0``. 

    :type n2: int, optional 

    :param n2: Ending index, defaults to ``0``. 

    :return: Spectrum. 

    """ 

    if (n2 == 0): 

        n2 = len(data) 

    n = n2 - n1 

    U = pow(np.linalg.norm([win]), 2) / n 

    xw = data * win 

    Px = pow(abs(fftpack.fft(xw, Nfft)), 2) / (n * U) 

    Px[0] = Px[1] 

    return Px 

 

 

def welch(data, win, Nfft, L=0, over=0): 

    """ 

    Spectrum of a signal. 

 

    Computes the spectrum of the given data which can be windowed or not. 

    The spectrum is estimated using Welch's method of averaging modified 

    periodograms. 

 

    Welch's estimate of the power spectrum is returned using a linear scale. 

 

    :type data: :class:`~numpy.ndarray` 

    :param data: Data to make spectrum of. 

    :param win: Window to multiply with given signal. 

    :param Nfft: Number of points for FFT. 

    :type L: int, optional 

    :param L: Length of windows to be averaged, defaults to ``0``. 

    :type over: float, optional 

    :param over: Overlap of windows to be averaged 0<over<1, defaults to ``0``. 

    :return: Spectrum. 

    """ 

    if (L == 0): 

        L = len(data) 

    n1 = 0 

    n2 = L 

    n0 = (1. - float(over)) * L 

    nsect = 1 + int(np.floor((len(data) - L) / (n0))) 

    Px = 0 

    for _i in xrange(nsect): 

        Px = Px + mper(data, win, Nfft, n1, n2) / nsect 

        n1 = n1 + n0 

        n2 = n2 + n0 

    return Px 

 

 

def cfrequency(data, fs, smoothie, fk): 

    """ 

    Central frequency of a signal. 

 

    Computes the central frequency of the given data which can be windowed or 

    not. The central frequency is a measure of the frequency where the 

    power is concentrated. It corresponds to the second moment of the power 

    spectral density function. 

 

    The central frequency is returned. 

 

    :type data: :class:`~numpy.ndarray` 

    :param data: Data to estimate central frequency from. 

    :param fs: Sampling frequency in Hz. 

    :param smoothie: Factor for smoothing the result. 

    :param fk: Coefficients for calculating time derivatives 

        (calculated via central difference). 

    :return: **cfreq[, dcfreq]** - Central frequency, Time derivative of center 

        frequency (windowed only). 

    """ 

    nfft = util.nextpow2(data.shape[1]) 

    freq = np.linspace(0, fs, nfft + 1) 

    freqaxis = freq[0:nfft / 2] 

    cfreq = np.zeros(data.shape[0]) 

    if np.size(data.shape) > 1: 

        i = 0 

        for row in data: 

            Px_wm = welch(row, np.hamming(len(row)), util.nextpow2(len(row))) 

            Px = Px_wm[0:len(Px_wm) / 2] 

            cfreq[i] = np.sqrt(np.sum(freqaxis ** 2 * Px) / (sum(Px))) 

            i = i + 1 

        cfreq = util.smooth(cfreq, smoothie) 

        #cfreq_add = \ 

        #        np.append(np.append([cfreq[0]] * (np.size(fk) // 2), cfreq), 

        #        [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2)) 

        # faster alternative 

        cfreq_add = np.hstack(([cfreq[0]] * (np.size(fk) // 2), cfreq, \ 

                  [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2))) 

        dcfreq = signal.lfilter(fk, 1, cfreq_add) 

        #dcfreq = dcfreq[np.size(fk) // 2:(np.size(dcfreq) - np.size(fk) // 2)] 

        # correct start and end values of time derivative 

        dcfreq = dcfreq[np.size(fk) - 1:np.size(dcfreq)] 

        return cfreq, dcfreq 

    else: 

        Px_wm = welch(data, np.hamming(len(data)), util.nextpow2(len(data))) 

        Px = Px_wm[0:len(Px_wm) / 2] 

        cfreq = np.sqrt(np.sum(freqaxis ** 2 * Px) / (sum(Px))) 

        return cfreq 

 

 

def bwith(data, fs, smoothie, fk): 

    """ 

    Bandwidth of a signal. 

 

    Computes the bandwidth of the given data which can be windowed or not. 

    The bandwidth corresponds to the level where the power of the spectrum is 

    half its maximum value. It is determined as the level of 1/sqrt(2) times 

    the maximum Fourier amplitude. 

 

    If data are windowed the bandwidth of each window is returned. 

 

    :type data: :class:`~numpy.ndarray` 

    :param data: Data to make envelope of. 

    :param fs: Sampling frequency in Hz. 

    :param smoothie: Factor for smoothing the result. 

    :param fk: Coefficients for calculating time derivatives 

        (calculated via central difference). 

    :return: **bwith[, dbwithd]** - Bandwidth, Time derivative of predominant 

        period (windowed only). 

    """ 

    nfft = util.nextpow2(data.shape[1]) 

    freqaxis = np.linspace(0, fs, nfft + 1) 

    bwith = np.zeros(data.shape[0]) 

    f = fftpack.fft(data, nfft) 

    f_sm = util.smooth(abs(f[:, 0:nfft / 2]), 10) 

    if np.size(data.shape) > 1: 

        i = 0 

        for row in f_sm: 

            minfc = abs(row - max(abs(row * (1 / np.sqrt(2))))) 

            [mdist_ind, _mindist] = min(enumerate(minfc), key=itemgetter(1)) 

            bwith[i] = freqaxis[mdist_ind] 

            i = i + 1 

        #bwith_add = \ 

        #        np.append(np.append([bwith[0]] * (np.size(fk) // 2), bwith), 

        #        [bwith[np.size(bwith) - 1]] * (np.size(fk) // 2)) 

        # faster alternative 

        bwith_add = np.hstack(([bwith[0]] * (np.size(fk) // 2), bwith, \ 

                [bwith[np.size(bwith) - 1]] * (np.size(fk) // 2))) 

        dbwith = signal.lfilter(fk, 1, bwith_add) 

        #dbwith = dbwith[np.size(fk) // 2:(np.size(dbwith) - np.size(fk) // 2)] 

        # correct start and end values of time derivative 

        dbwith = dbwith[np.size(fk) - 1:] 

        bwith = util.smooth(bwith, smoothie) 

        dbwith = util.smooth(dbwith, smoothie) 

        return bwith, dbwith 

    else: 

        minfc = abs(data - max(abs(data * (1 / np.sqrt(2))))) 

        [mdist_ind, _mindist] = min(enumerate(minfc), key=itemgetter(1)) 

        bwith = freqaxis[mdist_ind] 

        return bwith 

 

 

def domperiod(data, fs, smoothie, fk): 

    """ 

    Predominant period of a signal. 

 

    Computes the predominant period of the given data which can be windowed or 

    not. The period is determined as the period of the maximum value of the 

    Fourier amplitude spectrum. 

 

    If data are windowed the predominant period of each window is returned. 

 

    :type data: :class:`~numpy.ndarray` 

    :param data: Data to determine predominant period of. 

    :param fs: Sampling frequency in Hz. 

    :param smoothie: Factor for smoothing the result. 

    :param fk: Coefficients for calculating time derivatives 

        (calculated via central difference). 

    :return: **dperiod[, ddperiod]** - Predominant period, Time derivative of 

        predominant period (windowed only). 

    """ 

    nfft = 1024 

    #nfft = util.nextpow2(data.shape[1]) 

    freqaxis = np.linspace(0, fs, nfft + 1) 

    dperiod = np.zeros(data.shape[0]) 

    f = fftpack.fft(data, nfft) 

    #f_sm = util.smooth(abs(f[:,0:nfft/2]),1) 

    f_sm = f[:, 0:nfft / 2] 

    if np.size(data.shape) > 1: 

        i = 0 

        for row in f_sm: 

            [mdist_ind, _mindist] = max(enumerate(abs(row)), key=itemgetter(1)) 

            dperiod[i] = freqaxis[mdist_ind] 

            i = i + 1 

        #dperiod_add = np.append(np.append([dperiod[0]] * (np.size(fk) // 2), \ 

        #    dperiod), [dperiod[np.size(dperiod) - 1]] * (np.size(fk) // 2)) 

        # faster alternative 

        dperiod_add = np.hstack(([dperiod[0]] * (np.size(fk) // 2), dperiod, \ 

                [dperiod[np.size(dperiod) - 1]] * (np.size(fk) // 2))) 

        ddperiod = signal.lfilter(fk, 1, dperiod_add) 

        #ddperiod = ddperiod[np.size(fk) / \ 

        #    2:(np.size(ddperiod) - np.size(fk) // 2)] 

        # correct start and end values of time derivative 

        ddperiod = ddperiod[np.size(fk) - 1:] 

        dperiod = util.smooth(dperiod, smoothie) 

        ddperiod = util.smooth(ddperiod, smoothie) 

        return dperiod, ddperiod 

    else: 

        [mdist_ind, _mindist] = max(enumerate(abs(data)), key=itemgetter(1)) 

        dperiod = freqaxis[mdist_ind] 

        return dperiod 

 

 

def logbankm(p, n, fs, w): 

    """ 

    Matrix for a log-spaced filterbank. 

 

    Computes a matrix containing the filterbank amplitudes for a log-spaced 

    filterbank. 

 

    :param p: Number of filters in filterbank. 

    :param n: Length of fft. 

    :param fs: Sampling frequency in Hz. 

    :param w: Window function. 

    :return: **xx, yy, zz** - Matrix containing the filterbank amplitudes, 

        Lowest fft bin with a non-zero coefficient, Highest fft bin with a 

        non-zero coefficient. 

    """ 

    # alternative to avoid above problems: low end of the lowest filter 

    # corresponds to maximum frequency resolution 

    fn2 = np.floor(n / 2) 

    fl = np.floor(fs) / np.floor(n) 

    fh = np.floor(fs / 2) 

    lr = np.log((fh) / (fl)) / (p + 1) 

    bl = n * ((fl) * \ 

        np.exp(np.array([0, 1, p, p + 1]) * float(lr)) / float(fs)) 

    b2 = np.ceil(bl[1]) 

    b3 = np.floor(bl[2]) 

    b1 = np.floor(bl[0]) + 1 

    b4 = min(fn2, np.ceil(bl[3])) - 1 

    pf = np.log(((np.arange(b1 - 1, b4 + 1) / n) * fs) / (fl)) / lr 

    fp = np.floor(pf) 

    pm = pf - fp 

    k2 = b2 - b1 + 1 

    k3 = b3 - b1 + 1 

    k4 = b4 - b1 + 1 

    r = np.append(fp[k2:k4 + 2], 1 + fp[1:k3 + 1]) - 1 

    c = np.append(np.arange(k2, k4 + 1), np.arange(1, k3 + 1)) - 1 

    v = 2 * np.append([1 - pm[k2:k4 + 1]], [pm[1:k3 + 1]]) 

    mn = b1 + 1 

    mx = b4 + 1 

    #x = np.array([[c],[r]], dtype=[('x', 'float'), ('y', 'float')]) 

    #ind=np.argsort(x, order=('x','y')) 

    help = np.append([c], [r], axis=0) 

    if (w == 'Hann'): 

        v = 1. - [np.cos([v * float(np.pi / 2.)])] 

    elif (w == 'Hamming'): 

        v = 1. - 0.92 / 1.08 * np.cos(v * float(np.pi / 2)) 

    # bugfix for #70 - scipy.sparse.csr_matrix() delivers sometimes a 

    # transposed matrix depending on the installed NumPy version - using 

    # scipy.sparse.coo_matrix() ensures compatibility with old NumPy versions 

    xx = sparse.coo_matrix((v, help)).transpose().todense() 

    return xx, mn - 1, mx - 1 

 

 

def logcep(data, fs, nc, p, n, w):  # @UnusedVariable: n is never used!!! 

    """ 

    Cepstrum of a signal. 

 

    Computes the cepstral coefficient on a logarithmic scale of the given data 

    which can be windowed or not. 

 

    If data are windowed the analytic signal and the envelope of each window is 

    returned. 

 

    :type data: :class:`~numpy.ndarray` 

    :param data: Data to make envelope of. 

    :param fs: Sampling frequency in Hz. 

    :param nc: number of cepstral coefficients. 

    :param p: Number of filters in filterbank. 

    :param n: Number of data windows. 

    :return: Cepstral coefficients. 

    """ 

    dataT = np.transpose(data) 

    nfft = util.nextpow2(dataT.shape[0]) 

    fc = fftpack.fft(dataT, nfft, 0) 

    f = fc[1:len(fc) / 2 + 1, :] 

    m, a, b = logbankm(p, nfft, fs, w) 

    pw = np.real(np.multiply(f[a:b, :], np.conj(f[a:b, :]))) 

    pth = np.max(pw) * 1E-20 

    ath = np.sqrt(pth) 

    #h1 = np.transpose(np.array([[ath] * int(b + 1 - a)])) 

    #h2 = m * abs(f[a - 1:b, :]) 

    y = np.log(np.maximum(m * abs(f[a - 1:b, :]), ath)) 

    z = util.rdct(y) 

    z = z[1:, :] 

    #nc = nc + 1 

    nf = np.size(z, 1) 

    if (p > nc): 

        z = z[:nc, :] 

    elif (p < nc): 

        z = np.vstack([z, np.zeros(nf, nc - p)]) 

    return z 

 

 

def pgm(data, delta, freq, damp=0.1): 

    """ 

    Peak ground motion parameters 

 

    Compute the maximal displacement, velocity, acceleration and the peak 

    ground acceleration at a certain frequency (standard frequencies for 

    ShakeMaps are 0.3/1.0/3.0 Hz). 

 

    Data must be displacement 

 

    :type data: :class:`~numpy.ndarray` 

    :param data: Data in dispalcement to convolve with pendulum at freq. 

    :type delta: float 

    :param delta: Sampling interval 

    :type freq: float 

    :param freq: Frequency in Hz. 

    :type damp: float 

    :param damp: damping factor. Default is set to 0.1 

    :rtype: (float, float, float, float) 

    :return: Peak Ground Acceleration, maximal displacement, velocity, 

        acceleration 

    """ 

    data = data.copy() 

 

    # Displacement 

    if abs(max(data)) >= abs(min(data)): 

        m_dis = abs(max(data)) 

    else: 

        m_dis = abs(min(data)) 

 

    # Velocity 

    data = np.gradient(data, delta) 

    if abs(max(data)) >= abs(min(data)): 

        m_vel = abs(max(data)) 

    else: 

        m_vel = abs(min(data)) 

 

    # Acceleration 

    data = np.gradient(data, delta) 

    if abs(max(data)) >= abs(min(data)): 

        m_acc = abs(max(data)) 

    else: 

        m_acc = abs(min(data)) 

 

    samp_rate = 1.0 / delta 

    T = freq * 1.0 

    D = damp 

    omega = (2 * 3.14159 * T) ** 2 

 

    paz_sa = cornFreq2Paz(T, damp=D) 

    paz_sa['sensitivity'] = omega 

    paz_sa['zeros'] = [] 

    data = seisSim(data, samp_rate, paz_remove=None, paz_simulate=paz_sa, 

                   taper=True, simulate_sensitivity=True, taper_fraction=0.05) 

 

    if abs(max(data)) >= abs(min(data)): 

        pga = abs(max(data)) 

    else: 

        pga = abs(min(data)) 

 

    return (pga, m_dis, m_vel, m_acc)