Coverage for /usr/local/lib/python3.11/dist-packages/pyrocko/io/datacube.py: 54%

428 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2024-03-07 11:54 +0000

1# http://pyrocko.org - GPLv3 

2# 

3# The Pyrocko Developers, 21st Century 

4# ---|P------/S----------~Lg---------- 

5 

6''' 

7Reader for `DiGOS DATA-CUBE³ <https://digos.eu/the-seismic-data-recorder/>`_ 

8raw data. 

9''' 

10 

11 

12import os 

13import math 

14import logging 

15 

16import numpy as num 

17 

18from pyrocko import trace, util, plot 

19from pyrocko.guts import Object, Int, String, Timestamp 

20 

21from . import io_common 

22 

23logger = logging.getLogger(__name__) 

24 

25N_GPS_TAGS_WANTED = 200 # must match definition in datacube_ext.c 

26 

27APPLY_SUBSAMPLE_SHIFT_CORRECTION = True 

28 

29 

30def color(c): 

31 c = plot.color(c) 

32 return tuple(x/255. for x in c) 

33 

34 

35class DataCubeError(io_common.FileLoadError): 

36 pass 

37 

38 

39class ControlPointError(Exception): 

40 pass 

41 

42 

43def make_control_point(ipos_block, t_block, tref, deltat): 

44 

45 # reduce time (no drift would mean straight line) 

46 tred = (t_block - tref) - ipos_block*deltat 

47 

48 # first round, remove outliers 

49 q25, q75 = num.percentile(tred, (25., 75.)) 

50 iok = num.logical_and(q25 <= tred, tred <= q75) 

51 

52 # detrend 

53 slope, offset = num.polyfit(ipos_block[iok], tred[iok], 1) 

54 tred2 = tred - (offset + slope * ipos_block) 

55 

56 # second round, remove outliers based on detrended tred, refit 

57 q25, q75 = num.percentile(tred2, (25., 75.)) 

58 iok = num.logical_and(q25 <= tred2, tred2 <= q75) 

59 x = ipos_block[iok].copy() 

60 ipos0 = x[0] 

61 x -= ipos0 

62 y = tred[iok].copy() 

63 if x.size < 2: 

64 raise ControlPointError('Insufficient number control points after QC.') 

65 

66 elif x.size < 5: # needed for older numpy versions 

67 (slope, offset) = num.polyfit(x, y, 1) 

68 else: 

69 (slope, offset), cov = num.polyfit(x, y, 1, cov=True) 

70 

71 slope_err, offset_err = num.sqrt(num.diag(cov)) 

72 

73 slope_err_limit = 1.0e-10 

74 if ipos_block.size < N_GPS_TAGS_WANTED // 2: 

75 slope_err_limit *= (200. / ipos_block.size) 

76 

77 offset_err_limit = 5.0e-3 

78 

79 if slope_err > slope_err_limit: 

80 raise ControlPointError( 

81 'Slope error too large: %g (limit: %g' % ( 

82 slope_err, slope_err_limit)) 

83 

84 if offset_err > offset_err_limit: 

85 raise ControlPointError( 

86 'Offset error too large: %g (limit: %g' % ( 

87 offset_err, offset_err_limit)) 

88 

89 ic = ipos_block[ipos_block.size//2] 

90 tc = offset + slope * (ic - ipos0) 

91 

92 return ic, tc + ic * deltat + tref 

93 

94 

95def analyse_gps_tags(header, gps_tags, offset, nsamples): 

96 

97 ipos, t, fix, nsvs = gps_tags 

98 deltat = 1.0 / int(header['S_RATE']) 

99 

100 tquartz = offset + ipos * deltat 

101 

102 toff = t - tquartz 

103 toff_median = num.median(toff) 

104 

105 n = t.size 

106 

107 dtdt = (t[1:n] - t[0:n-1]) / (tquartz[1:n] - tquartz[0:n-1]) 

108 

109 ok = abs(toff_median - toff) < 10. 

110 

111 xok = num.abs(dtdt - 1.0) < 0.00001 

112 

113 if ok.size >= 1: 

114 

115 ok[0] = False 

116 ok[1:n] &= xok 

117 ok[0:n-1] &= xok 

118 ok[n-1] = False 

119 

120 ipos = ipos[ok] 

121 t = t[ok] 

122 fix = fix[ok] 

123 nsvs = nsvs[ok] 

124 

125 blocksize = N_GPS_TAGS_WANTED // 2 

126 if ipos.size < blocksize: 

127 blocksize = max(10, ipos.size // 4) 

128 logger.warning( 

129 'Small number of GPS tags found. ' 

130 'Reducing analysis block size to %i tags. ' 

131 'Time correction may be unreliable.' % blocksize) 

132 

133 try: 

134 if ipos.size < blocksize: 

135 raise ControlPointError( 

136 'Cannot determine GPS time correction: ' 

137 'Too few GPS tags found: %i' % ipos.size) 

138 

139 j = 0 

140 control_points = [] 

141 tref = num.median(t - ipos*deltat) 

142 while j < ipos.size - blocksize: 

143 ipos_block = ipos[j:j+blocksize] 

144 t_block = t[j:j+blocksize] 

145 try: 

146 ic, tc = make_control_point(ipos_block, t_block, tref, deltat) 

147 control_points.append((ic, tc)) 

148 except ControlPointError as e: 

149 logger.debug(str(e)) 

150 

151 j += blocksize 

152 

153 ipos_last = ipos[-blocksize:] 

154 t_last = t[-blocksize:] 

155 try: 

156 ic, tc = make_control_point(ipos_last, t_last, tref, deltat) 

157 control_points.append((ic, tc)) 

158 except ControlPointError as e: 

159 logger.debug(str(e)) 

160 

161 if len(control_points) < 2: 

162 raise ControlPointError( 

163 'Could not safely determine time corrections from GPS: ' 

164 'unable to construct two or more control points') 

165 

166 i0, t0 = control_points[0] 

167 i1, t1 = control_points[1] 

168 i2, t2 = control_points[-2] 

169 i3, t3 = control_points[-1] 

170 if len(control_points) == 2: 

171 tmin = t0 - i0 * deltat - offset * deltat 

172 tmax = t3 + (nsamples - i3 - 1) * deltat 

173 else: 

174 icontrol = num.array( 

175 [x[0] for x in control_points], dtype=num.int64) 

176 tcontrol = num.array( 

177 [x[1] for x in control_points], dtype=float) 

178 # robust against steps: 

179 slope = num.median( 

180 (tcontrol[1:] - tcontrol[:-1]) 

181 / (icontrol[1:] - icontrol[:-1])) 

182 

183 tmin = t0 + (offset - i0) * slope 

184 tmax = t2 + (offset + nsamples - 1 - i2) * slope 

185 

186 if offset < i0: 

187 control_points[0:0] = [(offset, tmin)] 

188 

189 if offset + nsamples - 1 > i3: 

190 control_points.append((offset + nsamples - 1, tmax)) 

191 

192 icontrol = num.array([x[0] for x in control_points], dtype=num.int64) 

193 tcontrol = num.array([x[1] for x in control_points], dtype=float) 

194 

195 # corrected 2021-10-26: This sub-sample time shift introduced by the 

196 # Cube's ADC was previously not recognized. 

197 if APPLY_SUBSAMPLE_SHIFT_CORRECTION: 

198 tcontrol -= 0.199 * deltat + 0.0003 

199 

200 return tmin, tmax, icontrol, tcontrol, ok 

201 

202 except ControlPointError as e: 

203 logger.error(str(e)) 

204 

205 tmin = util.str_to_time(header['S_DATE'] + header['S_TIME'], 

206 format='%y/%m/%d%H:%M:%S') 

207 

208 idat = int(header['DAT_NO']) 

209 if idat == 0: 

210 tmin = tmin + util.gps_utc_offset(tmin) 

211 else: 

212 tmin = util.day_start(tmin + idat * 24.*3600.) \ 

213 + util.gps_utc_offset(tmin) 

214 

215 tmax = tmin + (nsamples - 1) * deltat 

216 icontrol = num.array([offset, offset+nsamples - 1], dtype=num.int64) 

217 tcontrol = num.array([tmin, tmax]) 

218 return tmin, tmax, icontrol, tcontrol, ok 

219 

220 

221def plot_gnss_location_timeline(fns): 

222 from matplotlib import pyplot as plt 

223 from pyrocko.orthodrome import latlon_to_ne_numpy 

224 not_ = num.logical_not 

225 h = 3600. 

226 

227 fig = plt.figure() 

228 

229 axes = [] 

230 for i in range(4): 

231 axes.append( 

232 fig.add_subplot(4, 1, i+1, sharex=axes[-1] if axes else None)) 

233 

234 background_colors = [ 

235 color('aluminium1'), 

236 color('aluminium2')] 

237 

238 tref = None 

239 for ifn, fn in enumerate(fns): 

240 header, gps_tags, nsamples = get_time_infos(fn) 

241 _, t, fix, nsvs, lats, lons, elevations, _ = gps_tags 

242 

243 fix = fix.astype(bool) 

244 

245 if t.size < 2: 

246 logger.warning('Need at least 2 gps tags for plotting: %s' % fn) 

247 

248 if tref is None: 

249 tref = util.day_start(t[0]) 

250 lat, lon, elevation = coordinates_from_gps(gps_tags) 

251 

252 norths, easts = latlon_to_ne_numpy(lat, lon, lats, lons) 

253 

254 for ax, data in zip(axes, (norths, easts, elevations, nsvs)): 

255 

256 tspan = t[num.array([0, -1])] 

257 

258 ax.axvspan(*((tspan - tref) / h), color=background_colors[ifn % 2]) 

259 med = num.median(data) 

260 ax.plot( 

261 (tspan - tref) / h, 

262 [med, med], 

263 ls='--', 

264 c='k', 

265 lw=3, 

266 alpha=0.5) 

267 

268 ax.plot( 

269 (t[not_(fix)] - tref) / h, data[not_(fix)], 'o', 

270 ms=1.5, 

271 mew=0, 

272 color=color('scarletred2')) 

273 

274 ax.plot( 

275 (t[fix] - tref) / h, data[fix], 'o', 

276 ms=1.5, 

277 mew=0, 

278 color=color('aluminium6')) 

279 

280 for ax in axes: 

281 ax.grid(alpha=.3) 

282 

283 ax_lat, ax_lon, ax_elev, ax_nsv = axes 

284 

285 ax_lat.set_ylabel('Northing [m]') 

286 ax_lon.set_ylabel('Easting [m]') 

287 ax_elev.set_ylabel('Elevation [m]') 

288 ax_nsv.set_ylabel('Number of Satellites') 

289 

290 ax_lat.get_xaxis().set_tick_params(labelbottom=False) 

291 ax_lon.get_xaxis().set_tick_params(labelbottom=False) 

292 ax_nsv.set_xlabel( 

293 'Hours after %s' % util.time_to_str(tref, format='%Y-%m-%d')) 

294 

295 fig.suptitle( 

296 u'Lat: %.5f\u00b0 Lon: %.5f\u00b0 Elevation: %g m' % ( 

297 lat, lon, elevation)) 

298 

299 plt.show() 

300 

301 

302def coordinates_from_gps(gps_tags): 

303 ipos, t, fix, nsvs, lats, lons, elevations, temps = gps_tags 

304 return tuple(num.median(x) for x in (lats, lons, elevations)) 

305 

306 

307def extract_stations(fns): 

308 import io 

309 import sys 

310 from pyrocko.model import Station 

311 from pyrocko.guts import dump_all 

312 

313 stations = {} 

314 

315 for fn in fns: 

316 sta_name = os.path.splitext(fn)[1].lstrip('.') 

317 if sta_name in stations: 

318 logger.warning('Cube %s already in list!', sta_name) 

319 continue 

320 

321 header, gps_tags, nsamples = get_time_infos(fn) 

322 

323 lat, lon, elevation = coordinates_from_gps(gps_tags) 

324 

325 sta = Station( 

326 network='', 

327 station=sta_name, 

328 name=sta_name, 

329 location='', 

330 lat=lat, 

331 lon=lon, 

332 elevation=elevation) 

333 

334 stations[sta_name] = sta 

335 

336 f = io.BytesIO() 

337 dump_all(stations.values(), stream=f) 

338 sys.stdout.write(f.getvalue().decode()) 

339 

340 

341def plot_timeline(fns): 

342 from matplotlib import pyplot as plt 

343 

344 fig = plt.figure() 

345 axes = fig.gca() 

346 

347 h = 3600. 

348 

349 if isinstance(fns, str): 

350 fn = fns 

351 if os.path.isdir(fn): 

352 fns = [ 

353 os.path.join(fn, entry) for entry in sorted(os.listdir(fn))] 

354 

355 ipos, t, fix, nsvs, header, offset, nsamples = \ 

356 get_timing_context(fns) 

357 

358 else: 

359 ipos, t, fix, nsvs, header, offset, nsamples = \ 

360 get_extended_timing_context(fn) 

361 

362 else: 

363 ipos, t, fix, nsvs, header, offset, nsamples = \ 

364 get_timing_context(fns) 

365 

366 deltat = 1.0 / int(header['S_RATE']) 

367 

368 tref = num.median(t - ipos * deltat) 

369 tref = round(tref / deltat) * deltat 

370 

371 if APPLY_SUBSAMPLE_SHIFT_CORRECTION: 

372 tcorr = 0.199 * deltat + 0.0003 

373 else: 

374 tcorr = 0.0 

375 

376 x = ipos*deltat 

377 y = (t - tref) - ipos*deltat - tcorr 

378 

379 bfix = fix != 0 

380 bnofix = fix == 0 

381 

382 tmin, tmax, icontrol, tcontrol, ok = analyse_gps_tags( 

383 header, (ipos, t, fix, nsvs), offset, nsamples) 

384 

385 la = num.logical_and 

386 nok = num.logical_not(ok) 

387 

388 axes.plot( 

389 x[la(bfix, ok)]/h, y[la(bfix, ok)], '+', 

390 ms=5, color=color('chameleon3')) 

391 axes.plot( 

392 x[la(bfix, nok)]/h, y[la(bfix, nok)], '+', 

393 ms=5, color=color('aluminium4')) 

394 

395 axes.plot( 

396 x[la(bnofix, ok)]/h, y[la(bnofix, ok)], 'x', 

397 ms=5, color=color('chocolate3')) 

398 axes.plot( 

399 x[la(bnofix, nok)]/h, y[la(bnofix, nok)], 'x', 

400 ms=5, color=color('aluminium4')) 

401 

402 tred = tcontrol - icontrol*deltat - tref 

403 axes.plot(icontrol*deltat/h, tred, color=color('aluminium6')) 

404 axes.plot(icontrol*deltat/h, tred, 'o', ms=5, color=color('aluminium6')) 

405 

406 ymin = (math.floor(tred.min() / deltat)-1) * deltat 

407 ymax = (math.ceil(tred.max() / deltat)+1) * deltat 

408 # ymin = min(ymin, num.min(y)) 

409 # ymax = max(ymax, num.max(y)) 

410 

411 if ymax - ymin < 1000 * deltat: 

412 ygrid = math.floor(tred.min() / deltat) * deltat 

413 while ygrid < ymax: 

414 axes.axhline(ygrid, color=color('aluminium4'), alpha=0.3) 

415 ygrid += deltat 

416 

417 xmin = icontrol[0]*deltat/h 

418 xmax = icontrol[-1]*deltat/h 

419 xsize = xmax - xmin 

420 xmin -= xsize * 0.1 

421 xmax += xsize * 0.1 

422 axes.set_xlim(xmin, xmax) 

423 

424 axes.set_ylim(ymin, ymax) 

425 

426 axes.set_xlabel('Uncorrected (quartz) time [h]') 

427 axes.set_ylabel('Relative time correction [s]') 

428 

429 plt.show() 

430 

431 

432g_dir_contexts = {} 

433 

434 

435class DirContextEntry(Object): 

436 path = String.T() 

437 tstart = Timestamp.T() 

438 ifile = Int.T() 

439 

440 

441class DirContext(Object): 

442 path = String.T() 

443 mtime = Timestamp.T() 

444 entries = DirContextEntry.T() 

445 

446 def get_entry(self, fn): 

447 path = os.path.abspath(fn) 

448 for entry in self.entries: 

449 if entry.path == path: 

450 return entry 

451 

452 raise Exception('entry not found') 

453 

454 def iter_entries(self, fn, step=1): 

455 current = self.get_entry(fn) 

456 by_ifile = dict( 

457 (entry.ifile, entry) for entry in self.entries 

458 if entry.tstart == current.tstart) 

459 

460 icurrent = current.ifile 

461 while True: 

462 icurrent += step 

463 try: 

464 yield by_ifile[icurrent] 

465 

466 except KeyError: 

467 break 

468 

469 

470def context(fn): 

471 from pyrocko import datacube_ext 

472 

473 dpath = os.path.dirname(os.path.abspath(fn)) 

474 mtimes = [os.stat(dpath)[8]] 

475 

476 dentries = sorted([os.path.join(dpath, f) for f in os.listdir(dpath) 

477 if os.path.isfile(os.path.join(dpath, f))]) 

478 for dentry in dentries: 

479 fn2 = os.path.join(dpath, dentry) 

480 mtimes.append(os.stat(fn2)[8]) 

481 

482 mtime = float(max(mtimes)) 

483 

484 if dpath in g_dir_contexts: 

485 dir_context = g_dir_contexts[dpath] 

486 if dir_context.mtime == mtime: 

487 return dir_context 

488 

489 del g_dir_contexts[dpath] 

490 

491 entries = [] 

492 for dentry in dentries: 

493 fn2 = os.path.join(dpath, dentry) 

494 if not os.path.isfile(fn2): 

495 continue 

496 

497 with open(fn2, 'rb') as f: 

498 first512 = f.read(512) 

499 if not detect(first512): 

500 continue 

501 

502 with open(fn2, 'rb') as f: 

503 try: 

504 header, data_arrays, gps_tags, nsamples, _ = \ 

505 datacube_ext.load(f.fileno(), 3, 0, -1, None) 

506 

507 except datacube_ext.DataCubeError as e: 

508 e = DataCubeError(str(e)) 

509 e.set_context('filename', fn) 

510 raise e 

511 

512 header = dict(header) 

513 entries.append(DirContextEntry( 

514 path=os.path.abspath(fn2), 

515 tstart=util.str_to_time( 

516 '20' + header['S_DATE'] + ' ' + header['S_TIME'], 

517 format='%Y/%m/%d %H:%M:%S'), 

518 ifile=int(header['DAT_NO']))) 

519 

520 dir_context = DirContext(mtime=mtime, path=dpath, entries=entries) 

521 

522 return dir_context 

523 

524 

525def get_time_infos(fn): 

526 from pyrocko import datacube_ext 

527 

528 with open(fn, 'rb') as f: 

529 try: 

530 header, _, gps_tags, nsamples, _ = datacube_ext.load( 

531 f.fileno(), 1, 0, -1, None) 

532 

533 except datacube_ext.DataCubeError as e: 

534 e = DataCubeError(str(e)) 

535 e.set_context('filename', fn) 

536 raise e 

537 

538 return dict(header), gps_tags, nsamples 

539 

540 

541def get_timing_context(fns): 

542 joined = [[], [], [], []] 

543 ioff = 0 

544 for fn in fns: 

545 header, gps_tags, nsamples = get_time_infos(fn) 

546 

547 ipos = gps_tags[0] 

548 ipos += ioff 

549 

550 for i in range(4): 

551 joined[i].append(gps_tags[i]) 

552 

553 ioff += nsamples 

554 

555 ipos, t, fix, nsvs = [num.concatenate(x) for x in joined] 

556 

557 nsamples = ioff 

558 return ipos, t, fix, nsvs, header, 0, nsamples 

559 

560 

561def get_extended_timing_context(fn): 

562 c = context(fn) 

563 

564 header, gps_tags, nsamples_base = get_time_infos(fn) 

565 

566 ioff = 0 

567 aggregated = [gps_tags] 

568 

569 nsamples_total = nsamples_base 

570 

571 if num.sum(gps_tags[2]) == 0: 

572 

573 ioff = nsamples_base 

574 for entry in c.iter_entries(fn, 1): 

575 

576 _, gps_tags, nsamples = get_time_infos(entry.path) 

577 

578 ipos = gps_tags[0] 

579 ipos += ioff 

580 

581 aggregated.append(gps_tags) 

582 nsamples_total += nsamples 

583 

584 if num.sum(gps_tags[2]) > 0: 

585 break 

586 

587 ioff += nsamples 

588 

589 ioff = 0 

590 for entry in c.iter_entries(fn, -1): 

591 

592 _, gps_tags, nsamples = get_time_infos(entry.path) 

593 

594 ioff -= nsamples 

595 

596 ipos = gps_tags[0] 

597 ipos += ioff 

598 

599 aggregated[0:0] = [gps_tags] 

600 

601 nsamples_total += nsamples 

602 

603 if num.sum(gps_tags[2]) > 0: 

604 break 

605 

606 ipos, t, fix, nsvs = [num.concatenate(x) for x in zip(*aggregated)][:4] 

607 

608# return ipos, t, fix, nsvs, header, ioff, nsamples_total 

609 return ipos, t, fix, nsvs, header, 0, nsamples_base 

610 

611 

612def iload(fn, load_data=True, interpolation='sinc', yield_gps_tags=False): 

613 from pyrocko import datacube_ext 

614 from pyrocko import signal_ext 

615 

616 if interpolation not in ('sinc', 'off'): 

617 raise NotImplementedError( 

618 'no such interpolation method: %s' % interpolation) 

619 

620 with open(fn, 'rb') as f: 

621 if load_data: 

622 loadflag = 2 

623 else: 

624 loadflag = 1 

625 

626 try: 

627 header, data_arrays, gps_tags, nsamples, _ = datacube_ext.load( 

628 f.fileno(), loadflag, 0, -1, None) 

629 

630 except datacube_ext.DataCubeError as e: 

631 e = DataCubeError(str(e)) 

632 e.set_context('filename', fn) 

633 raise e 

634 

635 header = dict(header) 

636 deltat = 1.0 / int(header['S_RATE']) 

637 nchannels = int(header['CH_NUM']) 

638 

639 ipos, t, fix, nsvs, header_, offset_, nsamples_ = \ 

640 get_extended_timing_context(fn) 

641 

642 tmin, tmax, icontrol, tcontrol, _ = analyse_gps_tags( 

643 header_, (ipos, t, fix, nsvs), offset_, nsamples_) 

644 

645 if icontrol is None: 

646 logger.warning( 

647 'No usable GPS timestamps found. Using datacube header ' 

648 'information to guess time. (file: "%s")' % fn) 

649 

650 tmin_ip = round(tmin / deltat) * deltat 

651 if interpolation != 'off': 

652 tmax_ip = round(tmax / deltat) * deltat 

653 else: 

654 tmax_ip = tmin_ip + (nsamples-1) * deltat 

655 

656 nsamples_ip = int(round((tmax_ip - tmin_ip)/deltat)) + 1 

657 # to prevent problems with rounding errors: 

658 tmax_ip = tmin_ip + (nsamples_ip-1) * deltat 

659 

660 leaps = num.array( 

661 [x[0] + util.gps_utc_offset(x[0]) for x in util.read_leap_seconds2()], 

662 dtype=float) 

663 

664 if load_data and icontrol is not None: 

665 ncontrol_this = num.sum( 

666 num.logical_and(0 <= icontrol, icontrol < nsamples)) 

667 

668 if ncontrol_this <= 1: 

669 logger.warning( 

670 'Extrapolating GPS time information from directory context ' 

671 '(insufficient number of GPS timestamps in file: "%s").' % fn) 

672 

673 for i in range(nchannels): 

674 if load_data: 

675 arr = data_arrays[i] 

676 assert arr.size == nsamples 

677 

678 if interpolation == 'sinc' and icontrol is not None: 

679 

680 ydata = num.empty(nsamples_ip, dtype=float) 

681 try: 

682 signal_ext.antidrift( 

683 icontrol, tcontrol, 

684 arr.astype(float), tmin_ip, deltat, ydata) 

685 

686 except signal_ext.Error as e: 

687 e = DataCubeError(str(e)) 

688 e.set_context('filename', fn) 

689 e.set_context('n_control_points', icontrol.size) 

690 e.set_context('n_samples_raw', arr.size) 

691 e.set_context('n_samples_ip', ydata.size) 

692 e.set_context('tmin_ip', util.time_to_str(tmin_ip)) 

693 raise e 

694 

695 ydata = num.round(ydata).astype(arr.dtype) 

696 else: 

697 ydata = arr 

698 

699 tr_tmin = tmin_ip 

700 tr_tmax = None 

701 else: 

702 ydata = None 

703 tr_tmin = tmin_ip 

704 tr_tmax = tmax_ip 

705 

706 tr = trace.Trace('', header['DEV_NO'], '', 'p%i' % i, deltat=deltat, 

707 ydata=ydata, tmin=tr_tmin, tmax=tr_tmax, meta=header) 

708 

709 bleaps = num.logical_and(tmin_ip <= leaps, leaps < tmax_ip) 

710 

711 if num.any(bleaps): 

712 assert num.sum(bleaps) == 1 

713 tcut = leaps[bleaps][0] 

714 

715 for tmin_cut, tmax_cut in [ 

716 (tr.tmin, tcut), (tcut, tr.tmax+tr.deltat)]: 

717 

718 try: 

719 tr_cut = tr.chop(tmin_cut, tmax_cut, inplace=False) 

720 tr_cut.shift( 

721 util.utc_gps_offset(0.5*(tr_cut.tmin+tr_cut.tmax))) 

722 if yield_gps_tags: 

723 yield tr_cut, gps_tags 

724 else: 

725 yield tr_cut 

726 

727 except trace.NoData: 

728 pass 

729 

730 else: 

731 tr.shift(util.utc_gps_offset(0.5*(tr.tmin+tr.tmax))) 

732 if yield_gps_tags: 

733 yield tr, gps_tags 

734 else: 

735 yield tr 

736 

737 

738header_keys = { 

739 str: b'GIPP_V DEV_NO E_NAME GPS_PO S_TIME S_DATE DAT_NO'.split(), 

740 int: b'''P_AMPL CH_NUM S_RATE D_FILT C_MODE A_CHOP F_TIME GPS_TI GPS_OF 

741 A_FILT A_PHAS GPS_ON ACQ_ON V_TCXO D_VOLT E_VOLT'''.split()} 

742 

743all_header_keys = header_keys[str] + header_keys[int] 

744 

745 

746def detect(first512): 

747 s = first512 

748 

749 if len(s) < 512: 

750 return False 

751 

752 if ord(s[0:1]) >> 4 != 15: 

753 return False 

754 

755 n = s.find(b'\x80') 

756 if n == -1: 

757 n = len(s) 

758 

759 s = s[1:n] 

760 s = s.replace(b'\xf0', b'') 

761 s = s.replace(b';', b' ') 

762 s = s.replace(b'=', b' ') 

763 kvs = s.split(b' ') 

764 

765 if len([k for k in all_header_keys if k in kvs]) == 0: 

766 return False 

767 return True 

768 

769 

770if __name__ == '__main__': 

771 import argparse 

772 parser = argparse.ArgumentParser(description='Datacube reader') 

773 

774 parser.add_argument( 

775 'action', choices=['timeline', 'gnss', 'stations'], 

776 help='Action') 

777 parser.add_argument( 

778 'files', nargs='+') 

779 

780 parser.add_argument( 

781 '--loglevel', '-l', 

782 choices=['critical', 'error', 'warning', 'info', 'debug'], 

783 default='info', 

784 help='Set logger level. Default: %(default)s') 

785 

786 args = parser.parse_args() 

787 

788 util.setup_logging('pyrocko.io.datacube', args.loglevel) 

789 logging.getLogger('matplotlib.font_manager').disabled = True 

790 

791 if args.action == 'timeline': 

792 plot_timeline(args.files) 

793 

794 elif args.action == 'gnss': 

795 plot_gnss_location_timeline(args.files) 

796 

797 elif args.action == 'stations': 

798 extract_stations(args.files)