1# http://pyrocko.org - GPLv3 

2# 

3# The Pyrocko Developers, 21st Century 

4# ---|P------/S----------~Lg---------- 

5from __future__ import absolute_import, division 

6 

7import errno 

8import time 

9import os 

10import struct 

11import weakref 

12import math 

13import shutil 

14try: 

15 import fcntl 

16except ImportError: 

17 fcntl = None 

18import copy 

19import logging 

20import re 

21import hashlib 

22from glob import glob 

23 

24import numpy as num 

25from scipy import signal 

26 

27from . import meta 

28from .error import StoreError 

29try: 

30 from . import store_ext 

31except ImportError: 

32 store_ext = None 

33from pyrocko import util, spit 

34 

35logger = logging.getLogger('pyrocko.gf.store') 

36op = os.path 

37 

38# gf store endianness 

39E = '<' 

40 

41gf_dtype = num.dtype(num.float32) 

42gf_dtype_store = num.dtype(E + 'f4') 

43 

44gf_dtype_nbytes_per_sample = 4 

45 

46gf_store_header_dtype = [ 

47 ('nrecords', E + 'u8'), 

48 ('deltat', E + 'f4'), 

49] 

50 

51gf_store_header_fmt = E + 'Qf' 

52gf_store_header_fmt_size = struct.calcsize(gf_store_header_fmt) 

53 

54gf_record_dtype = num.dtype([ 

55 ('data_offset', E + 'u8'), 

56 ('itmin', E + 'i4'), 

57 ('nsamples', E + 'u4'), 

58 ('begin_value', E + 'f4'), 

59 ('end_value', E + 'f4'), 

60]) 

61 

62available_stored_tables = ['phase', 'takeoff_angle', 'incidence_angle'] 

63 

64km = 1000. 

65 

66 

67class SeismosizerErrorEnum: 

68 SUCCESS = 0 

69 INVALID_RECORD = 1 

70 EMPTY_RECORD = 2 

71 BAD_RECORD = 3 

72 ALLOC_FAILED = 4 

73 BAD_REQUEST = 5 

74 BAD_DATA_OFFSET = 6 

75 READ_DATA_FAILED = 7 

76 SEEK_INDEX_FAILED = 8 

77 READ_INDEX_FAILED = 9 

78 FSTAT_TRACES_FAILED = 10 

79 BAD_STORE = 11 

80 MMAP_INDEX_FAILED = 12 

81 MMAP_TRACES_FAILED = 13 

82 INDEX_OUT_OF_BOUNDS = 14 

83 NTARGETS_OUT_OF_BOUNDS = 15 

84 

85 

86def valid_string_id(s): 

87 return re.match(meta.StringID.pattern, s) 

88 

89 

90def check_string_id(s): 

91 if not valid_string_id(s): 

92 raise ValueError('invalid name %s' % s) 

93 

94# - data_offset 

95# 

96# Data file offset of first sample in bytes (the seek value). 

97# Special values: 

98# 

99# 0x00 - missing trace 

100# 0x01 - zero trace (GF trace is physically zero) 

101# 0x02 - short trace of 1 or 2 samples (no need for data allocation) 

102# 

103# - itmin 

104# 

105# Time of first sample of the trace as a multiple of the sampling interval. All 

106# GF samples must be evaluated at multiples of the sampling interval with 

107# respect to a global reference time zero. Must be set also for zero traces. 

108# 

109# - nsamples 

110# 

111# Number of samples in the GF trace. Should be set to zero for zero traces. 

112# 

113# - begin_value, end_value 

114# 

115# Values of first and last sample. These values are included in data[] 

116# redunantly. 

117 

118 

119class NotMultipleOfSamplingInterval(Exception): 

120 pass 

121 

122 

123sampling_check_eps = 1e-5 

124 

125 

126class GFTrace(object): 

127 ''' 

128 Green's Function trace class for handling traces from the GF store. 

129 ''' 

130 

131 @classmethod 

132 def from_trace(cls, tr): 

133 return cls(data=tr.ydata.copy(), tmin=tr.tmin, deltat=tr.deltat) 

134 

135 def __init__(self, data=None, itmin=None, deltat=1.0, 

136 is_zero=False, begin_value=None, end_value=None, tmin=None): 

137 

138 assert sum((x is None) for x in (tmin, itmin)) == 1, \ 

139 'GFTrace: either tmin or itmin must be given'\ 

140 

141 if tmin is not None: 

142 itmin = int(round(tmin / deltat)) 

143 if abs(itmin*deltat - tmin) > sampling_check_eps*deltat: 

144 raise NotMultipleOfSamplingInterval( 

145 'GFTrace: tmin (%g) is not a multiple of sampling ' 

146 'interval (%g)' % (tmin, deltat)) 

147 

148 if data is not None: 

149 data = num.asarray(data, dtype=gf_dtype) 

150 else: 

151 data = num.array([], dtype=gf_dtype) 

152 begin_value = 0.0 

153 end_value = 0.0 

154 

155 self.data = weakref.ref(data)() 

156 self.itmin = itmin 

157 self.deltat = deltat 

158 self.is_zero = is_zero 

159 self.n_records_stacked = 0. 

160 self.t_stack = 0. 

161 self.t_optimize = 0. 

162 self.err = SeismosizerErrorEnum.SUCCESS 

163 

164 if data is not None and data.size > 0: 

165 if begin_value is None: 

166 begin_value = data[0] 

167 if end_value is None: 

168 end_value = data[-1] 

169 

170 self.begin_value = begin_value 

171 self.end_value = end_value 

172 

173 @property 

174 def t(self): 

175 ''' 

176 Time vector of the GF trace. 

177 

178 :returns: Time vector 

179 :rtype: :py:class:`numpy.ndarray` 

180 ''' 

181 return num.linspace( 

182 self.itmin*self.deltat, 

183 (self.itmin+self.data.size-1)*self.deltat, self.data.size) 

184 

185 def __str__(self, itmin=0): 

186 

187 if self.is_zero: 

188 return 'ZERO' 

189 

190 s = [] 

191 for i in range(itmin, self.itmin + self.data.size): 

192 if i >= self.itmin and i < self.itmin + self.data.size: 

193 s.append('%7.4g' % self.data[i-self.itmin]) 

194 else: 

195 s.append(' '*7) 

196 

197 return '|'.join(s) 

198 

199 def to_trace(self, net, sta, loc, cha): 

200 from pyrocko import trace 

201 return trace.Trace( 

202 net, sta, loc, cha, 

203 ydata=self.data, 

204 deltat=self.deltat, 

205 tmin=self.itmin*self.deltat) 

206 

207 

208class GFValue(object): 

209 

210 def __init__(self, value): 

211 self.value = value 

212 self.n_records_stacked = 0. 

213 self.t_stack = 0. 

214 self.t_optimize = 0. 

215 

216 

217Zero = GFTrace(is_zero=True, itmin=0) 

218 

219 

220def make_same_span(tracesdict): 

221 

222 traces = tracesdict.values() 

223 

224 nonzero = [tr for tr in traces if not tr.is_zero] 

225 if not nonzero: 

226 return {k: Zero for k in tracesdict.keys()} 

227 

228 deltat = nonzero[0].deltat 

229 

230 itmin = min(tr.itmin for tr in nonzero) 

231 itmax = max(tr.itmin+tr.data.size for tr in nonzero) - 1 

232 

233 out = {} 

234 for k, tr in tracesdict.items(): 

235 n = itmax - itmin + 1 

236 if tr.itmin != itmin or tr.data.size != n: 

237 data = num.zeros(n, dtype=gf_dtype) 

238 if not tr.is_zero: 

239 lo = tr.itmin-itmin 

240 hi = lo + tr.data.size 

241 data[:lo] = tr.data[0] 

242 data[lo:hi] = tr.data 

243 data[hi:] = tr.data[-1] 

244 

245 tr = GFTrace(data, itmin, deltat) 

246 

247 out[k] = tr 

248 

249 return out 

250 

251 

252class CannotCreate(StoreError): 

253 pass 

254 

255 

256class CannotOpen(StoreError): 

257 pass 

258 

259 

260class DuplicateInsert(StoreError): 

261 pass 

262 

263 

264class ShortRead(StoreError): 

265 def __str__(self): 

266 return 'unexpected end of data (truncated traces file?)' 

267 

268 

269class NotAllowedToInterpolate(StoreError): 

270 def __str__(self): 

271 return 'not allowed to interpolate' 

272 

273 

274class NoSuchExtra(StoreError): 

275 def __init__(self, s): 

276 StoreError.__init__(self) 

277 self.value = s 

278 

279 def __str__(self): 

280 return 'extra information for key "%s" not found.' % self.value 

281 

282 

283class NoSuchPhase(StoreError): 

284 def __init__(self, s): 

285 StoreError.__init__(self) 

286 self.value = s 

287 

288 def __str__(self): 

289 return 'phase for key "%s" not found. ' \ 

290 'Running "fomosto ttt" may be needed.' % self.value 

291 

292 

293def remove_if_exists(fn, force=False): 

294 if os.path.exists(fn): 

295 if force: 

296 os.remove(fn) 

297 else: 

298 raise CannotCreate('file %s already exists' % fn) 

299 

300 

301def get_extra_path(store_dir, key): 

302 check_string_id(key) 

303 return os.path.join(store_dir, 'extra', key) 

304 

305 

306class BaseStore(object): 

307 

308 @staticmethod 

309 def lock_fn_(store_dir): 

310 return os.path.join(store_dir, 'lock') 

311 

312 @staticmethod 

313 def index_fn_(store_dir): 

314 return os.path.join(store_dir, 'index') 

315 

316 @staticmethod 

317 def data_fn_(store_dir): 

318 return os.path.join(store_dir, 'traces') 

319 

320 @staticmethod 

321 def config_fn_(store_dir): 

322 return os.path.join(store_dir, 'config') 

323 

324 @staticmethod 

325 def create(store_dir, deltat, nrecords, force=False): 

326 

327 try: 

328 util.ensuredir(store_dir) 

329 except Exception: 

330 raise CannotCreate('cannot create directory %s' % store_dir) 

331 

332 index_fn = BaseStore.index_fn_(store_dir) 

333 data_fn = BaseStore.data_fn_(store_dir) 

334 

335 for fn in (index_fn, data_fn): 

336 remove_if_exists(fn, force) 

337 

338 with open(index_fn, 'wb') as f: 

339 f.write(struct.pack(gf_store_header_fmt, nrecords, deltat)) 

340 records = num.zeros(nrecords, dtype=gf_record_dtype) 

341 records.tofile(f) 

342 

343 with open(data_fn, 'wb') as f: 

344 f.write(b'\0' * 32) 

345 

346 def __init__(self, store_dir, mode='r', use_memmap=True): 

347 assert mode in 'rw' 

348 self.store_dir = store_dir 

349 self.mode = mode 

350 self._use_memmap = use_memmap 

351 self._nrecords = None 

352 self._deltat = None 

353 self._f_index = None 

354 self._f_data = None 

355 self._records = None 

356 self.cstore = None 

357 

358 def open(self): 

359 assert self._f_index is None 

360 index_fn = self.index_fn() 

361 data_fn = self.data_fn() 

362 

363 if self.mode == 'r': 

364 fmode = 'rb' 

365 elif self.mode == 'w': 

366 fmode = 'r+b' 

367 else: 

368 assert False, 'invalid mode: %s' % self.mode 

369 

370 try: 

371 self._f_index = open(index_fn, fmode) 

372 self._f_data = open(data_fn, fmode) 

373 except Exception: 

374 self.mode = '' 

375 raise CannotOpen('cannot open gf store: %s' % self.store_dir) 

376 

377 try: 

378 # replace single precision deltat value in store with the double 

379 # precision value from the config, if available 

380 self.cstore = store_ext.store_init( 

381 self._f_index.fileno(), self._f_data.fileno(), 

382 self.get_deltat() or 0.0) 

383 

384 except store_ext.StoreExtError as e: 

385 raise StoreError(str(e)) 

386 

387 while True: 

388 try: 

389 dataheader = self._f_index.read(gf_store_header_fmt_size) 

390 break 

391 

392 except IOError as e: 

393 # occasionally got this one on an NFS volume 

394 if e.errno == errno.EBUSY: 

395 time.sleep(0.01) 

396 else: 

397 raise 

398 

399 nrecords, deltat = struct.unpack(gf_store_header_fmt, dataheader) 

400 self._nrecords = nrecords 

401 self._deltat = deltat 

402 

403 self._load_index() 

404 

405 def __del__(self): 

406 if self.mode != '': 

407 self.close() 

408 

409 def lock(self): 

410 if not fcntl: 

411 lock_fn = self.lock_fn() 

412 util.create_lockfile(lock_fn) 

413 else: 

414 if not self._f_index: 

415 self.open() 

416 

417 while True: 

418 try: 

419 fcntl.lockf(self._f_index, fcntl.LOCK_EX) 

420 break 

421 

422 except IOError as e: 

423 if e.errno == errno.ENOLCK: 

424 time.sleep(0.01) 

425 else: 

426 raise 

427 

428 def unlock(self): 

429 if not fcntl: 

430 lock_fn = self.lock_fn() 

431 util.delete_lockfile(lock_fn) 

432 else: 

433 self._f_data.flush() 

434 self._f_index.flush() 

435 fcntl.lockf(self._f_index, fcntl.LOCK_UN) 

436 

437 def put(self, irecord, trace): 

438 self._put(irecord, trace) 

439 

440 def get_record(self, irecord): 

441 return self._get_record(irecord) 

442 

443 def get_span(self, irecord, decimate=1): 

444 return self._get_span(irecord, decimate=decimate) 

445 

446 def get(self, irecord, itmin=None, nsamples=None, decimate=1, 

447 implementation='c'): 

448 return self._get(irecord, itmin, nsamples, decimate, implementation) 

449 

450 def sum(self, irecords, delays, weights, itmin=None, 

451 nsamples=None, decimate=1, implementation='c', 

452 optimization='enable'): 

453 return self._sum(irecords, delays, weights, itmin, nsamples, decimate, 

454 implementation, optimization) 

455 

456 def irecord_format(self): 

457 return util.zfmt(self._nrecords) 

458 

459 def str_irecord(self, irecord): 

460 return self.irecord_format() % irecord 

461 

462 def close(self): 

463 if self.mode == 'w': 

464 if not self._f_index: 

465 self.open() 

466 self._save_index() 

467 

468 if self._f_data: 

469 self._f_data.close() 

470 self._f_data = None 

471 

472 if self._f_index: 

473 self._f_index.close() 

474 self._f_index = None 

475 

476 del self._records 

477 self._records = None 

478 

479 self.mode = '' 

480 

481 def _get_record(self, irecord): 

482 if not self._f_index: 

483 self.open() 

484 

485 return self._records[irecord] 

486 

487 def _get(self, irecord, itmin, nsamples, decimate, implementation): 

488 ''' 

489 Retrieve complete GF trace from storage. 

490 ''' 

491 

492 if not self._f_index: 

493 self.open() 

494 

495 if not self.mode == 'r': 

496 raise StoreError('store not open in read mode') 

497 

498 if implementation == 'c' and decimate == 1: 

499 

500 if nsamples is None: 

501 nsamples = -1 

502 

503 if itmin is None: 

504 itmin = 0 

505 

506 try: 

507 return GFTrace(*store_ext.store_get( 

508 self.cstore, int(irecord), int(itmin), int(nsamples))) 

509 except store_ext.StoreExtError as e: 

510 raise StoreError(str(e)) 

511 

512 else: 

513 return self._get_impl_reference(irecord, itmin, nsamples, decimate) 

514 

515 def get_deltat(self): 

516 return self._deltat 

517 

518 def _get_impl_reference(self, irecord, itmin, nsamples, decimate): 

519 deltat = self.get_deltat() 

520 

521 if not (0 <= irecord < self._nrecords): 

522 raise StoreError('invalid record number requested ' 

523 '(irecord = %i, nrecords = %i)' % 

524 (irecord, self._nrecords)) 

525 

526 ipos, itmin_data, nsamples_data, begin_value, end_value = \ 

527 self._records[irecord] 

528 

529 if None in (itmin, nsamples): 

530 itmin = itmin_data 

531 itmax = itmin_data + nsamples_data - 1 

532 nsamples = nsamples_data 

533 else: 

534 itmin = itmin * decimate 

535 nsamples = nsamples * decimate 

536 itmax = itmin + nsamples - decimate 

537 

538 if ipos == 0: 

539 return None 

540 

541 elif ipos == 1: 

542 itmin_ext = (max(itmin, itmin_data)//decimate) * decimate 

543 return GFTrace(is_zero=True, itmin=itmin_ext//decimate) 

544 

545 if decimate == 1: 

546 ilo = max(itmin, itmin_data) - itmin_data 

547 ihi = min(itmin+nsamples, itmin_data+nsamples_data) - itmin_data 

548 data = self._get_data(ipos, begin_value, end_value, ilo, ihi) 

549 

550 return GFTrace(data, itmin=itmin_data+ilo, deltat=deltat, 

551 begin_value=begin_value, end_value=end_value) 

552 

553 else: 

554 itmax_data = itmin_data + nsamples_data - 1 

555 

556 # put begin and end to multiples of new sampling rate 

557 itmin_ext = (max(itmin, itmin_data)//decimate) * decimate 

558 itmax_ext = -((-min(itmax, itmax_data))//decimate) * decimate 

559 nsamples_ext = itmax_ext - itmin_ext + 1 

560 

561 # add some padding for the aa filter 

562 order = 30 

563 itmin_ext_pad = itmin_ext - order//2 

564 itmax_ext_pad = itmax_ext + order//2 

565 nsamples_ext_pad = itmax_ext_pad - itmin_ext_pad + 1 

566 

567 itmin_overlap = max(itmin_data, itmin_ext_pad) 

568 itmax_overlap = min(itmax_data, itmax_ext_pad) 

569 

570 ilo = itmin_overlap - itmin_ext_pad 

571 ihi = max(ilo, itmax_overlap - itmin_ext_pad + 1) 

572 ilo_data = itmin_overlap - itmin_data 

573 ihi_data = max(ilo_data, itmax_overlap - itmin_data + 1) 

574 

575 data_ext_pad = num.empty(nsamples_ext_pad, dtype=gf_dtype) 

576 data_ext_pad[ilo:ihi] = self._get_data( 

577 ipos, begin_value, end_value, ilo_data, ihi_data) 

578 

579 data_ext_pad[:ilo] = begin_value 

580 data_ext_pad[ihi:] = end_value 

581 

582 b = signal.firwin(order + 1, 1. / decimate, window='hamming') 

583 a = 1. 

584 data_filt_pad = signal.lfilter(b, a, data_ext_pad) 

585 data_deci = data_filt_pad[order:order+nsamples_ext:decimate] 

586 if data_deci.size >= 1: 

587 if itmin_ext <= itmin_data: 

588 data_deci[0] = begin_value 

589 

590 if itmax_ext >= itmax_data: 

591 data_deci[-1] = end_value 

592 

593 return GFTrace(data_deci, itmin_ext//decimate, 

594 deltat*decimate, 

595 begin_value=begin_value, end_value=end_value) 

596 

597 def _get_span(self, irecord, decimate=1): 

598 ''' 

599 Get temporal extent of GF trace at given index. 

600 ''' 

601 

602 if not self._f_index: 

603 self.open() 

604 

605 assert 0 <= irecord < self._nrecords, \ 

606 'irecord = %i, nrecords = %i' % (irecord, self._nrecords) 

607 

608 (_, itmin, nsamples, _, _) = self._records[irecord] 

609 

610 itmax = itmin + nsamples - 1 

611 

612 if decimate == 1: 

613 return itmin, itmax 

614 else: 

615 if nsamples == 0: 

616 return itmin//decimate, itmin//decimate - 1 

617 else: 

618 return itmin//decimate, -((-itmax)//decimate) 

619 

620 def _put(self, irecord, trace): 

621 ''' 

622 Save GF trace to storage. 

623 ''' 

624 

625 if not self._f_index: 

626 self.open() 

627 

628 deltat = self.get_deltat() 

629 

630 assert self.mode == 'w' 

631 assert trace.is_zero or \ 

632 abs(trace.deltat - deltat) < 1e-7 * deltat 

633 assert 0 <= irecord < self._nrecords, \ 

634 'irecord = %i, nrecords = %i' % (irecord, self._nrecords) 

635 

636 if self._records[irecord][0] != 0: 

637 raise DuplicateInsert('record %i already in store' % irecord) 

638 

639 if trace.is_zero or num.all(trace.data == 0.0): 

640 self._records[irecord] = (1, trace.itmin, 0, 0., 0.) 

641 return 

642 

643 ndata = trace.data.size 

644 

645 if ndata > 2: 

646 self._f_data.seek(0, 2) 

647 ipos = self._f_data.tell() 

648 trace.data.astype(gf_dtype_store).tofile(self._f_data) 

649 else: 

650 ipos = 2 

651 

652 self._records[irecord] = (ipos, trace.itmin, ndata, 

653 trace.data[0], trace.data[-1]) 

654 

655 def _sum_impl_alternative(self, irecords, delays, weights, itmin, nsamples, 

656 decimate): 

657 

658 ''' 

659 Sum delayed and weighted GF traces. 

660 ''' 

661 

662 if not self._f_index: 

663 self.open() 

664 

665 assert self.mode == 'r' 

666 

667 deltat = self.get_deltat() * decimate 

668 

669 if len(irecords) == 0: 

670 if None in (itmin, nsamples): 

671 return Zero 

672 else: 

673 return GFTrace( 

674 num.zeros(nsamples, dtype=gf_dtype), itmin, 

675 deltat, is_zero=True) 

676 

677 assert len(irecords) == len(delays) 

678 assert len(irecords) == len(weights) 

679 

680 if None in (itmin, nsamples): 

681 itmin_delayed, itmax_delayed = [], [] 

682 for irecord, delay in zip(irecords, delays): 

683 itmin, itmax = self._get_span(irecord, decimate=decimate) 

684 itmin_delayed.append(itmin + delay/deltat) 

685 itmax_delayed.append(itmax + delay/deltat) 

686 

687 itmin = int(math.floor(min(itmin_delayed))) 

688 nsamples = int(math.ceil(max(itmax_delayed))) - itmin + 1 

689 

690 out = num.zeros(nsamples, dtype=gf_dtype) 

691 if nsamples == 0: 

692 return GFTrace(out, itmin, deltat) 

693 

694 for ii in range(len(irecords)): 

695 irecord = irecords[ii] 

696 delay = delays[ii] 

697 weight = weights[ii] 

698 

699 if weight == 0.0: 

700 continue 

701 

702 idelay_floor = int(math.floor(delay/deltat)) 

703 idelay_ceil = int(math.ceil(delay/deltat)) 

704 

705 gf_trace = self._get( 

706 irecord, 

707 itmin - idelay_ceil, 

708 nsamples + idelay_ceil - idelay_floor, 

709 decimate, 

710 'reference') 

711 

712 assert gf_trace.itmin >= itmin - idelay_ceil 

713 assert gf_trace.data.size <= nsamples + idelay_ceil - idelay_floor 

714 

715 if gf_trace.is_zero: 

716 continue 

717 

718 ilo = gf_trace.itmin - itmin + idelay_floor 

719 ihi = ilo + gf_trace.data.size + (idelay_ceil-idelay_floor) 

720 

721 data = gf_trace.data 

722 

723 if idelay_floor == idelay_ceil: 

724 out[ilo:ihi] += data * weight 

725 else: 

726 if data.size: 

727 k = 1 

728 if ihi <= nsamples: 

729 out[ihi-1] += gf_trace.end_value * \ 

730 ((idelay_ceil-delay/deltat) * weight) 

731 k = 0 

732 

733 out[ilo+1:ihi-k] += data[:data.size-k] * \ 

734 ((delay/deltat-idelay_floor) * weight) 

735 k = 1 

736 if ilo >= 0: 

737 out[ilo] += gf_trace.begin_value * \ 

738 ((delay/deltat-idelay_floor) * weight) 

739 k = 0 

740 

741 out[ilo+k:ihi-1] += data[k:] * \ 

742 ((idelay_ceil-delay/deltat) * weight) 

743 

744 if ilo > 0 and gf_trace.begin_value != 0.0: 

745 out[:ilo] += gf_trace.begin_value * weight 

746 

747 if ihi < nsamples and gf_trace.end_value != 0.0: 

748 out[ihi:] += gf_trace.end_value * weight 

749 

750 return GFTrace(out, itmin, deltat) 

751 

752 def _sum_impl_reference(self, irecords, delays, weights, itmin, nsamples, 

753 decimate): 

754 

755 if not self._f_index: 

756 self.open() 

757 

758 deltat = self.get_deltat() * decimate 

759 

760 if len(irecords) == 0: 

761 if None in (itmin, nsamples): 

762 return Zero 

763 else: 

764 return GFTrace( 

765 num.zeros(nsamples, dtype=gf_dtype), itmin, 

766 deltat, is_zero=True) 

767 

768 datas = [] 

769 itmins = [] 

770 for i, delay, weight in zip(irecords, delays, weights): 

771 tr = self._get(i, None, None, decimate, 'reference') 

772 

773 idelay_floor = int(math.floor(delay/deltat)) 

774 idelay_ceil = int(math.ceil(delay/deltat)) 

775 

776 if idelay_floor == idelay_ceil: 

777 itmins.append(tr.itmin + idelay_floor) 

778 datas.append(tr.data.copy()*weight) 

779 else: 

780 itmins.append(tr.itmin + idelay_floor) 

781 datas.append(tr.data.copy()*weight*(idelay_ceil-delay/deltat)) 

782 itmins.append(tr.itmin + idelay_ceil) 

783 datas.append(tr.data.copy()*weight*(delay/deltat-idelay_floor)) 

784 

785 itmin_all = min(itmins) 

786 

787 itmax_all = max(itmin_ + data.size for (itmin_, data) in 

788 zip(itmins, datas)) 

789 

790 if itmin is not None: 

791 itmin_all = min(itmin_all, itmin) 

792 if nsamples is not None: 

793 itmax_all = max(itmax_all, itmin+nsamples) 

794 

795 nsamples_all = itmax_all - itmin_all 

796 

797 arr = num.zeros((len(datas), nsamples_all), dtype=gf_dtype) 

798 for i, itmin_, data in zip(num.arange(len(datas)), itmins, datas): 

799 if data.size > 0: 

800 ilo = itmin_-itmin_all 

801 ihi = ilo + data.size 

802 arr[i, :ilo] = data[0] 

803 arr[i, ilo:ihi] = data 

804 arr[i, ihi:] = data[-1] 

805 

806 sum_arr = arr.sum(axis=0) 

807 

808 if itmin is not None and nsamples is not None: 

809 ilo = itmin-itmin_all 

810 ihi = ilo + nsamples 

811 sum_arr = sum_arr[ilo:ihi] 

812 

813 else: 

814 itmin = itmin_all 

815 

816 return GFTrace(sum_arr, itmin, deltat) 

817 

818 def _optimize(self, irecords, delays, weights): 

819 if num.unique(irecords).size == irecords.size: 

820 return irecords, delays, weights 

821 

822 deltat = self.get_deltat() 

823 

824 delays = delays / deltat 

825 irecords2 = num.repeat(irecords, 2) 

826 delays2 = num.empty(irecords2.size, dtype=float) 

827 delays2[0::2] = num.floor(delays) 

828 delays2[1::2] = num.ceil(delays) 

829 weights2 = num.repeat(weights, 2) 

830 weights2[0::2] *= 1.0 - (delays - delays2[0::2]) 

831 weights2[1::2] *= (1.0 - (delays2[1::2] - delays)) * \ 

832 (delays2[1::2] - delays2[0::2]) 

833 

834 delays2 *= deltat 

835 

836 iorder = num.lexsort((delays2, irecords2)) 

837 

838 irecords2 = irecords2[iorder] 

839 delays2 = delays2[iorder] 

840 weights2 = weights2[iorder] 

841 

842 ui = num.empty(irecords2.size, dtype=bool) 

843 ui[1:] = num.logical_or(num.diff(irecords2) != 0, 

844 num.diff(delays2) != 0.) 

845 

846 ui[0] = 0 

847 ind2 = num.cumsum(ui) 

848 ui[0] = 1 

849 ind1 = num.where(ui)[0] 

850 

851 irecords3 = irecords2[ind1] 

852 delays3 = delays2[ind1] 

853 weights3 = num.bincount(ind2, weights2) 

854 

855 return irecords3, delays3, weights3 

856 

857 def _sum(self, irecords, delays, weights, itmin, nsamples, decimate, 

858 implementation, optimization): 

859 

860 if not self._f_index: 

861 self.open() 

862 

863 t0 = time.time() 

864 if optimization == 'enable': 

865 irecords, delays, weights = self._optimize( 

866 irecords, delays, weights) 

867 else: 

868 assert optimization == 'disable' 

869 

870 t1 = time.time() 

871 deltat = self.get_deltat() 

872 

873 if implementation == 'c' and decimate == 1: 

874 if delays.size != 0: 

875 itoffset = int(num.floor(num.min(delays)/deltat)) 

876 else: 

877 itoffset = 0 

878 

879 if nsamples is None: 

880 nsamples = -1 

881 

882 if itmin is None: 

883 itmin = 0 

884 else: 

885 itmin -= itoffset 

886 

887 try: 

888 tr = GFTrace(*store_ext.store_sum( 

889 self.cstore, irecords.astype(num.uint64), 

890 delays - itoffset*deltat, 

891 weights.astype(num.float32), 

892 int(itmin), int(nsamples))) 

893 

894 tr.itmin += itoffset 

895 

896 except store_ext.StoreExtError as e: 

897 raise StoreError(str(e) + ' in store %s' % self.store_dir) 

898 

899 elif implementation == 'alternative': 

900 tr = self._sum_impl_alternative(irecords, delays, weights, 

901 itmin, nsamples, decimate) 

902 

903 else: 

904 tr = self._sum_impl_reference(irecords, delays, weights, 

905 itmin, nsamples, decimate) 

906 

907 t2 = time.time() 

908 

909 tr.n_records_stacked = irecords.size 

910 tr.t_optimize = t1 - t0 

911 tr.t_stack = t2 - t1 

912 

913 return tr 

914 

915 def _sum_statics(self, irecords, delays, weights, it, ntargets, 

916 nthreads): 

917 if not self._f_index: 

918 self.open() 

919 

920 return store_ext.store_sum_static( 

921 self.cstore, irecords, delays, weights, it, ntargets, nthreads) 

922 

923 def _load_index(self): 

924 if self._use_memmap: 

925 records = num.memmap( 

926 self._f_index, dtype=gf_record_dtype, 

927 offset=gf_store_header_fmt_size, 

928 mode=('r', 'r+')[self.mode == 'w']) 

929 

930 else: 

931 self._f_index.seek(gf_store_header_fmt_size) 

932 records = num.fromfile(self._f_index, dtype=gf_record_dtype) 

933 

934 assert len(records) == self._nrecords 

935 

936 self._records = records 

937 

938 def _save_index(self): 

939 self._f_index.seek(0) 

940 self._f_index.write(struct.pack(gf_store_header_fmt, self._nrecords, 

941 self.get_deltat())) 

942 

943 if self._use_memmap: 

944 self._records.flush() 

945 else: 

946 self._f_index.seek(gf_store_header_fmt_size) 

947 self._records.tofile(self._f_index) 

948 self._f_index.flush() 

949 

950 def _get_data(self, ipos, begin_value, end_value, ilo, ihi): 

951 if ihi - ilo > 0: 

952 if ipos == 2: 

953 data_orig = num.empty(2, dtype=gf_dtype) 

954 data_orig[0] = begin_value 

955 data_orig[1] = end_value 

956 return data_orig[ilo:ihi] 

957 else: 

958 self._f_data.seek( 

959 int(ipos + ilo*gf_dtype_nbytes_per_sample)) 

960 arr = num.fromfile( 

961 self._f_data, gf_dtype_store, ihi-ilo).astype(gf_dtype) 

962 

963 if arr.size != ihi-ilo: 

964 raise ShortRead() 

965 return arr 

966 else: 

967 return num.empty((0,), dtype=gf_dtype) 

968 

969 def lock_fn(self): 

970 return BaseStore.lock_fn_(self.store_dir) 

971 

972 def index_fn(self): 

973 return BaseStore.index_fn_(self.store_dir) 

974 

975 def data_fn(self): 

976 return BaseStore.data_fn_(self.store_dir) 

977 

978 def config_fn(self): 

979 return BaseStore.config_fn_(self.store_dir) 

980 

981 def count_special_records(self): 

982 if not self._f_index: 

983 self.open() 

984 

985 return num.histogram( 

986 self._records['data_offset'], 

987 bins=[0, 1, 2, 3, num.array(-1).astype(num.uint64)])[0] 

988 

989 @property 

990 def size_index(self): 

991 return os.stat(self.index_fn()).st_size 

992 

993 @property 

994 def size_data(self): 

995 return os.stat(self.data_fn()).st_size 

996 

997 @property 

998 def size_index_and_data(self): 

999 return self.size_index + self.size_data 

1000 

1001 @property 

1002 def size_index_and_data_human(self): 

1003 return util.human_bytesize(self.size_index_and_data) 

1004 

1005 def stats(self): 

1006 counter = self.count_special_records() 

1007 

1008 stats = dict( 

1009 total=self._nrecords, 

1010 inserted=(counter[1] + counter[2] + counter[3]), 

1011 empty=counter[0], 

1012 short=counter[2], 

1013 zero=counter[1], 

1014 size_data=self.size_data, 

1015 size_index=self.size_index, 

1016 ) 

1017 

1018 return stats 

1019 

1020 stats_keys = 'total inserted empty short zero size_data size_index'.split() 

1021 

1022 

1023def remake_dir(dpath, force): 

1024 if os.path.exists(dpath): 

1025 if force: 

1026 shutil.rmtree(dpath) 

1027 else: 

1028 raise CannotCreate('Directory "%s" already exists.' % dpath) 

1029 

1030 os.mkdir(dpath) 

1031 

1032 

1033class MakeTimingParamsFailed(StoreError): 

1034 pass 

1035 

1036 

1037class Store(BaseStore): 

1038 

1039 ''' 

1040 Green's function disk storage and summation machine. 

1041 

1042 The `Store` can be used to efficiently store, retrieve, and sum Green's 

1043 function traces. A `Store` contains many 1D time traces sampled at even 

1044 multiples of a global sampling rate, where each time trace has an 

1045 individual start and end time. The traces are treated as having repeating 

1046 end points, so the functions they represent can be non-constant only 

1047 between begin and end time. It provides capabilities to retrieve decimated 

1048 traces and to extract parts of the traces. The main purpose of this class 

1049 is to provide a fast, easy to use, and flexible machanism to compute 

1050 weighted delay-and-sum stacks with many Green's function traces involved. 

1051 

1052 Individual Green's functions are accessed through a single integer index at 

1053 low level. On top of that, various indexing schemes can be implemented by 

1054 providing a mapping from physical coordinates to the low level index `i`. 

1055 E.g. for a problem with cylindrical symmetry, one might define a mapping 

1056 from the coordinates (`receiver_depth`, `source_depth`, `distance`) to the 

1057 low level index. Index translation is done in the 

1058 :py:class:`pyrocko.gf.meta.Config` subclass object associated with the 

1059 Store. It is accessible through the store's :py:attr:`config` attribute, 

1060 and contains all meta information about the store, including physical 

1061 extent, geometry, sampling rate, and information about the type of the 

1062 stored Green's functions. Information about the underlying earth model 

1063 can also be found there. 

1064 

1065 A GF store can also contain tabulated phase arrivals. In basic cases, these 

1066 can be created with the :py:meth:`make_travel_time_tables` and evaluated 

1067 with the :py:func:`t` methods. 

1068 

1069 .. attribute:: config 

1070 

1071 The :py:class:`pyrocko.gf.meta.Config` derived object associated with 

1072 the store which contains all meta information about the store and 

1073 provides the high-level to low-level index mapping. 

1074 

1075 .. attribute:: store_dir 

1076 

1077 Path to the store's data directory. 

1078 

1079 .. attribute:: mode 

1080 

1081 The mode in which the store is opened: ``'r'``: read-only, ``'w'``: 

1082 writeable. 

1083 ''' 

1084 

1085 @classmethod 

1086 def create(cls, store_dir, config, force=False, extra=None): 

1087 ''' 

1088 Create new GF store. 

1089 

1090 Creates a new GF store at path ``store_dir``. The layout of the GF is 

1091 defined with the parameters given in ``config``, which should be an 

1092 object of a subclass of :py:class:`~pyrocko.gf.meta.Config`. This 

1093 function will refuse to overwrite an existing GF store, unless 

1094 ``force`` is set to ``True``. If more information, e.g. parameters 

1095 used for the modelling code, earth models or other, should be saved 

1096 along with the GF store, these may be provided though a dict given to 

1097 ``extra``. The keys of this dict must be names and the values must be 

1098 *guts* type objects. 

1099 

1100 :param store_dir: GF store path 

1101 :type store_dir: str 

1102 :param config: GF store Config 

1103 :type config: :py:class:`~pyrocko.gf.meta.Config` 

1104 :param force: Force overwrite, defaults to ``False`` 

1105 :type force: bool 

1106 :param extra: Extra information 

1107 :type extra: dict or None 

1108 ''' 

1109 

1110 cls.create_editables(store_dir, config, force=force, extra=extra) 

1111 cls.create_dependants(store_dir, force=force) 

1112 

1113 return cls(store_dir) 

1114 

1115 @staticmethod 

1116 def create_editables(store_dir, config, force=False, extra=None): 

1117 try: 

1118 util.ensuredir(store_dir) 

1119 except Exception: 

1120 raise CannotCreate('cannot create directory %s' % store_dir) 

1121 

1122 fns = [] 

1123 

1124 config_fn = os.path.join(store_dir, 'config') 

1125 remove_if_exists(config_fn, force) 

1126 meta.dump(config, filename=config_fn) 

1127 

1128 fns.append(config_fn) 

1129 

1130 for sub_dir in ['extra']: 

1131 dpath = os.path.join(store_dir, sub_dir) 

1132 remake_dir(dpath, force) 

1133 

1134 if extra: 

1135 for k, v in extra.items(): 

1136 fn = get_extra_path(store_dir, k) 

1137 remove_if_exists(fn, force) 

1138 meta.dump(v, filename=fn) 

1139 

1140 fns.append(fn) 

1141 

1142 return fns 

1143 

1144 @staticmethod 

1145 def create_dependants(store_dir, force=False): 

1146 config_fn = os.path.join(store_dir, 'config') 

1147 config = meta.load(filename=config_fn) 

1148 

1149 BaseStore.create(store_dir, config.deltat, config.nrecords, 

1150 force=force) 

1151 

1152 for sub_dir in ['decimated']: 

1153 dpath = os.path.join(store_dir, sub_dir) 

1154 remake_dir(dpath, force) 

1155 

1156 def __init__(self, store_dir, mode='r', use_memmap=True): 

1157 BaseStore.__init__(self, store_dir, mode=mode, use_memmap=use_memmap) 

1158 config_fn = self.config_fn() 

1159 if not os.path.isfile(config_fn): 

1160 raise StoreError( 

1161 'directory "%s" does not seem to contain a GF store ' 

1162 '("config" file not found)' % store_dir) 

1163 self.load_config() 

1164 

1165 self._decimated = {} 

1166 self._extra = {} 

1167 self._phases = {} 

1168 for decimate in range(2, 9): 

1169 if os.path.isdir(self._decimated_store_dir(decimate)): 

1170 self._decimated[decimate] = None 

1171 

1172 def open(self): 

1173 if not self._f_index: 

1174 BaseStore.open(self) 

1175 c = self.config 

1176 

1177 mscheme = 'type_' + c.short_type.lower() 

1178 store_ext.store_mapping_init( 

1179 self.cstore, mscheme, 

1180 c.mins, c.maxs, c.deltas, c.ns.astype(num.uint64), 

1181 c.ncomponents) 

1182 

1183 def save_config(self, make_backup=False): 

1184 config_fn = self.config_fn() 

1185 if make_backup: 

1186 os.rename(config_fn, config_fn + '~') 

1187 

1188 meta.dump(self.config, filename=config_fn) 

1189 

1190 def get_deltat(self): 

1191 return self.config.deltat 

1192 

1193 def load_config(self): 

1194 self.config = meta.load(filename=self.config_fn()) 

1195 

1196 def ensure_reference(self, force=False): 

1197 if self.config.reference is not None and not force: 

1198 return 

1199 self.ensure_uuid() 

1200 reference = '%s-%s' % (self.config.id, self.config.uuid[0:6]) 

1201 

1202 if self.config.reference is not None: 

1203 self.config.reference = reference 

1204 self.save_config() 

1205 else: 

1206 with open(self.config_fn(), 'a') as config: 

1207 config.write('reference: %s\n' % reference) 

1208 self.load_config() 

1209 

1210 def ensure_uuid(self, force=False): 

1211 if self.config.uuid is not None and not force: 

1212 return 

1213 uuid = self.create_store_hash() 

1214 

1215 if self.config.uuid is not None: 

1216 self.config.uuid = uuid 

1217 self.save_config() 

1218 else: 

1219 with open(self.config_fn(), 'a') as config: 

1220 config.write('\nuuid: %s\n' % uuid) 

1221 self.load_config() 

1222 

1223 def create_store_hash(self): 

1224 logger.info('creating hash for store ...') 

1225 m = hashlib.sha1() 

1226 

1227 traces_size = op.getsize(self.data_fn()) 

1228 with open(self.data_fn(), 'rb') as traces: 

1229 while traces.tell() < traces_size: 

1230 m.update(traces.read(4096)) 

1231 traces.seek(1024 * 1024 * 10, 1) 

1232 

1233 with open(self.config_fn(), 'rb') as config: 

1234 m.update(config.read()) 

1235 return m.hexdigest() 

1236 

1237 def get_extra_path(self, key): 

1238 return get_extra_path(self.store_dir, key) 

1239 

1240 def get_extra(self, key): 

1241 ''' 

1242 Get extra information stored under given key. 

1243 ''' 

1244 

1245 x = self._extra 

1246 if key not in x: 

1247 fn = self.get_extra_path(key) 

1248 if not os.path.exists(fn): 

1249 raise NoSuchExtra(key) 

1250 

1251 x[key] = meta.load(filename=fn) 

1252 

1253 return x[key] 

1254 

1255 def upgrade(self): 

1256 ''' 

1257 Upgrade store config and files to latest Pyrocko version. 

1258 ''' 

1259 fns = [os.path.join(self.store_dir, 'config')] 

1260 for key in self.extra_keys(): 

1261 fns.append(self.get_extra_path(key)) 

1262 

1263 i = 0 

1264 for fn in fns: 

1265 i += util.pf_upgrade(fn) 

1266 

1267 return i 

1268 

1269 def extra_keys(self): 

1270 return [x for x in os.listdir(os.path.join(self.store_dir, 'extra')) 

1271 if valid_string_id(x)] 

1272 

1273 def put(self, args, trace): 

1274 ''' 

1275 Insert trace into GF store. 

1276 

1277 Store a single GF trace at (high-level) index ``args``. 

1278 

1279 :param args: :py:class:`~pyrocko.gf.meta.Config` index tuple, e.g. 

1280 ``(source_depth, distance, component)`` as in 

1281 :py:class:`~pyrocko.gf.meta.ConfigTypeA`. 

1282 :type args: tuple 

1283 :returns: GF trace at ``args`` 

1284 :rtype: :py:class:`~pyrocko.gf.store.GFTrace` 

1285 ''' 

1286 

1287 irecord = self.config.irecord(*args) 

1288 self._put(irecord, trace) 

1289 

1290 def get_record(self, args): 

1291 irecord = self.config.irecord(*args) 

1292 return self._get_record(irecord) 

1293 

1294 def str_irecord(self, args): 

1295 return BaseStore.str_irecord(self, self.config.irecord(*args)) 

1296 

1297 def get(self, args, itmin=None, nsamples=None, decimate=1, 

1298 interpolation='nearest_neighbor', implementation='c'): 

1299 ''' 

1300 Retrieve GF trace from store. 

1301 

1302 Retrieve a single GF trace from the store at (high-level) index 

1303 ``args``. By default, the full trace is retrieved. Given ``itmin`` and 

1304 ``nsamples``, only the selected portion of the trace is extracted. If 

1305 ``decimate`` is an integer in the range [2,8], the trace is decimated 

1306 on the fly or, if available, the trace is read from a decimated version 

1307 of the GF store. 

1308 

1309 :param args: :py:class:`~pyrocko.gf.meta.Config` index tuple, e.g. 

1310 ``(source_depth, distance, component)`` as in 

1311 :py:class:`~pyrocko.gf.meta.ConfigTypeA`. 

1312 :type args: tuple 

1313 :param itmin: Start time index (start time is ``itmin * dt``), 

1314 defaults to None 

1315 :type itmin: int or None 

1316 :param nsamples: Number of samples, defaults to None 

1317 :type nsamples: int or None 

1318 :param decimate: Decimatation factor, defaults to 1 

1319 :type decimate: int 

1320 :param interpolation: Interpolation method 

1321 ``['nearest_neighbor', 'multilinear', 'off']``, defaults to 

1322 ``'nearest_neighbor'`` 

1323 :type interpolation: str 

1324 :param implementation: Implementation to use ``['c', 'reference']``, 

1325 defaults to ``'c'``. 

1326 :type implementation: str 

1327 

1328 :returns: GF trace at ``args`` 

1329 :rtype: :py:class:`~pyrocko.gf.store.GFTrace` 

1330 ''' 

1331 

1332 store, decimate = self._decimated_store(decimate) 

1333 if interpolation == 'nearest_neighbor': 

1334 irecord = store.config.irecord(*args) 

1335 tr = store._get(irecord, itmin, nsamples, decimate, 

1336 implementation) 

1337 

1338 elif interpolation == 'off': 

1339 irecords, weights = store.config.vicinity(*args) 

1340 if len(irecords) != 1: 

1341 raise NotAllowedToInterpolate() 

1342 else: 

1343 tr = store._get(irecords[0], itmin, nsamples, decimate, 

1344 implementation) 

1345 

1346 elif interpolation == 'multilinear': 

1347 tr = store._sum(irecords, num.zeros(len(irecords)), weights, 

1348 itmin, nsamples, decimate, implementation, 

1349 'disable') 

1350 

1351 # to prevent problems with rounding errors (BaseStore saves deltat 

1352 # as a 4-byte floating point value, value from YAML config is more 

1353 # accurate) 

1354 tr.deltat = self.config.deltat * decimate 

1355 return tr 

1356 

1357 def sum(self, args, delays, weights, itmin=None, nsamples=None, 

1358 decimate=1, interpolation='nearest_neighbor', implementation='c', 

1359 optimization='enable'): 

1360 ''' 

1361 Sum delayed and weighted GF traces. 

1362 

1363 Calculate sum of delayed and weighted GF traces. ``args`` is a tuple of 

1364 arrays forming the (high-level) indices of the GF traces to be 

1365 selected. Delays and weights for the summation are given in the arrays 

1366 ``delays`` and ``weights``. ``itmin`` and ``nsamples`` can be given to 

1367 restrict to computation to a given time interval. If ``decimate`` is 

1368 an integer in the range [2,8], decimated traces are used in the 

1369 summation. 

1370 

1371 :param args: :py:class:`~pyrocko.gf.meta.Config` index tuple, e.g. 

1372 ``(source_depth, distance, component)`` as in 

1373 :py:class:`~pyrocko.gf.meta.ConfigTypeA`. 

1374 :type args: tuple(numpy.ndarray) 

1375 :param delays: Delay times 

1376 :type delays: :py:class:`numpy.ndarray` 

1377 :param weights: Trace weights 

1378 :type weights: :py:class:`numpy.ndarray` 

1379 :param itmin: Start time index (start time is ``itmin * dt``), 

1380 defaults to None 

1381 :type itmin: int or None 

1382 :param nsamples: Number of samples, defaults to None 

1383 :type nsamples: int or None 

1384 :param decimate: Decimatation factor, defaults to 1 

1385 :type decimate: int 

1386 :param interpolation: Interpolation method 

1387 ``['nearest_neighbor', 'multilinear', 'off']``, defaults to 

1388 ``'nearest_neighbor'`` 

1389 :type interpolation: str 

1390 :param implementation: Implementation to use, 

1391 ``['c', 'alternative', 'reference']``, where ``'alternative'`` 

1392 and ``'reference'`` use a Python implementation, defaults to `'c'` 

1393 :type implementation: str 

1394 :param optimization: Optimization mode ``['enable', 'disable']``, 

1395 defaults to ``'enable'`` 

1396 :type optimization: str 

1397 :returns: Stacked GF trace. 

1398 :rtype: :py:class:`~pyrocko.gf.store.GFTrace` 

1399 ''' 

1400 

1401 store, decimate_ = self._decimated_store(decimate) 

1402 

1403 if interpolation == 'nearest_neighbor': 

1404 irecords = store.config.irecords(*args) 

1405 else: 

1406 assert interpolation == 'multilinear' 

1407 irecords, ip_weights = store.config.vicinities(*args) 

1408 neach = irecords.size // args[0].size 

1409 weights = num.repeat(weights, neach) * ip_weights 

1410 delays = num.repeat(delays, neach) 

1411 

1412 tr = store._sum(irecords, delays, weights, itmin, nsamples, decimate_, 

1413 implementation, optimization) 

1414 

1415 # to prevent problems with rounding errors (BaseStore saves deltat 

1416 # as a 4-byte floating point value, value from YAML config is more 

1417 # accurate) 

1418 tr.deltat = self.config.deltat * decimate 

1419 return tr 

1420 

1421 def make_decimated(self, decimate, config=None, force=False, 

1422 show_progress=False): 

1423 ''' 

1424 Create decimated version of GF store. 

1425 

1426 Create a downsampled version of the GF store. Downsampling is done for 

1427 the integer factor ``decimate`` which should be in the range [2,8]. If 

1428 ``config`` is ``None``, all traces of the GF store are decimated and 

1429 held available (i.e. the index mapping of the original store is used), 

1430 otherwise, a different spacial stepping can be specified by giving a 

1431 modified GF store configuration in ``config`` (see :py:meth:`create`). 

1432 Decimated GF sub-stores are created under the ``decimated`` 

1433 subdirectory within the GF store directory. Holding available decimated 

1434 versions of the GF store can save computation time, IO bandwidth, or 

1435 decrease memory footprint at the cost of increased disk space usage, 

1436 when computation are done for lower frequency signals. 

1437 

1438 :param decimate: Decimate factor 

1439 :type decimate: int 

1440 :param config: GF store config object, defaults to None 

1441 :type config: :py:class:`~pyrocko.gf.meta.Config` or None 

1442 :param force: Force overwrite, defaults to ``False`` 

1443 :type force: bool 

1444 :param show_progress: Show progress, defaults to ``False`` 

1445 :type show_progress: bool 

1446 ''' 

1447 

1448 if not self._f_index: 

1449 self.open() 

1450 

1451 if not (2 <= decimate <= 8): 

1452 raise StoreError('decimate argument must be in the range [2,8]') 

1453 

1454 assert self.mode == 'r' 

1455 

1456 if config is None: 

1457 config = self.config 

1458 

1459 config = copy.deepcopy(config) 

1460 config.sample_rate = self.config.sample_rate / decimate 

1461 

1462 if decimate in self._decimated: 

1463 del self._decimated[decimate] 

1464 

1465 store_dir = self._decimated_store_dir(decimate) 

1466 if os.path.exists(store_dir): 

1467 if force: 

1468 shutil.rmtree(store_dir) 

1469 else: 

1470 raise CannotCreate('store already exists at %s' % store_dir) 

1471 

1472 store_dir_incomplete = store_dir + '-incomplete' 

1473 Store.create(store_dir_incomplete, config, force=force) 

1474 

1475 decimated = Store(store_dir_incomplete, 'w') 

1476 if show_progress: 

1477 pbar = util.progressbar('decimating store', self.config.nrecords) 

1478 

1479 for i, args in enumerate(decimated.config.iter_nodes()): 

1480 tr = self.get(args, decimate=decimate) 

1481 decimated.put(args, tr) 

1482 

1483 if show_progress: 

1484 pbar.update(i+1) 

1485 

1486 if show_progress: 

1487 pbar.finish() 

1488 

1489 decimated.close() 

1490 

1491 shutil.move(store_dir_incomplete, store_dir) 

1492 

1493 self._decimated[decimate] = None 

1494 

1495 def stats(self): 

1496 stats = BaseStore.stats(self) 

1497 stats['decimated'] = sorted(self._decimated.keys()) 

1498 return stats 

1499 

1500 stats_keys = BaseStore.stats_keys + ['decimated'] 

1501 

1502 def check(self, show_progress=False): 

1503 have_holes = [] 

1504 for pdef in self.config.tabulated_phases: 

1505 phase_id = pdef.id 

1506 ph = self.get_stored_phase(phase_id) 

1507 if ph.check_holes(): 

1508 have_holes.append(phase_id) 

1509 

1510 if have_holes: 

1511 for phase_id in have_holes: 

1512 logger.warning( 

1513 'Travel time table of phase "{}" contains holes. You can ' 

1514 ' use `fomosto tttlsd` to fix holes.'.format( 

1515 phase_id)) 

1516 else: 

1517 logger.info('No holes in travel time tables') 

1518 

1519 if show_progress: 

1520 pbar = util.progressbar('checking store', self.config.nrecords) 

1521 

1522 problems = 0 

1523 for i, args in enumerate(self.config.iter_nodes()): 

1524 tr = self.get(args) 

1525 if tr and not tr.is_zero: 

1526 if not tr.begin_value == tr.data[0]: 

1527 logger.warning('wrong begin value for trace at %s ' 

1528 '(data corruption?)' % str(args)) 

1529 problems += 1 

1530 if not tr.end_value == tr.data[-1]: 

1531 logger.warning('wrong end value for trace at %s ' 

1532 '(data corruption?)' % str(args)) 

1533 problems += 1 

1534 if not num.all(num.isfinite(tr.data)): 

1535 logger.warning('nans or infs in trace at %s' % str(args)) 

1536 problems += 1 

1537 

1538 if show_progress: 

1539 pbar.update(i+1) 

1540 

1541 if show_progress: 

1542 pbar.finish() 

1543 

1544 return problems 

1545 

1546 def check_earthmodels(self, config): 

1547 if config.earthmodel_receiver_1d.profile('z')[-1] not in\ 

1548 config.earthmodel_1d.profile('z'): 

1549 logger.warning('deepest layer of earthmodel_receiver_1d not ' 

1550 'found in earthmodel_1d') 

1551 

1552 def _decimated_store_dir(self, decimate): 

1553 return os.path.join(self.store_dir, 'decimated', str(decimate)) 

1554 

1555 def _decimated_store(self, decimate): 

1556 if decimate == 1 or decimate not in self._decimated: 

1557 return self, decimate 

1558 else: 

1559 store = self._decimated[decimate] 

1560 if store is None: 

1561 store = Store(self._decimated_store_dir(decimate), 'r') 

1562 self._decimated[decimate] = store 

1563 

1564 return store, 1 

1565 

1566 def phase_filename(self, phase_id, attribute='phase'): 

1567 check_string_id(phase_id) 

1568 return os.path.join( 

1569 self.store_dir, 'phases', phase_id + '.%s' % attribute) 

1570 

1571 def get_phase_identifier(self, phase_id, attribute): 

1572 return '{}.{}'.format(phase_id, attribute) 

1573 

1574 def get_stored_phase(self, phase_def, attribute='phase'): 

1575 ''' 

1576 Get stored phase from GF store. 

1577 

1578 :returns: Phase information 

1579 :rtype: :py:class:`pyrocko.spit.SPTree` 

1580 ''' 

1581 

1582 phase_id = self.get_phase_identifier(phase_def, attribute) 

1583 if phase_id not in self._phases: 

1584 fn = self.phase_filename(phase_def, attribute) 

1585 if not os.path.isfile(fn): 

1586 raise NoSuchPhase(phase_id) 

1587 

1588 spt = spit.SPTree(filename=fn) 

1589 self._phases[phase_id] = spt 

1590 

1591 return self._phases[phase_id] 

1592 

1593 def get_phase(self, phase_def): 

1594 toks = phase_def.split(':', 1) 

1595 if len(toks) == 2: 

1596 provider, phase_def = toks 

1597 else: 

1598 provider, phase_def = 'stored', toks[0] 

1599 

1600 if provider == 'stored': 

1601 return self.get_stored_phase(phase_def) 

1602 

1603 elif provider == 'vel': 

1604 vel = float(phase_def) * 1000. 

1605 

1606 def evaluate(args): 

1607 return self.config.get_distance(args) / vel 

1608 

1609 return evaluate 

1610 

1611 elif provider == 'vel_surface': 

1612 vel = float(phase_def) * 1000. 

1613 

1614 def evaluate(args): 

1615 return self.config.get_surface_distance(args) / vel 

1616 

1617 return evaluate 

1618 

1619 elif provider in ('cake', 'iaspei'): 

1620 from pyrocko import cake 

1621 mod = self.config.earthmodel_1d 

1622 if provider == 'cake': 

1623 phases = [cake.PhaseDef(phase_def)] 

1624 else: 

1625 phases = cake.PhaseDef.classic(phase_def) 

1626 

1627 def evaluate(args): 

1628 if len(args) == 2: 

1629 zr, zs, x = (self.config.receiver_depth,) + args 

1630 elif len(args) == 3: 

1631 zr, zs, x = args 

1632 else: 

1633 assert False 

1634 

1635 t = [] 

1636 if phases: 

1637 rays = mod.arrivals( 

1638 phases=phases, 

1639 distances=[x*cake.m2d], 

1640 zstart=zs, 

1641 zstop=zr) 

1642 

1643 for ray in rays: 

1644 t.append(ray.t) 

1645 

1646 if t: 

1647 return min(t) 

1648 else: 

1649 return None 

1650 

1651 return evaluate 

1652 

1653 raise StoreError('unsupported phase provider: %s' % provider) 

1654 

1655 def t(self, timing, *args): 

1656 ''' 

1657 Compute interpolated phase arrivals. 

1658 

1659 **Examples:** 

1660 

1661 If ``test_store`` is of :py:class:`~pyrocko.gf.meta.ConfigTypeA`:: 

1662 

1663 test_store.t('p', (1000, 10000)) 

1664 test_store.t('last{P|Pdiff}', (1000, 10000)) # The latter arrival 

1665 # of P or diffracted 

1666 # P phase 

1667 

1668 If ``test_store`` is of :py:class:`~pyrocko.gf.meta.ConfigTypeB`:: 

1669 

1670 test_store.t('S', (1000, 1000, 10000)) 

1671 test_store.t('first{P|p|Pdiff|sP}', (1000, 1000, 10000)) # The 

1672 ` # first arrival of 

1673 # the given phases is 

1674 # selected 

1675 

1676 :param timing: Timing string as described above 

1677 :type timing: str or :py:class:`~pyrocko.gf.meta.Timing` 

1678 :param \\*args: :py:class:`~pyrocko.gf.meta.Config` index tuple, e.g. 

1679 ``(source_depth, distance, component)`` as in 

1680 :py:class:`~pyrocko.gf.meta.ConfigTypeA`. 

1681 :type \\*args: tuple 

1682 :returns: Phase arrival according to ``timing`` 

1683 :rtype: float or None 

1684 ''' 

1685 

1686 if len(args) == 1: 

1687 args = args[0] 

1688 else: 

1689 args = self.config.make_indexing_args1(*args) 

1690 

1691 if not isinstance(timing, meta.Timing): 

1692 timing = meta.Timing(timing) 

1693 

1694 return timing.evaluate(self.get_phase, args) 

1695 

1696 def get_available_interpolation_tables(self): 

1697 filenames = glob(op.join(self.store_dir, 'phases', '*')) 

1698 return [op.basename(file) for file in filenames] 

1699 

1700 def get_stored_attribute(self, phase_def, attribute, *args): 

1701 ''' 

1702 Return interpolated store attribute 

1703 

1704 :param attribute: takeoff_angle / incidence_angle [deg] 

1705 :type attribute: str 

1706 :param \\*args: :py:class:`~pyrocko.gf.meta.Config` index tuple, e.g. 

1707 ``(source_depth, distance, component)`` as in 

1708 :py:class:`~pyrocko.gf.meta.ConfigTypeA`. 

1709 :type \\*args: tuple 

1710 ''' 

1711 try: 

1712 return self.get_stored_phase(phase_def, attribute)(*args) 

1713 except NoSuchPhase: 

1714 raise StoreError( 

1715 'Interpolation table for {} of {} does not exist! ' 

1716 'Available tables: {}'.format( 

1717 attribute, phase_def, 

1718 self.get_available_interpolation_tables())) 

1719 

1720 def get_many_stored_attributes(self, phase_def, attribute, coords): 

1721 ''' 

1722 Return interpolated store attribute 

1723 

1724 :param attribute: takeoff_angle / incidence_angle [deg] 

1725 :type attribute: str 

1726 :param \\coords: :py:class:`num.array.Array`, with columns being 

1727 ``(source_depth, distance, component)`` as in 

1728 :py:class:`~pyrocko.gf.meta.ConfigTypeA`. 

1729 :type \\coords: :py:class:`num.array.Array` 

1730 ''' 

1731 try: 

1732 return self.get_stored_phase( 

1733 phase_def, attribute).interpolate_many(coords) 

1734 except NoSuchPhase: 

1735 raise StoreError( 

1736 'Interpolation table for {} of {} does not exist! ' 

1737 'Available tables: {}'.format( 

1738 attribute, phase_def, 

1739 self.get_available_interpolation_tables())) 

1740 

1741 def make_stored_table(self, attribute, force=False): 

1742 ''' 

1743 Compute tables for selected ray attributes. 

1744 

1745 :param attribute: phase / takeoff_angle [deg]/ incidence_angle [deg] 

1746 :type attribute: str 

1747 

1748 Tables are computed using the 1D earth model defined in 

1749 :py:attr:`~pyrocko.gf.meta.Config.earthmodel_1d` for each defined phase 

1750 in :py:attr:`~pyrocko.gf.meta.Config.tabulated_phases`. 

1751 ''' 

1752 

1753 if attribute not in available_stored_tables: 

1754 raise StoreError( 

1755 'Cannot create attribute table for {}! ' 

1756 'Supported attribute tables: {}'.format( 

1757 attribute, available_stored_tables)) 

1758 

1759 from pyrocko import cake 

1760 config = self.config 

1761 

1762 if not config.tabulated_phases: 

1763 return 

1764 

1765 mod = config.earthmodel_1d 

1766 

1767 if not mod: 

1768 raise StoreError('no earth model found') 

1769 

1770 if config.earthmodel_receiver_1d: 

1771 self.check_earthmodels(config) 

1772 

1773 for pdef in config.tabulated_phases: 

1774 

1775 phase_id = pdef.id 

1776 phases = pdef.phases 

1777 

1778 if attribute == 'phase': 

1779 ftol = config.deltat * 0.5 

1780 horvels = pdef.horizontal_velocities 

1781 else: 

1782 ftol = config.deltat * 0.01 

1783 

1784 fn = self.phase_filename(phase_id, attribute) 

1785 

1786 if os.path.exists(fn) and not force: 

1787 logger.info('file already exists: %s' % fn) 

1788 continue 

1789 

1790 def evaluate(args): 

1791 

1792 nargs = len(args) 

1793 if nargs == 2: 

1794 receiver_depth, source_depth, distance = ( 

1795 config.receiver_depth,) + args 

1796 elif nargs == 3: 

1797 receiver_depth, source_depth, distance = args 

1798 else: 

1799 raise ValueError( 

1800 'Number of input arguments %i is not supported!' 

1801 'Supported number of arguments: 2 or 3' % nargs) 

1802 

1803 ray_attribute_values = [] 

1804 arrival_times = [] 

1805 if phases: 

1806 rays = mod.arrivals( 

1807 phases=phases, 

1808 distances=[distance * cake.m2d], 

1809 zstart=source_depth, 

1810 zstop=receiver_depth) 

1811 

1812 for ray in rays: 

1813 arrival_times.append(ray.t) 

1814 if attribute != 'phase': 

1815 ray_attribute_values.append( 

1816 getattr(ray, attribute)()) 

1817 

1818 if attribute == 'phase': 

1819 for v in horvels: 

1820 arrival_times.append(distance / (v * km)) 

1821 

1822 if arrival_times: 

1823 if attribute == 'phase': 

1824 return min(arrival_times) 

1825 else: 

1826 earliest_idx = num.argmin(arrival_times) 

1827 return ray_attribute_values[earliest_idx] 

1828 else: 

1829 return None 

1830 

1831 logger.info( 

1832 'making "%s" table for phasegroup "%s"', attribute, phase_id) 

1833 

1834 ip = spit.SPTree( 

1835 f=evaluate, 

1836 ftol=ftol, 

1837 xbounds=num.transpose((config.mins, config.maxs)), 

1838 xtols=config.deltas) 

1839 

1840 util.ensuredirs(fn) 

1841 ip.dump(fn) 

1842 

1843 def make_timing_params(self, begin, end, snap_vred=True, force=False): 

1844 ''' 

1845 Compute tight parameterized time ranges to include given timings. 

1846 

1847 Calculates appropriate time ranges to cover given begin and end timings 

1848 over all GF points in the store. A dict with the following keys is 

1849 returned: 

1850 

1851 * ``'tmin'``: time [s], minimum of begin timing over all GF points 

1852 * ``'tmax'``: time [s], maximum of end timing over all GF points 

1853 * ``'vred'``, ``'tmin_vred'``: slope [m/s] and offset [s] of reduction 

1854 velocity [m/s] appropriate to catch begin timing over all GF points 

1855 * ``'tlenmax_vred'``: maximum time length needed to cover all end 

1856 timings, when using linear slope given with (``vred``, ``tmin_vred``) 

1857 as start 

1858 ''' 

1859 

1860 data = [] 

1861 warned = set() 

1862 for args in self.config.iter_nodes(level=-1): 

1863 tmin = self.t(begin, args) 

1864 tmax = self.t(end, args) 

1865 if tmin is None: 

1866 warned.add(str(begin)) 

1867 if tmax is None: 

1868 warned.add(str(end)) 

1869 

1870 x = self.config.get_surface_distance(args) 

1871 data.append((x, tmin, tmax)) 

1872 

1873 if len(warned): 

1874 w = ' | '.join(list(warned)) 

1875 msg = '''determination of time window failed using phase 

1876definitions: %s.\n Travel time table contains holes in probed ranges. You can 

1877use `fomosto tttlsd` to fix holes.''' % w 

1878 if force: 

1879 logger.warning(msg) 

1880 else: 

1881 raise MakeTimingParamsFailed(msg) 

1882 

1883 xs, tmins, tmaxs = num.array(data, dtype=float).T 

1884 

1885 tlens = tmaxs - tmins 

1886 

1887 i = num.nanargmin(tmins) 

1888 if not num.isfinite(i): 

1889 raise MakeTimingParamsFailed('determination of time window failed') 

1890 

1891 tminmin = tmins[i] 

1892 x_tminmin = xs[i] 

1893 dx = (xs - x_tminmin) 

1894 dx = num.where(dx != 0.0, dx, num.nan) 

1895 s = (tmins - tminmin) / dx 

1896 sred = num.min(num.abs(s[num.isfinite(s)])) 

1897 

1898 deltax = self.config.distance_delta 

1899 

1900 if snap_vred: 

1901 tdif = sred*deltax 

1902 tdif2 = self.config.deltat * math.floor(tdif / self.config.deltat) 

1903 sred = tdif2/self.config.distance_delta 

1904 

1905 tmin_vred = tminmin - sred*x_tminmin 

1906 if snap_vred: 

1907 xe = x_tminmin - int(x_tminmin / deltax) * deltax 

1908 tmin_vred = float( 

1909 self.config.deltat * 

1910 math.floor(tmin_vred / self.config.deltat) - xe * sred) 

1911 

1912 tlenmax_vred = num.nanmax(tmax - (tmin_vred + sred*x)) 

1913 if sred != 0.0: 

1914 vred = 1.0/sred 

1915 else: 

1916 vred = 0.0 

1917 

1918 return dict( 

1919 tmin=tminmin, 

1920 tmax=num.nanmax(tmaxs), 

1921 tlenmax=num.nanmax(tlens), 

1922 tmin_vred=tmin_vred, 

1923 tlenmax_vred=tlenmax_vred, 

1924 vred=vred) 

1925 

1926 def make_travel_time_tables(self, force=False): 

1927 ''' 

1928 Compute travel time tables. 

1929 

1930 Travel time tables are computed using the 1D earth model defined in 

1931 :py:attr:`~pyrocko.gf.meta.Config.earthmodel_1d` for each defined phase 

1932 in :py:attr:`~pyrocko.gf.meta.Config.tabulated_phases`. The accuracy of 

1933 the tablulated times is adjusted to the sampling rate of the store. 

1934 ''' 

1935 self.make_stored_table(attribute='phase', force=force) 

1936 

1937 def make_ttt(self, force=False): 

1938 self.make_travel_time_tables(force=force) 

1939 

1940 def make_takeoff_angle_tables(self, force=False): 

1941 ''' 

1942 Compute takeoff-angle tables. 

1943 

1944 Takeoff-angle tables [deg] are computed using the 1D earth model 

1945 defined in :py:attr:`~pyrocko.gf.meta.Config.earthmodel_1d` for each 

1946 defined phase in :py:attr:`~pyrocko.gf.meta.Config.tabulated_phases`. 

1947 The accuracy of the tablulated times is adjusted to 0.01 times the 

1948 sampling rate of the store. 

1949 ''' 

1950 self.make_stored_table(attribute='takeoff_angle', force=force) 

1951 

1952 def make_incidence_angle_tables(self, force=False): 

1953 ''' 

1954 Compute incidence-angle tables. 

1955 

1956 Incidence-angle tables [deg] are computed using the 1D earth model 

1957 defined in :py:attr:`~pyrocko.gf.meta.Config.earthmodel_1d` for each 

1958 defined phase in :py:attr:`~pyrocko.gf.meta.Config.tabulated_phases`. 

1959 The accuracy of the tablulated times is adjusted to 0.01 times the 

1960 sampling rate of the store. 

1961 ''' 

1962 self.make_stored_table(attribute='incidence_angle', force=force) 

1963 

1964 def get_provided_components(self): 

1965 

1966 scheme_desc = meta.component_scheme_to_description[ 

1967 self.config.component_scheme] 

1968 

1969 quantity = self.config.stored_quantity \ 

1970 or scheme_desc.default_stored_quantity 

1971 

1972 if not quantity: 

1973 return scheme_desc.provided_components 

1974 else: 

1975 return [ 

1976 quantity + '.' + comp 

1977 for comp in scheme_desc.provided_components] 

1978 

1979 def fix_ttt_holes(self, phase_id): 

1980 

1981 pdef = self.config.get_tabulated_phase(phase_id) 

1982 mode = None 

1983 for phase in pdef.phases: 

1984 for leg in phase.legs(): 

1985 if mode is None: 

1986 mode = leg.mode 

1987 

1988 else: 

1989 if mode != leg.mode: 

1990 raise StoreError( 

1991 'Can only fix holes in pure P or pure S phases.') 

1992 

1993 sptree = self.get_stored_phase(phase_id) 

1994 sptree_lsd = self.config.fix_ttt_holes(sptree, mode) 

1995 

1996 phase_lsd = phase_id + '.lsd' 

1997 fn = self.phase_filename(phase_lsd) 

1998 sptree_lsd.dump(fn) 

1999 

2000 def statics(self, source, multi_location, itsnapshot, components, 

2001 interpolation='nearest_neighbor', nthreads=0): 

2002 if not self._f_index: 

2003 self.open() 

2004 

2005 out = {} 

2006 ntargets = multi_location.ntargets 

2007 source_terms = source.get_source_terms(self.config.component_scheme) 

2008 # TODO: deal with delays for snapshots > 1 sample 

2009 

2010 if itsnapshot is not None: 

2011 delays = source.times 

2012 

2013 # Fringe case where we sample at sample 0 and sample 1 

2014 tsnapshot = itsnapshot * self.config.deltat 

2015 if delays.max() == tsnapshot and delays.min() != tsnapshot: 

2016 delays[delays == delays.max()] -= self.config.deltat 

2017 

2018 else: 

2019 delays = source.times * 0 

2020 itsnapshot = 1 

2021 

2022 if ntargets == 0: 

2023 raise StoreError('MultiLocation.coords5 is empty') 

2024 

2025 res = store_ext.store_calc_static( 

2026 self.cstore, 

2027 source.coords5(), 

2028 source_terms, 

2029 delays, 

2030 multi_location.coords5, 

2031 self.config.component_scheme, 

2032 interpolation, 

2033 itsnapshot, 

2034 nthreads) 

2035 

2036 out = {} 

2037 for icomp, (comp, comp_res) in enumerate( 

2038 zip(self.get_provided_components(), res)): 

2039 if comp not in components: 

2040 continue 

2041 out[comp] = res[icomp] 

2042 

2043 return out 

2044 

2045 def calc_seismograms(self, source, receivers, components, deltat=None, 

2046 itmin=None, nsamples=None, 

2047 interpolation='nearest_neighbor', 

2048 optimization='enable', nthreads=1): 

2049 config = self.config 

2050 

2051 assert interpolation in ['nearest_neighbor', 'multilinear'], \ 

2052 'Unknown interpolation: %s' % interpolation 

2053 

2054 if not isinstance(receivers, list): 

2055 receivers = [receivers] 

2056 

2057 if deltat is None: 

2058 decimate = 1 

2059 else: 

2060 decimate = int(round(deltat/config.deltat)) 

2061 if abs(deltat / (decimate * config.deltat) - 1.0) > 0.001: 

2062 raise StoreError( 

2063 'unavailable decimation ratio target.deltat / store.deltat' 

2064 ' = %g / %g' % (deltat, config.deltat)) 

2065 

2066 store, decimate_ = self._decimated_store(decimate) 

2067 

2068 if not store._f_index: 

2069 store.open() 

2070 

2071 scheme = config.component_scheme 

2072 

2073 source_coords_arr = source.coords5() 

2074 source_terms = source.get_source_terms(scheme) 

2075 delays = source.times.ravel() 

2076 

2077 nreceiver = len(receivers) 

2078 receiver_coords_arr = num.empty((nreceiver, 5)) 

2079 for irec, rec in enumerate(receivers): 

2080 receiver_coords_arr[irec, :] = rec.coords5 

2081 

2082 dt = self.get_deltat() 

2083 

2084 itoffset = int(num.floor(delays.min()/dt)) if delays.size != 0 else 0 

2085 

2086 if itmin is None: 

2087 itmin = num.zeros(nreceiver, dtype=num.int32) 

2088 else: 

2089 itmin = (itmin-itoffset).astype(num.int32) 

2090 

2091 if nsamples is None: 

2092 nsamples = num.zeros(nreceiver, dtype=num.int32) - 1 

2093 else: 

2094 nsamples = nsamples.astype(num.int32) 

2095 

2096 try: 

2097 results = store_ext.store_calc_timeseries( 

2098 store.cstore, 

2099 source_coords_arr, 

2100 source_terms, 

2101 (delays - itoffset*dt), 

2102 receiver_coords_arr, 

2103 scheme, 

2104 interpolation, 

2105 itmin, 

2106 nsamples, 

2107 nthreads) 

2108 except Exception as e: 

2109 raise e 

2110 

2111 provided_components = self.get_provided_components() 

2112 ncomponents = len(provided_components) 

2113 

2114 seismograms = [dict() for _ in range(nreceiver)] 

2115 for ires in range(len(results)): 

2116 res = results.pop(0) 

2117 ireceiver = ires // ncomponents 

2118 

2119 comp = provided_components[res[-2]] 

2120 

2121 if comp not in components: 

2122 continue 

2123 

2124 tr = GFTrace(*res[:-2]) 

2125 tr.deltat = config.deltat * decimate 

2126 tr.itmin += itoffset 

2127 

2128 tr.n_records_stacked = 0 

2129 tr.t_optimize = 0. 

2130 tr.t_stack = 0. 

2131 tr.err = res[-1] 

2132 

2133 seismograms[ireceiver][comp] = tr 

2134 

2135 return seismograms 

2136 

2137 def seismogram(self, source, receiver, components, deltat=None, 

2138 itmin=None, nsamples=None, 

2139 interpolation='nearest_neighbor', 

2140 optimization='enable', nthreads=1): 

2141 

2142 config = self.config 

2143 

2144 if deltat is None: 

2145 decimate = 1 

2146 else: 

2147 decimate = int(round(deltat/config.deltat)) 

2148 if abs(deltat / (decimate * config.deltat) - 1.0) > 0.001: 

2149 raise StoreError( 

2150 'unavailable decimation ratio target.deltat / store.deltat' 

2151 ' = %g / %g' % (deltat, config.deltat)) 

2152 

2153 store, decimate_ = self._decimated_store(decimate) 

2154 

2155 if not store._f_index: 

2156 store.open() 

2157 

2158 scheme = config.component_scheme 

2159 

2160 source_coords_arr = source.coords5() 

2161 source_terms = source.get_source_terms(scheme) 

2162 receiver_coords_arr = receiver.coords5[num.newaxis, :].copy() 

2163 

2164 try: 

2165 params = store_ext.make_sum_params( 

2166 store.cstore, 

2167 source_coords_arr, 

2168 source_terms, 

2169 receiver_coords_arr, 

2170 scheme, 

2171 interpolation, nthreads) 

2172 

2173 except store_ext.StoreExtError: 

2174 raise meta.OutOfBounds() 

2175 

2176 provided_components = self.get_provided_components() 

2177 

2178 out = {} 

2179 for icomp, comp in enumerate(provided_components): 

2180 if comp in components: 

2181 weights, irecords = params[icomp] 

2182 

2183 neach = irecords.size // source.times.size 

2184 delays = num.repeat(source.times, neach) 

2185 

2186 tr = store._sum( 

2187 irecords, delays, weights, itmin, nsamples, decimate_, 

2188 'c', optimization) 

2189 

2190 # to prevent problems with rounding errors (BaseStore saves 

2191 # deltat as a 4-byte floating point value, value from YAML 

2192 # config is more accurate) 

2193 tr.deltat = config.deltat * decimate 

2194 

2195 out[comp] = tr 

2196 

2197 return out 

2198 

2199 

2200__all__ = ''' 

2201gf_dtype 

2202NotMultipleOfSamplingInterval 

2203GFTrace 

2204CannotCreate 

2205CannotOpen 

2206DuplicateInsert 

2207NotAllowedToInterpolate 

2208NoSuchExtra 

2209NoSuchPhase 

2210BaseStore 

2211Store 

2212SeismosizerErrorEnum 

2213'''.split()