# http://pyrocko.org - GPLv3 # # The Pyrocko Developers, 21st Century # ---|P------/S----------~Lg----------
# gf store endianness
('nrecords', E + 'u8'), ('deltat', E + 'f4'), ]
('data_offset', E + 'u8'), ('itmin', E + 'i4'), ('nsamples', E + 'u4'), ('begin_value', E + 'f4'), ('end_value', E + 'f4'), ])
raise ValueError('invalid name %s' % s)
# - data_offset # # Data file offset of first sample in bytes (the seek value). # Special values: # # 0x00 - missing trace # 0x01 - zero trace (GF trace is physically zero) # 0x02 - short trace of 1 or 2 samples (no need for data allocation) # # - itmin # # Time of first sample of the trace as a multiple of the sampling interval. All # GF samples must be evaluated at multiples of the sampling interval with # respect to a global reference time zero. Must be set also for zero traces. # # - nsamples # # Number of samples in the GF trace. Should be set to zero for zero traces. # # - begin_value, end_value # # Values of first and last sample. These values are included in data[] # redunantly.
''' Green's Function trace class for handling traces from the GF store. '''
def from_trace(cls, tr):
is_zero=False, begin_value=None, end_value=None, tmin=None):
'GFTrace: either tmin or itmin must be given'\
raise NotMultipleOfSamplingInterval( 'GFTrace: tmin (%g) is not a multiple of sampling ' 'interval (%g)' % (tmin, deltat))
else:
def t(self): ''' Time vector of the GF trace.
:returns: Time vector :rtype: :py:class:`numpy.ndarray` ''' return num.linspace( self.itmin*self.deltat, (self.itmin+self.data.size-1)*self.deltat, self.data.size)
def __str__(self, itmin=0):
if self.is_zero: return 'ZERO'
s = [] for i in range(itmin, self.itmin + self.data.size): if i >= self.itmin and i < self.itmin + self.data.size: s.append('%7.4g' % self.data[i-self.itmin]) else: s.append(' '*7)
return '|'.join(s)
from pyrocko import trace return trace.Trace( net, sta, loc, cha, ydata=self.data, deltat=self.deltat, tmin=self.itmin*self.deltat)
self.value = value self.n_records_stacked = 0. self.t_stack = 0. self.t_optimize = 0.
return {k: Zero for k in tracesdict.keys()}
data = num.zeros(n, dtype=gf_dtype) if not tr.is_zero: lo = tr.itmin-itmin hi = lo + tr.data.size data[:lo] = tr.data[0] data[lo:hi] = tr.data data[hi:] = tr.data[-1]
tr = GFTrace(data, itmin, deltat)
def __str__(self): return 'unexpected end of data (truncated traces file?)'
def __str__(self): return 'not allowed to interpolate'
StoreError.__init__(self) self.value = s
def __str__(self): return 'extra information for key "%s" not found.' % self.value
StoreError.__init__(self) self.value = s
def __str__(self): return 'phase for key "%s" not found. ' \ 'Running "fomosto ttt" may be needed.' % self.value
if force: os.remove(fn) else: raise CannotCreate('file %s already exists' % fn)
def index_fn_(store_dir):
def data_fn_(store_dir):
def config_fn_(store_dir):
except Exception: raise CannotCreate('cannot create directory %s' % store_dir)
else: assert False, 'invalid mode: %s' % self.mode
except Exception: self.mode = '' raise CannotOpen('cannot open gf store: %s' % self.store_dir)
# replace single precision deltat value in store with the double # precision value from the config, if available self._f_index.fileno(), self._f_data.fileno(), self.get_deltat() or 0.0)
except store_ext.StoreExtError as e: raise StoreError(str(e))
except IOError as e: # occasionally got this one on an NFS volume if e.errno == errno.EBUSY: time.sleep(0.01) else: raise
except IOError as e: if e.errno == errno.ENOLCK: time.sleep(0.01) else: raise
self._put(irecord, trace)
return self._get_record(irecord)
return self._get_span(irecord, decimate=decimate)
implementation='c'): return self._get(irecord, itmin, nsamples, decimate, implementation)
nsamples=None, decimate=1, implementation='c', optimization='enable'): return self._sum(irecords, delays, weights, itmin, nsamples, decimate, implementation, optimization)
if not self._f_index: self.open()
return self._records[irecord]
'''Retrieve complete GF trace from storage.'''
raise StoreError('store not open in read mode')
self.cstore, int(irecord), int(itmin), int(nsamples))) except store_ext.StoreExtError as e: raise StoreError(str(e))
else:
return self._deltat
raise StoreError('invalid record number requested ' '(irecord = %i, nrecords = %i)' % (irecord, self._nrecords))
self._records[irecord]
else: itmin = itmin * decimate nsamples = nsamples * decimate itmax = itmin + nsamples - decimate
return None
itmin_ext = (max(itmin, itmin_data)//decimate) * decimate return GFTrace(is_zero=True, itmin=itmin_ext//decimate)
ilo = max(itmin, itmin_data) - itmin_data ihi = min(itmin+nsamples, itmin_data+nsamples_data) - itmin_data data = self._get_data(ipos, begin_value, end_value, ilo, ihi)
return GFTrace(data, itmin=itmin_data+ilo, deltat=deltat, begin_value=begin_value, end_value=end_value)
else:
# put begin and end to multiples of new sampling rate
# add some padding for the aa filter
ipos, begin_value, end_value, ilo_data, ihi_data)
deltat*decimate, begin_value=begin_value, end_value=end_value)
''' Get temporal extent of GF trace at given index. '''
if not self._f_index: self.open()
assert 0 <= irecord < self._nrecords, \ 'irecord = %i, nrecords = %i' % (irecord, self._nrecords)
(_, itmin, nsamples, _, _) = self._records[irecord]
itmax = itmin + nsamples - 1
if decimate == 1: return itmin, itmax else: if nsamples == 0: return itmin//decimate, itmin//decimate - 1 else: return itmin//decimate, -((-itmax)//decimate)
''' Save GF trace to storage. '''
abs(trace.deltat - deltat) < 1e-7 * deltat 'irecord = %i, nrecords = %i' % (irecord, self._nrecords)
raise DuplicateInsert('record %i already in store' % irecord)
self._records[irecord] = (1, trace.itmin, 0, 0., 0.) return
else: ipos = 2
trace.data[0], trace.data[-1])
decimate):
''' Sum delayed and weighted GF traces. '''
if not self._f_index: self.open()
assert self.mode == 'r'
deltat = self.get_deltat() * decimate
if len(irecords) == 0: if None in (itmin, nsamples): return Zero else: return GFTrace( num.zeros(nsamples, dtype=gf_dtype), itmin, deltat, is_zero=True)
assert len(irecords) == len(delays) assert len(irecords) == len(weights)
if None in (itmin, nsamples): itmin_delayed, itmax_delayed = [], [] for irecord, delay in zip(irecords, delays): itmin, itmax = self._get_span(irecord, decimate=decimate) itmin_delayed.append(itmin + delay/deltat) itmax_delayed.append(itmax + delay/deltat)
itmin = int(math.floor(min(itmin_delayed))) nsamples = int(math.ceil(max(itmax_delayed))) - itmin + 1
out = num.zeros(nsamples, dtype=gf_dtype) if nsamples == 0: return GFTrace(out, itmin, deltat)
for ii in range(len(irecords)): irecord = irecords[ii] delay = delays[ii] weight = weights[ii]
if weight == 0.0: continue
idelay_floor = int(math.floor(delay/deltat)) idelay_ceil = int(math.ceil(delay/deltat))
gf_trace = self._get( irecord, itmin - idelay_ceil, nsamples + idelay_ceil - idelay_floor, decimate, 'reference')
assert gf_trace.itmin >= itmin - idelay_ceil assert gf_trace.data.size <= nsamples + idelay_ceil - idelay_floor
if gf_trace.is_zero: continue
ilo = gf_trace.itmin - itmin + idelay_floor ihi = ilo + gf_trace.data.size + (idelay_ceil-idelay_floor)
data = gf_trace.data
if idelay_floor == idelay_ceil: out[ilo:ihi] += data * weight else: if data.size: k = 1 if ihi <= nsamples: out[ihi-1] += gf_trace.end_value * \ ((idelay_ceil-delay/deltat) * weight) k = 0
out[ilo+1:ihi-k] += data[:data.size-k] * \ ((delay/deltat-idelay_floor) * weight) k = 1 if ilo >= 0: out[ilo] += gf_trace.begin_value * \ ((delay/deltat-idelay_floor) * weight) k = 0
out[ilo+k:ihi-1] += data[k:] * \ ((idelay_ceil-delay/deltat) * weight)
if ilo > 0 and gf_trace.begin_value != 0.0: out[:ilo] += gf_trace.begin_value * weight
if ihi < nsamples and gf_trace.end_value != 0.0: out[ihi:] += gf_trace.end_value * weight
return GFTrace(out, itmin, deltat)
decimate):
if not self._f_index: self.open()
deltat = self.get_deltat() * decimate
if len(irecords) == 0: if None in (itmin, nsamples): return Zero else: return GFTrace( num.zeros(nsamples, dtype=gf_dtype), itmin, deltat, is_zero=True)
datas = [] itmins = [] for i, delay, weight in zip(irecords, delays, weights): tr = self._get(i, None, None, decimate, 'reference')
idelay_floor = int(math.floor(delay/deltat)) idelay_ceil = int(math.ceil(delay/deltat))
if idelay_floor == idelay_ceil: itmins.append(tr.itmin + idelay_floor) datas.append(tr.data.copy()*weight) else: itmins.append(tr.itmin + idelay_floor) datas.append(tr.data.copy()*weight*(idelay_ceil-delay/deltat)) itmins.append(tr.itmin + idelay_ceil) datas.append(tr.data.copy()*weight*(delay/deltat-idelay_floor))
itmin_all = min(itmins)
itmax_all = max(itmin_ + data.size for (itmin_, data) in zip(itmins, datas))
if itmin is not None: itmin_all = min(itmin_all, itmin) if nsamples is not None: itmax_all = max(itmax_all, itmin+nsamples)
nsamples_all = itmax_all - itmin_all
arr = num.zeros((len(datas), nsamples_all), dtype=gf_dtype) for i, itmin_, data in zip(num.arange(len(datas)), itmins, datas): if data.size > 0: ilo = itmin_-itmin_all ihi = ilo + data.size arr[i, :ilo] = data[0] arr[i, ilo:ihi] = data arr[i, ihi:] = data[-1]
sum_arr = arr.sum(axis=0)
if itmin is not None and nsamples is not None: ilo = itmin-itmin_all ihi = ilo + nsamples sum_arr = sum_arr[ilo:ihi]
else: itmin = itmin_all
return GFTrace(sum_arr, itmin, deltat)
if num.unique(irecords).size == irecords.size: return irecords, delays, weights
deltat = self.get_deltat()
delays = delays / deltat irecords2 = num.repeat(irecords, 2) delays2 = num.empty(irecords2.size, dtype=num.float) delays2[0::2] = num.floor(delays) delays2[1::2] = num.ceil(delays) weights2 = num.repeat(weights, 2) weights2[0::2] *= 1.0 - (delays - delays2[0::2]) weights2[1::2] *= (1.0 - (delays2[1::2] - delays)) * \ (delays2[1::2] - delays2[0::2])
delays2 *= deltat
iorder = num.lexsort((delays2, irecords2))
irecords2 = irecords2[iorder] delays2 = delays2[iorder] weights2 = weights2[iorder]
ui = num.empty(irecords2.size, dtype=num.bool) ui[1:] = num.logical_or(num.diff(irecords2) != 0, num.diff(delays2) != 0.)
ui[0] = 0 ind2 = num.cumsum(ui) ui[0] = 1 ind1 = num.where(ui)[0]
irecords3 = irecords2[ind1] delays3 = delays2[ind1] weights3 = num.bincount(ind2, weights2)
return irecords3, delays3, weights3
implementation, optimization):
if not self._f_index: self.open()
t0 = time.time() if optimization == 'enable': irecords, delays, weights = self._optimize( irecords, delays, weights) else: assert optimization == 'disable'
t1 = time.time() deltat = self.get_deltat()
if implementation == 'c' and decimate == 1: if delays.size != 0: itoffset = int(num.floor(num.min(delays)/deltat)) else: itoffset = 0
if nsamples is None: nsamples = -1
if itmin is None: itmin = 0 else: itmin -= itoffset
try: tr = GFTrace(*store_ext.store_sum( self.cstore, irecords.astype(num.uint64), delays - itoffset*deltat, weights.astype(num.float32), int(itmin), int(nsamples)))
tr.itmin += itoffset
except store_ext.StoreExtError as e: raise StoreError(str(e) + ' in store %s' % self.store_dir)
elif implementation == 'alternative': tr = self._sum_impl_alternative(irecords, delays, weights, itmin, nsamples, decimate)
else: tr = self._sum_impl_reference(irecords, delays, weights, itmin, nsamples, decimate)
t2 = time.time()
tr.n_records_stacked = irecords.size tr.t_optimize = t1 - t0 tr.t_stack = t2 - t1
return tr
nthreads): if not self._f_index: self.open()
return store_ext.store_sum_static( self.cstore, irecords, delays, weights, it, ntargets, nthreads)
self._f_index, dtype=gf_record_dtype, offset=gf_store_header_fmt_size, mode=('r', 'r+')[self.mode == 'w'])
else: self._f_index.seek(gf_store_header_fmt_size) records = num.fromfile(self._f_index, dtype=gf_record_dtype)
self.get_deltat()))
else: self._f_index.seek(gf_store_header_fmt_size) self._records.tofile(self._f_index) self._f_index.flush()
data_orig = num.empty(2, dtype=gf_dtype) data_orig[0] = begin_value data_orig[1] = end_value return data_orig[ilo:ihi] else: int(ipos + ilo*gf_dtype_nbytes_per_sample)) self._f_data, gf_dtype_store, ihi-ilo).astype(gf_dtype)
raise ShortRead() else: return num.empty((0,), dtype=gf_dtype)
bins=[0, 1, 2, 3, num.uint64(-1)])[0]
def size_index(self):
def size_data(self):
def size_index_and_data(self): return self.size_index + self.size_data
def size_index_and_data_human(self): return util.human_bytesize(self.size_index_and_data)
total=self._nrecords, inserted=(counter[1] + counter[2] + counter[3]), empty=counter[0], short=counter[2], zero=counter[1], size_data=self.size_data, size_index=self.size_index, )
if force: shutil.rmtree(dpath) else: raise CannotCreate('Directory "%s" already exists.' % dpath)
''' Green's function disk storage and summation machine.
The `Store` can be used to efficiently store, retrieve, and sum Green's function traces. A `Store` contains many 1D time traces sampled at even multiples of a global sampling rate, where each time trace has an individual start and end time. The traces are treated as having repeating end points, so the functions they represent can be non-constant only between begin and end time. It provides capabilities to retrieve decimated traces and to extract parts of the traces. The main purpose of this class is to provide a fast, easy to use, and flexible machanism to compute weighted delay-and-sum stacks with many Green's function traces involved.
Individual Green's functions are accessed through a single integer index at low level. On top of that, various indexing schemes can be implemented by providing a mapping from physical coordinates to the low level index `i`. E.g. for a problem with cylindrical symmetry, one might define a mapping from the coordinates (`receiver_depth`, `source_depth`, `distance`) to the low level index. Index translation is done in the :py:class:`pyrocko.gf.meta.Config` subclass object associated with the Store. It is accessible through the store's :py:attr:`config` attribute, and contains all meta information about the store, including physical extent, geometry, sampling rate, and information about the type of the stored Green's functions. Information about the underlying earth model can also be found there.
A GF store can also contain tabulated phase arrivals. In basic cases, these can be created with the :py:meth:`make_ttt` and evaluated with the :py:func:`t` methods.
.. attribute:: config
The :py:class:`pyrocko.gf.meta.Config` derived object associated with the store which contains all meta information about the store and provides the high-level to low-level index mapping.
.. attribute:: store_dir
Path to the store's data directory.
.. attribute:: mode
The mode in which the store is opened: ``'r'``: read-only, ``'w'``: writeable. '''
''' Create new GF store.
Creates a new GF store at path ``store_dir``. The layout of the GF is defined with the parameters given in ``config``, which should be an object of a subclass of :py:class:`~pyrocko.gf.meta.Config`. This function will refuse to overwrite an existing GF store, unless ``force`` is set to ``True``. If more information, e.g. parameters used for the modelling code, earth models or other, should be saved along with the GF store, these may be provided though a dict given to ``extra``. The keys of this dict must be names and the values must be *guts* type objects.
:param store_dir: GF store path :type store_dir: str :param config: GF store Config :type config: :py:class:`~pyrocko.gf.meta.Config` :param force: Force overwrite, defaults to ``False`` :type force: bool :param extra: Extra information :type extra: dict or None '''
except Exception: raise CannotCreate('cannot create directory %s' % store_dir)
force=force)
raise StoreError( 'directory "%s" does not seem to contain a GF store ' '("config" file not found)' % store_dir)
self.cstore, mscheme, c.mins, c.maxs, c.deltas, c.ns.astype(num.uint64), c.ncomponents)
if self.config.reference is not None and not force: return self.ensure_uuid() reference = '%s-%s' % (self.config.id, self.config.uuid[0:6])
if self.config.reference is not None: self.config.reference = reference self.save_config() else: with open(self.config_fn(), 'a') as config: config.write('reference: %s\n' % reference) self.load_config()
if self.config.uuid is not None and not force: return uuid = self.create_store_hash()
if self.config.uuid is not None: self.config.uuid = uuid self.save_config() else: with open(self.config_fn(), 'a') as config: config.write('\nuuid: %s\n' % uuid) self.load_config()
logger.info('creating hash for store ...') m = hashlib.sha1()
traces_size = op.getsize(self.data_fn()) with open(self.data_fn(), 'rb') as traces: while traces.tell() < traces_size: m.update(traces.read(4096)) traces.seek(1024 * 1024 * 10, 1)
with open(self.config_fn(), 'rb') as config: m.update(config.read()) return m.hexdigest()
''' Get extra information stored under given key. '''
raise NoSuchExtra(key)
''' Upgrade store config and files to latest Pyrocko version. ''' fns.append(self.get_extra_path(key))
if valid_string_id(x)]
''' Insert trace into GF store.
Store a single GF trace at (high-level) index ``args``.
:param args: :py:class:`~pyrocko.gf.meta.Config` index tuple, e.g. ``(source_depth, distance, component)`` as in :py:class:`~pyrocko.gf.meta.ConfigTypeA`. :type args: tuple :returns: GF trace at ``args`` :rtype: :py:class:`~pyrocko.gf.store.GFTrace` '''
irecord = self.config.irecord(*args) return self._get_record(irecord)
interpolation='nearest_neighbor', implementation='c'): ''' Retrieve GF trace from store.
Retrieve a single GF trace from the store at (high-level) index ``args``. By default, the full trace is retrieved. Given ``itmin`` and ``nsamples``, only the selected portion of the trace is extracted. If ``decimate`` is an integer in the range [2,8], the trace is decimated on the fly or, if available, the trace is read from a decimated version of the GF store.
:param args: :py:class:`~pyrocko.gf.meta.Config` index tuple, e.g. ``(source_depth, distance, component)`` as in :py:class:`~pyrocko.gf.meta.ConfigTypeA`. :type args: tuple :param itmin: Start time index (start time is ``itmin * dt``), defaults to None :type itmin: int or None :param nsamples: Number of samples, defaults to None :type nsamples: int or None :param decimate: Decimatation factor, defaults to 1 :type decimate: int :param interpolation: Interpolation method ``['nearest_neighbor', 'multilinear', 'off']``, defaults to ``'nearest_neighbor'`` :type interpolation: str :param implementation: Implementation to use ``['c', 'reference']``, defaults to ``'c'``. :type implementation: str
:returns: GF trace at ``args`` :rtype: :py:class:`~pyrocko.gf.store.GFTrace` '''
implementation)
raise NotAllowedToInterpolate() else: implementation)
elif interpolation == 'multilinear': tr = store._sum(irecords, num.zeros(len(irecords)), weights, itmin, nsamples, decimate, implementation, 'disable')
# to prevent problems with rounding errors (BaseStore saves deltat # as a 4-byte floating point value, value from YAML config is more # accurate)
decimate=1, interpolation='nearest_neighbor', implementation='c', optimization='enable'): ''' Sum delayed and weighted GF traces.
Calculate sum of delayed and weighted GF traces. ``args`` is a tuple of arrays forming the (high-level) indices of the GF traces to be selected. Delays and weights for the summation are given in the arrays ``delays`` and ``weights``. ``itmin`` and ``nsamples`` can be given to restrict to computation to a given time interval. If ``decimate`` is an integer in the range [2,8], decimated traces are used in the summation.
:param args: :py:class:`~pyrocko.gf.meta.Config` index tuple, e.g. ``(source_depth, distance, component)`` as in :py:class:`~pyrocko.gf.meta.ConfigTypeA`. :type args: tuple(numpy.ndarray) :param delays: Delay times :type delays: :py:class:`numpy.ndarray` :param weights: Trace weights :type weights: :py:class:`numpy.ndarray` :param itmin: Start time index (start time is ``itmin * dt``), defaults to None :type itmin: int or None :param nsamples: Number of samples, defaults to None :type nsamples: int or None :param decimate: Decimatation factor, defaults to 1 :type decimate: int :param interpolation: Interpolation method ``['nearest_neighbor', 'multilinear', 'off']``, defaults to ``'nearest_neighbor'`` :type interpolation: str :param implementation: Implementation to use, ``['c', 'alternative', 'reference']``, where ``'alternative'`` and ``'reference'`` use a Python implementation, defaults to `'c'` :type implementation: str :param optimization: Optimization mode ``['enable', 'disable']``, defaults to ``'enable'`` :type optimization: str :returns: Stacked GF trace. :rtype: :py:class:`~pyrocko.gf.store.GFTrace` '''
store, decimate_ = self._decimated_store(decimate)
if interpolation == 'nearest_neighbor': irecords = store.config.irecords(*args) else: assert interpolation == 'multilinear' irecords, ip_weights = store.config.vicinities(*args) neach = irecords.size // args[0].size weights = num.repeat(weights, neach) * ip_weights delays = num.repeat(delays, neach)
tr = store._sum(irecords, delays, weights, itmin, nsamples, decimate_, implementation, optimization)
# to prevent problems with rounding errors (BaseStore saves deltat # as a 4-byte floating point value, value from YAML config is more # accurate) tr.deltat = self.config.deltat * decimate return tr
show_progress=False): ''' Create decimated version of GF store.
Create a downsampled version of the GF store. Downsampling is done for the integer factor ``decimate`` which should be in the range [2,8]. If ``config`` is ``None``, all traces of the GF store are decimated and held available (i.e. the index mapping of the original store is used), otherwise, a different spacial stepping can be specified by giving a modified GF store configuration in ``config`` (see :py:meth:`create`). Decimated GF sub-stores are created under the ``decimated`` subdirectory within the GF store directory. Holding available decimated versions of the GF store can save computation time, IO bandwidth, or decrease memory footprint at the cost of increased disk space usage, when computation are done for lower frequency signals.
:param decimate: Decimate factor :type decimate: int :param config: GF store config object, defaults to None :type config: :py:class:`~pyrocko.gf.meta.Config` or None :param force: Force overwrite, defaults to ``False`` :type force: bool :param show_progress: Show progress, defaults to ``False`` :type show_progress: bool '''
raise StoreError('decimate argument must be in the range [2,8]')
del self._decimated[decimate]
if force: shutil.rmtree(store_dir) else: raise CannotCreate('store already exists at %s' % store_dir)
have_holes.append(phase_id)
for phase_id in have_holes: logger.warn( 'Travel time table of phase "{}" contains holes. You can ' ' use `fomosto tttlsd` to fix holes.'.format( phase_id)) else:
logger.warn('wrong begin value for trace at %s ' '(data corruption?)' % str(args)) problems += 1 logger.warn('wrong end value for trace at %s ' '(data corruption?)' % str(args)) problems += 1 logger.warn('nans or infs in trace at %s' % str(args)) problems += 1
if config.earthmodel_receiver_1d.profile('z')[-1] not in\ config.earthmodel_1d.profile('z'): logger.warn('deepest layer of earthmodel_receiver_1d not ' 'found in earthmodel_1d')
else: store = self._decimated[decimate] if store is None: store = Store(self._decimated_store_dir(decimate), 'r') self._decimated[decimate] = store
return store, 1
"""Get stored phase from GF store
:returns: Phase information :rtype: :py:class:`pyrocko.spit.SPTree` """ raise NoSuchPhase(phase_id)
else: provider, phase_def = 'stored', toks[0]
elif provider == 'vel': vel = float(phase_def) * 1000.
def evaluate(args): return self.config.get_distance(args) / vel
return evaluate
elif provider == 'vel_surface': vel = float(phase_def) * 1000.
def evaluate(args): return self.config.get_surface_distance(args) / vel
return evaluate
elif provider in ('cake', 'iaspei'): from pyrocko import cake mod = self.config.earthmodel_1d if provider == 'cake': phases = [cake.PhaseDef(phase_def)] else: phases = cake.PhaseDef.classic(phase_def)
def evaluate(args): if len(args) == 2: zr, zs, x = (self.config.receiver_depth,) + args elif len(args) == 3: zr, zs, x = args else: assert False
t = [] if phases: rays = mod.arrivals( phases=phases, distances=[x*cake.m2d], zstart=zs, zstop=zr)
for ray in rays: t.append(ray.t)
if t: return min(t) else: return None
return evaluate
raise StoreError('unsupported phase provider: %s' % provider)
''' Compute interpolated phase arrivals.
**Examples:**
If ``test_store`` is of :py:class:`~pyrocko.gf.meta.ConfigTypeA`::
test_store.t('p', (1000, 10000)) test_store.t('last{P|Pdiff}', (1000, 10000)) # The latter arrival # of P or diffracted # P phase
If ``test_store`` is of :py:class:`~pyrocko.gf.meta.ConfigTypeB`::
test_store.t('S', (1000, 1000, 10000)) test_store.t('first{P|p|Pdiff|sP}', (1000, 1000, 10000)) # The ` # first arrival of # the given phases is # selected
:param timing: Timing string as described above :type timing: str or :py:class:`~pyrocko.gf.meta.Timing` :param \\*args: :py:class:`~pyrocko.gf.meta.Config` index tuple, e.g. ``(source_depth, distance, component)`` as in :py:class:`~pyrocko.gf.meta.ConfigTypeA`. :type \\*args: tuple :returns: Phase arrival according to ``timing`` :rtype: float or None '''
else:
''' Compute tight parameterized time ranges to include given timings.
Calculates appropriate time ranges to cover given begin and end timings over all GF points in the store. A dict with the following keys is returned:
* ``'tmin'``: time [s], minimum of begin timing over all GF points * ``'tmax'``: time [s], maximum of end timing over all GF points * ``'vred'``, ``'tmin_vred'``: slope [m/s] and offset [s] of reduction velocity [m/s] appropriate to catch begin timing over all GF points * ``'tlenmax_vred'``: maximum time length needed to cover all end timings, when using linear slope given with (``vred``, ``tmin_vred``) as start '''
warned.add(str(begin)) warned.add(str(end))
w = ' | '.join(list(warned)) msg = '''determination of time window failed using phase definitions: %s.\n Travel time table contains holes in probed ranges. You can use `fomosto tttlsd` to fix holes.''' % w if force: logger.warn(msg) else: raise MakeTimingParamsFailed(msg)
raise MakeTimingParamsFailed('determination of time window failed')
self.config.deltat * math.floor(tmin_vred / self.config.deltat) - xe * sred)
vred = 1.0/sred else:
tmin=tminmin, tmax=num.nanmax(tmaxs), tlenmax=num.nanmax(tlens), tmin_vred=tmin_vred, tlenmax_vred=tlenmax_vred, vred=vred)
'''Compute travel time tables.
Travel time tables are computed using the 1D earth model defined in :py:attr:`~pyrocko.gf.meta.Config.earthmodel_1d` for each defined phase in :py:attr:`~pyrocko.gf.meta.Config.tabulated_phases`. The accuracy of the tablulated times is adjusted to the sampling rate of the store. '''
return
self.check_earthmodels(config)
raise StoreError('no earth model found')
logger.info('file already exists: %s' % fn) continue
elif len(args) == 3: zr, zs, x = args else: assert False
phases=phases, distances=[x*cake.m2d], zstart=zs, zstop=zr)
else: return None
phase_id)
f=evaluate, ftol=config.deltat*0.5, xbounds=num.transpose((config.mins, config.maxs)), xtols=config.deltas)
self.config.component_scheme]
or scheme_desc.default_stored_quantity
return scheme_desc.provided_components else: quantity + '.' + comp for comp in scheme_desc.provided_components]
pdef = self.config.get_tabulated_phase(phase_id) mode = None for phase in pdef.phases: for leg in phase.legs(): if mode is None: mode = leg.mode
else: if mode != leg.mode: raise StoreError( 'Can only fix holes in pure P or pure S phases.')
sptree = self.get_stored_phase(phase_id) sptree_lsd = self.config.fix_ttt_holes(sptree, mode)
phase_lsd = phase_id + '.lsd' fn = self.phase_filename(phase_lsd) sptree_lsd.dump(fn)
interpolation='nearest_neighbor', nthreads=0): if not self._f_index: self.open()
out = {} ntargets = multi_location.ntargets source_terms = source.get_source_terms(self.config.component_scheme) # TODO: deal with delays for snapshots > 1 sample
if itsnapshot is not None: delays = source.times
# Fringe case where we sample at sample 0 and sample 1 tsnapshot = itsnapshot * self.config.deltat if delays.max() == tsnapshot and delays.min() != tsnapshot: delays[delays == delays.max()] -= self.config.deltat
else: delays = source.times * 0 itsnapshot = 1
if ntargets == 0: raise StoreError('MultiLocation.coords5 is empty')
res = store_ext.store_calc_static( self.cstore, source.coords5(), source_terms, delays, multi_location.coords5, self.config.component_scheme, interpolation, itsnapshot, nthreads)
out = {} for icomp, (comp, comp_res) in enumerate( zip(self.get_provided_components(), res)): if comp not in components: continue out[comp] = res[icomp]
return out
itmin=None, nsamples=None, interpolation='nearest_neighbor', optimization='enable', nthreads=1):
'Unknown interpolation: %s' % interpolation
receivers = [receivers]
else: decimate = int(round(deltat/config.deltat)) if abs(deltat / (decimate * config.deltat) - 1.0) > 0.001: raise StoreError( 'unavailable decimation ratio target.deltat / store.deltat' ' = %g / %g' % (deltat, config.deltat))
itmin = num.zeros(nreceiver, dtype=num.int32) else:
nsamples = num.zeros(nreceiver, dtype=num.int32) - 1 else:
store.cstore, source_coords_arr, source_terms, (delays - itoffset*dt), receiver_coords_arr, scheme, interpolation, itmin, nsamples, nthreads) except Exception as e: raise e
itmin=None, nsamples=None, interpolation='nearest_neighbor', optimization='enable', nthreads=1):
config = self.config
if deltat is None: decimate = 1 else: decimate = int(round(deltat/config.deltat)) if abs(deltat / (decimate * config.deltat) - 1.0) > 0.001: raise StoreError( 'unavailable decimation ratio target.deltat / store.deltat' ' = %g / %g' % (deltat, config.deltat))
store, decimate_ = self._decimated_store(decimate)
if not store._f_index: store.open()
scheme = config.component_scheme
source_coords_arr = source.coords5() source_terms = source.get_source_terms(scheme) receiver_coords_arr = receiver.coords5[num.newaxis, :].copy()
try: params = store_ext.make_sum_params( store.cstore, source_coords_arr, source_terms, receiver_coords_arr, scheme, interpolation, nthreads)
except store_ext.StoreExtError: raise meta.OutOfBounds()
provided_components = self.get_provided_components()
out = {} for icomp, comp in enumerate(provided_components): if comp in components: weights, irecords = params[icomp]
neach = irecords.size // source.times.size delays = num.repeat(source.times, neach)
tr = store._sum( irecords, delays, weights, itmin, nsamples, decimate_, 'c', optimization)
# to prevent problems with rounding errors (BaseStore saves # deltat as a 4-byte floating point value, value from YAML # config is more accurate) tr.deltat = config.deltat * decimate
out[comp] = tr
return out
gf_dtype NotMultipleOfSamplingInterval GFTrace CannotCreate CannotOpen DuplicateInsert NotAllowedToInterpolate NoSuchExtra NoSuchPhase BaseStore Store SeismosizerErrorEnum '''.split() |