Timestamp, List, Dict)
''' Base class for GF store selectors.
GF store selectors can be implemented to select different stores, based on station location, source location or other characteristics. '''
''' Store ID selector picking CRUST 2.0 model based on event location. '''
help="Template for the GF store ID. For example ``'crust2_${id}'`` " "where ``'${id}'`` will be replaced with the corresponding CRUST " "2.0 profile identifier for the source location.")
s = Template(self.template) return s.substitute(id=( crust2x2.get_profile(event.lat, event.lon)._ident).lower())
''' Store ID selector using a manual station to store ID mapping. '''
String.T(), gf.StringID.T(), help='Dictionary with station to store ID pairs, keys are NET.STA. ' "Add a fallback store ID under the key ``'others'``.")
try: store_id = self.mapping['%s.%s' % (st.network, st.station)] except KeyError: try: store_id = self.mapping['others'] except KeyError: raise StoreIDSelectorError( 'No store ID found for station "%s.%s".' % ( st.network, st.station))
return store_id
''' Store ID selector using a mapping from station depth range to store ID. '''
for r in self.depth_ranges: if r.depth_min <= st.depth < r.depth_max: return r.store_id
raise StoreIDSelectorError( 'No store ID found for station "%s.%s" at %g m depth.' % ( st.network, st.station, st.depth))
'time_domain', 'frequency_domain', 'log_frequency_domain', 'envelope', 'absolute', 'cc_max_norm']
optional=True, help='Start of main time window used for waveform fitting.') optional=True, help='End of main time window used for waveform fitting.') optional=True, help='Decay time of taper prepended and appended to main time window ' 'used for waveform fitting [s].') optional=True, help='Synthetic phase arrival definition for alignment of observed ' 'and synthetic traces.') optional=True, help='Name of picked phase for alignment of observed and synthetic ' 'traces.') default='time_domain', help='Type of data characteristic to be fitted.\n\nAvailable choices ' 'are: %s' % ', '.join("``'%s'``" % s for s in DomainChoice.choices)) default=2, help='Exponent to use in norm (1: L1-norm, 2: L2-norm)') default=0.0, help='If non-zero, allow synthetic and observed traces to be shifted ' 'against each other by up to +/- the given value [s].') default=0.0, help='If non-zero, a penalty misfit is added for non-zero shift ' 'values.\n\nThe penalty value is computed as ' '``autoshift_penalty_max * normalization_factor * tautoshift**2 ' '/ tautoshift_max**2``')
return self.fmin / self.ffactor, self.fmax * self.ffactor
target.string_id(), reason))
'''Handles seismogram targets or other targets of dynamic ground motion. ''' optional=True, help='excludes targets nearer to source, along a great circle') optional=True, help='excludes targets farther from source, along a great circle') optional=True, help='excludes targets nearer from source (direct distance)') optional=True, help='excludes targets farther from source (direct distance)') optional=True, help='excludes targets with smaller depths') optional=True, help='excludes targets with larger depths') String.T(), optional=True, help='If not None, list of stations/components to include according ' 'to their STA, NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes.') String.T(), help='Stations/components to be excluded according to their STA, ' 'NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes.') String.T(), optional=True, help="set channels to include, e.g. ['Z', 'T']") optional=True, help='select GF store based on event-station geometry.')
logger.warning( 'No stations found to create waveform target group.')
store_id = self.store_id_selector.get_store_id( event, st, cha) else:
quantity='displacement', codes=nslc, lat=st.lat, lon=st.lon, north_shift=st.north_shift, east_shift=st.east_shift, depth=st.depth, interpolation=self.interpolation, store_id=store_id, misfit_config=self.misfit_config, manual_weight=self.weight, normalisation_family=self.normalisation_family, path=self.path or default_path)
log_exclude(target, 'excluded by dataset') continue
nslcs_to_patterns(self.exclude), nslc): log_exclude(target, 'excluded by target group') continue
nslcs_to_patterns(self.include), nslc): log_exclude(target, 'excluded by target group') continue
target.distance_to(origin) < self.distance_min:
target.distance_to(origin) > self.distance_max:
target.distance_3d_to(origin) < self.distance_3d_min: log_exclude(target, 'distance_3d < distance_3d_min') continue
target.distance_3d_to(origin) > self.distance_3d_max: log_exclude(target, 'distance_3d > distance_3d_max') continue
target.depth < self.depth_min: log_exclude(target, 'depth < depth_min') continue
target.depth > self.depth_max: log_exclude(target, 'depth > depth_max') continue
return weed(origin, targets, self.limit)[0] else:
return self.ydata
return self.fmin + num.arange(self.ydata.size) * self.deltaf
def new_piggy_id(cls): piggy_id = WaveformPiggybackSubtarget._next_piggy_id WaveformPiggybackSubtarget._next_piggy_id += 1 return piggy_id
if piggy_id is None: piggy_id = self.new_piggy_id()
Object.__init__(self, piggy_id=piggy_id, **kwargs)
self, tr_proc_obs, trspec_proc_obs, tr_proc_syn, trspec_proc_syn):
raise NotImplementedError()
'''Carries the observations for a target and corresponding synthetics.
A number of different waveform or phase representations are possible. '''
def get_plot_classes(cls):
else: tfade = 1.0/config.fmax
else: tfade_taper = config.tfade
def backazimuth(self): return self.azimuth - 180.
config.fmin/config.ffactor, config.fmin, config.fmax, config.fmax*config.ffactor)
store = engine.get_store(self.store_id) tsyn = source.time + store.t( config.pick_synthetic_traveltime, source, self)
marker = ds.get_pick( source.name, self.codes[:3], config.pick_phasename)
if marker: tobs = marker.tmin
else: tinc_obs = 10.0 / self.misfit_config.fmax
(tmin - tfade) / tinc_obs) - 1.0) * tinc_obs (tmax + tfade) / tinc_obs) + 1.0) * tinc_obs
self.get_taper_params(engine, source)
tobs_shift = tobs - tsyn else:
tmin_fit - tfade * 2.0, tmax_fit + tfade * 2.0, fillmethod='repeat')
elif config.quantity == 'velocity': syn_resp = trace.DifferentiationResponse(1) elif config.quantity == 'acceleration': syn_resp = trace.DifferentiationResponse(2) else: GrondError('Unsupported quantity: %s' % config.quantity)
freqlimits=freqlimits, tfade=tfade, transfer_function=syn_resp)
tmin_fit+tobs_shift, tmax_fit+tobs_shift, tfade)
nslc, quantity=config.quantity, tinc_cache=1.0/(config.fmin or 0.1*config.fmax), tmin=tmin_fit+tobs_shift-tfade, tmax=tmax_fit+tobs_shift+tfade, tfade=tfade, freqlimits=freqlimits, deltat=tr_syn.deltat, cache=True, backazimuth=self.get_backazimuth_for_waveform())
tr_obs = tr_obs.copy() tr_obs.shift(-tobs_shift)
tr_obs, tr_syn, taper=trace.CosTaper( tmin_fit - tfade_taper, tmin_fit, tmax_fit, tmax_fit + tfade_taper), domain=config.domain, exponent=config.norm_exponent, flip=self.flip_norm, result_mode=self._result_mode, tautoshift_max=config.tautoshift_max, autoshift_penalty_max=config.autoshift_penalty_max, subtargets=self._piggyback_subtargets)
d = dict( (k, getattr(self, k)) for k in gf.Target.T.propnames) return [gf.Target(**d)]
self._piggyback_subtargets.append(subtarget)
tr_obs, tr_syn, taper, domain, exponent, tautoshift_max, autoshift_penalty_max, flip, result_mode='sparse', subtargets=[]):
''' Calculate misfit between observed and synthetic trace.
:param tr_obs: observed trace as :py:class:`pyrocko.trace.Trace` :param tr_syn: synthetic trace as :py:class:`pyrocko.trace.Trace` :param taper: taper applied in timedomain as :py:class:`pyrocko.trace.Taper` :param domain: how to calculate difference, see :py:class:`DomainChoice` :param exponent: exponent of Lx type norms :param tautoshift_max: if non-zero, return lowest misfit when traces are allowed to shift against each other by up to +/- ``tautoshift_max`` :param autoshift_penalty_max: if non-zero, a penalty misfit is added for for non-zero shift values. The penalty value is ``autoshift_penalty_max * normalization_factor * \ tautoshift**2 / tautoshift_max**2`` :param flip: ``bool``, if set to ``True``, normalization factor is computed against *tr_syn* rather than *tr_obs* :param result_mode: ``'full'``, include traces and spectra or ``'sparse'``, include only misfit and normalization factor in result
:returns: object of type :py:class:`WaveformMisfitResult` '''
piggyback_results.append( subtarget.evaluate( tr_proc_obs, trspec_proc_obs, tr_proc_syn, trspec_proc_syn))
int(math.floor(tautoshift_max / deltat))))
else:
ctr = trace.correlate( tr_proc_syn, tr_proc_obs, mode='same', normalization='normal')
tshift, cc_max = ctr.max() m = 0.5 - 0.5 * cc_max n = 0.5
elif domain == 'log_frequency_domain': a, b = trspec_proc_syn.ydata, trspec_proc_obs.ydata if flip: b, a = a, b
a = num.abs(a) b = num.abs(b)
eps = (num.mean(a) + num.mean(b)) * 1e-7 if eps == 0.0: eps = 1e-7
a = num.log(a + eps) b = num.log(b + eps)
m, n = trace.Lx_norm(a, b, norm=exponent)
misfits=num.array([[m, n]], dtype=num.float), processed_obs=tr_proc_obs, processed_syn=tr_proc_syn, filtered_obs=tr_obs.copy(), filtered_syn=tr_syn, spectrum_obs=trspec_proc_obs, spectrum_syn=trspec_proc_syn, taper=taper, tshift=tshift, cc=ctr)
misfits=num.array([[m, n]], dtype=num.float)) else: assert False
tr_proc = tr_proc.envelope(inplace=False) tr_proc.set_ydata(num.abs(tr_proc.get_ydata()))
tr_proc.set_ydata(num.abs(tr_proc.get_ydata()))
network=tr_proc.network, station=tr_proc.station, location=tr_proc.location, channel=tr_proc.channel, deltaf=df, fmin=0.0, ydata=spectrum)
else:
else: return float(x)
azimuths = num.zeros(len(targets)) dists = num.zeros(len(targets)) for i, target in enumerate(targets): _, azimuths[i] = target.azibazi_to(origin) dists[i] = target.distance_to(origin)
badnesses = num.ones(len(targets), dtype=float) deleted, meandists_kept = weeding.weed( azimuths, dists, badnesses, nwanted=limit, neighborhood=neighborhood)
targets_weeded = [ target for (delete, target) in zip(deleted, targets) if not delete]
return targets_weeded, meandists_kept, deleted
StoreIDSelectorError StoreIDSelector Crust2StoreIDSelector StationDictStoreIDSelector DepthRangeToStoreID StationDepthStoreIDSelector WaveformTargetGroup WaveformMisfitConfig WaveformMisfitTarget WaveformMisfitResult WaveformPiggybackSubtarget WaveformPiggybackSubresult '''.split() |