marker as pmarker load_all)
util.time_to_str(self.time_range[0]), util.time_to_str(self.time_range[1]))
scs = load_all(filename=filename) for sc in scs: assert isinstance(sc, StationCorrection)
return scs
return dump_all(station_corrections, filename=filename)
self._cache = {}
self, stations=None, pyrocko_stations_filename=None, stationxml_filenames=None):
for station in stations: self.stations[station.nsl()] = station
'Loading stations from file "%s"...' % pyrocko_stations_filename)
'Loading stations from StationXML file "%s"...' % stationxml_filename)
logger.warning( 'No stations found for time %s in file "%s".' % ( util.time_to_str(ev.time), stationxml_filename))
logger.warning( 'Station "%s" has vertical component' ' information only, adding mocked channels.' % station.nsl_string()) station.add_channel(model.Channel('N')) station.add_channel(model.Channel('E'))
self.events.extend(events)
'Loading events from %s: %i events found.' % (filename, len(events))) except Exception as e: logger.warning('Could not load events from %s!', filename) raise e
show_progress=False):
show_progress=show_progress) fileformat=fileformat, show_progress=show_progress)
logger.debug( 'Loading SAC PZ responses from "%s"...' % sacpz_dirname) for x in enhanced_sacpz.iload_dirname(sacpz_dirname): self.responses[x.codes].append(x)
'Loading StationXML responses from "%s"...' % stationxml_filename)
fs.load_xml(filename=stationxml_filename))
markers = pmarker.load_markers(markers_filename) clippings = {} for marker in markers: nslc = marker.one_nslc() nsl = nslc[:3] if nsl not in clippings: clippings[nsl] = []
if nslc not in clippings: clippings[nslc] = []
clippings[nsl].append(marker.tmin) clippings[nslc].append(marker.tmin)
for k, times in clippings.items(): atimes = num.array(times, dtype=num.float) if k not in self.clippings: self.clippings[k] = atimes else: self.clippings[k] = num.concatenate(self.clippings, atimes)
with open(filename, 'r') as f: blacklist.extend( s.strip() for s in f.read().splitlines()) else:
logger.debug('Loading whitelisted stations...') if filenames: whitelist = list(whitelist) for filename in filenames: with open(filename, 'r') as f: whitelist.extend(s.strip() for s in f.read().splitlines())
if self.whitelist_nslc is None: self.whitelist_nslc = set() self.whitelist = set() self.whitelist_nsl_xx = set()
for x in whitelist: if isinstance(x, str): x = tuple(x.split('.')) if len(x) == 4: self.whitelist_nslc.add(x) self.whitelist_nsl_xx.add(x[:3]) else: self.whitelist.add(x)
self.station_corrections.update( (sc.codes, sc) for sc in load_station_corrections(filename))
self.pick_markers.extend( pmarker.load_markers(filename))
self._picks = None
paths, regex=r'\.yml|\.yaml', show_progress=False)
except ImportError: raise ImportError('Module pyrocko.model.gnss not found,' ' please upgrade pyrocko!')
paths, regex=r'\.npz', show_progress=False)
logger.warning('Could not find any kite scenes at %s.' % quote_paths(self.kite_scene_paths))
except ImportError: raise ImportError('Module kite could not be imported,' ' please install from https://pyrocko.org.')
else: raise AttributeError('Kite scene_id not unique for "%s".' % filename)
return True
nsl in self.blacklist or nsl[1:2] in self.blacklist or nsl[:2] in self.blacklist)
nsl = self.get_nsl(obj)
if ( nsl in self.whitelist or nsl[1:2] in self.whitelist or nsl[:2] in self.whitelist):
return True
try: nslc = self.get_nslc(obj) if nslc in self.whitelist_nslc: return True
except InvalidObject: return nsl in self.whitelist_nsl_xx
atimes = self.clippings[nsl_or_nslc] return num.any(num.logical_and(tmin < atimes, atimes <= tmax))
else: 'Cannot get nsl code from given object of type "%s".' % type(obj))
else: 'Cannot get nslc code from given object "%s"' % type(obj))
else: raise InvalidObject( 'Cannot get tmin and tmax from given object of type "%s"' % type(obj))
raise NotFound('Station is blacklisted:', self.get_nsl(obj))
raise NotFound('Station is not on whitelist:', self.get_nsl(obj))
return obj
raise NotFound('No station information:', keys)
if not self.is_blacklisted(self.stations[k]) and self.is_whitelisted(self.stations[k])]
if len(self.kite_scenes) == 0: raise AttributeError('No kite displacements defined.') return self.kite_scenes[0] else:
raise NotFound('GNSS campaign %s not found!' % name)
and (self.responses_stationxml is None or len(self.responses_stationxml) == 0):
raise NotFound('No response information available.')
'displacement': 'M', 'velocity': 'M/S', 'acceleration': 'M/S**2'}
raise NotFound('Response is blacklisted:', self.get_nslc(obj))
raise NotFound('Response is not on whitelist:', self.get_nslc(obj))
(net, sta, loc, cha), (net, sta, '', cha), ('', sta, '', cha)]
for x in self.responses[k]: if x.tmin < tmin and (x.tmax is None or tmax < x.tmax): if quantity == 'displacement': candidates.append(x.response) elif quantity == 'velocity': candidates.append(trace.MultiplyResponse([ x.response, trace.DifferentiationResponse()])) elif quantity == 'acceleration': candidates.append(trace.MultiplyResponse([ x.response, trace.DifferentiationResponse(2)])) else: assert False
sx.get_pyrocko_response( (net, sta, loc, cha), timespan=(tmin, tmax), fake_input_units=quantity_to_unit[quantity]))
elif len(candidates) == 0: raise NotFound('No response found:', (net, sta, loc, cha)) else: raise NotFound('Multiple responses found:', (net, sta, loc, cha))
self, obj, tmin, tmax, tpad=0., toffset_noise_extract=0., want_incomplete=False, extend_incomplete=False):
raise NotFound( 'Waveform is blacklisted:', (net, sta, loc, cha))
raise NotFound( 'Waveform is not on whitelist:', (net, sta, loc, cha))
raise NotFound( 'Waveform clipped:', (net, sta, loc))
elif self.clip_handling == 'by_nslc': if self.has_clipping((net, sta, loc, cha), tmin, tmax): raise NotFound( 'Waveform clipped:', (net, sta, loc, cha))
tmin=tmin+toffset_noise_extract, tmax=tmax+toffset_noise_extract, tpad=tpad, trace_selector=lambda tr: tr.nslc_id == (net, sta, loc, cha), want_incomplete=want_incomplete or extend_incomplete)
for tr in trs: tr.snap()
for tr in trs: tr.shift(-toffset_noise_extract)
trs[0].extend( tmin + toffset_noise_extract - tpad, tmax + toffset_noise_extract + tpad, fillmethod='repeat')
else: message = 'Waveform has gaps.'
message, codes=(net, sta, loc, cha), time_range=( tmin + toffset_noise_extract - tpad, tmax + toffset_noise_extract + tpad))
self, obj, quantity='displacement', tmin=None, tmax=None, tpad=0., tfade=0., freqlimits=None, deltat=None, toffset_noise_extract=0., want_incomplete=False, extend_incomplete=False):
obj, tmin=tmin, tmax=tmax, tpad=tpad+tfade, toffset_noise_extract=toffset_noise_extract, want_incomplete=want_incomplete, extend_incomplete=extend_incomplete)
tr.transfer( tfade=tfade, freqlimits=freqlimits, transfer_function=resp, invert=True))
except trace.InfiniteResponse: raise NotFound( 'Instrument response deconvolution failed ' '(divide by zero)', tr.nslc_id)
self, station, backazimuth, source, target, tmin, tmax):
# fill in missing channel information (happens when station file # does not contain any channel information) station = copy.deepcopy(station)
nsl = station.nsl() trs = self.pile.all( tmin=tmin, tmax=tmax, trace_selector=lambda tr: tr.nslc_id[:3] == nsl, load_data=False)
channels = list(set(tr.channel for tr in trs)) station.set_channels_by_name(*channels)
out_channels=('E', 'N', 'Z')))
backazimuth = source.azibazi_to(target)[1]
out_channels=('R', 'T', 'Z'), backazimuth=backazimuth))
'Cannot determine projection of data components:', station.nsl())
self, obj, quantity='displacement', tmin=None, tmax=None, tpad=0., tfade=0., freqlimits=None, deltat=None, cache=None, backazimuth=None, source=None, target=None, debug=False):
raise NotFound( 'Waveform is blacklisted:', nslc)
raise NotFound( 'Waveform is not on whitelist:', nslc)
tmin, tmax, tuple(freqlimits), tfade, deltat, tpad, quantity) raise obj else:
if not syn_test.respect_data_availability: if syn_test.real_noise_scale != 0.0: raise DatasetError( 'respect_data_availability=False and ' 'addition of real noise cannot be combined.')
tr = syn_test.get_waveform( nslc, tmin, tmax, tfade=tfade, freqlimits=freqlimits)
if cache is not None: cache[tr.nslc_id + cache_k] = tr
if debug: return [], [], [] else: return tr
if syn_test.real_noise_scale != 0.0: toffset_noise_extract = syn_test.real_noise_toffset
abs_delays.append(abs(sc.delay))
abs_delay_max = max(abs_delays) else:
station, backazimuth, source, target, tmin, tmax)
matrix, in_channels, out_channels)
self.get_waveform_restituted( station.nsl() + (cha,), quantity=quantity, tmin=tmin, tmax=tmax, tpad=tpad+abs_delay_max, toffset_noise_extract=toffset_noise_extract, # noqa tfade=tfade, freqlimits=freqlimits, deltat=deltat, want_incomplete=debug, extend_incomplete=self.extend_incomplete)
trace.project( trs_restituted_group, matrix, in_channels, out_channels))
if self.apply_correction_factors: tr.ydata /= sc.factor
if self.apply_correction_delays: tr.shift(-sc.delay)
trs_projected_synthetic = [] for tr in trs_projected: if tr.channel == channel: tr_syn = syn_test.get_waveform( tr.nslc_id, tmin, tmax, tfade=tfade, freqlimits=freqlimits)
if tr_syn: if syn_test.real_noise_scale != 0.0: tr_syn = tr_syn.copy() tr_noise = tr.copy() tr_noise.set_ydata( tr_noise.get_ydata() * syn_test.real_noise_scale)
tr_syn.add(tr_noise)
trs_projected_synthetic.append(tr_syn)
trs_projected = trs_projected_synthetic
return trs_projected, trs_restituted, trs_raw, tr_return
else: raise NotFound( 'waveform not available', station.nsl() + (channel,))
else: tmin_r = tmin tmax_r = tmax
return self._get_waveform(obj, **kwargs) else:
evs = [] for ev in self.events: if ((magmin is None or ev.magnitude >= magmin) and (event_names is None or ev.name in event_names)): evs.append(ev)
return evs
evs = self.get_events(magmin=magmin) ev_x = None for ev in evs: if ev_x is None or abs(ev.time - t) < abs(ev_x.time - t): ev_x = ev
if not ev_x: raise NotFound( 'No event information matching criteria (t=%s, magmin=%s).' % (t, magmin))
return ev_x
raise NotFound('No main event selected in dataset!')
raise NotFound('No such event: %s' % self._event_name)
if self._picks is None: hash_to_name = {} names = set() for marker in self.pick_markers: if isinstance(marker, pmarker.EventMarker): name = marker.get_event().name if name in names: raise DatasetError( 'Duplicate event name "%s" in picks.' % name)
names.add(name) hash_to_name[marker.get_event_hash()] = name
for ev in self.events: hash_to_name[ev.get_hash()] = ev.name
picks = {} for marker in self.pick_markers: if isinstance(marker, pmarker.PhaseMarker): ehash = marker.get_event_hash()
nsl = marker.one_nslc()[:3] phasename = marker.get_phasename()
if ehash is None or ehash not in hash_to_name: raise DatasetError( 'Unassociated pick: %s.%s.%s, %s' % (nsl + (phasename, )))
eventname = hash_to_name[ehash]
if (nsl, phasename, eventname) in picks: raise DatasetError( 'Duplicate pick: %s.%s.%s, %s' % (nsl + (phasename, )))
picks[nsl, phasename, eventname] = marker
self._picks = picks
return self._picks
nsl = self.get_nsl(obj) return self.get_picks().get((nsl, phasename, eventname), None)
''' Configuration for a Grond `Dataset` object. '''
optional=True, help='List of files with station coordinates in Pyrocko format.') Path.T(), optional=True, help='List of files with station coordinates in StationXML format.') optional=True, help='File with hypocenter information and possibly' ' reference solution') Path.T(), optional=True, help='List of directories with raw waveform data') optional=True) optional=True, help='List of SACPZ response files for restitution of' ' the raw waveform data.') Path.T(), optional=True, help='List of StationXML response files for restitution of' ' the raw waveform data.') optional=True, help='File containing station correction informations.') optional=True, default=True, help='Apply correction factors from station corrections.') optional=True, default=True, help='Apply correction delays from station corrections.') optional=True, default=False, help='Work around displaced sampling issues.') default=False, help='Extend incomplete seismic traces.') Path.T()) Path.T(), help='List of text files with blacklisted stations.') String.T(), help='Stations/components to be excluded according to their STA, ' 'NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes.') Path.T(), help='List of text files with whitelisted stations.') String.T(), optional=True, help='If not None, list of stations/components to include according ' 'to their STA, NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes. ' 'Note: ''when whitelisting on channel level, both, the raw and ' 'the processed channel codes have to be listed.') optional=True)
Path.T(), optional=True, help='List of directories for the InSAR scenes.')
Path.T(), optional=True, help='List of directories for the GNSS campaign data.')
event_name='*'))
logger.warning('Event in %s has no name!', fn) return logger.warning('Event %s has inconsistent coordinates!', ev.name) logger.warning('Event %s has no depth!', ev.name) logger.warning('Event %s has no time!', ev.name)
raise DatasetError('No event files matching "%s".' % events_path)
event_name=event_name))
else: logger.warn('Path %s does not exist.' % p)
pyrocko_stations_filename=fp(self.stations_path), stationxml_filenames=fp(self.stations_stationxml_paths))
ds.add_clippings(markers_filename=fp(self.clippings_path))
ds.add_responses( sacpz_dirname=fp(self.responses_sacpz_path))
stationxml_filenames=fp( self.responses_stationxml_paths))
ds.add_station_corrections( filename=fp(self.station_corrections_path))
self.apply_displaced_sampling_workaround
ds.add_picks( filename=fp(picks_path))
ds.add_whitelist(self.whitelist) ds.add_whitelist(filenames=fp(self.whitelist_paths))
except (FileLoadError, OSError) as e: raise DatasetError(str(e))
Dataset DatasetConfig DatasetError InvalidObject NotFound StationCorrection load_station_corrections dump_station_corrections '''.split() |