# http://pyrocko.org - GPLv3 # # The Pyrocko Developers, 21st Century # ---|P------/S----------~Lg----------
Unicode, Int, Float, List, Object, Timestamp, ValidationError, TBase, re_tz)
('M', 'M'): None, ('M/S', 'M'): trace.IntegrationResponse(1), ('M/S**2', 'M'): trace.IntegrationResponse(2), ('M', 'M/S'): trace.DifferentiationResponse(1), ('M/S', 'M/S'): None, ('M/S**2', 'M/S'): trace.IntegrationResponse(1), ('M', 'M/S**2'): trace.DifferentiationResponse(2), ('M/S', 'M/S**2'): trace.DifferentiationResponse(1), ('M/S**2', 'M/S**2'): None}
words = s.split() lines = [] t = [] n = 0 for w in words: if n + len(w) >= width: lines.append(' '.join(t)) n = indent t = [' '*(indent-1)]
t.append(w) n += len(w) + 1
lines.append(' '.join(t)) return '\n'.join(lines)
if any(type(x[0]) != type(r) for r in x): return False
if isinstance(x[0], float): return all(abs(r-x[0]) <= eps for r in x) else: return all(r == x[0] for r in x)
resp.evaluate(num.array([frequency], dtype=float)))[0]
raise InconsistentResponseInformation( '%s\n' ' computed response is zero' % prelude)
raise InconsistentResponseInformation( '%s\n' ' reported value: %g\n' ' computed value: %g\n' ' at frequency [Hz]: %g\n' ' difference [dB]: %g\n' ' limit [dB]: %g' % ( prelude, value, value_resp, frequency, diff_db, limit_db))
tt = val.utctimetuple() val = time_float(calendar.timegm(tt)) + val.microsecond * 1e-6
tt = val.timetuple() val = time_float(calendar.timegm(tt))
+ (int(sm)*60 if sm else 0)
except util.TimeStrError: year = int(val[:4]) if sys.maxsize > 2**32: # if we're on 64bit if year > this_year + 100: return None # StationXML contained a dummy date
if year < 1903: # for macOS, 1900-01-01 dummy dates return None
else: # 32bit end of time is in 2038 if this_year < 2037 and year > 2037 or year < 1903: return None # StationXML contained a dummy date
raise
elif isinstance(val, (int, float)): val = time_float(val)
else: raise ValidationError( '%s: cannot convert "%s" to type %s' % ( self.xname(), val, time_float))
.rstrip('0').rstrip('.')
.rstrip('0').rstrip('.') + 'Z'
'NOMINAL', 'CALCULATED']
'open', 'closed', 'partial']
'TRIGGERED', 'CONTINUOUS', 'HEALTH', 'GEOPHYSICAL', 'WEATHER', 'FLAG', 'SYNTHESIZED', 'INPUT', 'EXPERIMENTAL', 'MAINTENANCE', 'BEAM']
logger.warn( 'channel type: "%s" is not a valid choice out of %s' % (val, repr(self.choices)))
'LAPLACE (RADIANS/SECOND)', 'LAPLACE (HERTZ)', 'DIGITAL (Z-TRANSFORM)']
'NONE', 'EVEN', 'ODD']
except ValueError: raise ValidationError( '%s: cannot convert to string %s' % (self.xname, repr(val)))
'ANALOG (RADIANS/SECOND)', 'ANALOG (HERTZ)', 'DIGITAL']
'ANALOG (RAD/SEC)': 'ANALOG (RADIANS/SECOND)', 'ANALOG (HZ)': 'ANALOG (HERTZ)', }
'MACLAURIN']
''' Description of a site location using name and optional geopolitical boundaries (country, city, etc.). '''
''' This type contains a URI and description for external data that users may want to reference in StationXML. '''
''' A type to document units. Corresponds to SEED blockette 34. '''
''' Sample rate expressed as number of samples in a number of seconds. '''
''' Complex type for sensitivity and frequency ranges. This complex type can be used to represent both overall sensitivities and individual stage gains. The FrequencyRangeGroup is an optional construct that defines a pass band in Hertz ( FrequencyStart and FrequencyEnd) in which the SensitivityValue is valid within the number of decibels specified in FrequencyDBVariation. '''
optional=True, xmltagname='InstallationDate') optional=True, xmltagname='RemovalDate')
''' The BaseFilter is derived by all filters. '''
''' Sensitivity and frequency ranges. The FrequencyRangeGroup is an optional construct that defines a pass band in Hertz (FrequencyStart and FrequencyEnd) in which the SensitivityValue is valid within the number of decibels specified in FrequencyDBVariation. '''
xmltagname='FrequencyDBVariation')
''' Complex numbers used as poles or zeros in channel response. '''
xmlstyle='attribute') # fixed
''' A time value in seconds. '''
# fixed unit
# fixed unit
# fixed unit
''' Instrument azimuth, degrees clockwise from North. '''
# fixed unit
''' Instrument dip in degrees down from horizontal. Together azimuth and dip describe the direction of the sensitive axis of the instrument. '''
# fixed unit
''' Extension of FloatWithUnit for distances, elevations, and depths. '''
# NOT fixed unit!
# fixed unit
''' Sample rate in samples per second. '''
# fixed unit
''' Representation of a person's contact information. A person can belong to multiple agencies and have multiple email addresses and phone numbers. '''
''' Response: FIR filter. Corresponds to SEED blockette 61. FIR filters are also commonly documented using the Coefficients element. '''
NumeratorCoefficient.T(xmltagname='NumeratorCoefficient'))
''' Response: coefficients for FIR filter. Laplace transforms or IIR filters can be expressed using type as well but the PolesAndZeros should be used instead. Corresponds to SEED blockette 54. '''
xmltagname='CfTransferFunctionType')
''' Type for latitude coordinate. '''
# fixed unit
''' Type for longitude coordinate. '''
# fixed unit
''' Response: complex poles and zeros. Corresponds to SEED blockette 53. '''
xmltagname='PzTransferFunctionType') xmltagname='NormalizationFactor')
logger.warn( 'unhandled pole-zero response of type "DIGITAL (Z-TRANSFORM)" ' '(%s)' % '.'.join(nslc))
return []
'LAPLACE (RADIANS/SECOND)', 'LAPLACE (HERTZ)'):
raise NoResponseInformation( 'cannot convert PoleZero response of type %s (%s)' % (self.pz_transfer_function_type, '.'.join(nslc)))
len(self.pole_list) - len(self.zero_list))
or self.normalization_factor == 0.0:
logger.warn( 'no pole-zero normalization factor given. Assuming a value of ' '1.0 (%s)' % '.'.join(nslc))
nfactor = 1.0 else:
constant=nfactor*cfactor, zeros=[z.value()*factor for z in self.zero_list], poles=[p.value()*factor for p in self.pole_list])
logger.warn( 'cannot check pole-zero normalization factor (%s)' % '.'.join(nslc))
else: resp.evaluate( num.array([self.normalization_frequency.value]))[0])
computed_normalization_factor / nfactor)
logger.warn( 'computed and reported normalization factors differ by ' '%g dB: computed: %g, reported: %g (%s)' % ( db, computed_normalization_factor, nfactor, '.'.join(nslc)))
''' Response: expressed as a polynomial (allows non-linear sensors to be described). Corresponds to SEED blockette 62. Can be used to describe a stage of acquisition or a complete system. '''
xmltagname='ApproximationType')
''' Corresponds to SEED blockette 57. '''
''' Container for a comment or log entry. Corresponds to SEED blockettes 31, 51 and 59. '''
optional=True, xmltagname='BeginEffectiveTime') optional=True, xmltagname='EndEffectiveTime')
''' Response: list of frequency, amplitude and phase values. Corresponds to SEED blockette 55. '''
ResponseListElement.T(xmltagname='ResponseListElement'))
''' Container for log entries. '''
''' This complex type represents channel response and covers SEED blockettes 53 to 56. '''
PolesZeros.T(optional=True, xmltagname='PolesZeros')) Coefficients.T(optional=True, xmltagname='Coefficients'))
logger.warn( 'multiple poles and zeros records in single response stage ' '(%s.%s.%s.%s)' % nslc)
self.response_list or self.fir or self.polynomial):
trace.PoleZeroResponse(constant=self.stage_gain.value))
def input_units(self): [self.response_list, self.fir, self.polynomial]):
def output_units(self): [self.response_list, self.fir, self.polynomial]):
xmltagname='InstrumentSensitivity') xmltagname='InstrumentPolynomial')
responses.append( trace.PoleZeroResponse( constant=self.instrument_sensitivity.value))
trial, sval, sfreq, 0.1, prelude='Instrument sensitivity value inconsistent with ' 'sensitivity computed from complete response\n' ' channel: %s' % '.'.join(nslc))
self.instrument_sensitivity.input_units is None:
raise NoResponseInformation('no input units given')
fake_input_units.upper(), input_units.upper()]
except KeyError: raise NoResponseInformation( 'cannot convert between units: %s, %s' % (fake_input_units, input_units))
normalization_frequency=1.0): ''' Convert Pyrocko pole-zero response to StationXML response.
:param presponse: Pyrocko pole-zero response :type presponse: :py:class:`~pyrocko.trace.PoleZeroResponse` :param input_unit: Input unit to be reported in the StationXML response. :type input_unit: str :param output_unit: Output unit to be reported in the StationXML response. :type output_unit: str :param normalization_frequency: Frequency where the normalization factor for the StationXML response should be computed. :type normalization_frequency: float '''
presponse.evaluate(num.array([normalization_frequency]))[0] / presponse.constant))
pz_transfer_function_type='LAPLACE (RADIANS/SECOND)', normalization_factor=norm_factor, normalization_frequency=Frequency(normalization_frequency), zero_list=[PoleZero(real=FloatNoUnit(z.real), imaginary=FloatNoUnit(z.imag)) for z in presponse.zeros], pole_list=[PoleZero(real=FloatNoUnit(z.real), imaginary=FloatNoUnit(z.imag)) for z in presponse.poles])
number=1, poles_zeros_list=[pzs], stage_gain=Gain(float(abs(presponse.constant))/norm_factor))
instrument_sensitivity=Sensitivity( value=stage.stage_gain.value, input_units=Units(input_unit), output_units=Units(output_unit)),
stage_list=[stage])
''' A base node type for derivation from: Network, Station and Channel types. '''
xmlstyle='attribute') xmlstyle='attribute')
self.start_date <= args[0]) and (self.end_date is None or args[0] <= self.end_date))
args[1] >= self.start_date) and (self.end_date is None or self.end_date >= args[0]))
''' Equivalent to SEED blockette 52 and parent element for the related the response blockettes. '''
ExternalReference.T(xmltagname='ExternalReference')) xmltagname='SampleRateRatio')
def position_values(self):
''' This type represents a Station epoch. It is common to only have a single station epoch with the station's creation and termination dates as the epoch start and end dates. '''
optional=True, xmltagname='CreationDate') optional=True, xmltagname='TerminationDate') optional=True, xmltagname='TotalNumberChannels') optional=True, xmltagname='SelectedNumberChannels') ExternalReference.T(xmltagname='ExternalReference'))
def position_values(self): lat = self.latitude.value lon = self.longitude.value elevation = value_or_none(self.elevation) return lat, lon, elevation
''' This type represents the Network layer, all station metadata is contained within this element. The official name of the network or other descriptive information can be included in the Description element. The Network can contain 0 or more Stations. '''
xmltagname='TotalNumberStations') xmltagname='SelectedNumberStations')
def station_code_list(self): return sorted(set(s.code for s in self.station_list))
def sl_code_list(self): sls = set() for station in self.station_list: for channel in station.channel_list: sls.add((station.code, channel.location_code))
return sorted(sls)
sls = self.sl_code_list or [(x,) for x in self.station_code_list] lines = ['%s (%i):' % (self.code, len(sls))] if sls: ssls = ['.'.join(x for x in c if x) for c in sls] w = max(len(x) for x in ssls) n = (width - indent) / (w+1) while ssls: lines.append( ' ' * indent + ' '.join(x.ljust(w) for x in ssls[:n]))
ssls[:n] = []
return '\n'.join(lines)
else: return None
channels[0].position_values
info = '\n'.join( ' %s: %s' % (x.code, x.position_values) for x in channels)
mess = 'encountered inconsistencies in channel ' \ 'lat/lon/elevation/depth ' \ 'for %s.%s.%s: \n%s' % (nsl + (info,))
if inconsistencies == 'raise': raise InconsistentChannelLocations(mess)
elif inconsistencies == 'warn': logger.warn(mess) logger.warn(' -> using mean values')
/ num.sum(num.isfinite(apos), axis=0)
(channel.code, value_or_none(channel.azimuth), value_or_none(channel.dip)) for channel in groups[code]]
data, message='channel orientation values differ:', error=inconsistencies)
pyrocko.model.Channel(code, azimuth=azimuth, dip=dip))
*nsl, lat=mlat, lon=mlon, elevation=mele, depth=mdep, channels=pchannels)
''' Top-level type for Station XML. Required field are Source (network ID of the institution sending the message) and one or more Network containers or one or more Station containers. '''
time=None, timespan=None, inconsistencies='warn'):
assert inconsistencies in ('raise', 'warn')
nslcs = set(nslcs)
nsls = set(nsls)
tt = timespan
continue
channels = [channel for channel in channels if (network.code, station.code, loc, channel.code) in nslcs]
continue
continue
pyrocko_station_from_channels( nsl, channels, inconsistencies=inconsistencies)) else: pstations.append(pyrocko.model.Station( network.code, station.code, '*', lat=station.latitude.value, lon=station.longitude.value, elevation=value_or_none(station.elevation), name=station.description or ''))
cls, pyrocko_stations, add_flat_responses_from=None):
''' Generate :py:class:`FDSNStationXML` from list of :py:class;`pyrocko.model.Station` instances.
:param pyrocko_stations: list of :py:class;`pyrocko.model.Station` instances. :param add_flat_responses_from: unit, 'M', 'M/S' or 'M/S**2' '''
assert add_flat_responses_from in ('M', 'M/S', 'M/S**2') extra = dict( response=Response( instrument_sensitivity=Sensitivity( value=1.0, frequency=1.0, input_units=Units(name=add_flat_responses_from)))) else:
have_offsets.add(s.nsl())
Channel( location_code=location, code=c.name, latitude=Latitude(value=s.effective_lat), longitude=Longitude(value=s.effective_lon), elevation=Distance(value=s.elevation), depth=Distance(value=s.depth), azimuth=Azimuth(value=c.azimuth), dip=Dip(value=c.dip), **extra ) )
Station( code=station, latitude=Latitude(value=s.effective_lat), longitude=Longitude(value=s.effective_lon), elevation=Distance(value=s.elevation), channel_list=channel_list) )
logger.warn( 'StationXML does not support Cartesian offsets in ' 'coordinates. Storing effective lat/lon for stations: %s' % ', '.join('.'.join(nsl) for nsl in sorted(have_offsets)))
Network( code=k, station_list=station_list, total_number_stations=len(station_list)))
source='from pyrocko stations list', created=timestamp, network_list=network_list)
self, net=None, sta=None, time=None, timespan=None):
tt = () if time is not None: tt = (time,) elif timespan is not None: tt = timespan
for network in self.network_list: if not network.spans(*tt) or ( net is not None and network.code != net): continue
for station in network.station_list: if not station.spans(*tt) or ( sta is not None and station.code != sta): continue
yield (network, station)
self, net=None, sta=None, loc=None, cha=None, time=None, timespan=None):
net is not None and network.code != net):
sta is not None and station.code != sta): continue
(cha is not None and channel.code != cha) or (loc is not None and channel.location_code.strip() != loc)): continue
time=None, timespan=None):
groups = {} for network, station, channel in self.iter_network_station_channels( net, sta, loc, cha, time=time, timespan=timespan):
net = network.code sta = station.code cha = channel.code loc = channel.location_code.strip() if len(cha) == 3: bic = cha[:2] # band and intrument code according to SEED elif len(cha) == 1: bic = '' else: bic = cha
if channel.response and \ channel.response.instrument_sensitivity and \ channel.response.instrument_sensitivity.input_units:
unit = channel.response.instrument_sensitivity.input_units.name else: unit = None
bic = (bic, unit)
k = net, sta, loc if k not in groups: groups[k] = {}
if bic not in groups[k]: groups[k][bic] = []
groups[k][bic].append(channel)
for nsl, bic_to_channels in groups.items(): bad_bics = [] for bic, channels in bic_to_channels.items(): sample_rates = [] for channel in channels: sample_rates.append(channel.sample_rate.value)
if not same(sample_rates): scs = ','.join(channel.code for channel in channels) srs = ', '.join('%e' % x for x in sample_rates) err = 'ignoring channels with inconsistent sampling ' + \ 'rates (%s.%s.%s.%s: %s)' % (nsl + (scs, srs))
logger.warn(err) bad_bics.append(bic)
for bic in bad_bics: del bic_to_channels[bic]
return groups
self, target_sample_rate=None, priority_band_code=['H', 'B', 'M', 'L', 'V', 'E', 'S'], priority_units=['M/S', 'M/S**2'], priority_instrument_code=['H', 'L'], time=None, timespan=None):
nslcs = {} for nsl, bic_to_channels in self.get_channel_groups( time=time, timespan=timespan).items():
useful_bics = [] for bic, channels in bic_to_channels.items(): rate = channels[0].sample_rate.value
if target_sample_rate is not None and \ rate < target_sample_rate*0.99999: continue
if len(bic[0]) == 2: if bic[0][0] not in priority_band_code: continue
if bic[0][1] not in priority_instrument_code: continue
unit = bic[1]
prio_unit = len(priority_units) try: prio_unit = priority_units.index(unit) except ValueError: pass
prio_inst = len(priority_instrument_code) prio_band = len(priority_band_code) if len(channels[0].code) == 3: try: prio_inst = priority_instrument_code.index( channels[0].code[1]) except ValueError: pass
try: prio_band = priority_band_code.index( channels[0].code[0]) except ValueError: pass
if target_sample_rate is None: rate = -rate
useful_bics.append((-len(channels), prio_band, rate, prio_unit, prio_inst, bic))
useful_bics.sort()
for _, _, rate, _, _, bic in useful_bics: channels = sorted( bic_to_channels[bic], key=lambda channel: channel.code)
if channels: for channel in channels: nslcs[nsl + (channel.code,)] = channel
break
return nslcs
self, nslc, time=None, timespan=None, fake_input_units=None):
net, sta, loc, cha, time=time, timespan=timespan): nslc, fake_input_units=fake_input_units))
raise NoResponseInformation('%s.%s.%s.%s' % nslc) raise MultipleResponseInformation('%s.%s.%s.%s' % nslc)
def n_code_list(self): return sorted(set(x.code for x in self.network_list))
def ns_code_list(self): nss = set() for network in self.network_list: for station in network.station_list: nss.add((network.code, station.code))
return sorted(nss)
def nsl_code_list(self): nsls = set() for network in self.network_list: for station in network.station_list: for channel in station.channel_list: nsls.add( (network.code, station.code, channel.location_code))
return sorted(nsls)
def nslc_code_list(self): (network.code, station.code, channel.location_code, channel.code))
lst = [ 'number of n codes: %i' % len(self.n_code_list), 'number of ns codes: %i' % len(self.ns_code_list), 'number of nsl codes: %i' % len(self.nsl_code_list), 'number of nslc codes: %i' % len(self.nslc_code_list) ]
return '\n'.join(lst)
Exception.__init__(self) self._line = line
def __str__(self): return 'Invalid record: "%s"' % self._line
logger.warn('Invalid channel record: %s' % line) continue
scale_freq, scale_units, sample_rate, start_date, end_date) = t
except ValueError: scale = None
except ValueError: scale_freq = None
except ValueError: depth = 0.0
except ValueError: azi = None dip = None
else:
code=sta, latitude=lat, longitude=lon, elevation=ele)
else:
instrument_sensitivity=Sensitivity( value=scale, frequency=scale_freq, input_units=scale_units)) else: resp = None
code=cha, location_code=loc.strip(), latitude=lat, longitude=lon, elevation=ele, depth=depth, azimuth=azi, dip=dip, sensor=Equipment(description=sens), response=resp, sample_rate=sample_rate, start_date=start_date, end_date=end_date or None)
except ValidationError: raise InvalidRecord(line)
source='created from table input', created=time.time(), network_list=sorted(networks.values(), key=lambda x: x.code))
networks = [] for sx in sxs: networks.extend(sx.network_list)
return FDSNStationXML( source='merged from different sources', created=time.time(), network_list=copy.deepcopy( sorted(networks, key=lambda x: x.code))) |