1# http://pyrocko.org - GPLv3
2#
3# The Pyrocko Developers, 21st Century
4# ---|P------/S----------~Lg----------
5from __future__ import division, absolute_import
7import sys
8import struct
9import re
10import numpy as num
12from io import StringIO
13from collections import namedtuple
15from pyrocko import util, trace
16from .io_common import FileLoadError
19g_guralp_zero = None
22def get_guralp_zero():
23 global g_guralp_zero
24 if g_guralp_zero is None:
25 g_guralp_zero = util.str_to_time('1989-11-17 00:00:00')
27 return g_guralp_zero
30class GCFLoadError(FileLoadError):
31 pass
34class EOF(Exception):
35 pass
38Header = namedtuple(
39 'Header',
40 '''
41 block_type
42 system_id
43 stream_id
44 instrument_type
45 time
46 gain
47 ttl
48 sample_rate
49 compression
50 nrecords
51 '''.split())
54def read(f, n, eof_ok=False):
55 data = f.read(n)
56 if eof_ok and len(data) == 0:
57 raise EOF()
59 if len(data) != n:
60 raise GCFLoadError('Unexpected end of file')
62 return data
65def read_header(f, endianness='>'):
66 e = endianness
67 data = read(f, 16, eof_ok=True)
69 isystem_id, istream_id = struct.unpack(e+'II', data[:8])
70 ex = isystem_id & 0x80000000
71 if ex:
72 ex2 = isystem_id & (1 << 30)
73 if ex2:
74 system_id = util.base36encode(isystem_id & (2**21 - 1))
75 else:
76 system_id = util.base36encode(isystem_id & (2**26 - 1))
78 instrument_type = (isystem_id >> 26) & 0b1
79 gain = [None, 1, 2, 4, 8, 16, 32, 64][(isystem_id >> 27) & 0b111]
80 else:
82 system_id = util.base36encode(isystem_id)
83 instrument_type = None
84 gain = None
86 stream_id = util.base36encode(istream_id)
88 i_day_second = struct.unpack(e+'I', data[8:12])[0]
89 iday = i_day_second >> 17
90 isecond = i_day_second & 0x1ffff
91 time = (iday*24*60*60) + get_guralp_zero() + isecond
93 ittl, israte, compression, nrecords = struct.unpack(e+'BBBB', data[12:])
94 if nrecords > 250:
95 raise FileLoadError('Header indicates too many records in block.')
97 if israte == 0:
98 if compression == 4 and stream_id[-2:] == '00':
99 block_type = 'status_block'
101 elif compression == 4 and stream_id[-2:] == '01':
102 block_type = 'unified_status_block'
104 elif compression == 4 and stream_id[-2:] == 'SM':
105 block_type = 'strong_motion_block'
107 elif stream_id[-2:] == 'CD':
108 block_type = 'cd_status_block'
110 elif compression == 4 and stream_id[-2:] == 'BP':
111 block_type = 'byte_pipe_block'
113 elif stream_id[-2:] == 'IB':
114 block_type = 'information_block'
116 else:
117 raise FileLoadError('Unexpected block type found.')
119 return Header(block_type, system_id, stream_id, instrument_type,
120 time, gain, ittl, 0.0, compression, nrecords)
121 else:
122 block_type = 'data_block'
124 if not re.match(r'^([ZNEXC][0-9A-CG-S]|M[0-9A-F])$', stream_id[-2:]):
125 raise FileLoadError('Unexpected data stream ID')
127 sample_rate_tab = {
128 157: (0.1, None),
129 161: (0.125, None),
130 162: (0.2, None),
131 164: (0.25, None),
132 167: (0.5, None),
133 171: (400., 8),
134 174: (500., 2),
135 176: (1000., 4),
136 179: (2000., 8),
137 181: (4000., 16)}
139 if israte in sample_rate_tab:
140 sample_rate, tfod = sample_rate_tab[israte]
141 else:
142 sample_rate = float(israte)
143 tfod = None
145 if tfod is not None:
146 toff = (compression >> 4) // tfod
147 compression = compression & 0b1111
148 time += toff
150 if compression not in (1, 2, 4):
151 raise GCFLoadError(
152 'Unsupported compression code: %i' % compression)
154 return Header(block_type, system_id, stream_id, instrument_type,
155 time, gain, ittl, sample_rate, compression, nrecords)
158def read_data(f, h, endianness='>'):
159 e = endianness
160 data = read(f, 1024 - 16)
161 first = struct.unpack(e+'i', data[0:4])[0]
162 dtype = {1: e+'i4', 2: e+'i2', 4: e+'i1'}
163 if h.compression not in dtype:
164 raise GCFLoadError('Unsupported compression code: %i' % h.compression)
166 nsamples = h.compression * h.nrecords
167 difs = num.frombuffer(data[4:4+h.nrecords*4], dtype=dtype[h.compression],
168 count=nsamples)
169 samples = difs.astype(num.int32)
170 samples[0] += first
171 samples = num.cumsum(samples, dtype=num.int32)
172 last = struct.unpack(e+'i', data[4+h.nrecords*4:4+h.nrecords*4+4])[0]
173 if last != samples[-1]:
174 raise GCFLoadError('Checksum error occured')
176 return samples
179def read_status(f, h):
180 data = read(f, 1024 - 16)
181 return data[:h.nrecords*4]
184def iload(filename, load_data=True):
185 traces = {}
187 with open(filename, 'rb') as f:
188 try:
189 while True:
190 h = read_header(f)
191 if h.block_type == 'data_block':
192 deltat = 1.0 / h.sample_rate
193 if load_data:
194 samples = read_data(f, h)
195 tmax = None
196 else:
197 f.seek(1024 - 16, 1)
198 samples = None
199 tmax = h.time + (
200 h.nrecords * h.compression - 1) * deltat
202 nslc = ('', h.system_id, '', h.stream_id)
204 if nslc in traces:
205 tr = traces[nslc]
206 if abs((tr.tmax + tr.deltat) - h.time) < deltat*0.0001:
207 if samples is not None:
208 tr.append(samples)
209 else:
210 tr.tmax = tmax
212 else:
213 del traces[nslc]
214 yield tr
216 if nslc not in traces:
217 traces[nslc] = trace.Trace(
218 *nslc,
219 tmin=h.time,
220 deltat=deltat,
221 ydata=samples,
222 tmax=tmax)
224 else:
225 f.seek(1024 - 16, 1)
227 except EOF:
228 for tr in traces.values():
229 yield tr
232def detect(first512):
233 # does not work properly, produces too many false positives
234 # difficult to improve due to the very compact GCF header
235 try:
236 if len(first512) < 512:
237 return False
239 f = StringIO(first512)
240 read_header(f)
241 return True
243 except Exception:
244 return False
247if __name__ == '__main__':
248 util.setup_logging('warn')
250 all_traces = []
251 for fn in sys.argv[1:]:
252 if detect(open(fn, 'rb').read(512)):
253 print(fn)
254 all_traces.extend(iload(fn))
256 trace.snuffle(all_traces)