# http://pyrocko.org - GPLv3 # # The Pyrocko Developers, 21st Century # ---|P------/S----------~Lg---------- # python 2/3
c = plot.color(c) return tuple(x/255. for x in c)
# reduce time (no drift would mean straight line)
# first round, remove outliers
# detrend
# second round, remove outliers based on detrended tred, refit
raise ControlPointError('slope error too large')
raise ControlPointError('offset error too large')
raise ControlPointError( 'could not safely determine time corrections from gps')
except ControlPointError: pass
except ControlPointError: pass
raise ControlPointError( 'could not safely determine time corrections from gps')
tmin = t0 - i0 * deltat - offset * deltat tmax = t3 + (nsamples - i3 - 1) * deltat else: [x[0] for x in control_points], dtype=num.int64) [x[1] for x in control_points], dtype=num.float) # robust against steps: (tcontrol[1:] - tcontrol[:-1]) / (icontrol[1:] - icontrol[:-1]))
except ControlPointError:
tmin = util.str_to_time(header['S_DATE'] + header['S_TIME'], format='%y/%m/%d%H:%M:%S')
idat = int(header['DAT_NO']) if idat == 0: tmin = tmin + util.gps_utc_offset(tmin) else: tmin = util.day_start(tmin + idat * 24.*3600.) \ + util.gps_utc_offset(tmin)
tmax = tmin + (nsamples - 1) * deltat icontrol, tcontrol = None, None return tmin, tmax, icontrol, tcontrol, None
from matplotlib import pyplot as plt
fig = plt.figure() axes = fig.gca()
h = 3600.
if isinstance(fns, (str, newstr)): fn = fns if os.path.isdir(fn): fns = [ os.path.join(fn, entry) for entry in sorted(os.listdir(fn))]
ipos, t, fix, nsvs, header, offset, nsamples = \ get_timing_context(fns)
else: ipos, t, fix, nsvs, header, offset, nsamples = \ get_extended_timing_context(fn)
else: ipos, t, fix, nsvs, header, offset, nsamples = \ get_timing_context(fns)
deltat = 1.0 / int(header['S_RATE'])
tref = num.median(t - ipos * deltat) tref = round(tref / deltat) * deltat
x = ipos*deltat y = (t - tref) - ipos*deltat
bfix = fix != 0 bnofix = fix == 0
tmin, tmax, icontrol, tcontrol, ok = analyse_gps_tags( header, (ipos, t, fix, nsvs), offset, nsamples)
la = num.logical_and nok = num.logical_not(ok)
axes.plot( x[la(bfix, ok)]/h, y[la(bfix, ok)], '+', ms=5, color=color('chameleon3')) axes.plot( x[la(bfix, nok)]/h, y[la(bfix, nok)], '+', ms=5, color=color('aluminium4'))
axes.plot( x[la(bnofix, ok)]/h, y[la(bnofix, ok)], 'x', ms=5, color=color('chocolate3')) axes.plot( x[la(bnofix, nok)]/h, y[la(bnofix, nok)], 'x', ms=5, color=color('aluminium4'))
tred = tcontrol - icontrol*deltat - tref axes.plot(icontrol*deltat/h, tred, color=color('aluminium6')) axes.plot(icontrol*deltat/h, tred, 'o', ms=5, color=color('aluminium6'))
ymin = (math.floor(tred.min() / deltat)-1) * deltat ymax = (math.ceil(tred.max() / deltat)+1) * deltat
# axes.set_ylim(ymin, ymax) if ymax - ymin < 1000 * deltat: ygrid = math.floor(tred.min() / deltat) * deltat while ygrid < ymax: axes.axhline(ygrid, color=color('aluminium4'), alpha=0.3) ygrid += deltat
xmin = icontrol[0]*deltat/h xmax = icontrol[-1]*deltat/h xsize = xmax - xmin xmin -= xsize * 0.1 xmax += xsize * 0.1 axes.set_xlim(xmin, xmax)
axes.set_ylim(ymin, ymax)
axes.set_xlabel('Uncorrected (quartz) time [h]') axes.set_ylabel('Relative time correction [s]')
plt.show()
path = os.path.abspath(fn) for entry in self.entries: if entry.path == path: return entry
raise Exception('entry not found')
current = self.get_entry(fn) by_ifile = dict( (entry.ifile, entry) for entry in self.entries if entry.tstart == current.tstart)
icurrent = current.ifile while True: icurrent += step try: yield by_ifile[icurrent]
except KeyError: break
if os.path.isfile(os.path.join(dpath, f))])
dir_context = g_dir_contexts[dpath] if dir_context.mtime == mtime: return dir_context
del g_dir_contexts[dpath]
continue
datacube_ext.load(f.fileno(), 3, 0, -1, None)
except datacube_ext.DataCubeError as e: e = DataCubeError(str(e)) e.set_context('filename', fn) raise e
path=os.path.abspath(fn2), tstart=util.str_to_time( '20' + header['S_DATE'] + ' ' + header['S_TIME'], format='%Y/%m/%d %H:%M:%S'), ifile=int(header['DAT_NO'])))
f.fileno(), 1, 0, -1, None)
except datacube_ext.DataCubeError as e: e = DataCubeError(str(e)) e.set_context('filename', fn) raise e
joined = [[], [], [], []] ioff = 0 for fn in fns: header, gps_tags, nsamples = get_time_infos(fn)
ipos = gps_tags[0] ipos += ioff
for i in range(4): joined[i].append(gps_tags[i])
ioff += nsamples
ipos, t, fix, nsvs = [num.concatenate(x) for x in joined]
nsamples = ioff return ipos, t, fix, nsvs, header, 0, nsamples
ioff = nsamples_base for entry in c.iter_entries(fn, 1):
_, gps_tags, nsamples = get_time_infos(entry.path)
ipos = gps_tags[0] ipos += ioff
aggregated.append(gps_tags) nsamples_total += nsamples
if num.sum(gps_tags[2]) > 0: break
ioff += nsamples
ioff = 0 for entry in c.iter_entries(fn, -1):
_, gps_tags, nsamples = get_time_infos(entry.path)
ioff -= nsamples
ipos = gps_tags[0] ipos += ioff
aggregated[0:0] = [gps_tags]
nsamples_total += nsamples
if num.sum(gps_tags[2]) > 0: break
# return ipos, t, fix, nsvs, header, ioff, nsamples_total
raise NotImplementedError( 'no such interpolation method: %s' % interpolation)
else:
f.fileno(), loadflag, 0, -1, None)
except datacube_ext.DataCubeError as e: e = DataCubeError(str(e)) e.set_context('filename', fn) raise e
get_extended_timing_context(fn)
header_, (ipos, t, fix, nsvs), offset_, nsamples_)
logger.warn( 'No usable GPS timestamps found. Using datacube header ' 'information to guess time. (file: "%s")' % fn)
else:
# to prevent problems with rounding errors:
[x[0] + util.gps_utc_offset(x[0]) for x in util.read_leap_seconds2()], dtype=num.float)
num.logical_and(0 <= icontrol, icontrol < nsamples))
logger.warn( 'Extrapolating GPS time information from directory context ' '(insufficient number of GPS timestamps in file: "%s").' % fn)
icontrol, tcontrol, arr.astype(num.float), tmin_ip, deltat, ydata)
except signal_ext.Error as e: e = DataCubeError(str(e)) e.set_context('filename', fn) e.set_context('n_control_points', icontrol.size) e.set_context('n_samples_raw', arr.size) e.set_context('n_samples_ip', ydata.size) e.set_context('tmin_ip', util.time_to_str(tmin_ip)) raise e
else:
else:
ydata=ydata, tmin=tr_tmin, tmax=tr_tmax, meta=header)
(tr.tmin, tcut), (tcut, tr.tmax+tr.deltat)]:
util.utc_gps_offset(0.5*(tr_cut.tmin+tr_cut.tmax)))
except trace.NoData: pass
else:
str: b'GIPP_V DEV_NO E_NAME GPS_PO S_TIME S_DATE DAT_NO'.split(), int: b'''P_AMPL CH_NUM S_RATE D_FILT C_MODE A_CHOP F_TIME GPS_TI GPS_OF A_FILT A_PHAS GPS_ON ACQ_ON V_TCXO D_VOLT E_VOLT'''.split()}
return False
n = len(s)
return False
if __name__ == '__main__': import sys fns = sys.argv[1:] if len(fns) > 1: plot_timeline(fns) else: plot_timeline(fns[0]) |