Coverage for /usr/local/lib/python3.11/dist-packages/pyrocko/io/datacube.py: 54%
424 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-10-04 09:52 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2023-10-04 09:52 +0000
1# http://pyrocko.org - GPLv3
2#
3# The Pyrocko Developers, 21st Century
4# ---|P------/S----------~Lg----------
6'''
7Reader for `DiGOS DATA-CUBE³ <https://digos.eu/the-seismic-data-recorder/>`_
8raw data.
9'''
12import os
13import math
14import logging
16import numpy as num
18from pyrocko import trace, util, plot
19from pyrocko.guts import Object, Int, String, Timestamp
21from . import io_common
23logger = logging.getLogger(__name__)
25N_GPS_TAGS_WANTED = 200 # must match definition in datacube_ext.c
27APPLY_SUBSAMPLE_SHIFT_CORRECTION = True
30def color(c):
31 c = plot.color(c)
32 return tuple(x/255. for x in c)
35class DataCubeError(io_common.FileLoadError):
36 pass
39class ControlPointError(Exception):
40 pass
43def make_control_point(ipos_block, t_block, tref, deltat):
45 # reduce time (no drift would mean straight line)
46 tred = (t_block - tref) - ipos_block*deltat
48 # first round, remove outliers
49 q25, q75 = num.percentile(tred, (25., 75.))
50 iok = num.logical_and(q25 <= tred, tred <= q75)
52 # detrend
53 slope, offset = num.polyfit(ipos_block[iok], tred[iok], 1)
54 tred2 = tred - (offset + slope * ipos_block)
56 # second round, remove outliers based on detrended tred, refit
57 q25, q75 = num.percentile(tred2, (25., 75.))
58 iok = num.logical_and(q25 <= tred2, tred2 <= q75)
59 x = ipos_block[iok].copy()
60 ipos0 = x[0]
61 x -= ipos0
62 y = tred[iok].copy()
63 if x.size < 2:
64 raise ControlPointError('Insufficient number control points after QC.')
66 elif x.size < 5: # needed for older numpy versions
67 (slope, offset) = num.polyfit(x, y, 1)
68 else:
69 (slope, offset), cov = num.polyfit(x, y, 1, cov=True)
71 slope_err, offset_err = num.sqrt(num.diag(cov))
73 slope_err_limit = 1.0e-10
74 if ipos_block.size < N_GPS_TAGS_WANTED // 2:
75 slope_err_limit *= (200. / ipos_block.size)
77 offset_err_limit = 5.0e-3
79 if slope_err > slope_err_limit:
80 raise ControlPointError(
81 'Slope error too large: %g (limit: %g' % (
82 slope_err, slope_err_limit))
84 if offset_err > offset_err_limit:
85 raise ControlPointError(
86 'Offset error too large: %g (limit: %g' % (
87 offset_err, offset_err_limit))
89 ic = ipos_block[ipos_block.size//2]
90 tc = offset + slope * (ic - ipos0)
92 return ic, tc + ic * deltat + tref
95def analyse_gps_tags(header, gps_tags, offset, nsamples):
97 ipos, t, fix, nsvs = gps_tags
98 deltat = 1.0 / int(header['S_RATE'])
100 tquartz = offset + ipos * deltat
102 toff = t - tquartz
103 toff_median = num.median(toff)
105 n = t.size
107 dtdt = (t[1:n] - t[0:n-1]) / (tquartz[1:n] - tquartz[0:n-1])
109 ok = abs(toff_median - toff) < 10.
111 xok = num.abs(dtdt - 1.0) < 0.00001
113 if ok.size >= 1:
115 ok[0] = False
116 ok[1:n] &= xok
117 ok[0:n-1] &= xok
118 ok[n-1] = False
120 ipos = ipos[ok]
121 t = t[ok]
122 fix = fix[ok]
123 nsvs = nsvs[ok]
125 blocksize = N_GPS_TAGS_WANTED // 2
126 if ipos.size < blocksize:
127 blocksize = max(10, ipos.size // 4)
128 logger.warning(
129 'Small number of GPS tags found. '
130 'Reducing analysis block size to %i tags. '
131 'Time correction may be unreliable.' % blocksize)
133 try:
134 if ipos.size < blocksize:
135 raise ControlPointError(
136 'Cannot determine GPS time correction: '
137 'Too few GPS tags found: %i' % ipos.size)
139 j = 0
140 control_points = []
141 tref = num.median(t - ipos*deltat)
142 while j < ipos.size - blocksize:
143 ipos_block = ipos[j:j+blocksize]
144 t_block = t[j:j+blocksize]
145 try:
146 ic, tc = make_control_point(ipos_block, t_block, tref, deltat)
147 control_points.append((ic, tc))
148 except ControlPointError as e:
149 logger.debug(str(e))
151 j += blocksize
153 ipos_last = ipos[-blocksize:]
154 t_last = t[-blocksize:]
155 try:
156 ic, tc = make_control_point(ipos_last, t_last, tref, deltat)
157 control_points.append((ic, tc))
158 except ControlPointError as e:
159 logger.debug(str(e))
161 if len(control_points) < 2:
162 raise ControlPointError(
163 'Could not safely determine time corrections from GPS: '
164 'unable to construct two or more control points')
166 i0, t0 = control_points[0]
167 i1, t1 = control_points[1]
168 i2, t2 = control_points[-2]
169 i3, t3 = control_points[-1]
170 if len(control_points) == 2:
171 tmin = t0 - i0 * deltat - offset * deltat
172 tmax = t3 + (nsamples - i3 - 1) * deltat
173 else:
174 icontrol = num.array(
175 [x[0] for x in control_points], dtype=num.int64)
176 tcontrol = num.array(
177 [x[1] for x in control_points], dtype=float)
178 # robust against steps:
179 slope = num.median(
180 (tcontrol[1:] - tcontrol[:-1])
181 / (icontrol[1:] - icontrol[:-1]))
183 tmin = t0 + (offset - i0) * slope
184 tmax = t2 + (offset + nsamples - 1 - i2) * slope
186 if offset < i0:
187 control_points[0:0] = [(offset, tmin)]
189 if offset + nsamples - 1 > i3:
190 control_points.append((offset + nsamples - 1, tmax))
192 icontrol = num.array([x[0] for x in control_points], dtype=num.int64)
193 tcontrol = num.array([x[1] for x in control_points], dtype=float)
195 # corrected 2021-10-26: This sub-sample time shift introduced by the
196 # Cube's ADC was previously not recognized.
197 if APPLY_SUBSAMPLE_SHIFT_CORRECTION:
198 tcontrol -= 0.199 * deltat + 0.0003
200 return tmin, tmax, icontrol, tcontrol, ok
202 except ControlPointError as e:
203 logger.error(str(e))
205 tmin = util.str_to_time(header['S_DATE'] + header['S_TIME'],
206 format='%y/%m/%d%H:%M:%S')
208 idat = int(header['DAT_NO'])
209 if idat == 0:
210 tmin = tmin + util.gps_utc_offset(tmin)
211 else:
212 tmin = util.day_start(tmin + idat * 24.*3600.) \
213 + util.gps_utc_offset(tmin)
215 tmax = tmin + (nsamples - 1) * deltat
216 icontrol = num.array([offset, offset+nsamples - 1], dtype=num.int64)
217 tcontrol = num.array([tmin, tmax])
218 return tmin, tmax, icontrol, tcontrol, ok
221def plot_gnss_location_timeline(fns):
222 from matplotlib import pyplot as plt
223 from pyrocko.orthodrome import latlon_to_ne_numpy
224 not_ = num.logical_not
225 h = 3600.
227 fig = plt.figure()
229 axes = []
230 for i in range(4):
231 axes.append(
232 fig.add_subplot(4, 1, i+1, sharex=axes[-1] if axes else None))
234 background_colors = [
235 color('aluminium1'),
236 color('aluminium2')]
238 tref = None
239 for ifn, fn in enumerate(fns):
240 header, gps_tags, nsamples = get_time_infos(fn)
241 _, t, fix, nsvs, lats, lons, elevations, _ = gps_tags
243 fix = fix.astype(bool)
245 if t.size < 2:
246 logger.warning('Need at least 2 gps tags for plotting: %s' % fn)
248 if tref is None:
249 tref = util.day_start(t[0])
250 lat, lon, elevation = coordinates_from_gps(gps_tags)
252 norths, easts = latlon_to_ne_numpy(lat, lon, lats, lons)
254 for ax, data in zip(axes, (norths, easts, elevations, nsvs)):
256 tspan = t[num.array([0, -1])]
258 ax.axvspan(*((tspan - tref) / h), color=background_colors[ifn % 2])
259 med = num.median(data)
260 ax.plot(
261 (tspan - tref) / h,
262 [med, med],
263 ls='--',
264 c='k',
265 lw=3,
266 alpha=0.5)
268 ax.plot(
269 (t[not_(fix)] - tref) / h, data[not_(fix)], 'o',
270 ms=1.5,
271 mew=0,
272 color=color('scarletred2'))
274 ax.plot(
275 (t[fix] - tref) / h, data[fix], 'o',
276 ms=1.5,
277 mew=0,
278 color=color('aluminium6'))
280 for ax in axes:
281 ax.grid(alpha=.3)
283 ax_lat, ax_lon, ax_elev, ax_nsv = axes
285 ax_lat.set_ylabel('Northing [m]')
286 ax_lon.set_ylabel('Easting [m]')
287 ax_elev.set_ylabel('Elevation [m]')
288 ax_nsv.set_ylabel('Number of Satellites')
290 ax_lat.get_xaxis().set_tick_params(labelbottom=False)
291 ax_lon.get_xaxis().set_tick_params(labelbottom=False)
292 ax_nsv.set_xlabel(
293 'Hours after %s' % util.time_to_str(tref, format='%Y-%m-%d'))
295 fig.suptitle(
296 u'Lat: %.5f\u00b0 Lon: %.5f\u00b0 Elevation: %g m' % (
297 lat, lon, elevation))
299 plt.show()
302def coordinates_from_gps(gps_tags):
303 ipos, t, fix, nsvs, lats, lons, elevations, temps = gps_tags
304 return tuple(num.median(x) for x in (lats, lons, elevations))
307def extract_stations(fns):
308 import io
309 import sys
310 from pyrocko.model import Station
311 from pyrocko.guts import dump_all
313 stations = {}
315 for fn in fns:
316 sta_name = os.path.splitext(fn)[1].lstrip('.')
317 if sta_name in stations:
318 logger.warning('Cube %s already in list!', sta_name)
319 continue
321 header, gps_tags, nsamples = get_time_infos(fn)
323 lat, lon, elevation = coordinates_from_gps(gps_tags)
325 sta = Station(
326 network='',
327 station=sta_name,
328 name=sta_name,
329 location='',
330 lat=lat,
331 lon=lon,
332 elevation=elevation)
334 stations[sta_name] = sta
336 f = io.BytesIO()
337 dump_all(stations.values(), stream=f)
338 sys.stdout.write(f.getvalue().decode())
341def plot_timeline(fns):
342 from matplotlib import pyplot as plt
344 fig = plt.figure()
345 axes = fig.gca()
347 h = 3600.
349 if isinstance(fns, str):
350 fn = fns
351 if os.path.isdir(fn):
352 fns = [
353 os.path.join(fn, entry) for entry in sorted(os.listdir(fn))]
355 ipos, t, fix, nsvs, header, offset, nsamples = \
356 get_timing_context(fns)
358 else:
359 ipos, t, fix, nsvs, header, offset, nsamples = \
360 get_extended_timing_context(fn)
362 else:
363 ipos, t, fix, nsvs, header, offset, nsamples = \
364 get_timing_context(fns)
366 deltat = 1.0 / int(header['S_RATE'])
368 tref = num.median(t - ipos * deltat)
369 tref = round(tref / deltat) * deltat
371 if APPLY_SUBSAMPLE_SHIFT_CORRECTION:
372 tcorr = 0.199 * deltat + 0.0003
373 else:
374 tcorr = 0.0
376 x = ipos*deltat
377 y = (t - tref) - ipos*deltat - tcorr
379 bfix = fix != 0
380 bnofix = fix == 0
382 tmin, tmax, icontrol, tcontrol, ok = analyse_gps_tags(
383 header, (ipos, t, fix, nsvs), offset, nsamples)
385 la = num.logical_and
386 nok = num.logical_not(ok)
388 axes.plot(
389 x[la(bfix, ok)]/h, y[la(bfix, ok)], '+',
390 ms=5, color=color('chameleon3'))
391 axes.plot(
392 x[la(bfix, nok)]/h, y[la(bfix, nok)], '+',
393 ms=5, color=color('aluminium4'))
395 axes.plot(
396 x[la(bnofix, ok)]/h, y[la(bnofix, ok)], 'x',
397 ms=5, color=color('chocolate3'))
398 axes.plot(
399 x[la(bnofix, nok)]/h, y[la(bnofix, nok)], 'x',
400 ms=5, color=color('aluminium4'))
402 tred = tcontrol - icontrol*deltat - tref
403 axes.plot(icontrol*deltat/h, tred, color=color('aluminium6'))
404 axes.plot(icontrol*deltat/h, tred, 'o', ms=5, color=color('aluminium6'))
406 ymin = (math.floor(tred.min() / deltat)-1) * deltat
407 ymax = (math.ceil(tred.max() / deltat)+1) * deltat
408 # ymin = min(ymin, num.min(y))
409 # ymax = max(ymax, num.max(y))
411 if ymax - ymin < 1000 * deltat:
412 ygrid = math.floor(tred.min() / deltat) * deltat
413 while ygrid < ymax:
414 axes.axhline(ygrid, color=color('aluminium4'), alpha=0.3)
415 ygrid += deltat
417 xmin = icontrol[0]*deltat/h
418 xmax = icontrol[-1]*deltat/h
419 xsize = xmax - xmin
420 xmin -= xsize * 0.1
421 xmax += xsize * 0.1
422 axes.set_xlim(xmin, xmax)
424 axes.set_ylim(ymin, ymax)
426 axes.set_xlabel('Uncorrected (quartz) time [h]')
427 axes.set_ylabel('Relative time correction [s]')
429 plt.show()
432g_dir_contexts = {}
435class DirContextEntry(Object):
436 path = String.T()
437 tstart = Timestamp.T()
438 ifile = Int.T()
441class DirContext(Object):
442 path = String.T()
443 mtime = Timestamp.T()
444 entries = DirContextEntry.T()
446 def get_entry(self, fn):
447 path = os.path.abspath(fn)
448 for entry in self.entries:
449 if entry.path == path:
450 return entry
452 raise Exception('entry not found')
454 def iter_entries(self, fn, step=1):
455 current = self.get_entry(fn)
456 by_ifile = dict(
457 (entry.ifile, entry) for entry in self.entries
458 if entry.tstart == current.tstart)
460 icurrent = current.ifile
461 while True:
462 icurrent += step
463 try:
464 yield by_ifile[icurrent]
466 except KeyError:
467 break
470def context(fn):
471 from pyrocko import datacube_ext
473 dpath = os.path.dirname(os.path.abspath(fn))
474 mtimes = [os.stat(dpath)[8]]
476 dentries = sorted([os.path.join(dpath, f) for f in os.listdir(dpath)
477 if os.path.isfile(os.path.join(dpath, f))])
478 for dentry in dentries:
479 fn2 = os.path.join(dpath, dentry)
480 mtimes.append(os.stat(fn2)[8])
482 mtime = float(max(mtimes))
484 if dpath in g_dir_contexts:
485 dir_context = g_dir_contexts[dpath]
486 if dir_context.mtime == mtime:
487 return dir_context
489 del g_dir_contexts[dpath]
491 entries = []
492 for dentry in dentries:
493 fn2 = os.path.join(dpath, dentry)
494 if not os.path.isfile(fn2):
495 continue
497 with open(fn2, 'rb') as f:
498 first512 = f.read(512)
499 if not detect(first512):
500 continue
502 with open(fn2, 'rb') as f:
503 try:
504 header, data_arrays, gps_tags, nsamples, _ = \
505 datacube_ext.load(f.fileno(), 3, 0, -1, None)
507 except datacube_ext.DataCubeError as e:
508 e = DataCubeError(str(e))
509 e.set_context('filename', fn)
510 raise e
512 header = dict(header)
513 entries.append(DirContextEntry(
514 path=os.path.abspath(fn2),
515 tstart=util.str_to_time(
516 '20' + header['S_DATE'] + ' ' + header['S_TIME'],
517 format='%Y/%m/%d %H:%M:%S'),
518 ifile=int(header['DAT_NO'])))
520 dir_context = DirContext(mtime=mtime, path=dpath, entries=entries)
522 return dir_context
525def get_time_infos(fn):
526 from pyrocko import datacube_ext
528 with open(fn, 'rb') as f:
529 try:
530 header, _, gps_tags, nsamples, _ = datacube_ext.load(
531 f.fileno(), 1, 0, -1, None)
533 except datacube_ext.DataCubeError as e:
534 e = DataCubeError(str(e))
535 e.set_context('filename', fn)
536 raise e
538 return dict(header), gps_tags, nsamples
541def get_timing_context(fns):
542 joined = [[], [], [], []]
543 ioff = 0
544 for fn in fns:
545 header, gps_tags, nsamples = get_time_infos(fn)
547 ipos = gps_tags[0]
548 ipos += ioff
550 for i in range(4):
551 joined[i].append(gps_tags[i])
553 ioff += nsamples
555 ipos, t, fix, nsvs = [num.concatenate(x) for x in joined]
557 nsamples = ioff
558 return ipos, t, fix, nsvs, header, 0, nsamples
561def get_extended_timing_context(fn):
562 c = context(fn)
564 header, gps_tags, nsamples_base = get_time_infos(fn)
566 ioff = 0
567 aggregated = [gps_tags]
569 nsamples_total = nsamples_base
571 if num.sum(gps_tags[2]) == 0:
573 ioff = nsamples_base
574 for entry in c.iter_entries(fn, 1):
576 _, gps_tags, nsamples = get_time_infos(entry.path)
578 ipos = gps_tags[0]
579 ipos += ioff
581 aggregated.append(gps_tags)
582 nsamples_total += nsamples
584 if num.sum(gps_tags[2]) > 0:
585 break
587 ioff += nsamples
589 ioff = 0
590 for entry in c.iter_entries(fn, -1):
592 _, gps_tags, nsamples = get_time_infos(entry.path)
594 ioff -= nsamples
596 ipos = gps_tags[0]
597 ipos += ioff
599 aggregated[0:0] = [gps_tags]
601 nsamples_total += nsamples
603 if num.sum(gps_tags[2]) > 0:
604 break
606 ipos, t, fix, nsvs = [num.concatenate(x) for x in zip(*aggregated)][:4]
608# return ipos, t, fix, nsvs, header, ioff, nsamples_total
609 return ipos, t, fix, nsvs, header, 0, nsamples_base
612def iload(fn, load_data=True, interpolation='sinc'):
613 from pyrocko import datacube_ext
614 from pyrocko import signal_ext
616 if interpolation not in ('sinc', 'off'):
617 raise NotImplementedError(
618 'no such interpolation method: %s' % interpolation)
620 with open(fn, 'rb') as f:
621 if load_data:
622 loadflag = 2
623 else:
624 loadflag = 1
626 try:
627 header, data_arrays, gps_tags, nsamples, _ = datacube_ext.load(
628 f.fileno(), loadflag, 0, -1, None)
630 except datacube_ext.DataCubeError as e:
631 e = DataCubeError(str(e))
632 e.set_context('filename', fn)
633 raise e
635 header = dict(header)
636 deltat = 1.0 / int(header['S_RATE'])
637 nchannels = int(header['CH_NUM'])
639 ipos, t, fix, nsvs, header_, offset_, nsamples_ = \
640 get_extended_timing_context(fn)
642 tmin, tmax, icontrol, tcontrol, _ = analyse_gps_tags(
643 header_, (ipos, t, fix, nsvs), offset_, nsamples_)
645 if icontrol is None:
646 logger.warning(
647 'No usable GPS timestamps found. Using datacube header '
648 'information to guess time. (file: "%s")' % fn)
650 tmin_ip = round(tmin / deltat) * deltat
651 if interpolation != 'off':
652 tmax_ip = round(tmax / deltat) * deltat
653 else:
654 tmax_ip = tmin_ip + (nsamples-1) * deltat
656 nsamples_ip = int(round((tmax_ip - tmin_ip)/deltat)) + 1
657 # to prevent problems with rounding errors:
658 tmax_ip = tmin_ip + (nsamples_ip-1) * deltat
660 leaps = num.array(
661 [x[0] + util.gps_utc_offset(x[0]) for x in util.read_leap_seconds2()],
662 dtype=float)
664 if load_data and icontrol is not None:
665 ncontrol_this = num.sum(
666 num.logical_and(0 <= icontrol, icontrol < nsamples))
668 if ncontrol_this <= 1:
669 logger.warning(
670 'Extrapolating GPS time information from directory context '
671 '(insufficient number of GPS timestamps in file: "%s").' % fn)
673 for i in range(nchannels):
674 if load_data:
675 arr = data_arrays[i]
676 assert arr.size == nsamples
678 if interpolation == 'sinc' and icontrol is not None:
680 ydata = num.empty(nsamples_ip, dtype=float)
681 try:
682 signal_ext.antidrift(
683 icontrol, tcontrol,
684 arr.astype(float), tmin_ip, deltat, ydata)
686 except signal_ext.Error as e:
687 e = DataCubeError(str(e))
688 e.set_context('filename', fn)
689 e.set_context('n_control_points', icontrol.size)
690 e.set_context('n_samples_raw', arr.size)
691 e.set_context('n_samples_ip', ydata.size)
692 e.set_context('tmin_ip', util.time_to_str(tmin_ip))
693 raise e
695 ydata = num.round(ydata).astype(arr.dtype)
696 else:
697 ydata = arr
699 tr_tmin = tmin_ip
700 tr_tmax = None
701 else:
702 ydata = None
703 tr_tmin = tmin_ip
704 tr_tmax = tmax_ip
706 tr = trace.Trace('', header['DEV_NO'], '', 'p%i' % i, deltat=deltat,
707 ydata=ydata, tmin=tr_tmin, tmax=tr_tmax, meta=header)
709 bleaps = num.logical_and(tmin_ip <= leaps, leaps < tmax_ip)
711 if num.any(bleaps):
712 assert num.sum(bleaps) == 1
713 tcut = leaps[bleaps][0]
715 for tmin_cut, tmax_cut in [
716 (tr.tmin, tcut), (tcut, tr.tmax+tr.deltat)]:
718 try:
719 tr_cut = tr.chop(tmin_cut, tmax_cut, inplace=False)
720 tr_cut.shift(
721 util.utc_gps_offset(0.5*(tr_cut.tmin+tr_cut.tmax)))
722 yield tr_cut
724 except trace.NoData:
725 pass
727 else:
728 tr.shift(util.utc_gps_offset(0.5*(tr.tmin+tr.tmax)))
729 yield tr
732header_keys = {
733 str: b'GIPP_V DEV_NO E_NAME GPS_PO S_TIME S_DATE DAT_NO'.split(),
734 int: b'''P_AMPL CH_NUM S_RATE D_FILT C_MODE A_CHOP F_TIME GPS_TI GPS_OF
735 A_FILT A_PHAS GPS_ON ACQ_ON V_TCXO D_VOLT E_VOLT'''.split()}
737all_header_keys = header_keys[str] + header_keys[int]
740def detect(first512):
741 s = first512
743 if len(s) < 512:
744 return False
746 if ord(s[0:1]) >> 4 != 15:
747 return False
749 n = s.find(b'\x80')
750 if n == -1:
751 n = len(s)
753 s = s[1:n]
754 s = s.replace(b'\xf0', b'')
755 s = s.replace(b';', b' ')
756 s = s.replace(b'=', b' ')
757 kvs = s.split(b' ')
759 if len([k for k in all_header_keys if k in kvs]) == 0:
760 return False
761 return True
764if __name__ == '__main__':
765 import argparse
766 parser = argparse.ArgumentParser(description='Datacube reader')
768 parser.add_argument(
769 'action', choices=['timeline', 'gnss', 'stations'],
770 help='Action')
771 parser.add_argument(
772 'files', nargs='+')
774 parser.add_argument(
775 '--loglevel', '-l',
776 choices=['critical', 'error', 'warning', 'info', 'debug'],
777 default='info',
778 help='Set logger level. Default: %(default)s')
780 args = parser.parse_args()
782 util.setup_logging('pyrocko.io.datacube', args.loglevel)
783 logging.getLogger('matplotlib.font_manager').disabled = True
785 if args.action == 'timeline':
786 plot_timeline(args.files)
788 elif args.action == 'gnss':
789 plot_gnss_location_timeline(args.files)
791 elif args.action == 'stations':
792 extract_stations(args.files)