Coverage for /usr/local/lib/python3.11/dist-packages/pyrocko/gui/sparrow/elements/catalog.py: 53%

258 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-10-06 15:01 +0000

1# https://pyrocko.org - GPLv3 

2# 

3# The Pyrocko Developers, 21st Century 

4# ---|P------/S----------~Lg---------- 

5 

6# import copy 

7import logging 

8import operator 

9import calendar 

10import numpy as num 

11 

12from pyrocko.guts import \ 

13 Object, StringChoice, String, List, Float 

14 

15from pyrocko import table, model # , automap 

16from pyrocko.client import fdsn, catalog 

17from pyrocko.gui.qt_compat import qw 

18from pyrocko.plot import beachball 

19# from pyrocko.himesh import HiMesh 

20 

21# from pyrocko.gui.vtk_util import TrimeshPipe 

22 

23from .. import common 

24 

25from .table import TableElement, TableState 

26 

27logger = logging.getLogger('pyrocko.gui.sparrow.elements.catalog') 

28 

29guts_prefix = 'sparrow' 

30 

31 

32attribute_names = [ 

33 'time', 'lat', 'lon', 'northing', 'easting', 'depth', 'magnitude'] 

34 

35attribute_dtypes = [ 

36 'f16', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8'] 

37 

38name_to_icol = dict( 

39 (name, icol) for (icol, name) in enumerate(attribute_names)) 

40 

41# event_dtype = num.dtype(list(zip(attribute_names, attribute_dtypes))) 

42 

43t_time = num.float64 

44 

45 

46def binned_statistic(values, ibins, function): 

47 order = num.argsort(ibins) 

48 values_sorted = values[order] 

49 ibins_sorted = ibins[order] 

50 parts = num.concatenate(( 

51 [0], 

52 num.where(num.diff(ibins_sorted) != 0)[0] + 1, 

53 [ibins.size])) 

54 

55 results = [] 

56 ibins_result = [] 

57 for ilow, ihigh in zip(parts[:-1], parts[1:]): 

58 values_part = values_sorted[ilow:ihigh] 

59 results.append(function(values_part)) 

60 ibins_result.append(ibins_sorted[ilow]) 

61 

62 return num.array(ibins_result, dtype=num.int64), num.array(results) 

63 

64 

65def load_text( 

66 filepath, 

67 column_names=('time', 'lat', 'lon', 'depth', 'magnitude'), 

68 time_format='seconds'): 

69 

70 with open(filepath, 'r') as f: 

71 if column_names == 'from_header': 

72 line = f.readline() 

73 column_names = line.split() 

74 

75 name_to_icol_in = dict( 

76 (name, icol) for (icol, name) in enumerate(column_names) 

77 if name in attribute_names) 

78 

79 data_in = num.loadtxt(filepath, skiprows=1) 

80 

81 nevents = data_in.shape[0] 

82 c5 = num.zeros((nevents, 5)) 

83 c5[:, 0] = data_in[:, name_to_icol_in['lat']] 

84 c5[:, 1] = data_in[:, name_to_icol_in['lon']] 

85 c5[:, 2] = 0.0 

86 c5[:, 3] = 0.0 

87 c5[:, 4] = data_in[:, name_to_icol_in['depth']] * 1000. 

88 

89 tab = table.Table() 

90 loc_rec = table.LocationRecipe() 

91 tab.add_recipe(loc_rec) 

92 

93 tab.add_col(loc_rec.c5_header, c5) 

94 for k, unit in [ 

95 ('time', 's'), 

96 ('magnitude', None)]: 

97 

98 values = data_in[:, name_to_icol_in[k]] 

99 

100 if k == 'time' and time_format == 'year': 

101 values = decimal_year_to_time(values) 

102 

103 tab.add_col(table.Header(k, unit), values) 

104 

105 return tab 

106 

107 

108def decimal_year_to_time(year): 

109 iyear_start = num.floor(year).astype(num.int64) 

110 iyear_end = iyear_start + 1 

111 

112 iyear_min = num.min(iyear_start) 

113 iyear_max = num.max(iyear_end) 

114 

115 iyear_to_time = num.zeros(iyear_max - iyear_min + 1, dtype=t_time) 

116 for iyear in range(iyear_min, iyear_max+1): 

117 iyear_to_time[iyear-iyear_min] = calendar.timegm( 

118 (iyear, 1, 1, 0, 0, 0)) 

119 

120 tyear_start = iyear_to_time[iyear_start - iyear_min] 

121 tyear_end = iyear_to_time[iyear_end - iyear_min] 

122 

123 t = tyear_start + (year - iyear_start) * (tyear_end - tyear_start) 

124 

125 return t 

126 

127 

128def oa_to_array(objects, attribute): 

129 return num.fromiter( 

130 map(operator.attrgetter(attribute), objects), 

131 num.float64, 

132 len(objects)) 

133 

134 

135def eventtags_to_array(events, tab): 

136 

137 ks = set() 

138 event_tags = [] 

139 for ev in events: 

140 tags = ev.tags_as_dict() 

141 ks.update(tags.keys()) 

142 event_tags.append(tags) 

143 

144 for k in sorted(ks): 

145 column = [tags.get(k, None) for tags in event_tags] 

146 if all(isinstance(v, int) for v in column): 

147 dtype = int 

148 elif all(isinstance(v, float) for v in column): 

149 dtype = float 

150 else: 

151 dtype = num.string_ 

152 column = [v or '' for v in column] 

153 

154 arr = num.array(column, dtype=dtype) 

155 

156 tab.add_col(table.Header('tag_%s' % k), arr) 

157 

158 

159def eventextras_to_array(events, tab): 

160 n_extras = num.array([len(ev.extras) for ev in events]) 

161 evts_all_extras = num.arange(len(events))[n_extras != 0] 

162 

163 if evts_all_extras.shape[0] == 0: 

164 return tab 

165 

166 ev0 = events[evts_all_extras[0]] 

167 

168 if num.unique(n_extras).shape[0] > 1: 

169 msg = 'Not all events have equal number of extras.' 

170 if num.unique(n_extras).shape[0] == 2 and num.min(n_extras) == 0: 

171 logger.warn(msg + ' Zero length lists are filled with NaNs.') 

172 else: 

173 raise IndexError( 

174 msg + ' Several non-zero shapes detected. Please check.') 

175 

176 for key, val in ev0.extras.items(): 

177 dtype = num.string_ 

178 values = num.array(['' for x in range(n_extras.shape[0])], dtype=dtype) 

179 

180 if type(val) is float: 

181 dtype, values = num.float64, num.ones_like(n_extras) * num.nan 

182 

183 elif type(val) is int: 

184 dtype = num.int64 

185 values = num.ones_like(n_extras, dtype=num.int64) * num.nan 

186 

187 header = 'extra_%s' % key 

188 values[evts_all_extras] = num.array([ 

189 events[iev].extras[key] for iev in evts_all_extras], dtype=dtype) 

190 

191 tab.add_col(table.Header(header), values) 

192 

193 

194def events_to_table(events): 

195 c5 = num.zeros((len(events), 5)) 

196 m6 = None 

197 

198 for i, ev in enumerate(events): 

199 c5[i, :] = ( 

200 ev.lat, ev.lon, ev.north_shift, ev.east_shift, ev.depth) 

201 

202 if ev.moment_tensor: 

203 if m6 is None: 

204 m6 = num.zeros((len(events), 6)) 

205 m6[:, 0:3] = 1.0 

206 

207 m6[i, :] = beachball.deco_part(ev.moment_tensor, 'deviatoric').m6() 

208 

209 tab = table.Table() 

210 

211 ev_rec = table.EventRecipe() 

212 tab.add_recipe(ev_rec) 

213 tab.add_col(ev_rec.get_header('c5'), c5) 

214 

215 for k in ['time', 'magnitude']: 

216 tab.add_col(ev_rec.get_header(k), oa_to_array(events, k)) 

217 

218 if events: 

219 eventtags_to_array(events, tab) 

220 eventextras_to_array(events, tab) 

221 

222 if m6 is not None: 

223 mt_rec = table.MomentTensorRecipe() 

224 tab.add_recipe(mt_rec) 

225 tab.add_col(mt_rec.m6_header, m6) 

226 

227 return tab 

228 

229 

230class LoadingChoice(StringChoice): 

231 choices = [choice.upper() for choice in [ 

232 'file', 

233 'fdsn']] 

234 

235 

236class FDSNSiteChoice(StringChoice): 

237 choices = [key.upper() for key in fdsn.g_site_abbr.keys()] 

238 

239 

240class CatalogSelection(Object): 

241 pass 

242 

243 

244class MemoryCatalogSelection(CatalogSelection): 

245 

246 def __init__(self, events=None): 

247 if events is None: 

248 events = [] 

249 

250 self._events = events 

251 

252 def get_table(self): 

253 return events_to_table(self._events) 

254 

255 

256class FileCatalogSelection(CatalogSelection): 

257 paths = List.T(String.T()) 

258 

259 def get_table(self): 

260 from pyrocko.io import quakeml 

261 

262 events = [] 

263 for path in self.paths: 

264 fn_ext = path.split('.')[-1].lower() 

265 if fn_ext in ['xml', 'qml', 'quakeml']: 

266 qml = quakeml.QuakeML.load_xml(filename=path) 

267 events.extend(qml.get_pyrocko_events()) 

268 

269 if fn_ext in ['dat', 'csv']: 

270 assert len(self.paths) == 1 

271 tab = load_text( 

272 path, column_names='from_header', time_format='year') 

273 

274 return tab 

275 

276 else: 

277 events.extend(model.load_events(path)) 

278 

279 return events_to_table(events) 

280 

281 

282g_catalogs = { 

283 'Geofon': catalog.Geofon(), 

284 'USGS/NEIC US': catalog.USGS('us'), 

285 'Global-CMT': catalog.GlobalCMT(), 

286 'Saxony (Uni-Leipzig)': catalog.Saxony() 

287} 

288 

289g_fdsn_has_events = ['ISC', 'SCEDC', 'NCEDC', 'IRIS', 'GEONET'] 

290 

291g_sites = sorted(g_catalogs.keys()) 

292g_sites.extend(g_fdsn_has_events) 

293 

294 

295class OnlineCatalogSelection(CatalogSelection): 

296 site = String.T() 

297 tmin = Float.T() 

298 tmax = Float.T() 

299 magnitude_min = Float.T(optional=True) 

300 

301 def get_table(self): 

302 logger.info('Getting events from "%s" catalog.' % self.site) 

303 

304 cat = g_catalogs.get(self.site, None) 

305 try: 

306 if cat: 

307 kwargs = {} 

308 if self.magnitude_min is not None: 

309 kwargs['magmin'] = self.magnitude_min 

310 

311 events = cat.get_events( 

312 time_range=(self.tmin, self.tmax), **kwargs) 

313 

314 else: 

315 kwargs = {} 

316 if self.magnitude_min is not None: 

317 kwargs['minmagnitude'] = self.magnitude_min 

318 

319 request = fdsn.event( 

320 starttime=self.tmin, 

321 endtime=self.tmax, 

322 site=self.site, **kwargs) 

323 

324 from pyrocko.io import quakeml 

325 qml = quakeml.QuakeML.load_xml(request) 

326 events = qml.get_pyrocko_events() 

327 

328 logger.info('Got %i event%s from "%s" catalog.' % ( 

329 len(events), '' if len(events) == 1 else 's', self.site)) 

330 

331 except Exception as e: 

332 logger.error( 

333 'Getting events from "%s" catalog failed: %s' % ( 

334 self.site, str(e))) 

335 

336 events = [] 

337 

338 return events_to_table(events) 

339 

340 

341class CatalogState(TableState): 

342 selection = CatalogSelection.T(optional=True) 

343 size_parameter = String.T(default='magnitude', optional=True) 

344 color_parameter = String.T(default='depth', optional=True) 

345 

346 @classmethod 

347 def get_name(self): 

348 return 'Catalog' 

349 

350 def create(self): 

351 element = CatalogElement() 

352 return element 

353 

354 

355class CatalogElement(TableElement): 

356 

357 def __init__(self, *args, **kwargs): 

358 TableElement.__init__(self, *args, **kwargs) 

359 self._selection_view = None 

360 # self._himesh = HiMesh(order=6) 

361 

362 # cpt_data = [ 

363 # (0.0, 0.0, 0.0, 0.0), 

364 # (1.0, 0.9, 0.9, 0.2)] 

365 # 

366 # self.cpt_mesh = automap.CPT( 

367 # levels=[ 

368 # automap.CPTLevel( 

369 # vmin=a[0], 

370 # vmax=b[0], 

371 # color_min=[255*x for x in a[1:]], 

372 # color_max=[255*x for x in b[1:]]) 

373 # for (a, b) in zip(cpt_data[:-1], cpt_data[1:])]) 

374 

375 def get_name(self): 

376 return 'Catalog' 

377 

378 def get_size_parameter_extra_entries(self): 

379 return ['magnitude'] 

380 

381 def get_color_parameter_extra_entries(self): 

382 return ['depth'] 

383 

384 def bind_state(self, state): 

385 TableElement.bind_state(self, state) 

386 self.talkie_connect(state, 'selection', self.update) 

387 

388 def update(self, *args): 

389 state = self._state 

390 # ifaces = None 

391 if self._selection_view is not state.selection: 

392 self.set_table(state.selection.get_table()) 

393 self._selection_view = state.selection 

394 

395 # ifaces = self._himesh.points_to_faces(self._table.get_col('xyz')) 

396 

397 TableElement.update(self, *args) 

398 

399 # if ifaces is not None: 

400 # ifaces_x, sizes = binned_statistic( 

401 # ifaces, ifaces, lambda part: part.shape[0]) 

402 # 

403 # vertices = self._himesh.get_vertices() 

404 # # vertices *= 0.95 

405 # faces = self._himesh.get_faces() 

406 # 

407 # values = num.zeros(faces.shape[0]) 

408 # values[ifaces_x] = num.log(1+sizes) 

409 # 

410 # self._mesh = TrimeshPipe(vertices, faces, values=values) 

411 # cpt = copy.deepcopy(self.cpt_mesh) 

412 # cpt.scale(num.min(values), num.max(values)) 

413 # self._mesh.set_cpt(cpt) 

414 # self._mesh.set_opacity(0.5) 

415 

416 # self._parent.add_actor(self._mesh.actor) 

417 

418 def open_file_dialog(self): 

419 caption = 'Select one or more files to open' 

420 

421 fns, _ = qw.QFileDialog.getOpenFileNames( 

422 self._parent, caption, options=common.qfiledialog_options) 

423 

424 if fns: 

425 self._state.selection = FileCatalogSelection( 

426 paths=[str(fn) for fn in fns]) 

427 

428 def open_catalog_load_dialog(self): 

429 dialog = qw.QDialog(self._parent) 

430 dialog.setWindowTitle('Get events from online catalog.') 

431 

432 layout = qw.QHBoxLayout(dialog) 

433 

434 layout.addWidget(qw.QLabel('Site')) 

435 

436 cb = qw.QComboBox() 

437 for i, s in enumerate(g_sites): 

438 cb.insertItem(i, s) 

439 

440 layout.addWidget(cb) 

441 

442 pb = qw.QPushButton('Cancel') 

443 pb.clicked.connect(dialog.reject) 

444 layout.addWidget(pb) 

445 

446 pb = qw.QPushButton('Ok') 

447 pb.clicked.connect(dialog.accept) 

448 layout.addWidget(pb) 

449 

450 dialog.exec_() 

451 

452 site = str(cb.currentText()) 

453 

454 vstate = self._parent.state 

455 

456 if dialog.result() == qw.QDialog.Accepted: 

457 self._state.selection = OnlineCatalogSelection( 

458 site=site, 

459 tmin=vstate.tmin, 

460 tmax=vstate.tmax) 

461 

462 def _get_table_widgets_start(self): 

463 return 1 # used as y arg in addWidget calls 

464 

465 def _get_controls(self): 

466 if not self._controls: 

467 frame = TableElement._get_controls(self) # sets self._controls 

468 layout = frame.layout() 

469 

470 lab = qw.QLabel('Load from') 

471 pb_file = qw.QPushButton('File') 

472 

473 layout.addWidget(lab, 0, 0) 

474 layout.addWidget(pb_file, 0, 1) 

475 

476 pb_file.clicked.connect(self.open_file_dialog) 

477 

478 pb_file = qw.QPushButton('Online Catalog') 

479 

480 layout.addWidget(lab, 0, 0) 

481 layout.addWidget(pb_file, 0, 2) 

482 

483 pb_file.clicked.connect(self.open_catalog_load_dialog) 

484 

485 return self._controls 

486 

487 

488__all__ = [ 

489 'CatalogSelection', 

490 'FileCatalogSelection', 

491 'MemoryCatalogSelection', 

492 'CatalogElement', 

493 'CatalogState', 

494]