Coverage for /usr/local/lib/python3.11/dist-packages/pyrocko/squirrel/client/catalog.py: 80%

202 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2024-01-02 13:30 +0000

1# http://pyrocko.org - GPLv3 

2# 

3# The Pyrocko Developers, 21st Century 

4# ---|P------/S----------~Lg---------- 

5 

6''' 

7Squirrel online earthquake catalog client. 

8''' 

9 

10import os.path as op 

11import logging 

12import time 

13try: 

14 import cPickle as pickle 

15except ImportError: 

16 import pickle 

17 

18from pyrocko import util, progress 

19from pyrocko.guts import String, Dict, Duration, dump_all 

20 

21from .base import Source 

22from ..model import ehash 

23from ..lock import LockDir 

24 

25guts_prefix = 'squirrel' 

26 

27logger = logging.getLogger('psq.client.catalog') 

28 

29 

30class Link(object): 

31 def __init__(self, tmin, tmax, tmodified, nevents=-1, content_id=None): 

32 self.tmin = tmin 

33 self.tmax = tmax 

34 self.tmodified = tmodified 

35 self.nevents = nevents 

36 self.content_id = content_id 

37 

38 def __str__(self): 

39 return 'span %s - %s, access %s, nevents %i' % ( 

40 util.tts(self.tmin), 

41 util.tts(self.tmax), 

42 util.tts(self.tmodified), 

43 self.nevents) 

44 

45 

46class NoSuchCatalog(Exception): 

47 pass 

48 

49 

50def get_catalog(name): 

51 if name == 'geofon': 

52 from pyrocko.client.geofon import Geofon 

53 return Geofon() 

54 elif name == 'gcmt': 

55 from pyrocko.client.globalcmt import GlobalCMT 

56 return GlobalCMT() 

57 elif name == 'isc': 

58 from pyrocko.client.isc import ISC 

59 return ISC() 

60 else: 

61 raise NoSuchCatalog(name) 

62 

63 

64class CatalogSource(Source): 

65 ''' 

66 Squirrel data-source to transparently access online earthquake catalogs. 

67 

68 The catalog source maintains and synchronizes a partial copy of the online 

69 catalog, e.g. of all events above a certain magnitude. The time span for 

70 which the local copy of the catalog should be up-to date is maintained 

71 automatically be Squirrel. Data is loaded and updated in chunks as 

72 needed in a just-in-time fashion. Data validity can optionally expire after 

73 a given period of time and new data can be treated to be preliminary. 

74 In both cases information will be refreshed as needed. 

75 ''' 

76 

77 catalog = String.T( 

78 help='Catalog name.') 

79 

80 query_args = Dict.T( 

81 String.T(), String.T(), 

82 optional=True, 

83 help='Common arguments, which are appended to all queries, e.g. to ' 

84 'constrain location, depth or magnitude ranges.') 

85 

86 expires = Duration.T( 

87 optional=True, 

88 help='Expiration time [s]. Information older than this will be ' 

89 'refreshed, i.e. queried again.') 

90 

91 anxious = Duration.T( 

92 optional=True, 

93 help='Anxiety period [s]. Information will be treated as preliminary ' 

94 'if it was younger than this at the time of its retrieval. ' 

95 'Preliminary information is refreshed on each query relevant ' 

96 'to it.') 

97 

98 cache_path = String.T( 

99 optional=True, 

100 help='Directory path where the partial local copy of the catalog is ' 

101 "kept. By default the Squirrel environment's cache directory is " 

102 'used.') 

103 

104 def __init__(self, catalog, query_args=None, **kwargs): 

105 Source.__init__(self, catalog=catalog, query_args=query_args, **kwargs) 

106 

107 self._hash = self.make_hash() 

108 self._nevents_query_hint = 1000 

109 self._nevents_chunk_hint = 5000 

110 self._tquery = 3600.*24. 

111 self._tquery_limits = (3600., 3600.*24.*365.) 

112 

113 def describe(self): 

114 return 'catalog:%s:%s' % (self.catalog, self.get_hash()) 

115 

116 def setup(self, squirrel, check=True): 

117 self._force_query_age_max = self.anxious 

118 self._catalog = get_catalog(self.catalog) 

119 

120 self._cache_path = op.join( 

121 self.cache_path or squirrel._cache_path, 

122 'catalog', 

123 self.get_hash()) 

124 

125 util.ensuredir(self._cache_path) 

126 

127 with LockDir(self._cache_path): 

128 self._load_chain() 

129 self._add_events_to_squirrel(squirrel) 

130 

131 def make_hash(self): 

132 s = self.catalog 

133 if self.query_args is not None: 

134 s += ','.join( 

135 '%s:%s' % (k, self.query_args[k]) 

136 for k in sorted(self.query_args.keys())) 

137 else: 

138 s += 'noqueryargs' 

139 

140 return ehash(s) 

141 

142 def get_hash(self): 

143 return self._hash 

144 

145 def update_event_inventory(self, squirrel, constraint=None): 

146 

147 with LockDir(self._cache_path): 

148 self._load_chain() 

149 

150 assert constraint is not None 

151 if constraint is not None: 

152 tmin, tmax = constraint.tmin, constraint.tmax 

153 

154 tmin_sq, tmax_sq = squirrel.get_time_span(dummy_limits=False) 

155 

156 if tmin is None: 

157 tmin = tmin_sq 

158 

159 if tmax is None: 

160 tmax = tmax_sq 

161 

162 if tmin is None or tmax is None: 

163 logger.warning( 

164 'Cannot query catalog source "%s" without time ' 

165 'constraint. Could not determine appropriate time ' 

166 'constraint from current data holdings (no data?).' 

167 % self.catalog) 

168 

169 return 

170 

171 if tmin >= tmax: 

172 tmax = tmin 

173 

174 tnow = time.time() 

175 modified = False 

176 

177 if tmax > tnow: 

178 tmax = tnow 

179 

180 chain = [] 

181 this_tmin = tmin 

182 for link in self._chain: 

183 if this_tmin < link.tmin and tmax > this_tmin: 

184 chain.append(Link(this_tmin, min(tmax, link.tmin), tnow)) 

185 modified = True 

186 

187 chain.append(link) 

188 this_tmin = link.tmax 

189 

190 if this_tmin < tmax: 

191 chain.append(Link(this_tmin, tmax, tnow)) 

192 modified = True 

193 

194 if modified: 

195 self._chain = chain 

196 

197 chain = [] 

198 remove = [] 

199 for link in self._chain: 

200 if tmin < link.tmax and link.tmin < tmax \ 

201 and self._outdated(link, tnow): 

202 

203 if link.content_id: 

204 remove.append( 

205 self._get_events_file_path(link.content_id)) 

206 

207 tmin_query = max(link.tmin, tmin) 

208 tmax_query = min(link.tmax, tmax) 

209 

210 if link.tmin < tmin_query: 

211 chain.append(Link(link.tmin, tmin_query, tnow)) 

212 

213 if tmin_query < tmax_query: 

214 for link in self._iquery(tmin_query, tmax_query, tnow): 

215 chain.append(link) 

216 

217 if tmax_query < link.tmax: 

218 chain.append(Link(tmax_query, link.tmax, tnow)) 

219 

220 modified = True 

221 

222 else: 

223 chain.append(link) 

224 

225 if modified: 

226 self._chain = chain 

227 self._dump_chain() 

228 squirrel.remove(remove) 

229 

230 self._add_events_to_squirrel(squirrel) 

231 

232 def _add_events_to_squirrel(self, squirrel): 

233 add = [] 

234 for link in self._chain: 

235 if link.content_id: 

236 add.append(self._get_events_file_path(link.content_id)) 

237 

238 squirrel.add(add, kinds=['event'], format='yaml') 

239 

240 def _iquery(self, tmin, tmax, tmodified): 

241 

242 nwant = self._nevents_query_hint 

243 tlim = self._tquery_limits 

244 

245 t = tmin 

246 tpack_min = tmin 

247 

248 events = [] 

249 with progress.task( 

250 'Querying %s' % self.catalog, 100, logger=logger) as task: 

251 

252 while t < tmax: 

253 tmin_query = t 

254 tmax_query = min(t + self._tquery, tmax) 

255 

256 events_new = self._query(tmin_query, tmax_query) 

257 nevents_new = len(events_new) 

258 events.extend(events_new) 

259 while len(events) > int(self._nevents_chunk_hint * 1.5): 

260 tpack_max = events[self._nevents_chunk_hint].time 

261 yield self._pack( 

262 events[:self._nevents_chunk_hint], 

263 tpack_min, tpack_max, tmodified) 

264 

265 tpack_min = tpack_max 

266 events[:self._nevents_query_hint] = [] 

267 

268 t += self._tquery 

269 

270 if tmax_query != tmax: 

271 if nevents_new < 5: 

272 self._tquery *= 10.0 

273 

274 elif not (nwant // 2 < nevents_new < nwant * 2): 

275 self._tquery /= float(nevents_new) / float(nwant) 

276 

277 self._tquery = max(tlim[0], min(self._tquery, tlim[1])) 

278 task.update(int(round(100. * (t-tmin)/(tmax-tmin)))) 

279 

280 if self._force_query_age_max is not None: 

281 tsplit = tmodified - self._force_query_age_max 

282 if tpack_min < tsplit < tmax: 

283 events_older = [] 

284 events_newer = [] 

285 for ev in events: 

286 if ev.time < tsplit: 

287 events_older.append(ev) 

288 else: 

289 events_newer.append(ev) 

290 

291 yield self._pack( 

292 events_older, tpack_min, tsplit, tmodified) 

293 yield self._pack( 

294 events_newer, tsplit, tmax, tmodified) 

295 return 

296 

297 yield self._pack(events, tpack_min, tmax, tmodified) 

298 

299 def _pack(self, events, tmin, tmax, tmodified): 

300 if events: 

301 content_id = ehash( 

302 self.get_hash() + ' %r %r %r' % (tmin, tmax, tmodified)) 

303 path = self._get_events_file_path(content_id) 

304 dump_all(events, filename=path) 

305 else: 

306 content_id = None 

307 

308 return Link(tmin, tmax, tmodified, len(events), content_id) 

309 

310 def query_args_typed(self): 

311 if self.query_args is None: 

312 return {} 

313 else: 

314 type_map = { 

315 'magmin': float, 

316 'magmax': float, 

317 'latmin': float, 

318 'latmax': float, 

319 'lonmin': float, 

320 'lonmax': float, 

321 'depthmin': float, 

322 'depthmax': float} 

323 

324 return dict( 

325 (k, type_map.get(k, str)(v)) 

326 for (k, v) in self.query_args.items()) 

327 

328 def _query(self, tmin, tmax): 

329 logger.info('Querying catalog "%s" for time span %s - %s.' % ( 

330 self.catalog, util.tts(tmin), util.tts(tmax))) 

331 

332 return self._catalog.get_events( 

333 (tmin, tmax), 

334 **self.query_args_typed()) 

335 

336 def _outdated(self, link, tnow): 

337 if link.nevents == -1: 

338 return True 

339 

340 if self._force_query_age_max \ 

341 and link.tmax + self._force_query_age_max > link.tmodified: 

342 

343 return True 

344 

345 if self.expires is not None \ 

346 and link.tmodified < tnow - self.expires: 

347 

348 return True 

349 

350 return False 

351 

352 def _get_events_file_path(self, fhash): 

353 return op.join(self._cache_path, fhash + '.pf') 

354 

355 def _get_chain_file_path(self): 

356 return op.join(self._cache_path, 'chain.pickle') 

357 

358 def _load_chain(self): 

359 path = self._get_chain_file_path() 

360 if op.exists(path): 

361 with open(path, 'rb') as f: 

362 self._chain = pickle.load(f) 

363 else: 

364 self._chain = [] 

365 

366 def _dump_chain(self): 

367 with open(self._get_chain_file_path(), 'wb') as f: 

368 pickle.dump(self._chain, f, protocol=2) 

369 

370 

371__all__ = [ 

372 'CatalogSource' 

373]