Coverage for /usr/local/lib/python3.11/dist-packages/pyrocko/squirrel/client/catalog.py: 80%
202 statements
« prev ^ index » next coverage.py v6.5.0, created at 2024-03-07 11:54 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2024-03-07 11:54 +0000
1# http://pyrocko.org - GPLv3
2#
3# The Pyrocko Developers, 21st Century
4# ---|P------/S----------~Lg----------
6'''
7Squirrel online earthquake catalog client.
8'''
10import os.path as op
11import logging
12import time
13try:
14 import cPickle as pickle
15except ImportError:
16 import pickle
18from pyrocko import util, progress
19from pyrocko.guts import String, Dict, Duration, dump_all
21from .base import Source
22from ..model import ehash
23from ..lock import LockDir
25guts_prefix = 'squirrel'
27logger = logging.getLogger('psq.client.catalog')
30class Link(object):
31 def __init__(self, tmin, tmax, tmodified, nevents=-1, content_id=None):
32 self.tmin = tmin
33 self.tmax = tmax
34 self.tmodified = tmodified
35 self.nevents = nevents
36 self.content_id = content_id
38 def __str__(self):
39 return 'span %s - %s, access %s, nevents %i' % (
40 util.tts(self.tmin),
41 util.tts(self.tmax),
42 util.tts(self.tmodified),
43 self.nevents)
46class NoSuchCatalog(Exception):
47 pass
50def get_catalog(name):
51 if name == 'geofon':
52 from pyrocko.client.geofon import Geofon
53 return Geofon()
54 elif name == 'gcmt':
55 from pyrocko.client.globalcmt import GlobalCMT
56 return GlobalCMT()
57 elif name == 'isc':
58 from pyrocko.client.isc import ISC
59 return ISC()
60 else:
61 raise NoSuchCatalog(name)
64class CatalogSource(Source):
65 '''
66 Squirrel data-source to transparently access online earthquake catalogs.
68 The catalog source maintains and synchronizes a partial copy of the online
69 catalog, e.g. of all events above a certain magnitude. The time span for
70 which the local copy of the catalog should be up-to date is maintained
71 automatically be Squirrel. Data is loaded and updated in chunks as
72 needed in a just-in-time fashion. Data validity can optionally expire after
73 a given period of time and new data can be treated to be preliminary.
74 In both cases information will be refreshed as needed.
75 '''
77 catalog = String.T(
78 help='Catalog name.')
80 query_args = Dict.T(
81 String.T(), String.T(),
82 optional=True,
83 help='Common arguments, which are appended to all queries, e.g. to '
84 'constrain location, depth or magnitude ranges.')
86 expires = Duration.T(
87 optional=True,
88 help='Expiration time [s]. Information older than this will be '
89 'refreshed, i.e. queried again.')
91 anxious = Duration.T(
92 optional=True,
93 help='Anxiety period [s]. Information will be treated as preliminary '
94 'if it was younger than this at the time of its retrieval. '
95 'Preliminary information is refreshed on each query relevant '
96 'to it.')
98 cache_path = String.T(
99 optional=True,
100 help='Directory path where the partial local copy of the catalog is '
101 "kept. By default the Squirrel environment's cache directory is "
102 'used.')
104 def __init__(self, catalog, query_args=None, **kwargs):
105 Source.__init__(self, catalog=catalog, query_args=query_args, **kwargs)
107 self._hash = self.make_hash()
108 self._nevents_query_hint = 1000
109 self._nevents_chunk_hint = 5000
110 self._tquery = 3600.*24.
111 self._tquery_limits = (3600., 3600.*24.*365.)
113 def describe(self):
114 return 'catalog:%s:%s' % (self.catalog, self.get_hash())
116 def setup(self, squirrel, check=True):
117 self._force_query_age_max = self.anxious
118 self._catalog = get_catalog(self.catalog)
120 self._cache_path = op.join(
121 self.cache_path or squirrel._cache_path,
122 'catalog',
123 self.get_hash())
125 util.ensuredir(self._cache_path)
127 with LockDir(self._cache_path):
128 self._load_chain()
129 self._add_events_to_squirrel(squirrel)
131 def make_hash(self):
132 s = self.catalog
133 if self.query_args is not None:
134 s += ','.join(
135 '%s:%s' % (k, self.query_args[k])
136 for k in sorted(self.query_args.keys()))
137 else:
138 s += 'noqueryargs'
140 return ehash(s)
142 def get_hash(self):
143 return self._hash
145 def update_event_inventory(self, squirrel, constraint=None):
147 with LockDir(self._cache_path):
148 self._load_chain()
150 assert constraint is not None
151 if constraint is not None:
152 tmin, tmax = constraint.tmin, constraint.tmax
154 tmin_sq, tmax_sq = squirrel.get_time_span(dummy_limits=False)
156 if tmin is None:
157 tmin = tmin_sq
159 if tmax is None:
160 tmax = tmax_sq
162 if tmin is None or tmax is None:
163 logger.warning(
164 'Cannot query catalog source "%s" without time '
165 'constraint. Could not determine appropriate time '
166 'constraint from current data holdings (no data?).'
167 % self.catalog)
169 return
171 if tmin >= tmax:
172 tmax = tmin
174 tnow = time.time()
175 modified = False
177 if tmax > tnow:
178 tmax = tnow
180 chain = []
181 this_tmin = tmin
182 for link in self._chain:
183 if this_tmin < link.tmin and tmax > this_tmin:
184 chain.append(Link(this_tmin, min(tmax, link.tmin), tnow))
185 modified = True
187 chain.append(link)
188 this_tmin = link.tmax
190 if this_tmin < tmax:
191 chain.append(Link(this_tmin, tmax, tnow))
192 modified = True
194 if modified:
195 self._chain = chain
197 chain = []
198 remove = []
199 for link in self._chain:
200 if tmin < link.tmax and link.tmin < tmax \
201 and self._outdated(link, tnow):
203 if link.content_id:
204 remove.append(
205 self._get_events_file_path(link.content_id))
207 tmin_query = max(link.tmin, tmin)
208 tmax_query = min(link.tmax, tmax)
210 if link.tmin < tmin_query:
211 chain.append(Link(link.tmin, tmin_query, tnow))
213 if tmin_query < tmax_query:
214 for link in self._iquery(tmin_query, tmax_query, tnow):
215 chain.append(link)
217 if tmax_query < link.tmax:
218 chain.append(Link(tmax_query, link.tmax, tnow))
220 modified = True
222 else:
223 chain.append(link)
225 if modified:
226 self._chain = chain
227 self._dump_chain()
228 squirrel.remove(remove)
230 self._add_events_to_squirrel(squirrel)
232 def _add_events_to_squirrel(self, squirrel):
233 add = []
234 for link in self._chain:
235 if link.content_id:
236 add.append(self._get_events_file_path(link.content_id))
238 squirrel.add(add, kinds=['event'], format='yaml')
240 def _iquery(self, tmin, tmax, tmodified):
242 nwant = self._nevents_query_hint
243 tlim = self._tquery_limits
245 t = tmin
246 tpack_min = tmin
248 events = []
249 with progress.task(
250 'Querying %s' % self.catalog, 100, logger=logger) as task:
252 while t < tmax:
253 tmin_query = t
254 tmax_query = min(t + self._tquery, tmax)
256 events_new = self._query(tmin_query, tmax_query)
257 nevents_new = len(events_new)
258 events.extend(events_new)
259 while len(events) > int(self._nevents_chunk_hint * 1.5):
260 tpack_max = events[self._nevents_chunk_hint].time
261 yield self._pack(
262 events[:self._nevents_chunk_hint],
263 tpack_min, tpack_max, tmodified)
265 tpack_min = tpack_max
266 events[:self._nevents_query_hint] = []
268 t += self._tquery
270 if tmax_query != tmax:
271 if nevents_new < 5:
272 self._tquery *= 10.0
274 elif not (nwant // 2 < nevents_new < nwant * 2):
275 self._tquery /= float(nevents_new) / float(nwant)
277 self._tquery = max(tlim[0], min(self._tquery, tlim[1]))
278 task.update(int(round(100. * (t-tmin)/(tmax-tmin))))
280 if self._force_query_age_max is not None:
281 tsplit = tmodified - self._force_query_age_max
282 if tpack_min < tsplit < tmax:
283 events_older = []
284 events_newer = []
285 for ev in events:
286 if ev.time < tsplit:
287 events_older.append(ev)
288 else:
289 events_newer.append(ev)
291 yield self._pack(
292 events_older, tpack_min, tsplit, tmodified)
293 yield self._pack(
294 events_newer, tsplit, tmax, tmodified)
295 return
297 yield self._pack(events, tpack_min, tmax, tmodified)
299 def _pack(self, events, tmin, tmax, tmodified):
300 if events:
301 content_id = ehash(
302 self.get_hash() + ' %r %r %r' % (tmin, tmax, tmodified))
303 path = self._get_events_file_path(content_id)
304 dump_all(events, filename=path)
305 else:
306 content_id = None
308 return Link(tmin, tmax, tmodified, len(events), content_id)
310 def query_args_typed(self):
311 if self.query_args is None:
312 return {}
313 else:
314 type_map = {
315 'magmin': float,
316 'magmax': float,
317 'latmin': float,
318 'latmax': float,
319 'lonmin': float,
320 'lonmax': float,
321 'depthmin': float,
322 'depthmax': float}
324 return dict(
325 (k, type_map.get(k, str)(v))
326 for (k, v) in self.query_args.items())
328 def _query(self, tmin, tmax):
329 logger.info('Querying catalog "%s" for time span %s - %s.' % (
330 self.catalog, util.tts(tmin), util.tts(tmax)))
332 return self._catalog.get_events(
333 (tmin, tmax),
334 **self.query_args_typed())
336 def _outdated(self, link, tnow):
337 if link.nevents == -1:
338 return True
340 if self._force_query_age_max \
341 and link.tmax + self._force_query_age_max > link.tmodified:
343 return True
345 if self.expires is not None \
346 and link.tmodified < tnow - self.expires:
348 return True
350 return False
352 def _get_events_file_path(self, fhash):
353 return op.join(self._cache_path, fhash + '.pf')
355 def _get_chain_file_path(self):
356 return op.join(self._cache_path, 'chain.pickle')
358 def _load_chain(self):
359 path = self._get_chain_file_path()
360 if op.exists(path):
361 with open(path, 'rb') as f:
362 self._chain = pickle.load(f)
363 else:
364 self._chain = []
366 def _dump_chain(self):
367 with open(self._get_chain_file_path(), 'wb') as f:
368 pickle.dump(self._chain, f, protocol=2)
371__all__ = [
372 'CatalogSource'
373]