Coverage for /usr/local/lib/python3.11/dist-packages/grond/problems/dynamic_rupture/problem.py: 31%

354 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-10-25 10:12 +0000

1import numpy as num 

2import logging 

3 

4from pyrocko import gf, util 

5from pyrocko.guts import String, Float, Dict, Int, Bool, StringChoice 

6from pyrocko.guts_array import Array 

7from pyrocko.gf.seismosizer import map_anchor 

8 

9from grond.meta import expand_template, Parameter, has_get_plot_classes, \ 

10 GrondError 

11 

12from ..base import Problem, ProblemConfig 

13from .. import CMTProblem 

14 

15guts_prefix = 'grond' 

16 

17logger = logging.getLogger('grond.problems.dynamic_rupture.problem') 

18km = 1e3 

19d2r = num.pi / 180. 

20as_km = dict(scale_factor=km, scale_unit='km') 

21as_gpa = dict(scale_factor=1e9, scale_unit='GPa') 

22 

23 

24class DynamicRuptureProblemConfig(ProblemConfig): 

25 

26 ranges = Dict.T(String.T(), gf.Range.T()) 

27 

28 anchor = StringChoice.T( 

29 choices=tuple(map_anchor.keys()), 

30 default='top') 

31 

32 decimation_factor = Int.T( 

33 default=1, 

34 help='Decimation factor of the discretized sub-faults') 

35 distance_min = Float.T(default=0.) 

36 nthreads = Int.T(default=1) 

37 adaptive_resolution = StringChoice.T( 

38 choices=('off', 'linear', 'uniqueness'), 

39 default='off') 

40 adaptive_start = Int.T( 

41 default=0) 

42 

43 pure_shear = Bool.T( 

44 default=True) 

45 smooth_rupture = Bool.T( 

46 default=False) 

47 

48 patch_mask = Array.T( 

49 optional=True, 

50 shape=(None,), 

51 dtype=bool, 

52 serialize_as='list') 

53 

54 point_source_target_balancing = Bool.T( 

55 default=False, 

56 help='If ``True``, target balancing (if used) is performed on a ' 

57 'moment tensor point source at the events location. It increases ' 

58 'the speed, but might lead to not fully optimized target weights.' 

59 ) 

60 

61 def get_problem(self, event, target_groups, targets): 

62 if self.decimation_factor != 1: 

63 logger.warn( 

64 'Decimation factor for dynamic rupture source set to %i.' 

65 ' Results may be inaccurate.' % self.decimation_factor) 

66 

67 base_source = gf.PseudoDynamicRupture.from_pyrocko_event( 

68 event, 

69 anchor=self.anchor, 

70 nx=int(self.ranges['nx'].start), 

71 ny=int(self.ranges['ny'].start), 

72 decimation_factor=self.decimation_factor, 

73 nthreads=self.nthreads, 

74 pure_shear=self.pure_shear, 

75 smooth_rupture=self.smooth_rupture, 

76 patch_mask=self.patch_mask, 

77 stf=None) 

78 

79 subs = dict( 

80 event_name=event.name, 

81 event_time=util.time_to_str(event.time)) 

82 

83 cmt_problem = None 

84 if self.point_source_target_balancing: 

85 base_source_cmt = gf.MTSource.from_pyrocko_event(event) 

86 

87 stf = gf.HalfSinusoidSTF() 

88 stf.duration = event.duration or 0.0 

89 

90 base_source_cmt.stf = stf 

91 

92 ranges = dict( 

93 time=self.ranges['time'], 

94 north_shift=self.ranges['north_shift'], 

95 east_shift=self.ranges['east_shift'], 

96 depth=self.ranges['depth'], 

97 magnitude=gf.Range( 

98 start=event.magnitude - 1., 

99 stop=event.magnitude + 1.), 

100 duration=gf.Range(start=0., stop=stf.duration * 2.), 

101 rmnn=gf.Range(start=-1.41421, stop=1.41421), 

102 rmee=gf.Range(start=-1.41421, stop=1.41421), 

103 rmdd=gf.Range(start=-1.41421, stop=1.41421), 

104 rmne=gf.Range(start=-1., stop=1.), 

105 rmnd=gf.Range(start=-1., stop=1.), 

106 rmed=gf.Range(start=-1., stop=1.)) 

107 

108 cmt_problem = CMTProblem( 

109 name=expand_template(self.name_template, subs), 

110 base_source=base_source_cmt, 

111 distance_min=self.distance_min, 

112 target_groups=target_groups, 

113 targets=targets, 

114 ranges=ranges, 

115 mt_type='dc', 

116 stf_type='HalfSinusoidSTF', 

117 norm_exponent=self.norm_exponent, 

118 nthreads=self.nthreads) 

119 

120 problem = DynamicRuptureProblem( 

121 name=expand_template(self.name_template, subs), 

122 base_source=base_source, 

123 distance_min=self.distance_min, 

124 target_groups=target_groups, 

125 targets=targets, 

126 ranges=self.ranges, 

127 norm_exponent=self.norm_exponent, 

128 nthreads=self.nthreads, 

129 adaptive_resolution=self.adaptive_resolution, 

130 adaptive_start=self.adaptive_start, 

131 cmt_problem=cmt_problem) 

132 

133 return problem 

134 

135 

136@has_get_plot_classes 

137class DynamicRuptureProblem(Problem): 

138 

139 problem_parameters = [ 

140 Parameter('east_shift', 'm', label='Easting', **as_km), 

141 Parameter('north_shift', 'm', label='Northing', **as_km), 

142 Parameter('depth', 'm', label='Depth', **as_km), 

143 Parameter('length', 'm', label='Length', **as_km), 

144 Parameter('width', 'm', label='Width', **as_km), 

145 Parameter('slip', 'm', label='Peak slip', optional=True), 

146 Parameter('strike', 'deg', label='Strike'), 

147 Parameter('dip', 'deg', label='Dip'), 

148 Parameter('rake', 'deg', label='Rake'), 

149 Parameter('gamma', 'vr/vs', label=r'$\gamma$'), 

150 Parameter('nx', 'patches', label='nx'), 

151 Parameter('ny', 'patches', label='ny') 

152 ] 

153 

154 problem_waveform_parameters = [ 

155 Parameter('nucleation_x', 'offset', label='Nucleation X'), 

156 Parameter('nucleation_y', 'offset', label='Nucleation Y'), 

157 Parameter('time', 's', label='Time'), 

158 ] 

159 

160 dependants = [ 

161 Parameter('traction', 'Pa', label='Maximum traction'), 

162 Parameter('magnitude', label='Magnitude'), 

163 ] 

164 

165 distance_min = Float.T(default=0.0) 

166 adaptive_resolution = String.T() 

167 adaptive_start = Int.T() 

168 

169 cmt_problem = Problem.T(optional=True) 

170 

171 def __init__(self, **kwargs): 

172 Problem.__init__(self, **kwargs) 

173 self.deps_cache = {} 

174 self.minmax_cache = {} 

175 

176 def set_engine(self, engine): 

177 self._engine = engine 

178 

179 if self.cmt_problem is not None: 

180 self.cmt_problem.set_engine(engine) 

181 

182 def pack(self, source): 

183 arr = self.get_parameter_array(source) 

184 for ip, p in enumerate(self.parameters): 

185 if p.name == 'time': 

186 arr[ip] -= self.base_source.time 

187 return arr 

188 

189 def get_source(self, x): 

190 src = self.base_source 

191 

192 d = self.get_parameter_dict(x) 

193 p = {k: float(self.ranges[k].make_relative(src[k], d[k])) 

194 for k in src.keys() if k in d} 

195 

196 p['nx'] = int(p['nx']) 

197 p['ny'] = int(p['ny']) 

198 

199 if p['nx'] != src.nx or p['ny'] != src.ny: 

200 logger.info('refining patches to %dx%d', p['nx'], p['ny']) 

201 

202 src.nx = p['nx'] 

203 src.ny = p['ny'] 

204 

205 return src.clone(**p) 

206 

207 def random_uniform(self, xbounds, rstate, fixed_magnitude=None): 

208 if fixed_magnitude is not None: 

209 raise GrondError( 

210 'Setting fixed magnitude in random model generation not ' 

211 'supported for this type of problem.') 

212 

213 x = num.zeros(self.nparameters) 

214 for i in range(self.nparameters): 

215 x[i] = rstate.uniform(xbounds[i, 0], xbounds[i, 1]) 

216 

217 return x 

218 

219 def preconstrain(self, x, optimiser=None): 

220 nx = self.ranges['nx'] 

221 ny = self.ranges['ny'] 

222 

223 if optimiser and self.adaptive_resolution == 'linear' and \ 

224 optimiser.iiter >= self.adaptive_start: 

225 

226 progress = (optimiser.iiter - self.adaptive_start - 1) / \ 

227 (optimiser.niterations - self.adaptive_start) 

228 

229 nx = num.floor( 

230 nx.start + progress * (nx.stop - nx.start + 1)) 

231 ny = num.floor( 

232 ny.start + progress * (ny.stop - ny.start + 1)) 

233 

234 elif optimiser and self.adaptive_resolution == 'uniqueness' and \ 

235 optimiser.iiter >= self.adaptive_start: 

236 raise NotImplementedError 

237 

238 else: 

239 nx = nx.start 

240 ny = ny.start 

241 

242 idx_nx = self.get_parameter_index('nx') 

243 idx_ny = self.get_parameter_index('ny') 

244 x[idx_nx] = int(round(nx)) 

245 x[idx_ny] = int(round(ny)) 

246 

247 return x 

248 

249 def make_dependant(self, xs, pname, store=None): 

250 cache = self.deps_cache 

251 if xs.ndim == 1: 

252 return self.make_dependant(xs[num.newaxis, :], pname)[0] 

253 

254 if pname not in self.dependant_names: 

255 raise KeyError(pname) 

256 

257 if store is None: 

258 store_ids = self.get_gf_store_ids() 

259 engine = self.get_engine() 

260 store = engine.get_store(store_ids[0]) 

261 

262 y = num.zeros(xs.shape[0]) 

263 for i, x in enumerate(xs): 

264 k = tuple(x.tolist()) 

265 if k not in cache: 

266 source = self.get_source(x) 

267 

268 if num.isnan(source.slip): 

269 source.slip = 0. 

270 

271 source.discretize_basesource(store) 

272 

273 tractions = source.get_scaled_tractions(store) 

274 traction = max(num.linalg.norm(tractions, axis=1)) 

275 

276 magnitude = source.get_magnitude(store=store) 

277 

278 cache[k] = dict( 

279 traction=traction, 

280 magnitude=magnitude) 

281 

282 res = cache[k] 

283 

284 y[i] = res[pname] 

285 

286 return y 

287 

288 def get_dependant_bounds(self): 

289 cache = self.minmax_cache 

290 

291 x_max_traction = self.get_min_model() 

292 

293 for i, p in enumerate(self.problem_parameters): 

294 if p.name != 'slip': 

295 continue 

296 

297 x_max_traction[i] = self.ranges[p.name].stop 

298 

299 xs = (self.get_min_model(), self.get_max_model(), x_max_traction) 

300 

301 if not any(tuple(x.tolist()) in cache for x in xs): 

302 store_ids = self.get_gf_store_ids() 

303 engine = self.get_engine() 

304 store = engine.get_store(store_ids[0]) 

305 

306 for x in xs: 

307 source = self.get_source(x) 

308 source.discretize_basesource(store) 

309 

310 tractions = source.get_scaled_tractions(store) 

311 traction = max(num.linalg.norm(tractions, axis=1)) 

312 

313 magnitude = source.get_magnitude(store=store) 

314 

315 cache[tuple(x.tolist())] = dict( 

316 traction=traction, 

317 magnitude=magnitude 

318 ) 

319 

320 traction = [cache[tuple(x.tolist())]['traction'] for x in xs] 

321 magnitude = [cache[tuple(x.tolist())]['magnitude'] for x in xs] 

322 

323 out = [ 

324 (min(traction), max(traction)), 

325 (min(magnitude), max(magnitude))] 

326 

327 return out 

328 

329 @classmethod 

330 def get_plot_classes(cls): 

331 from . import plot 

332 plots = super(DynamicRuptureProblem, cls).get_plot_classes() 

333 plots.extend([ 

334 plot.DynamicRuptureSlipMap, 

335 plot.DynamicRuptureSTF, 

336 plot.DynamicRuptureMap]) 

337 return plots 

338 

339 

340class DoublePDRProblemConfig(ProblemConfig): 

341 

342 ranges = Dict.T(String.T(), gf.Range.T()) 

343 

344 decimation_factor = Int.T( 

345 default=1, 

346 help='Decimation factor of the discretized sub-faults') 

347 distance_min = Float.T(default=0.) 

348 nthreads = Int.T(default=1) 

349 adaptive_resolution = StringChoice.T( 

350 choices=('off', 'linear', 'uniqueness'), 

351 default='off') 

352 adaptive_start = Int.T( 

353 default=0) 

354 

355 pure_shear = Bool.T( 

356 default=True) 

357 smooth_rupture = Bool.T( 

358 default=False) 

359 

360 point_source_target_balancing = Bool.T( 

361 default=False, 

362 help='If ``True``, target balancing (if used) is performed on a ' 

363 'moment tensor point source at the events location. It increases ' 

364 'the speed, but might lead to not fully optimized target weights.' 

365 ) 

366 

367 def get_problem(self, event, target_groups, targets): 

368 if self.decimation_factor != 1: 

369 logger.warn( 

370 'Decimation factor for dynamic rupture source set to %i.' 

371 ' Results may be inaccurate.' % self.decimation_factor) 

372 

373 base_source = gf.DoublePDR.from_pyrocko_event( 

374 event, 

375 nx1=int(self.ranges['nx1'].start), 

376 ny1=int(self.ranges['ny1'].start), 

377 nx2=int(self.ranges['nx2'].start), 

378 ny2=int(self.ranges['ny2'].start), 

379 decimation_factor=self.decimation_factor, 

380 nthreads=self.nthreads, 

381 pure_shear=self.pure_shear, 

382 smooth_rupture=self.smooth_rupture, 

383 stf=None) 

384 

385 subs = dict( 

386 event_name=event.name, 

387 event_time=util.time_to_str(event.time)) 

388 

389 cmt_problem = None 

390 if self.point_source_target_balancing: 

391 base_source_cmt = gf.MTSource.from_pyrocko_event(event) 

392 

393 stf = gf.HalfSinusoidSTF() 

394 stf.duration = event.duration or 0.0 

395 

396 base_source_cmt.stf = stf 

397 

398 ranges = dict( 

399 time=self.ranges['time'], 

400 north_shift=self.ranges['north_shift'], 

401 east_shift=self.ranges['east_shift'], 

402 depth=self.ranges['depth'], 

403 magnitude=gf.Range( 

404 start=event.magnitude - 1., 

405 stop=event.magnitude + 1.), 

406 duration=gf.Range(start=0., stop=stf.duration * 2.), 

407 rmnn=gf.Range(start=-1.41421, stop=1.41421), 

408 rmee=gf.Range(start=-1.41421, stop=1.41421), 

409 rmdd=gf.Range(start=-1.41421, stop=1.41421), 

410 rmne=gf.Range(start=-1., stop=1.), 

411 rmnd=gf.Range(start=-1., stop=1.), 

412 rmed=gf.Range(start=-1., stop=1.)) 

413 

414 cmt_problem = CMTProblem( 

415 name=expand_template(self.name_template, subs), 

416 base_source=base_source_cmt, 

417 distance_min=self.distance_min, 

418 target_groups=target_groups, 

419 targets=targets, 

420 ranges=ranges, 

421 mt_type='dc', 

422 stf_type='HalfSinusoidSTF', 

423 norm_exponent=self.norm_exponent, 

424 nthreads=self.nthreads) 

425 

426 problem = DoublePDRProblem( 

427 name=expand_template(self.name_template, subs), 

428 base_source=base_source, 

429 distance_min=self.distance_min, 

430 target_groups=target_groups, 

431 targets=targets, 

432 ranges=self.ranges, 

433 norm_exponent=self.norm_exponent, 

434 nthreads=self.nthreads, 

435 adaptive_resolution=self.adaptive_resolution, 

436 adaptive_start=self.adaptive_start, 

437 cmt_problem=cmt_problem) 

438 

439 return problem 

440 

441 

442@has_get_plot_classes 

443class DoublePDRProblem(Problem): 

444 

445 problem_parameters = [ 

446 Parameter('east_shift', 'm', label='Easting', **as_km), 

447 Parameter('north_shift', 'm', label='Northing', **as_km), 

448 Parameter('depth', 'm', label='Depth', **as_km), 

449 Parameter('length1', 'm', label='Length 1', **as_km), 

450 Parameter('width1', 'm', label='Width 1', **as_km), 

451 Parameter('slip1', 'm', label='Peak slip 1', optional=True), 

452 Parameter('strike1', 'deg', label='Strike 1'), 

453 Parameter('dip1', 'deg', label='Dip 1'), 

454 Parameter('rake1', 'deg', label='Rake 1'), 

455 Parameter('gamma1', 'vr/vs', label=r'$\gamma$ 1'), 

456 Parameter('nx1', 'patches', label='nx 1'), 

457 Parameter('ny1', 'patches', label='ny 1'), 

458 Parameter('length2', 'm', label='Length 2', **as_km), 

459 Parameter('width2', 'm', label='Width 2', **as_km), 

460 Parameter('slip2', 'm', label='Peak slip 2', optional=True), 

461 Parameter('strike2', 'deg', label='Strike 2'), 

462 Parameter('dip2', 'deg', label='Dip 2'), 

463 Parameter('rake2', 'deg', label='Rake 2'), 

464 Parameter('gamma2', 'vr/vs', label=r'$\gamma$ 2'), 

465 Parameter('nx2', 'patches', label='nx 2'), 

466 Parameter('ny2', 'patches', label='ny 2'), 

467 Parameter('delta_time', 's', label='$\\Delta$ Time'), 

468 Parameter('delta_depth', 'm', label='$\\Delta$ Depth'), 

469 Parameter('azimuth', 'deg', label='Azimuth'), 

470 Parameter('distance', 'm', label='Distance'), 

471 Parameter('mix', label='Mix') 

472 ] 

473 

474 problem_waveform_parameters = [ 

475 Parameter('nucleation_x1', 'offset', label='Nucleation X 1'), 

476 Parameter('nucleation_y1', 'offset', label='Nucleation Y 1'), 

477 Parameter('nucleation_x2', 'offset', label='Nucleation X 2'), 

478 Parameter('nucleation_y2', 'offset', label='Nucleation Y 2'), 

479 Parameter('time', 's', label='Time'), 

480 ] 

481 

482 dependants = [] 

483 

484 distance_min = Float.T(default=0.0) 

485 adaptive_resolution = String.T() 

486 adaptive_start = Int.T() 

487 

488 cmt_problem = Problem.T(optional=True) 

489 

490 def set_engine(self, engine): 

491 self._engine = engine 

492 

493 if self.cmt_problem is not None: 

494 self.cmt_problem.set_engine(engine) 

495 

496 def pack(self, source): 

497 arr = self.get_parameter_array(source) 

498 for ip, p in enumerate(self.parameters): 

499 if p.name == 'time': 

500 arr[ip] -= self.base_source.time 

501 return arr 

502 

503 def get_source(self, x): 

504 src = self.base_source 

505 

506 d = self.get_parameter_dict(x) 

507 p = {k: float(self.ranges[k].make_relative(src[k], d[k])) 

508 for k in src.keys() if k in d} 

509 

510 p['nx1'] = int(p['nx1']) 

511 p['ny1'] = int(p['ny1']) 

512 

513 if p['nx1'] != src.nx1 or p['ny1'] != src.ny1: 

514 logger.info('refining patches to %dx%d', p['nx1'], p['ny1']) 

515 

516 src.nx1 = p['nx1'] 

517 src.ny1 = p['ny1'] 

518 

519 p['nx2'] = int(p['nx2']) 

520 p['ny2'] = int(p['ny2']) 

521 

522 if p['nx2'] != src.nx2 or p['ny2'] != src.ny2: 

523 logger.info('refining patches to %dx%d', p['nx2'], p['ny2']) 

524 

525 src.nx2 = p['nx2'] 

526 src.ny2 = p['ny2'] 

527 

528 return src.clone(**p) 

529 

530 def random_uniform(self, xbounds, rstate, fixed_magnitude=None): 

531 if fixed_magnitude is not None: 

532 raise GrondError( 

533 'Setting fixed magnitude in random model generation not ' 

534 'supported for this type of problem.') 

535 

536 x = num.zeros(self.nparameters) 

537 for i in range(self.nparameters): 

538 x[i] = rstate.uniform(xbounds[i, 0], xbounds[i, 1]) 

539 

540 return x 

541 

542 def preconstrain(self, x, optimiser=None): 

543 nx1 = self.ranges['nx1'] 

544 ny1 = self.ranges['ny1'] 

545 nx2 = self.ranges['nx2'] 

546 ny2 = self.ranges['ny2'] 

547 

548 if optimiser and self.adaptive_resolution == 'linear' and \ 

549 optimiser.iiter >= self.adaptive_start: 

550 

551 progress = (optimiser.iiter - self.adaptive_start - 1) / \ 

552 (optimiser.niterations - self.adaptive_start) 

553 

554 nx1 = num.floor( 

555 nx1.start + progress * (nx1.stop - nx1.start + 1)) 

556 ny1 = num.floor( 

557 ny1.start + progress * (ny1.stop - ny1.start + 1)) 

558 nx2 = num.floor( 

559 nx2.start + progress * (nx2.stop - nx2.start + 1)) 

560 ny2 = num.floor( 

561 ny2.start + progress * (ny2.stop - ny2.start + 1)) 

562 

563 elif optimiser and self.adaptive_resolution == 'uniqueness' and \ 

564 optimiser.iiter >= self.adaptive_start: 

565 raise NotImplementedError 

566 

567 else: 

568 nx1 = nx1.start 

569 ny1 = ny1.start 

570 nx2 = nx2.start 

571 ny2 = ny2.start 

572 

573 idx_nx1 = self.get_parameter_index('nx1') 

574 idx_ny1 = self.get_parameter_index('ny1') 

575 idx_nx2 = self.get_parameter_index('nx2') 

576 idx_ny2 = self.get_parameter_index('ny2') 

577 x[idx_nx1] = int(round(nx1)) 

578 x[idx_ny1] = int(round(ny1)) 

579 x[idx_nx2] = int(round(nx2)) 

580 x[idx_ny2] = int(round(ny2)) 

581 

582 return x 

583 

584 @classmethod 

585 def get_plot_classes(cls): 

586 plots = super(DoublePDRProblem, cls).get_plot_classes() 

587 

588 return plots 

589 

590 

591class TriplePDRProblemConfig(ProblemConfig): 

592 

593 ranges = Dict.T(String.T(), gf.Range.T()) 

594 

595 decimation_factor = Int.T( 

596 default=1, 

597 help='Decimation factor of the discretized sub-faults') 

598 distance_min = Float.T(default=0.) 

599 nthreads = Int.T(default=1) 

600 adaptive_resolution = StringChoice.T( 

601 choices=('off', 'linear', 'uniqueness'), 

602 default='off') 

603 adaptive_start = Int.T( 

604 default=0) 

605 

606 pure_shear = Bool.T( 

607 default=True) 

608 smooth_rupture = Bool.T( 

609 default=False) 

610 

611 def get_problem(self, event, target_groups, targets): 

612 if self.decimation_factor != 1: 

613 logger.warn( 

614 'Decimation factor for dynamic rupture source set to %i.' 

615 ' Results may be inaccurate.' % self.decimation_factor) 

616 

617 base_source = gf.TriplePDR.from_pyrocko_event( 

618 event, 

619 nx1=int(self.ranges['nx1'].start), 

620 ny1=int(self.ranges['ny1'].start), 

621 nx2=int(self.ranges['nx2'].start), 

622 ny2=int(self.ranges['ny2'].start), 

623 nx3=int(self.ranges['nx3'].start), 

624 ny3=int(self.ranges['ny3'].start), 

625 decimation_factor=self.decimation_factor, 

626 nthreads=self.nthreads, 

627 pure_shear=self.pure_shear, 

628 smooth_rupture=self.smooth_rupture, 

629 stf=None) 

630 

631 subs = dict( 

632 event_name=event.name, 

633 event_time=util.time_to_str(event.time)) 

634 

635 cmt_problem = None 

636 

637 problem = TriplePDRProblem( 

638 name=expand_template(self.name_template, subs), 

639 base_source=base_source, 

640 distance_min=self.distance_min, 

641 target_groups=target_groups, 

642 targets=targets, 

643 ranges=self.ranges, 

644 norm_exponent=self.norm_exponent, 

645 nthreads=self.nthreads, 

646 adaptive_resolution=self.adaptive_resolution, 

647 adaptive_start=self.adaptive_start, 

648 cmt_problem=cmt_problem) 

649 

650 return problem 

651 

652 

653@has_get_plot_classes 

654class TriplePDRProblem(Problem): 

655 

656 problem_parameters = [ 

657 Parameter('delta_time1', 's', label=r'$\Delta$Time 1'), 

658 Parameter('north_shift1', 'm', label='Northing 1', **as_km), 

659 Parameter('east_shift1', 'm', label='Easting 1', **as_km), 

660 Parameter('depth1', 'm', label='Depth 1', **as_km), 

661 Parameter('length1', 'm', label='Length 1', **as_km), 

662 Parameter('width1', 'm', label='Width 1', **as_km), 

663 Parameter('slip1', 'm', label='Peak slip 1', optional=True), 

664 Parameter('strike1', 'deg', label='Strike 1'), 

665 Parameter('dip1', 'deg', label='Dip 1'), 

666 Parameter('rake1', 'deg', label='Rake 1'), 

667 Parameter('gamma1', 'vr/vs', label=r'$\gamma$ 1'), 

668 Parameter('nx1', 'patches', label='nx 1'), 

669 Parameter('ny1', 'patches', label='ny 1'), 

670 Parameter('delta_time2', 's', label=r'$\Delta$Time 2'), 

671 Parameter('north_shift2', 'm', label='Northing 2', **as_km), 

672 Parameter('east_shift2', 'm', label='Easting 2', **as_km), 

673 Parameter('depth2', 'm', label='Depth 2', **as_km), 

674 Parameter('length2', 'm', label='Length 2', **as_km), 

675 Parameter('width2', 'm', label='Width 2', **as_km), 

676 Parameter('slip2', 'm', label='Peak slip 2', optional=True), 

677 Parameter('strike2', 'deg', label='Strike 2'), 

678 Parameter('dip2', 'deg', label='Dip 2'), 

679 Parameter('rake2', 'deg', label='Rake 2'), 

680 Parameter('gamma2', 'vr/vs', label=r'$\gamma$ 2'), 

681 Parameter('nx2', 'patches', label='nx 2'), 

682 Parameter('ny2', 'patches', label='ny 2'), 

683 Parameter('delta_time3', 's', label=r'$\Delta$Time 3'), 

684 Parameter('north_shift3', 'm', label='Northing 3', **as_km), 

685 Parameter('east_shift3', 'm', label='Easting 3', **as_km), 

686 Parameter('depth3', 'm', label='Depth 3', **as_km), 

687 Parameter('length3', 'm', label='Length 3', **as_km), 

688 Parameter('width3', 'm', label='Width 3', **as_km), 

689 Parameter('slip3', 'm', label='Peak slip 3', optional=True), 

690 Parameter('strike3', 'deg', label='Strike 3'), 

691 Parameter('dip3', 'deg', label='Dip 3'), 

692 Parameter('rake3', 'deg', label='Rake 3'), 

693 Parameter('gamma3', 'vr/vs', label=r'$\gamma$ 3'), 

694 Parameter('nx3', 'patches', label='nx 3'), 

695 Parameter('ny3', 'patches', label='ny 3') 

696 ] 

697 

698 problem_waveform_parameters = [ 

699 Parameter('nucleation_x1', 'offset', label='Nucleation X 1'), 

700 Parameter('nucleation_y1', 'offset', label='Nucleation Y 1'), 

701 Parameter('nucleation_x2', 'offset', label='Nucleation X 2'), 

702 Parameter('nucleation_y2', 'offset', label='Nucleation Y 2'), 

703 Parameter('nucleation_x3', 'offset', label='Nucleation X 3'), 

704 Parameter('nucleation_y3', 'offset', label='Nucleation Y 3') 

705 ] 

706 

707 dependants = [] 

708 

709 distance_min = Float.T(default=0.0) 

710 adaptive_resolution = String.T() 

711 adaptive_start = Int.T() 

712 

713 cmt_problem = Problem.T(optional=True) 

714 

715 def set_engine(self, engine): 

716 self._engine = engine 

717 

718 if self.cmt_problem is not None: 

719 self.cmt_problem.set_engine(engine) 

720 

721 def pack(self, source): 

722 return self.get_parameter_array(source) 

723 # return arr 

724 

725 def get_source(self, x): 

726 src = self.base_source 

727 

728 d = self.get_parameter_dict(x) 

729 p = {k: float(self.ranges[k].make_relative(src[k], d[k])) 

730 for k in src.keys() if k in d} 

731 

732 p['nx1'] = int(p['nx1']) 

733 p['ny1'] = int(p['ny1']) 

734 

735 if p['nx1'] != src.nx1 or p['ny1'] != src.ny1: 

736 logger.info('refining patches to %dx%d', p['nx1'], p['ny1']) 

737 

738 src.nx1 = p['nx1'] 

739 src.ny1 = p['ny1'] 

740 

741 p['nx2'] = int(p['nx2']) 

742 p['ny2'] = int(p['ny2']) 

743 

744 if p['nx2'] != src.nx2 or p['ny2'] != src.ny2: 

745 logger.info('refining patches to %dx%d', p['nx2'], p['ny2']) 

746 

747 src.nx2 = p['nx2'] 

748 src.ny2 = p['ny2'] 

749 

750 p['nx3'] = int(p['nx3']) 

751 p['ny3'] = int(p['ny3']) 

752 

753 if p['nx3'] != src.nx3 or p['ny3'] != src.ny3: 

754 logger.info('refining patches to %dx%d', p['nx3'], p['ny3']) 

755 

756 src.nx3 = p['nx3'] 

757 src.ny3 = p['ny3'] 

758 

759 return src.clone(**p) 

760 

761 def random_uniform(self, xbounds, rstate, fixed_magnitude=None): 

762 if fixed_magnitude is not None: 

763 raise GrondError( 

764 'Setting fixed magnitude in random model generation not ' 

765 'supported for this type of problem.') 

766 

767 x = num.zeros(self.nparameters) 

768 for i in range(self.nparameters): 

769 x[i] = rstate.uniform(xbounds[i, 0], xbounds[i, 1]) 

770 

771 return x 

772 

773 def preconstrain(self, x, optimiser=None): 

774 nx1 = self.ranges['nx1'] 

775 ny1 = self.ranges['ny1'] 

776 nx2 = self.ranges['nx2'] 

777 ny2 = self.ranges['ny2'] 

778 nx3 = self.ranges['nx3'] 

779 ny3 = self.ranges['ny3'] 

780 

781 if optimiser and self.adaptive_resolution == 'linear' and \ 

782 optimiser.iiter >= self.adaptive_start: 

783 

784 progress = (optimiser.iiter - self.adaptive_start - 1) / \ 

785 (optimiser.niterations - self.adaptive_start) 

786 

787 nx1 = num.floor( 

788 nx1.start + progress * (nx1.stop - nx1.start + 1)) 

789 ny1 = num.floor( 

790 ny1.start + progress * (ny1.stop - ny1.start + 1)) 

791 nx2 = num.floor( 

792 nx2.start + progress * (nx2.stop - nx2.start + 1)) 

793 ny2 = num.floor( 

794 ny2.start + progress * (ny2.stop - ny2.start + 1)) 

795 nx3 = num.floor( 

796 nx3.start + progress * (nx3.stop - nx3.start + 1)) 

797 ny3 = num.floor( 

798 ny3.start + progress * (ny3.stop - ny3.start + 1)) 

799 

800 elif optimiser and self.adaptive_resolution == 'uniqueness' and \ 

801 optimiser.iiter >= self.adaptive_start: 

802 raise NotImplementedError 

803 

804 else: 

805 nx1 = nx1.start 

806 ny1 = ny1.start 

807 nx2 = nx2.start 

808 ny2 = ny2.start 

809 nx3 = nx3.start 

810 ny3 = ny3.start 

811 

812 idx_nx1 = self.get_parameter_index('nx1') 

813 idx_ny1 = self.get_parameter_index('ny1') 

814 idx_nx2 = self.get_parameter_index('nx2') 

815 idx_ny2 = self.get_parameter_index('ny2') 

816 idx_nx3 = self.get_parameter_index('nx3') 

817 idx_ny3 = self.get_parameter_index('ny3') 

818 x[idx_nx1] = int(round(nx1)) 

819 x[idx_ny1] = int(round(ny1)) 

820 x[idx_nx2] = int(round(nx2)) 

821 x[idx_ny2] = int(round(ny2)) 

822 x[idx_nx3] = int(round(nx3)) 

823 x[idx_ny3] = int(round(ny3)) 

824 

825 return x 

826 

827 @classmethod 

828 def get_plot_classes(cls): 

829 plots = super(TriplePDRProblem, cls).get_plot_classes() 

830 

831 return plots 

832 

833 

834__all__ = ''' 

835 DynamicRuptureProblem 

836 DynamicRuptureProblemConfig 

837 DoublePDRProblem 

838 DoublePDRProblemConfig 

839 TriplePDRProblem 

840 TriplePDRProblemConfig 

841'''.split()