Skip to content

Hazard

Helpers for querying Hazard Realizations and related models.

Provides efficient queries for the models: HazardAggregation, OpenquakeRealization & ToshiOpenquakeMeta.

Functions:

  • get_hazard_metadata_v3() - returns iterator of matching ToshiOpenquakeMeta objects.
  • get_rlz_curves_v3() - returns iterator of matching OpenquakeRealization objects.
  • get_hazard_curves() - returns iterator of HazardAggregation objects.

get_hazard_curves(locs, vs30s, hazard_model_ids, imts, aggs=None)

Query the HazardAggregation table for matching hazard curves.

Examples:

>>>  get_hazard_curves(
        locs=['-46.430~168.360'],
        vs30s=[250, 350, 500],
        hazard_model_ids=['NSHM_V1.0.4'],
        imts=['PGA', 'SA(0.5)']
    )
>>> <generator object get_hazard_curves at 0x7f115d67be60>

Parameters:

Name Type Description Default
locs Iterable[str]

coded location strings e.g. ['-46.430~168.360']

required
vs30s Iterable[int]

vs30 values eg [400, 500]

required
hazard_model_ids Iterable[str]

hazard model ids e.. ['NSHM_V1.0.4']

required
imts Iterable[str]

IntensityMeasureType values e.g ['PGA', 'SA(0.5)']

required
aggs Union[Iterable[str], None]

aggregation values e.g. ['mean', '0.9']

None

Yields:

Type Description
HazardAggregation

an iterator of the matching HazardAggregation models.

Source code in toshi_hazard_store/query/hazard_query.py
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
def get_hazard_curves(
    locs: Iterable[str],
    vs30s: Iterable[int],
    hazard_model_ids: Iterable[str],
    imts: Iterable[str],
    aggs: Union[Iterable[str], None] = None,
) -> Iterator[HazardAggregation]:
    """Query the HazardAggregation table for matching hazard curves.

    Examples:
        >>>  get_hazard_curves(
                locs=['-46.430~168.360'],
                vs30s=[250, 350, 500],
                hazard_model_ids=['NSHM_V1.0.4'],
                imts=['PGA', 'SA(0.5)']
            )
        >>> <generator object get_hazard_curves at 0x7f115d67be60>

    Parameters:
        locs: coded location strings e.g. ['-46.430~168.360']
        vs30s: vs30 values eg [400, 500]
        hazard_model_ids:  hazard model ids e.. ['NSHM_V1.0.4']
        imts: IntensityMeasureType values e.g ['PGA', 'SA(0.5)']
        aggs: aggregation values e.g. ['mean', '0.9']

    Yields:
        an iterator of the matching HazardAggregation models.
    """
    aggs = aggs or ["mean", "0.1"]

    log.info("get_hazard_curves( %s" % locs)

    # table classes may be rebased, this makes sure we always get the current class definition.
    mHAG = openquake_models.__dict__['HazardAggregation']
    log.debug(f"mHAG.__bases__ : {mHAG.__bases__}")

    def build_condition_expr(loc, vs30, hid, agg):
        """Build the filter condition expression."""
        grid_res = decimal.Decimal(str(loc.split('~')[0]))
        places = grid_res.as_tuple().exponent

        res = float(decimal.Decimal(10) ** places)
        loc = downsample_code(loc, res)

        expr = None

        if places == -1:
            expr = mHAG.nloc_1 == loc
        elif places == -2:
            expr = mHAG.nloc_01 == loc
        elif places == -3:
            expr = mHAG.nloc_001 == loc
        else:
            assert 0

        return expr & (mHAG.vs30 == vs30) & (mHAG.imt == imt) & (mHAG.agg == agg) & (mHAG.hazard_model_id == hid)

    # TODO: use https://pypi.org/project/InPynamoDB/
    total_hits = 0
    for hash_location_code in get_hashes(locs):
        partition_hits = 0
        log.info('hash_key %s' % hash_location_code)
        hash_locs = list(filter(lambda loc: downsample_code(loc, 0.1) == hash_location_code, locs))

        for hloc, hid, vs30, imt, agg in itertools.product(hash_locs, hazard_model_ids, vs30s, imts, aggs):

            sort_key_first_val = f"{hloc}:{vs30}:{imt}:{agg}:{hid}"
            condition_expr = build_condition_expr(hloc, vs30, hid, agg)

            log.debug('sort_key_first_val: %s' % sort_key_first_val)
            log.debug('condition_expr: %s' % condition_expr)

            results = mHAG.query(
                hash_key=hash_location_code,
                range_key_condition=mHAG.sort_key == sort_key_first_val,
                filter_condition=condition_expr,
                # limit=10,
                # rate_limit=None,
                # last_evaluated_key=None
            )

            log.debug("get_hazard_rlz_curves_v3: results %s" % results)
            for hit in results:
                partition_hits += 1
                yield (hit)

        total_hits += partition_hits
        log.info('hash_key %s has %s hits' % (hash_location_code, partition_hits))

    log.info('Total %s hits' % total_hits)

get_hazard_metadata_v3(haz_sol_ids, vs30_vals)

Query the ToshiOpenquakeMeta table

Parameters:

Name Type Description Default
haz_sol_ids Iterable[str]

list of solution ids to fetch

required
vs30_vals Iterable[int]

vs30 values eg [400, 500]

required

Yields:

Type Description
ToshiOpenquakeMeta

an iterator of the matching ToshiOpenquakeMeta objects

Source code in toshi_hazard_store/query/hazard_query.py
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
def get_hazard_metadata_v3(haz_sol_ids: Iterable[str], vs30_vals: Iterable[int]) -> Iterator[ToshiOpenquakeMeta]:
    """Query the ToshiOpenquakeMeta table

    Parameters:
        haz_sol_ids: list of solution ids to fetch
        vs30_vals: vs30 values eg [400, 500]

    Yields:
        an iterator of the matching ToshiOpenquakeMeta objects
    """
    mOQM = openquake_models.__dict__['ToshiOpenquakeMeta']

    total_hits = 0
    for tid, vs30 in itertools.product(haz_sol_ids, vs30_vals):
        sort_key_val = f"{tid}:{vs30}"
        log.info('sort_key_val: %s' % sort_key_val)

        for hit in mOQM.query(
            "ToshiOpenquakeMeta",  # NB the partition key is the table name!
            range_key_condition=(mOQM.hazsol_vs30_rk == sort_key_val),
        ):
            total_hits += 1
            yield (hit)

    log.info('Total %s hits' % total_hits)

get_rlz_curves_v3(locs, vs30s, rlzs, tids, imts)

Query the OpenquakeRealization table.

Parameters:

Name Type Description Default
locs Iterable[str]

coded location codes e.g. ['-46.430~168.360']

required
vs30s Iterable[int]

vs30 values eg [400, 500]

required
rlzs Iterable[int]

realizations eg [0,1,2,3]

required
tids Iterable[str]

toshi hazard_solution_ids e.. ['XXZ']

required
imts Iterable[str]

imt (IntensityMeasureType) values e.g ['PGA', 'SA(0.5)']

required

Yields:

Type Description
OpenquakeRealization

HazardRealization models

Source code in toshi_hazard_store/query/hazard_query.py
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
def get_rlz_curves_v3(
    locs: Iterable[str],
    vs30s: Iterable[int],
    rlzs: Iterable[int],
    tids: Iterable[str],
    imts: Iterable[str],
    # model=None,
) -> Iterator[OpenquakeRealization]:
    """Query the OpenquakeRealization table.

    Parameters:
        locs: coded location codes e.g. ['-46.430~168.360']
        vs30s: vs30 values eg [400, 500]
        rlzs: realizations eg [0,1,2,3]
        tids: toshi hazard_solution_ids e.. ['XXZ']
        imts: imt (IntensityMeasureType) values e.g ['PGA', 'SA(0.5)']

    Yields:
        HazardRealization models
    """

    # table classes may be rebased, this makes sure we always get the current class definition.
    mRLZ = openquake_models.__dict__['OpenquakeRealization']

    def build_condition_expr(loc, vs30, rlz, tid):
        """Build the filter condition expression."""
        grid_res = decimal.Decimal(str(loc.split('~')[0]))
        places = grid_res.as_tuple().exponent

        res = float(decimal.Decimal(10) ** places)
        loc = downsample_code(loc, res)

        expr = None

        if places == -1:
            expr = mRLZ.nloc_1 == loc
        elif places == -2:
            expr = mRLZ.nloc_01 == loc
        elif places == -3:
            expr = mRLZ.nloc_001 == loc
        else:
            assert 0

        return expr & (mRLZ.vs30 == vs30) & (mRLZ.rlz == rlz) & (mRLZ.hazard_solution_id == tid)

    total_hits = 0
    for hash_location_code in get_hashes(locs):
        partition_hits = 0
        log.debug('hash_key %s' % hash_location_code)
        hash_locs = list(filter(lambda loc: downsample_code(loc, 0.1) == hash_location_code, locs))

        for hloc, tid, vs30, rlz in itertools.product(hash_locs, tids, vs30s, rlzs):

            sort_key_first_val = f"{hloc}:{vs30}:{str(rlz).zfill(6)}:{tid}"
            condition_expr = build_condition_expr(hloc, vs30, rlz, tid)

            log.debug('sort_key_first_val: %s' % sort_key_first_val)
            log.debug('condition_expr: %s' % condition_expr)

            results = mRLZ.query(
                hash_location_code,
                mRLZ.sort_key == sort_key_first_val,
                filter_condition=condition_expr,
            )

            # print(f"get_hazard_rlz_curves_v3: qry {qry}")
            log.debug("get_hazard_rlz_curves_v3: results %s" % results)
            for hit in results:
                partition_hits += 1
                hit.values = list(filter(lambda x: x.imt in imts, hit.values))
                yield (hit)

        total_hits += partition_hits
        log.debug('hash_key %s has %s hits' % (hash_location_code, partition_hits))

    log.info('Total %s hits' % total_hits)