Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -54,18 +54,21 @@
'SA': [
'Chiou & Youngs (2014)',
'Abrahamson, Silva & Kamai (2014)',
'Abrahamson, Silva & Kamai (2014) Aftershock',
'Boore, Stewart, Seyhan & Atkinson (2014)',
'Campbell & Bozorgnia (2014)',
],
'PGA': [
'Chiou & Youngs (2014)',
'Abrahamson, Silva & Kamai (2014)',
'Abrahamson, Silva & Kamai (2014) Aftershock',
'Boore, Stewart, Seyhan & Atkinson (2014)',
'Campbell & Bozorgnia (2014)',
],
'PGV': [
'Chiou & Youngs (2014)',
'Abrahamson, Silva & Kamai (2014)',
'Abrahamson, Silva & Kamai (2014) Aftershock',
'Boore, Stewart, Seyhan & Atkinson (2014)',
'Campbell & Bozorgnia (2014)',
],
Expand Down Expand Up @@ -136,11 +139,16 @@ def __init__(
gmpe_weights_dict=dict(), # noqa: B006, C408
im_type=None,
site_info=dict(), # noqa: B006, C408
mainshock=None
):
# basic set-ups
self.set_im_gmpe(im_dict, gmpe_dict, gmpe_weights_dict)
self.set_im_type(im_type)
self.set_sites(site_info)
if mainshock is not None:
self.mainshock = mainshock.copy() # single row gdf containing mainshock rupture surface
else:
self.mainshock = None
# self.set_source(source_info)

def set_source(self, source_info): # noqa: D102
Expand All @@ -155,6 +163,7 @@ def set_source(self, source_info): # noqa: D102
or 'Abrahamson, Silva & Kamai (2014)' in gmpe_list
or 'Boore, Stewart, Seyhan & Atkinson (2014)' in gmpe_list
or 'Campbell & Bozorgnia (2014)' in gmpe_list
or 'Abrahamson, Silva & Kamai (2014) Aftershock' in gmpe_list
):
source_index = source_info.get('SourceIndex', None)
rupture_index = source_info.get('RuptureIndex', None)
Expand All @@ -163,6 +172,14 @@ def set_source(self, source_info): # noqa: D102
self.erf, source_index, rupture_index, self.site_info
)
# self.timeGetRuptureInfo += time.process_time_ns() - start
if 'Abrahamson, Silva & Kamai (2014) Aftershock' in gmpe_list:
source_index = source_info.get('SourceIndex', None)
rupture_index = source_info.get('RuptureIndex', None)
crJB = get_rupture_info_ASK2014_aftershock( # noqa: F405
self.erf, source_index, rupture_index, self.mainshock
)
site_rup_dict['crJB'] = crJB

elif source_info['Type'] == 'PointSource':
if (
'Chiou & Youngs (2014)' in gmpe_list
Expand Down Expand Up @@ -456,7 +473,7 @@ def get_im_from_local( # noqa: C901, D102
eq_magnitude, self.site_rup_dict, cur_site, im_info
)
# self.timeGetIM += time.process_time_ns() - start
elif cur_gmpe == 'Abrahamson, Silva & Kamai (2014)':
elif cur_gmpe == 'Abrahamson, Silva & Kamai (2014)' or cur_gmpe == 'Abrahamson, Silva & Kamai (2014) Aftershock':
# start = time.process_time_ns()
tmpResult = self.ASK.get_IM( # noqa: N806
eq_magnitude, self.site_rup_dict, cur_site, im_info
Expand Down Expand Up @@ -793,13 +810,17 @@ def compute_im( # noqa: C901, D103
sys.exit(
'SA is used in hazard downsampling but not defined in the intensity measure tab'
)
elif ho_period in im_info['SA'].get('Periods'):
pass
elif im_info.get('Periods', None) is not None:
if ho_period in im_info['Periods']:
pass
elif im_info.get('SA', None) is not None:
if ho_period in im_info['SA'].get('Periods'):
pass
else:
tmp_periods = im_info['SA']['Periods'] + [ho_period]
tmp_periods.sort()
im_info['SA']['Periods'] = tmp_periods
elif ho_period in im_info['SA'].get('Periods'):
elif ho_period in im_info.get('Periods'):
pass
else:
tmp_periods = im_info['SA']['Periods'] + [ho_period]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@
import sys
import psutil
import GlobalVariable
import shapely
import geopandas as gpd

if 'stampede2' not in socket.gethostname():
import GlobalVariable
Expand Down Expand Up @@ -395,6 +397,25 @@ def get_rupture_distance(erf, source_index, rupture_index, lat, lon): # noqa: D

return distToRupture

def get_rupture_info_ASK2014_aftershock(
erf, source_index, rupture_indx, mainshock):
rupSource = erf.getSource(source_index) # noqa: N806
rupList = rupSource.getRuptureList() # noqa: N806
rupSurface = rupList.get(rupture_indx).getRuptureSurface()
rupSurface_perimeter = rupSurface.getPerimeter()
coords = []
for i in range(rupSurface_perimeter.size()):
loc = rupSurface_perimeter.get(i)
coords.append((loc.getLongitude(), loc.getLatitude()))
if len(coords) == 1:
rup_polygon = shapely.geometry.Point(coords[0])
else:
rup_polygon = shapely.geometry.Polygon(coords)
rup_gdf = gpd.GeoDataFrame(index=[0], crs='EPSG:4326', geometry=[rup_polygon])
rup_gdf = rup_gdf.to_crs(epsg=6417)
centroid = rup_gdf.geometry.centroid.iloc[0]
return mainshock.geometry.iloc[0].distance(centroid) /1000 # in meters


def get_rupture_info_CY2014(erf, source_index, rupture_index, siteList): # noqa: N802, N803, D103
rupSource = erf.getSource(source_index) # noqa: N806
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,14 +87,18 @@ def configure_hazard_occurrence( # noqa: C901, D103
# return periods
if hc_input is None:
return {}
elif hc_input == 'Inferred_NSHMP': # noqa: RET505
elif hc_input == 'NSHM V1' or hc_input == 'NSHM V2': # noqa: RET505
period = hzo_config.get('Period', 0.0)
if im_type == 'SA':
cur_imt = im_type + f'{period:.1f}'.replace('.', 'P')
else:
cur_imt = im_type
# fetching hazard curve from usgs
cur_edition = hzo_config.get('Edition', 'E2014')
version_str = hc_input.split(' ')[-1]
if version_str == 'V1':
cur_edition = 'E2008'
else:
cur_edition = 'E2014'
hazard_curve_collector = []
for site_id in range(len(site_config)):
cur_site = site_config[site_id]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
import numpy as np
from tqdm import tqdm
import geopandas as gpd
import shapely
import sys
import ujson as json

def get_rups_to_run(scenario_info, num_scenarios): # noqa: C901, D103
# If there is a filter
if scenario_info['Generator'].get('method', None) == 'MonteCarlo':
rup_filter = scenario_info['Generator'].get('RuptureFilter', None)
if rup_filter is None or len(rup_filter) == 0:
rups_to_run = list(range(num_scenarios))
else:
rups_requested = []
for rups in rup_filter.split(','):
if '-' in rups:
asset_low, asset_high = rups.split('-')
rups_requested += list(
range(int(asset_low), int(asset_high) + 1)
)
else:
rups_requested.append(int(rups))
rups_requested = np.array(rups_requested)
rups_requested = (
rups_requested - 1
) # The input index starts from 1, not 0
rups_available = list(range(num_scenarios))
rups_to_run = rups_requested[
np.where(np.isin(rups_requested, rups_available))[0]
]
else:
sys.exit(
f'The scenario selection method {scenario_info["Generator"].get("method", None)} is not available'
)
return rups_to_run

def load_earthquake_rupFile(scenario_info, rupFilePath): # noqa: N802, N803, D103
# Getting earthquake rupture forecast data
source_type = scenario_info['EqRupture']['Type']
try:
with open(rupFilePath) as f: # noqa: PTH123
user_scenarios = json.load(f)
except: # noqa: E722
sys.exit(f'CreateScenario: source file {rupFilePath} not found.')
# number of features (i.e., ruptures)
num_scenarios = len(user_scenarios.get('features', []))
if num_scenarios < 1:
sys.exit('CreateScenario: source file is empty.')
rups_to_run = get_rups_to_run(scenario_info, num_scenarios)
# get rupture and source ids
scenario_data = {}
if source_type == 'ERF':
# source model
source_model = scenario_info['EqRupture']['Model']
for rup_tag in rups_to_run:
cur_rup = user_scenarios.get('features')[rup_tag]
cur_id_source = cur_rup.get('properties').get('Source', None)
cur_id_rupture = cur_rup.get('properties').get('Rupture', None)
scenario_data.update(
{
rup_tag: {
'Type': source_type,
'RuptureForecast': source_model,
'Name': cur_rup.get('properties').get('Name', ''),
'Magnitude': cur_rup.get('properties').get(
'Magnitude', None
),
'MeanAnnualRate': cur_rup.get('properties').get(
'MeanAnnualRate', None
),
'SourceIndex': cur_id_source,
'RuptureIndex': cur_id_rupture,
'SiteSourceDistance': cur_rup.get('properties').get(
'Distance', None
),
'SiteRuptureDistance': cur_rup.get('properties').get(
'DistanceRup', None
),
}
}
)
elif source_type == 'PointSource':
sourceID = 0 # noqa: N806
rupID = 0 # noqa: N806
for rup_tag in rups_to_run:
try:
cur_rup = user_scenarios.get('features')[rup_tag]
magnitude = cur_rup.get('properties')['Magnitude']
location = cur_rup.get('properties')['Location']
average_rake = cur_rup.get('properties')['AverageRake']
average_dip = cur_rup.get('properties')['AverageDip']
scenario_data.update(
{
0: {
'Type': source_type,
'Magnitude': magnitude,
'Location': location,
'AverageRake': average_rake,
'AverageDip': average_dip,
'SourceIndex': sourceID,
'RuptureIndex': rupID,
}
}
)
rupID = rupID + 1 # noqa: N806
except: # noqa: PERF203, E722
print('Please check point-source inputs.') # noqa: T201
# return
return scenario_data

def load_earthquake_rup_scenario(scenario_info, user_scenarios): # noqa: N802, N803, D103
# Getting earthquake rupture forecast data
source_type = scenario_info['EqRupture']['Type']
# number of features (i.e., ruptures)
num_scenarios = len(user_scenarios)
if num_scenarios < 1:
sys.exit('CreateScenario: source file is empty.')
rups_to_run = get_rups_to_run(scenario_info, num_scenarios)
# get rupture and source ids
scenario_data = {}
if source_type == 'ERF':
# source model
source_model = scenario_info['EqRupture']['Model']
for rup_tag in rups_to_run:
cur_rup = user_scenarios.iloc[rup_tag, :]
cur_id_source = cur_rup['Source']
cur_id_rupture = cur_rup['Rupture']
print("DEBUG: cur_id_source, cur_id_rupture", cur_id_source, cur_id_rupture)
scenario_data.update(
{
rup_tag: {
'Type': source_type,
'RuptureForecast': source_model,
'Name': cur_rup.get('Name', ''),
'Magnitude': cur_rup.get(
'Magnitude', None
),
'MeanAnnualRate': cur_rup.get(
'MeanAnnualRate', None
),
'SourceIndex': cur_id_source,
'RuptureIndex': cur_id_rupture,
'SiteSourceDistance': cur_rup.get(
'Distance', None
),
'SiteRuptureDistance': cur_rup.get(
'DistanceRup', None
),
}
}
)
return scenario_data
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ class abrahamson_silva_kamai_2014: # noqa: D101
timeCalc = 0 # noqa: N815
supportedImt = None # noqa: N815

def __init__(self):
def __init__(self, aftershock=False):
self.coeff = pd.read_csv(
os.path.join(os.path.dirname(__file__), 'data', 'ASK14.csv') # noqa: PTH118, PTH120
)
Expand All @@ -343,6 +343,7 @@ def __init__(self):
self.H2 = 1.5
self.H3 = -0.75
self.PHI_AMP_SQ = 0.16
self.aftershock = aftershock

def setIMT(self, imt): # noqa: N802, D102
if imt not in self.supportedImt:
Expand Down Expand Up @@ -442,6 +443,7 @@ def calcValues( # noqa: C901, N802, D102
vsInferred, # noqa: N803
z1p0,
style,
crJB=None
):
if Mw > 5: # noqa: PLR2004
c4mag = self.C4
Expand Down Expand Up @@ -537,8 +539,16 @@ def calcValues( # noqa: C901, N802, D102
)
else:
f5 = (self.a10 + self.b * self.N) * np.log(vs30s / self.Vlin)
# total model (no aftershock f11) -- Equation 1
mean = f1 + f78 + f5 + f4 + f6 + f10

# Aftershock term -- Equation 20
if self.aftershock:
if crJB is None:
raise ValueError('crJB must be provided for aftershock calculations')
f11 = self.a14 * np.clip(1 - (crJB - 5)/10, 0, 1)
else:
f11 = 0.0
# total model -- Equation 1
mean = f1 + f78 + f5 + f4 + f6 + f10 + f11

# ****** Aleatory uncertainty model ******
# Intra-event term -- Equation 24
Expand Down Expand Up @@ -604,6 +614,7 @@ def get_IM(self, Mw, site_rup_dict, site_info, im_info): # noqa: N802, N803, D1
vsInf,
site_info['z1pt0'] / 1000.0,
style,
site_rup_dict.get('crJB', None)
)
self.timeCalc += time.process_time_ns() - start
meanList.append(mean)
Expand Down
Loading