Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,9 @@

**/__pycache__
**/.*

**/.*

**/temp/

dist/
build/
13 changes: 8 additions & 5 deletions Examples/Example 1 Dynamic.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
import sys
from pathlib import Path
import os
sys.path.append(str(Path(__file__).resolve().parents[1]))
import sys

parent_dir = str(Path(__file__).parent.parent)
if parent_dir not in sys.path:
sys.path.insert(0, parent_dir)

import MDOF_LU as mlu
import MDOF_CN as mcn
import MDOFOpenSees as mops
from MDOFModel import MDOF_LU as mlu
from MDOFModel import MDOF_CN as mcn
from MDOFModel import MDOFOpenSees as mops

NumofStories = 3
bld = mcn.MDOF_CN(NumofStories, 1000, 'S2', City='石家庄',longitude=114.52,latitude=38.05)
Expand Down
11 changes: 7 additions & 4 deletions Examples/Example 2 pushover.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import sys
from pathlib import Path
import os
sys.path.append(str(Path(__file__).resolve().parents[1]))
import sys

parent_dir = str(Path(__file__).parent.parent)
if parent_dir not in sys.path:
sys.path.insert(0, parent_dir)

import MDOF_LU as mlu
import MDOFOpenSees as mops
from MDOFModel import MDOF_LU as mlu
from MDOFModel import MDOFOpenSees as mops

NumOfStories = 9
bld = mlu.MDOF_LU(NumOfStories, 45.75*45.75, 'S1H')
Expand Down
13 changes: 8 additions & 5 deletions Examples/Example 3 LossAssessment.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
import sys
from pathlib import Path
import os
sys.path.append(str(Path(__file__).resolve().parents[1]))
import sys

parent_dir = str(Path(__file__).parent.parent)
if parent_dir not in sys.path:
sys.path.insert(0, parent_dir)

import MDOF_LU as mlu
import MDOFOpenSees as mops
import BldLossAssessment as bl
from MDOFModel import MDOF_LU as mlu
from MDOFModel import MDOFOpenSees as mops
from MDOFModel import BldLossAssessment as bl

NumofStories = 3
bld = mlu.MDOF_LU(NumofStories, 3600, 'S2')
Expand Down
8 changes: 5 additions & 3 deletions Examples/Example 4 EQSpectra.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
import sys
from pathlib import Path
import os
sys.path.append(str(Path(__file__).resolve().parents[1]))

import sys
import numpy as np
import matplotlib.pyplot as plt
import eqsig.single

parent_dir = str(Path(__file__).parent.parent)
if parent_dir not in sys.path:
sys.path.insert(0, parent_dir)

bf, sub_fig = plt.subplots()
with open(os.path.join(os.path.dirname(__file__),'H-E12140.dat'), "r") as f:
Allstr = f.read()
Expand Down
21 changes: 12 additions & 9 deletions Examples/Example 5 IDA.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,21 @@
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).resolve().parents[1]))

import IDA
import MDOF_LU as mlu
import MDOFOpenSees as mops
import time
import sys
import pandas as pd
import numpy as np
from pathlib import Path
import time

parent_dir = str(Path(__file__).parent.parent)
if parent_dir not in sys.path:
sys.path.insert(0, parent_dir)

from MDOFModel import IDA
from MDOFModel import MDOF_LU as mlu
from MDOFModel import MDOFOpenSees as mops
import MDOFModel

CFDir = Path(__file__).resolve().parent

FEMAP695Dir = str((CFDir/'../Resources/FEMA_P-695_far-field_ground_motions').resolve())
FEMAP695Dir = str(Path(MDOFModel.__file__).parent / 'Resources/FEMA_P-695_far-field_ground_motions')
T:pd.DataFrame = pd.read_table(str(Path(FEMAP695Dir)/'MetaData.txt'),sep=',')
EQRecordFile_list = [str(Path(FEMAP695Dir)/str.replace(x,'.txt',''))
for x in T['AccelXfile'].to_list()]
Expand Down
Binary file modified Examples/IDA.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
442 changes: 221 additions & 221 deletions Examples/IDA_results.csv

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions Examples/structural parameters.csv
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
damping ratio,Hysteretic curve type,"Hysteretic parameter, tao",Typical story height (m)
0.05,Pinching,0.4,3.6576000000000004
damping ratio,Hysteretic curve type,"Hysteretic parameter, tao",Typical story height (m),T1 (s),Cs
0.05,Pinching,0.2,3.6576000000000004,0.6000000000000001,0.05
No. of story,Floor mass (kg),Elastic shear stiffness (N/m),Design shear force (N),Design displacement (m),Yield shear force (N),Yield displacement (m),Ultimate shear force (N),Ultimage displacement (m),Complete damage displacement (m)
1,1200000,663737405.343388,2769260.912751282,0.004172223669266599,5316980.952482463,0.008010669444991871,10633961.904964926,0.09612803333990247,0.219456
2,1200000,663737405.343388,2307717.4272927353,0.0034768530577221663,4430817.460402053,0.0066755578708265605,8861634.920804106,0.08010669444991873,0.219456
3,1200000,663737405.343388,1384630.456375641,0.0020861118346332994,2658490.4762412314,0.004005334722495936,5316980.952482463,0.048064016669951234,0.219456
1,4320000,2389454659.236196,6350400.000000003,0.002657677548076994,9525600.000000004,0.003986516322115491,19051200.000000007,0.03986516322115491,0.14630400000000002
2,4320000,2389454659.236196,5292000.000000003,0.002214731290064162,7938000.000000004,0.0033220969350962427,15876000.000000007,0.03322096935096243,0.14630400000000002
3,4320000,2389454659.236196,3175200.0000000014,0.001328838774038497,4762800.000000002,0.0019932581610577456,9525600.000000004,0.019932581610577454,0.14630400000000002
File renamed without changes.
123 changes: 75 additions & 48 deletions BldLossAssessment.py → MDOFModel/BldLossAssessment.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,57 @@ class BldLossAssessment:
RepairTime_DS = [0,0,0,0,0] # corresponding to 5 damage states
RecoveryTime_DS = [0,0,0,0,0] # corresponding to 5 damage states
FunctionLossMultipliers = [0,0,0,0,0] # corresponding to 5 damage states

# preloaded data
_HazusInventoryTable4_2 = None
_HazusInventoryTable6_2 = None
_HazusInventoryTable6_3 = None
_HazusInventoryTable6_9 = None
_HazusTable5_9 = None
_HazusTable5_10 = None
_HazusTable5_12 = None
_HazusTable15_2 = None
_HazusTable15_3 = None
_HazusTable15_4 = None
_HazusTable15_5 = None
_HazusData4_2_Table11_7 = None
_HazusData4_2_Table11_8 = None
_HazusData4_2_Table11_9 = None

@classmethod
def LoadHazusData(cls):
"""
This method preloads the Hazus data, and avoid loading it multiple times when creating multiple instances.
"""

current_dir = Path(__file__).resolve().parent

cls._HazusInventoryTable4_2 = pd.read_csv(current_dir/"./Resources/HazusInventory Table 4-2.csv", index_col=0, header=0)
cls._HazusInventoryTable6_2 = pd.read_csv(current_dir/"./Resources/HazusInventory Table 6-2.csv",index_col=0, header=1)
cls._HazusInventoryTable6_3 = pd.read_csv(current_dir/"./Resources/HazusInventory Table 6-3.csv", index_col=[0,1], header=1)
cls._HazusInventoryTable6_9 = pd.read_csv(current_dir/"./Resources/HazusInventory Table 6-9.csv", index_col=0, header=1)

cls._HazusTable5_9 = pd.read_csv(current_dir/"./Resources/HazusData Table 5.9.csv",
index_col=0, header=[0,1,2,3])
cls._HazusTable5_10 = pd.read_csv(current_dir/"./Resources/HazusData Table 5.10.csv",
index_col=None, header=[1,2])
cls._HazusTable5_12 = pd.read_csv(current_dir/"./Resources/HazusData Table 5.12.csv",
index_col=0, header=[1,2])
cls._HazusTable15_2 = pd.read_csv(current_dir/"./Resources/HazusData Table 15.2.csv", index_col=1, header=2)
cls._HazusTable15_3 = pd.read_csv(current_dir/"./Resources/HazusData Table 15.3.csv", index_col=1, header=2)
cls._HazusTable15_4 = pd.read_csv(current_dir/"./Resources/HazusData Table 15.4.csv", index_col=1, header=2)
cls._HazusTable15_5 = pd.read_csv(current_dir/"./Resources/HazusData Table 15.5.csv", index_col=1, header=2)

cls._HazusData4_2_Table11_7 = pd.read_csv(current_dir/"./Resources/HazusData4-2 Table 11-7.csv", index_col=1, header=2)
cls._HazusData4_2_Table11_8 = pd.read_csv(current_dir/"./Resources/HazusData4-2 Table 11-8.csv", index_col=1, header=2)
cls._HazusData4_2_Table11_9 = pd.read_csv(current_dir/"./Resources/HazusData4-2 Table 11-9.csv", index_col=1, header=2)

def __init__(self, NumOfStories, FloorArea, StructuralType, DesignLevel, OccupancyClass):

# if the data is not loaded, load it
if BldLossAssessment._HazusInventoryTable4_2 is None:
self.LoadHazusData()

self.NumOfStories = NumOfStories
self.FloorArea = FloorArea
self.__Read_StructuralType(StructuralType)
Expand Down Expand Up @@ -101,10 +149,7 @@ def LossAssessment(self,MaxDriftRatio,MaxAbsAccel, MaxRIDR = 'none'):
self.__Estimate_RepairTime()

def __Read_StructuralType(self,StructuralType):
current_dir = Path(__file__).resolve().parent
HazusInventoryTable4_2 = pd.read_csv(current_dir/"Resources/HazusInventory Table 4-2.csv",
index_col=0, header=0)
rownames = HazusInventoryTable4_2.index.to_list()
rownames = self._HazusInventoryTable4_2.index.to_list()
rownames_NO_LMH = rownames.copy()
for i in range(0,len(rownames)):
if rownames[i][-1] in 'LMH':
Expand All @@ -114,7 +159,7 @@ def __Read_StructuralType(self,StructuralType):
self.StructuralType = StructuralType
elif StructuralType in rownames_NO_LMH:
ind = [i for i in range(0,len(rownames_NO_LMH)) if StructuralType==rownames_NO_LMH[i]]
storyrange = HazusInventoryTable4_2.iloc[ind]['story range'].values.tolist()
storyrange = self._HazusInventoryTable4_2.iloc[ind]['story range'].values.tolist()
for i in range(0,len(storyrange)):
if '~' in storyrange[i]:
Story_low = int(storyrange[i].split('~')[0])
Expand All @@ -136,82 +181,64 @@ def __Read_StructuralType(self,StructuralType):
self.StructuralType = StructuralType + ' is UNKNOWN'

def __Read_StructureReplacementCost(self):
current_dir = Path(__file__).resolve().parent
HazusInventoryTable6_2 = pd.read_csv(
current_dir/"./Resources/HazusInventory Table 6-2.csv",
index_col=0, header=1)
if self.OccupancyClass=='RES1':
HazusInventoryTable6_3 = pd.read_csv(
current_dir/"./Resources/HazusInventory Table 6-3.csv",
index_col=[0,1], header=1)
N_story = self.NumOfStories if self.NumOfStories<=3 else 3
HeightClass = ['One-story','Two-story','Three-story'][N_story-1]
RCPersqft = HazusInventoryTable6_3.loc[('Average',HeightClass),'Average Base cost per sq.ft']
RCPersqft = self._HazusInventoryTable6_3.loc[('Average',HeightClass),'Average Base cost per sq.ft']
else:
RCPersqft = HazusInventoryTable6_2.loc[self.OccupancyClass,'Structure Replacement Costl/sq.ft (2018)']
RCPersqft = self._HazusInventoryTable6_2.loc[self.OccupancyClass,'Structure Replacement Costl/sq.ft (2018)']

assert RCPersqft[0]=='$'
RCPersqft = float(RCPersqft[1:])
self.StructureReplacementCost = RCPersqft*(self.FloorArea*3.28*3.28)

def __Read_ContentsValueFactor(self):
current_dir = Path(__file__).resolve().parent
HazusInventoryTable6_9 = pd.read_csv(current_dir/"./Resources/HazusInventory Table 6-9.csv",
index_col=0, header=1)
ContentsValueFactor = HazusInventoryTable6_9.loc[self.OccupancyClass,'Contents Value (%)']
ContentsValueFactor = self._HazusInventoryTable6_9.loc[self.OccupancyClass,'Contents Value (%)']
assert ContentsValueFactor[-1:]=='%'
self.ContentsValueFactorOfStructureValue = float(ContentsValueFactor[:-1])/100.0

def __Read_RepairCostRatios(self):
current_dir = Path(__file__).resolve().parent
HazusTable15_2 = pd.read_csv(current_dir/"./Resources/HazusData Table 15.2.csv",
index_col=1, header=2)
HazusTable15_2 = HazusTable15_2.drop(['No.'], axis=1)
HazusTable15_3 = pd.read_csv(current_dir/"./Resources/HazusData Table 15.3.csv",
index_col=1, header=2)
HazusTable15_3 = HazusTable15_3.drop(['No.'], axis=1)
HazusTable15_4 = pd.read_csv(current_dir/"./Resources/HazusData Table 15.4.csv",
index_col=1, header=2)
HazusTable15_4 = HazusTable15_4.drop(['No.'], axis=1)
HazusTable15_5 = pd.read_csv(current_dir/"./Resources/HazusData Table 15.5.csv",
index_col=1, header=2)
HazusTable15_5 = HazusTable15_5.drop(['No.'], axis=1)
HazusTable15_2 = self._HazusTable15_2.drop(['No.'], axis=1)
HazusTable15_3 = self._HazusTable15_3.drop(['No.'], axis=1)
HazusTable15_4 = self._HazusTable15_4.drop(['No.'], axis=1)
HazusTable15_5 = self._HazusTable15_5.drop(['No.'], axis=1)
self.StructureRCRatio_DS = (HazusTable15_2.loc[self.OccupancyClass].values/100.0).tolist()
self.AccelSenNonstructRCRatio_DS = (HazusTable15_3.loc[self.OccupancyClass].values/100.0).tolist()
self.DriftSenNonstructRCRatio_DS = (HazusTable15_4.loc[self.OccupancyClass].values/100.0).tolist()
self.ContentsRCRatio_DS = (HazusTable15_5.loc[self.OccupancyClass].values/100.0).tolist()

def __Read_RepairTime_DS(self):
current_dir = Path(__file__).resolve().parent
HazusData4_2_Table11_7 = pd.read_csv(current_dir/"./Resources/HazusData4-2 Table 11-7.csv",
index_col=1, header=2)
HazusData4_2_Table11_7 = HazusData4_2_Table11_7.drop(['No.'], axis=1)
HazusData4_2_Table11_8 = pd.read_csv(current_dir/"./Resources/HazusData4-2 Table 11-8.csv",
index_col=1, header=2)
HazusData4_2_Table11_8 = HazusData4_2_Table11_8.drop(['No.'], axis=1)
HazusData4_2_Table11_9 = pd.read_csv(current_dir/"./Resources/HazusData4-2 Table 11-9.csv",
index_col=1, header=2)
HazusData4_2_Table11_9 = HazusData4_2_Table11_9.drop(['No.'], axis=1)
HazusData4_2_Table11_7 = self._HazusData4_2_Table11_7.drop(['No.'], axis=1)
HazusData4_2_Table11_8 = self._HazusData4_2_Table11_8.drop(['No.'], axis=1)
HazusData4_2_Table11_9 = self._HazusData4_2_Table11_9.drop(['No.'], axis=1)
self.RepairTime_DS = HazusData4_2_Table11_7.loc[self.OccupancyClass].values.tolist()
self.RecoveryTime_DS = HazusData4_2_Table11_8.loc[self.OccupancyClass].values.tolist()
self.FunctionLossMultipliers = HazusData4_2_Table11_9.loc[self.OccupancyClass].values.tolist()

def __Read_IDR_Accel_thresholds_DS(self):
current_dir = Path(__file__).resolve().parent
HazusTable5_9 = pd.read_csv(current_dir/"./Resources/HazusData Table 5.9.csv",
index_col=0, header=[0,1,2,3])
HazusTable5_10 = pd.read_csv(current_dir/"./Resources/HazusData Table 5.10.csv",
index_col=None, header=[1,2])
HazusTable5_12 = pd.read_csv(current_dir/"./Resources/HazusData Table 5.12.csv",
index_col=0, header=[1,2])
HazusTable5_9 = self._HazusTable5_9.sort_index(axis=1)
HazusTable5_10 = self._HazusTable5_10.sort_index(axis=1)
HazusTable5_12 = self._HazusTable5_12.sort_index(axis=1)

self.Median_IDR_Struct_DS = HazusTable5_9.loc[self.StructuralType,(self.SeismicDesignLevel,
'Interstory Drift at Threshold of Damage State','Median')].values.tolist()
sorted_indices = np.argsort(self.Median_IDR_Struct_DS)
self.Median_IDR_Struct_DS = [self.Median_IDR_Struct_DS[i] for i in sorted_indices]
self.Beta_IDR_Struct_DS = HazusTable5_9.loc[self.StructuralType,(self.SeismicDesignLevel,
'Interstory Drift at Threshold of Damage State','Beta')].values.tolist()
self.Beta_IDR_Struct_DS = [self.Beta_IDR_Struct_DS[i] for i in sorted_indices]

self.Median_IDR_NonStruct_DS = HazusTable5_10.loc[0,('Median')].values.tolist()
sorted_indices = np.argsort(self.Median_IDR_NonStruct_DS)
self.Median_IDR_NonStruct_DS = [self.Median_IDR_NonStruct_DS[i] for i in sorted_indices]
self.Beta_IDR_NonStruct_DS = HazusTable5_10.loc[0,('Beta')].values.tolist()
self.Beta_IDR_NonStruct_DS = [self.Beta_IDR_NonStruct_DS[i] for i in sorted_indices]

self.Median_Accel_NonStruct_DS = HazusTable5_12.loc[self.SeismicDesignLevel,('Median')].values.tolist()
sorted_indices = np.argsort(self.Median_Accel_NonStruct_DS)
self.Median_Accel_NonStruct_DS = [self.Median_Accel_NonStruct_DS[i] for i in sorted_indices]
self.Beta_Accel_NonStruct_DS = HazusTable5_12.loc[self.SeismicDesignLevel,('Beta')].values.tolist()
self.Beta_Accel_NonStruct_DS = [self.Beta_Accel_NonStruct_DS[i] for i in sorted_indices]

def __Estimate_DamageState(self,MaxDriftRatio,MaxAbsAccel,MaxRIDR):

Expand Down
Loading