Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

formatted with black and flake 8 #47

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 16 additions & 9 deletions strec/cmt.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ def getComposite(rows):
"""Calculate composite moment tensor.

Args:
rows (list):List of tuples containing (mrr,mtt,mpp,mrt,mrp,mtp)
rows (list):
List of tuples containing (mrr,mtt,mpp,mrt,mrp,mtp)
Returns:
tuple: Tuple of (composite moment tensor dictionary
(see fill_tensor_from_angles),
Expand Down Expand Up @@ -45,8 +46,7 @@ def getComposite(rows):
vm13 = comp_sq_mean_sq[3]
vm23 = comp_sq_mean_sq[4]
vm12 = comp_sq_mean_sq[5]
varforbenius = vm11 * vm11 + vm12 * vm12 + \
vm13 * vm13 + vm22 * vm22 + vm23 * vm23
varforbenius = vm11 * vm11 + vm12 * vm12 + vm13 * vm13 + vm22 * vm22 + vm23 * vm23
forbenius = m11 * m11 + m12 * m12 + m13 * m13 + m22 * m22 + m23 * m23
similarity = np.sqrt(varforbenius) / forbenius

Expand All @@ -61,8 +61,9 @@ def getComposite(rows):
return (tensor, similarity, nrows)


def getCompositeCMT(lat, lon, depth, dbfile, box=0.1, depthbox=10, nmin=3, maxbox=1.0,
dbox=0.09):
def getCompositeCMT(
lat, lon, depth, dbfile, box=0.1, depthbox=10, nmin=3, maxbox=1.0, dbox=0.09
):
"""Search a database for list of moment tensors, calculate composite moment tensor.
Args:
lat (float): Latitude (dd).
Expand All @@ -85,10 +86,16 @@ def getCompositeCMT(lat, lon, depth, dbfile, box=0.1, depthbox=10, nmin=3, maxbo
rows = []
searchwidth = box
while len(rows) < nmin and searchwidth < maxbox:
qstr = ("SELECT mrr,mtt,mpp,mrt,mrp,mtp FROM earthquake WHERE lat >= %.4f AND "
"lat <= %.4f AND lon >= %.4f AND lon <= %.4f")
query = qstr % (lat - searchwidth, lat + searchwidth,
lon - searchwidth, lon + searchwidth)
qstr = (
"SELECT mrr,mtt,mpp,mrt,mrp,mtp FROM earthquake WHERE lat >= %.4f AND "
"lat <= %.4f AND lon >= %.4f AND lon <= %.4f"
)
query = qstr % (
lat - searchwidth,
lat + searchwidth,
lon - searchwidth,
lon + searchwidth,
)
cursor.execute(query)
rows = cursor.fetchall()
if len(rows) >= nmin:
Expand Down
114 changes: 79 additions & 35 deletions strec/gcmt.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,12 @@


TIMEOUT = 30
HIST_GCMT_URL = 'http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/jan76_dec17.ndk.gz'
MONTHLY_GCMT_URL = 'http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/NEW_MONTHLY/'
HIST_GCMT_URL = (
"http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/jan76_dec17.ndk.gz"
)
MONTHLY_GCMT_URL = (
"http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/NEW_MONTHLY/"
)


def fetch_gcmt():
Expand All @@ -34,24 +38,24 @@ def fetch_gcmt():
- mtp Mtp moment tensor component
"""
t1 = str(datetime.utcnow())
print('%s - Fetching historical GCMT data...' % t1)
print("%s - Fetching historical GCMT data..." % t1)
histfile = get_historical_gcmt()
t2 = str(datetime.utcnow())
print('%s - Converting to dataframe...' % t2)
print("%s - Converting to dataframe..." % t2)
dataframe = ndk_to_dataframe(histfile)
t3 = str(datetime.utcnow())
print('%s - Fetching monthly data...' % t3)
print("%s - Fetching monthly data..." % t3)
start_year = 2018
end_year = datetime.utcnow().year
end_month = datetime.utcnow().month
for year in range(start_year, end_year + 1):
for month in range(1, 13):
if year == end_year and month > end_month:
continue
print('Fetching GCMT data for %i month %i...' % (year, month))
print("Fetching GCMT data for %i month %i..." % (year, month))
monthfile = get_monthly_gcmt(year, month)
if monthfile is None:
print('No NDK file for %i month %i' % (year, month))
print("No NDK file for %i month %i" % (year, month))
continue
month_frame = ndk_to_dataframe(monthfile)
dataframe = dataframe.append(month_frame)
Expand All @@ -76,11 +80,11 @@ def get_historical_gcmt():
fh.close()
handle, zipfile = tempfile.mkstemp()
os.close(handle)
f = open(zipfile, 'wb')
f = open(zipfile, "wb")
f.write(zip_bytes)
f.close()
gz = gzip.GzipFile(zipfile, mode='rb')
unzip_bytes = gz.read().decode('utf-8')
gz = gzip.GzipFile(zipfile, mode="rb")
unzip_bytes = gz.read().decode("utf-8")
string_unzip = io.StringIO(unzip_bytes)
return string_unzip
except Exception as msg:
Expand All @@ -102,14 +106,27 @@ def get_monthly_gcmt(year, month):
io.StringIO:
StringIO object containing NDK file for given month/year.
"""
strmonth = ['jan', 'feb', 'mar', 'apr', 'may', 'jun',
'jul', 'aug', 'sep', 'oct', 'nov', 'dec'][month - 1]
strmonth = [
"jan",
"feb",
"mar",
"apr",
"may",
"jun",
"jul",
"aug",
"sep",
"oct",
"nov",
"dec",
][month - 1]
stryear = str(year)
url = parse.urljoin(MONTHLY_GCMT_URL, os.path.join(
stryear, strmonth + stryear[2:] + '.ndk'))
url = parse.urljoin(
MONTHLY_GCMT_URL, os.path.join(stryear, strmonth + stryear[2:] + ".ndk")
)
try:
fh = request.urlopen(url, timeout=TIMEOUT)
bytes = fh.read().decode('utf-8')
bytes = fh.read().decode("utf-8")
fh.close()
stringfile = io.StringIO(bytes)
return stringfile
Expand Down Expand Up @@ -138,12 +155,25 @@ def ndk_to_dataframe(ndkfile):
- mrp Mrp moment tensor component
- mtp Mtp moment tensor component
"""
if not hasattr(ndkfile, 'read'):
ndkfile = open(ndkfile, 'r')
if not hasattr(ndkfile, "read"):
ndkfile = open(ndkfile, "r")

lc = 0
df = pd.DataFrame(columns=['time', 'lat', 'lon', 'depth', 'mag',
'mrr', 'mtt', 'mpp', 'mrt', 'mrp', 'mtp'])
df = pd.DataFrame(
columns=[
"time",
"lat",
"lon",
"depth",
"mag",
"mrr",
"mtt",
"mpp",
"mrt",
"mrp",
"mtp",
]
)
tdict = {}
for line in ndkfile.readlines():
if (lc + 1) % 5 == 1:
Expand All @@ -163,7 +193,7 @@ def ndk_to_dataframe(ndkfile):
if (lc + 1) % 5 == 0:
_parse_line5(line, tdict)
lc += 1
tdict.pop('exponent') # remove now extraneous field
tdict.pop("exponent") # remove now extraneous field
df = df.append(tdict, ignore_index=True)
tdict = {}
continue
Expand All @@ -175,6 +205,12 @@ def ndk_to_dataframe(ndkfile):
def _parse_line1(line, tdict):
"""Parse the first line of an NDK file.

Args:
line (str/byte):
The line of the NDK file
tdict (dict):
dictionary to inpiut values from ndk files

"""
dstr = line[5:26]
year = int(dstr[0:4])
Expand All @@ -190,31 +226,39 @@ def _parse_line1(line, tdict):
if microseconds > 999999:
microseconds = 999999

tdict['time'] = datetime(year, month, day, hour,
minute, seconds, microseconds)
tdict["time"] = datetime(year, month, day, hour, minute, seconds, microseconds)

tdict['lat'] = float(line[27:33])
tdict['lon'] = float(line[34:41])
tdict['depth'] = float(line[42:47])
tdict["lat"] = float(line[27:33])
tdict["lon"] = float(line[34:41])
tdict["depth"] = float(line[42:47])


def _parse_line4(line, tdict):
"""Parse the fourth line of an NDK file.

Args:
line (str/byte):
The line of the NDK file
tdict (dict):
dictionary to inpiut values from ndk files
"""
tdict['exponent'] = float(line[0:2])
tdict['mrr'] = float(line[2:9]) * np.power(10.0, tdict['exponent'])
tdict['mtt'] = float(line[15:22]) * np.power(10.0, tdict['exponent'])
tdict['mpp'] = float(line[28:35]) * np.power(10.0, tdict['exponent'])
tdict['mrt'] = float(line[41:48]) * np.power(10.0, tdict['exponent'])
tdict['mrp'] = float(line[54:61]) * np.power(10.0, tdict['exponent'])
tdict['mtp'] = float(line[67:74]) * np.power(10.0, tdict['exponent'])
tdict["exponent"] = float(line[0:2])
tdict["mrr"] = float(line[2:9]) * np.power(10.0, tdict["exponent"])
tdict["mtt"] = float(line[15:22]) * np.power(10.0, tdict["exponent"])
tdict["mpp"] = float(line[28:35]) * np.power(10.0, tdict["exponent"])
tdict["mrt"] = float(line[41:48]) * np.power(10.0, tdict["exponent"])
tdict["mrp"] = float(line[54:61]) * np.power(10.0, tdict["exponent"])
tdict["mtp"] = float(line[67:74]) * np.power(10.0, tdict["exponent"])


def _parse_line5(line, tdict):
"""Parse the fifth line of an NDK file.

Args:
line (str/byte):
The line of the NDK file
tdict (dict):
dictionary to inpiut values from ndk files
"""
scalar_moment = float(line[49:56].strip()) * \
np.power(10.0, tdict['exponent'])
tdict['mag'] = ((2.0 / 3.0) * np.log10(scalar_moment)) - 10.7
scalar_moment = float(line[49:56].strip()) * np.power(10.0, tdict["exponent"])
tdict["mag"] = ((2.0 / 3.0) * np.log10(scalar_moment)) - 10.7
39 changes: 20 additions & 19 deletions strec/slab.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@


class GridSlab(object):
"""Represents USGS Slab model grids for a given subduction zone.
"""
"""Represents USGS Slab model grids for a given subduction zone."""

def __init__(self, depth_file, dip_file, strike_file, error_file):
"""Construct GridSlab object from input files.
Expand All @@ -35,7 +34,7 @@ def __init__(self, depth_file, dip_file, strike_file, error_file):
# as all of the slab grids. Read it into a local dictionary if found,
# otherwise we'll use the MAX_INTERFACE_DEPTH constant found above.
fpath, fname = os.path.split(self._depth_file)
table_file_name = os.path.join(fpath, 'maximum_interface_depths.csv')
table_file_name = os.path.join(fpath, "maximum_interface_depths.csv")
if os.path.isfile(table_file_name):
self._slab_table = pd.read_csv(table_file_name)
else:
Expand Down Expand Up @@ -70,7 +69,7 @@ def getSlabInfo(self, lat, lon):
lat (float): Hypocentral latitude in decimal degrees.
lon (float): Hypocentral longitude in decimal degrees.
Returns:
dict: Dictionary containing keys:
slabinfo (dict): Dictionary containing keys:
- region Three letter Slab model region code.
- strike Slab model strike angle.
- dip Slab model dip angle.
Expand All @@ -81,7 +80,7 @@ def getSlabInfo(self, lat, lon):
if not self.contains(lat, lon):
return slabinfo
fpath, fname = os.path.split(self._depth_file)
parts = fname.split('_')
parts = fname.split("_")
region = parts[0]
depth_grid = GMTGrid.load(self._depth_file)
# slab grids are negative depth
Expand Down Expand Up @@ -109,16 +108,18 @@ def getSlabInfo(self, lat, lon):
# get the maximum interface depth from table (if present)
if self._slab_table is not None:
df = self._slab_table
max_int_depth = df[df['zone'] == region].iloc[0]['interface_max_depth']
max_int_depth = df[df["zone"] == region].iloc[0]["interface_max_depth"]
else:
max_int_depth = MAX_INTERFACE_DEPTH

slabinfo = {'region': region,
'strike': strike,
'dip': dip,
'depth': depth,
'maximum_interface_depth' : max_int_depth,
'depth_uncertainty': error}
slabinfo = {
"region": region,
"strike": strike,
"dip": dip,
"depth": depth,
"maximum_interface_depth": max_int_depth,
"depth_uncertainty": error,
}
return slabinfo


Expand All @@ -132,7 +133,7 @@ def __init__(self, datafolder):
Args:
datafolder (str): String path where grid files and GeoJSON file reside.
"""
self._depth_files = glob.glob(os.path.join(datafolder, '*_dep*.grd'))
self._depth_files = glob.glob(os.path.join(datafolder, "*_dep*.grd"))

def getSlabInfo(self, lat, lon, depth):
"""Query the entire set of slab models and return a SlabInfo object, or None.
Expand All @@ -143,7 +144,7 @@ def getSlabInfo(self, lat, lon, depth):
depth (float): Hypocentral depth in km.

Returns:
dict: Dictionary containing keys:
slabinfo (dict): Dictionary containing keys:
- region Three letter Slab model region code.
- strike Slab model strike angle.
- dip Slab model dip angle.
Expand All @@ -155,21 +156,21 @@ def getSlabInfo(self, lat, lon, depth):
slabinfo = {}
# loop over all slab regions, return keep all slabs found
for depth_file in self._depth_files:
dip_file = depth_file.replace('dep', 'dip')
strike_file = depth_file.replace('dep', 'str')
error_file = depth_file.replace('dep', 'unc')
dip_file = depth_file.replace("dep", "dip")
strike_file = depth_file.replace("dep", "str")
error_file = depth_file.replace("dep", "unc")
if not os.path.isfile(error_file):
error_file = None
gslab = GridSlab(depth_file, dip_file, strike_file, error_file)
tslabinfo = gslab.getSlabInfo(lat, lon)
if not len(tslabinfo):
continue
else:
depth = tslabinfo['depth']
depth = tslabinfo["depth"]
if depth < deep_depth:
slabinfo = tslabinfo.copy()
deep_depth = depth
elif np.isnan(depth) and 'depth' not in slabinfo:
elif np.isnan(depth) and "depth" not in slabinfo:
slabinfo = tslabinfo.copy()

return slabinfo
Loading