diff --git a/bin/findid b/bin/findid index 3f5162e..5d56a34 100755 --- a/bin/findid +++ b/bin/findid @@ -12,7 +12,8 @@ import numpy as np # local imports from libcomcat.search import search -from libcomcat.utils import get_summary_data_frame, maketime +from libcomcat.utils import maketime +from libcomcat.dataframes import get_summary_data_frame # constants TIMEFMT = '%Y-%m-%dT%H:%M:%S' @@ -25,15 +26,15 @@ pd.set_option('display.max_colwidth', 100) def get_parser(): - desc = '''Find the id(s) of the closest earthquake to input parameters. + desc = '''Find the id(s) of the closest earthquake to input parameters. - To print the authoritative id of the event closest in time and space + To print the authoritative id of the event closest in time and space inside a 100 km, 16 second window to "2017-08-30 03:00:33 UTC 37.571 118.888": - - + + %(prog)s 2017-08-30T03:00:33 37.571 -118.888 - To make a similar query but with the time shifted by 2 minutes, and a + To make a similar query but with the time shifted by 2 minutes, and a custom time window of 3 minutes: %(prog)s -w 180 2017-08-30T03:00:33 37.571 -118.888 @@ -52,7 +53,7 @@ def get_parser(): Notes: - The time format at the command line must be of the form "YYYY-MM-DDTHH:MM:SS". The time format in an input csv file - can be either :YYYY-MM-DDTHH:MM:SS" OR "YYYY-MM-DD HH:MM:SS". This is because on the command line the argument parser + can be either :YYYY-MM-DDTHH:MM:SS" OR "YYYY-MM-DD HH:MM:SS". This is because on the command line the argument parser would be confused by the space between the date and the time, whereas in the csv file the input files are being split by commas. - Supplying the -a option with the -f option has no effect. @@ -100,7 +101,8 @@ def get_event_info(time, lat, lon, twindow, radius): for idx, row in df.iterrows(): distance, az, azb = gps2dist_azimuth( lat, lon, row['latitude'], row['longitude']) - dtime = row['time'] - time + row_time = row['time'].to_pydatetime() + dtime = row_time - time dt = np.abs(dtime.days * 86400 + dtime.seconds) df.loc[idx, 'distance'] = distance df.loc[idx, 'timedelta'] = dt diff --git a/bin/getcsv b/bin/getcsv index 6daf6fb..f5c072c 100755 --- a/bin/getcsv +++ b/bin/getcsv @@ -3,9 +3,9 @@ import argparse import sys from libcomcat.search import search, count -from libcomcat.utils import (get_detail_data_frame, - get_summary_data_frame, - maketime) +from libcomcat.utils import maketime +from libcomcat.dataframes import (get_detail_data_frame, + get_summary_data_frame) def get_parser(): diff --git a/bin/getpager b/bin/getpager new file mode 100755 index 0000000..44a9935 --- /dev/null +++ b/bin/getpager @@ -0,0 +1,244 @@ +#!/usr/bin/env python +import argparse +import sys + +# third party imports +import pandas as pd +import openpyxl +from openpyxl.styles import Font, Color, colors + +# local imports +from libcomcat.search import search, count, get_event_by_id +from libcomcat.classes import SummaryEvent +from libcomcat.utils import maketime +from libcomcat.dataframes import get_pager_data_frame + + +def add_headers(filename, file_format): + headers = ['#This data represents the results of running the PAGER exposure', + '#and loss algorithms on the output from ShakeMap.', + '#Notes: "Total" in the country column indicates that the', + '#results in that row are the sum of exposures/losses for', + '#all affected countries.', + '#"predicted_fatalities" and "predicted_dollars" are the', + '#results of applying loss models to the exposure data -', + '#note that these values are not guaranteed to match the', + '#actual losses from the earthquake.'] + if file_format == 'csv': + data = open(filename, 'rt').read() + headertext = '\n'.join(headers) + '\n' + data = headertext + data + with open(filename, 'wt') as f: + f.write(data) + else: + font = Font(color=colors.RED, bold=True) + wb = openpyxl.load_workbook(filename) + ws = wb.active + ws.insert_rows(1, amount=len(headers)) + for cellidx in range(0, len(headers)): + coordinate = 'A%i' % (cellidx+1) + ws[coordinate] = headers[cellidx].strip('#') + cell = ws[coordinate] + cell.font = font + wb.save(filename) + wb.close() + + +def get_parser(): + desc = '''Download PAGER exposure/loss results in line format (csv, tab, etc.). + + To download basic PAGER information (total exposure) for events around New Zealand from 2010 + to the present in CSV format: + + %(prog)s nz_exposures.csv -f csv -s 2010-01-01 -m 5.5 9.9 -b 163.213 -178.945 -48.980 -32.324 + + To download the same information in Excel format: + + %(prog)s nz_exposures.xlsx -f excel -s 2010-01-01 -m 5.5 9.9 -b 163.213 -178.945 -48.980 -32.324 + + To add loss information (see notes below), you can use the -l flag: + + %(prog)s nz_exposures.xlsx -f excel -s 2010-01-01 -m 5.5 9.9 -b 163.213 -178.945 -48.980 -32.324 -l + + To add exposures on a per-country basis (see notes below), you can use the -c flag: + + %(prog)s nz_exposures.xlsx -f excel -s 2010-01-01 -m 5.5 9.9 -b 163.213 -178.945 -48.980 -32.324 -c + + NOTES: + + 1) Any start or end time where only date is specified (YYYY-mm-dd) will + be translated to the beginning of that day. Thus, a start time of + "2015-01-01" becomes "2015-01-01T:00:00:00" and an end time of "2015-01-02" + becomes ""2015-01-02T:00:00:00". + + 2) Older events may not have the predicted loss information in ComCat - in those + cases, predicted losses and uncertainties will be filled in with NaN values. + + 3) Older events may not have the per-country exposure information available in + ComCat. + + 4) Note that when specifying a search box that crosses the -180/180 meridian, + you simply specify longitudes as you would if you were not crossing that + meridian (i.e., lonmin=179, lonmax=-179). The program will resolve the + discrepancy. + + 5) The ComCat API has a returned event limit of 20,000. Queries that + exceed this ComCat limit ARE supported by this software, by + breaking up one large request into a number of smaller ones.''' + + parser = argparse.ArgumentParser( + description=desc, formatter_class=argparse.RawDescriptionHelpFormatter) + # positional arguments + parser.add_argument('filename', + metavar='FILENAME', help='Output filename.') + # optional arguments + helpstr = ('Bounds to constrain event search ' + '[lonmin lonmax latmin latmax]') + parser.add_argument('-b', '--bounds', + metavar=('lonmin', 'lonmax', 'latmin', 'latmax'), + dest='bounds', type=float, nargs=4, + help=helpstr) + helpstr = 'Search radius in KM (use instead of bounding box)' + parser.add_argument('-r', '--radius', dest='radius', + metavar=('lat', 'lon', 'rmax'), + type=float, nargs=3, + help=helpstr) + helpstr = ('Start time for search (defaults to ~30 days ago). ' + 'YYYY-mm-dd, YYYY-mm-ddTHH:MM:SS, or YYYY-mm-ddTHH:MM:SS.s') + parser.add_argument('-s', '--start-time', dest='startTime', type=maketime, + help=helpstr) + helpstr = ('End time for search (defaults to current date/time). ' + 'YYYY-mm-dd, YYYY-mm-ddTHH:MM:SS, or YYYY-mm-ddTHH:MM:SS.s') + parser.add_argument('-e', '--end-time', dest='endTime', type=maketime, + help=helpstr) + helpstr = ('Limit to events after specified time. YYYY-mm-dd or ' + 'YYYY-mm-ddTHH:MM:SS') + parser.add_argument('-t', '--time-after', dest='after', type=maketime, + help=helpstr) + helpstr = 'Min/max (authoritative) magnitude to restrict search.' + parser.add_argument('-m', '--mag-range', metavar=('minmag', 'maxmag'), + dest='magRange', type=float, nargs=2, + help=helpstr) + parser.add_argument('-f', '--format', dest='format', + choices=['csv', 'tab', 'excel'], default='csv', + metavar='FORMAT', help='Output format.') + + losshelp = 'Retrieve fatalities and economic losses' + parser.add_argument('-l', '--get-losses', help=losshelp, action='store_true', + default=False) + + countryhelp = 'Retrieve information from all countries affected by earthquake' + parser.add_argument('-c', '--get-countries', help=countryhelp, action='store_true', + default=False) + + versionhelp = 'Retrieve information from all versions of PAGER' + parser.add_argument('-a', '--all-versions', help=versionhelp, action='store_true', + default=False) + + versionhelp = 'Retrieve information from a single PAGER event' + parser.add_argument('-i', '--eventid', help=versionhelp, + metavar='EVENTID') + + parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', + help='Print progress') + helpstr = ('Specify a different comcat *search* host than ' + 'earthquake.usgs.gov.') + parser.add_argument('--host', + help=helpstr) + return parser + + +def main(): + parser = get_parser() + args = parser.parse_args() + + latitude = None + longitude = None + radiuskm = None + lonmin = latmin = lonmax = latmax = None + if args.radius: + latitude = args.radius[0] + longitude = args.radius[1] + radiuskm = args.radius[2] + + if args.bounds: + lonmin, lonmax, latmin, latmax = args.bounds + # fix longitude bounds when crossing dateline + if lonmin > lonmax and lonmax >= -180: + lonmin -= 360 + else: + lonmin, lonmax, latmin, latmax = None, None, None, None + + minmag = 0.0 + maxmag = 9.9 + if args.magRange: + minmag = args.magRange[0] + maxmag = args.magRange[1] + + if args.bounds and args.radius: + print('Please specify either a bounding box OR radius search.') + sys.exit(1) + + if args.eventid: + event = get_event_by_id(args.eventid, + includesuperseded=args.all_versions) + events = [event] + else: + events = search(starttime=args.startTime, + endtime=args.endTime, + updatedafter=args.after, + minlatitude=latmin, + maxlatitude=latmax, + minlongitude=lonmin, + maxlongitude=lonmax, + latitude=latitude, + longitude=longitude, + maxradiuskm=radiuskm, + maxmagnitude=maxmag, + minmagnitude=minmag, + producttype='losspager', + host=args.host, + verbose=args.verbose) + + if not len(events): + print('No events found matching your search criteria. Exiting.') + sys.exit(0) + + dataframe = None + nevents = len(events) + i = 1 + for event in events: + if args.verbose and (i == 1 or (not i % (nevents//10))): + sys.stderr.write('Processing event %s (%i of %i).\n' % + (event.id, i, nevents)) + i += 1 + if isinstance(event, SummaryEvent): + detail = event.getDetailEvent(includesuperseded=args.all_versions) + else: + detail = event + df = get_pager_data_frame(detail, get_losses=args.get_losses, + get_country_exposures=args.get_countries, + get_all_versions=args.all_versions) + if dataframe is None: + dataframe = df + else: + dataframe = pd.concat([dataframe, df]) + + if args.verbose: + sys.stderr.write('Created table...saving %i records to %s.\n' % + (len(dataframe), args.filename)) + if args.format == 'csv': + dataframe.to_csv(args.filename, index=False, chunksize=1000) + elif args.format == 'tab': + dataframe.to_csv(args.filename, sep='\t', index=False) + else: + dataframe.to_excel(args.filename, index=False) + + add_headers(args.filename, args.format) + + print('%i records saved to %s.' % (len(dataframe), args.filename)) + sys.exit(0) + + +if __name__ == '__main__': + main() diff --git a/environment.yml b/environment.yml deleted file mode 100644 index b9e059c..0000000 --- a/environment.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: comcat -channels: -- conda-forge -- defaults -dependencies: -- ipython -- impactutils -- jupyter -- numpy -- obspy -- pandas -- pytest -- pytest-cov -- pytest-mpl -- python=3.5 -- xlrd -- xlwt -- openpyxl -- vcrpy -- xlsxwriter - diff --git a/install.sh b/install.sh index 29acaa3..088146f 100755 --- a/install.sh +++ b/install.sh @@ -22,18 +22,6 @@ echo $PATH VENV=comcat -# Is the reset flag set? -reset=0 -while getopts r FLAG; do - case $FLAG in - r) - reset=1 - - ;; - esac -done - - # create a matplotlibrc file with the non-interactive backend "Agg" in it. if [ ! -d "$matplotlibdir" ]; then mkdir -p $matplotlibdir @@ -77,23 +65,40 @@ echo "" # Choose an environment file based on platform -echo ". $HOME/miniconda/etc/profile.d/conda.sh" >> $prof - -# If the user has specified the -r (reset) flag, then create an -# environment based on only the named dependencies, without -# any versions of packages specified. -if [ $reset == 1 ]; then - echo "Ignoring platform, letting conda sort out dependencies..." - env_file=environment.yml +# only add this line if it does not already exist +grep "/etc/profile.d/conda.sh" $prof +if [ $? -ne 0 ]; then + echo ". $_CONDA_ROOT/etc/profile.d/conda.sh" >> $prof fi # Start in conda base environment echo "Activate base virtual environment" conda activate base +# Remove existing libcomcat environment if it exists +conda remove -y -n $VENV --all + +# Package list: +package_list=( + "python=3.5" + "impactutils" + "ipython" + "jupyter" + "numpy" + "obspy" + "pandas" + "pytest" + "pytest-cov" + "vcrpy" + "xlrd" + "xlwt" + "openpyxl" + "xlsxwriter" +) + # Create a conda virtual environment echo "Creating the $VENV virtual environment:" -conda env create -f $env_file --force +conda create -y -n $VENV -c conda-forge --channel-priority ${package_list[*]} # Bail out at this point if the conda create command fails. # Clean up zip files we've downloaded @@ -111,8 +116,5 @@ conda activate $VENV echo "Installing libcomcat..." pip install -e . -# Install default profile -#python bin/sm_profile -c default -a - # Tell the user they have to activate this environment echo "Type 'conda activate $VENV' to use this new virtual environment." diff --git a/libcomcat/classes.py b/libcomcat/classes.py index d4f7bdf..1014dd7 100644 --- a/libcomcat/classes.py +++ b/libcomcat/classes.py @@ -126,50 +126,71 @@ def __init__(self, feature): https://earthquake.usgs.gov/earthquakes/feed/v1.0/geojson.php - :param feature: - GeoJSON feature as described at above URL. + Args: + feature (dict): GeoJSON feature as described at above URL. """ self._jdict = feature.copy() @property def location(self): """Earthquake location string. + + Returns: + str: Earthquake location. """ return self._jdict['properties']['place'] @property def url(self): """ComCat URL. + + Returns: + str: ComCat URL """ return self._jdict['properties']['url'] @property def latitude(self): """Authoritative origin latitude. + + Returns: + float: Authoritative origin latitude. """ return self._jdict['geometry']['coordinates'][1] @property def longitude(self): """Authoritative origin longitude. + + Returns: + float: Authoritative origin longitude. """ return self._jdict['geometry']['coordinates'][0] @property def depth(self): """Authoritative origin depth. + + Returns: + float: Authoritative origin depth. """ return self._jdict['geometry']['coordinates'][2] @property def id(self): """Authoritative origin ID. + + Returns: + str: Authoritative origin ID. """ return self._jdict['id'] @property def time(self): """Authoritative origin time. + + Returns: + datetime: Authoritative origin time. """ time_in_msec = self._jdict['properties']['time'] time_in_sec = time_in_msec // 1000 @@ -182,6 +203,9 @@ def time(self): @property def magnitude(self): """Authoritative origin magnitude. + + Returns: + float: Authoritative origin magnitude. """ return self._jdict['properties']['mag'] @@ -192,17 +216,21 @@ def __repr__(self): @property def properties(self): - """List of summary event properties (retrievable from object with [] operator). + """List of summary event properties. + + Returns: + list: List of summary event properties (retrievable + from object with [] operator). """ return list(self._jdict['properties'].keys()) def hasProduct(self, product): """Test to see whether a given product exists for this event. - :param product: - Product to search for. - :returns: - Boolean indicating whether that product exists or not. + Args: + product (str): Product to search for. + Returns: + bool: Indicates whether that product exists or not. """ if product not in self._jdict['properties']['types'].split(',')[1:]: return False @@ -211,10 +239,10 @@ def hasProduct(self, product): def hasProperty(self, key): """Test to see if property is present in list of properties. - :param key: - Property to search for. - :returns: - Boolean indicating whether that key exists or not. + Args: + key (str): Property to search for. + Returns: + bool: Indicates whether that key exists or not. """ if key not in self._jdict['properties']: return False @@ -223,10 +251,10 @@ def hasProperty(self, key): def __getitem__(self, key): """Extract SummaryEvent property using the [] operator. - :param key: - Property to extract. - :returns: - Desired property. + Args: + key (str): Property to extract. + Returns: + str: Desired property. """ if key not in self._jdict['properties']: raise AttributeError( @@ -236,23 +264,26 @@ def __getitem__(self, key): def getDetailURL(self): """Instantiate a DetailEvent object from the URL found in the summary. - :returns: - URL for detailed version of event. + Returns: + str: URL for detailed version of event. """ durl = self._jdict['properties']['detail'] return durl def getDetailEvent(self, includedeleted=False, includesuperseded=False): """Instantiate a DetailEvent object from the URL found in the summary. - :param includedeleted: - Boolean indicating wheather to return versions of products that have - been deleted. Cannot be used with includesuperseded. - :param includesuperseded: - Boolean indicating wheather to return versions of products that have - been replaced by newer versions. - Cannot be used with includedeleted. - :returns: - DetailEvent version of SummaryEvent. + + Args: + includedeleted (bool): Boolean indicating wheather to return + versions of products that have + been deleted. Cannot be used with + includesuperseded. + includesuperseded (bool): + Boolean indicating wheather to return versions of products + that have been replaced by newer versions. + Cannot be used with includedeleted. + Returns: + DetailEvent: Detailed version of SummaryEvent. """ if includesuperseded and includedeleted: msg = ('includedeleted and includesuperseded ' @@ -271,14 +302,14 @@ def getDetailEvent(self, includedeleted=False, includesuperseded=False): def toDict(self): """Render the SummaryEvent origin information as an OrderedDict(). - :returns: - Dictionary containing fields: - - id (string) Authoritative ComCat event ID. - - time (datetime) Authoritative event origin time. - - latitude (float) Authoritative event latitude. - - longitude (float) Authoritative event longitude. - - depth (float) Authoritative event depth. - - magnitude (float) Authoritative event magnitude. + Returns: + dict: Containing fields: + - id (string) Authoritative ComCat event ID. + - time (datetime) Authoritative event origin time. + - latitude (float) Authoritative event latitude. + - longitude (float) Authoritative event longitude. + - depth (float) Authoritative event depth. + - magnitude (float) Authoritative event magnitude. """ edict = OrderedDict() edict['id'] = self.id @@ -303,8 +334,8 @@ def __init__(self, url): https://earthquake.usgs.gov/earthquakes/feed/v1.0/geojson_detail.php - :param url: - String indicating a URL pointing to a detailed GeoJSON event. + Args: + url (str): String indicating a URL pointing to a detailed GeoJSON event. """ try: fh = request.urlopen(url, timeout=TIMEOUT) @@ -329,24 +360,46 @@ def __repr__(self): @property def location(self): """Earthquake location string. + + Returns: + str: Earthquake location. """ return self._jdict['properties']['place'] @property def url(self): """ComCat URL. + + Returns: + str: Earthquake URL. """ return self._jdict['properties']['url'] + @property + def detail_url(self): + """ComCat Detailed URL (with JSON). + + Returns: + str: Earthquake Detailed URL with JSON. + """ + url = URL_TEMPLATE.replace('[EVENTID]', self.id) + return url + @property def latitude(self): """Authoritative origin latitude. + + Returns: + float: Authoritative origin latitude. """ return self._jdict['geometry']['coordinates'][1] @property def longitude(self): """Authoritative origin longitude. + + Returns: + float: Authoritative origin longitude. """ return self._jdict['geometry']['coordinates'][0] @@ -359,12 +412,18 @@ def depth(self): @property def id(self): """Authoritative origin ID. + + Returns: + str: Authoritative origin ID. """ return self._jdict['id'] @property def time(self): """Authoritative origin time. + + Returns: + datetime: Authoritative origin time. """ time_in_msec = self._jdict['properties']['time'] time_in_sec = time_in_msec // 1000 @@ -377,22 +436,28 @@ def time(self): @property def magnitude(self): """Authoritative origin magnitude. + + Returns: + float: Authoritative origin magnitude. """ return self._jdict['properties']['mag'] @property def properties(self): - """List of summary event properties (retrievable from object with [] operator). + """List of summary event properties. + + Returns: + list: List of summary event properties (retrievable from object with [] operator). """ return list(self._jdict['properties'].keys()) def hasProduct(self, product): """Return a boolean indicating whether given product can be extracted from DetailEvent. - :param product: - Product to search for. - :returns: - Boolean indicating whether that product exists or not. + Args: + product (str): Product to search for. + Returns: + bool: Indicates whether that product exists or not. """ if product in self._jdict['properties']['products']: return True @@ -401,10 +466,10 @@ def hasProduct(self, product): def hasProperty(self, key): """Test to see whether a property with a given key is present in list of properties. - :param key: - Property to search for. - :returns: - Boolean indicating whether that key exists or not. + Args: + key (str): Property to search for. + Returns: + bool: Indicates whether that key exists or not. """ if key not in self._jdict['properties']: return False @@ -413,10 +478,10 @@ def hasProperty(self, key): def __getitem__(self, key): """Extract DetailEvent property using the [] operator. - :param key: - Property to extract. - :returns: - Desired property. + Args: + key (str): Property to extract. + Returns: + str: Desired property. """ if key not in self._jdict['properties']: raise AttributeError( @@ -430,28 +495,24 @@ def toDict(self, catalog=None, get_focals='preferred'): """Return origin, focal mechanism, and tensor information for a DetailEvent. - :param catalog: - Retrieve the primary event information (time,lat,lon...) from the - catalog given. If no source for this information exists, an - AttributeError will be raised. - :param get_all_magnitudes: - Boolean indicating whether all known magnitudes for this event - should be returned. NOTE: The ComCat phase-data product's - QuakeML file will be downloaded and parsed, which takes extra time. - :param get_tensors: - String option of 'none', 'preferred', or 'all'. - :param get_moment_supplement: - Boolean indicating whether derived origin and - double-couple/source time information should be extracted - (when available.) - :param get_focals: - String option of 'none', 'preferred', or 'all'. - :returns: - OrderedDict with the same fields as returned by - SummaryEvent.toDict(), *preferred* moment tensor and focal - mechanism data. If all magnitudes are requested, then - those will be returned as well. Generally speaking, the - number and name of the fields will vary by what data is available. + Args: + catalog (str): Retrieve the primary event information (time,lat,lon...) from the + catalog given. If no source for this information exists, an + AttributeError will be raised. + get_all_magnitudes (bool): Indicates whether all known magnitudes for this event + should be returned. NOTE: The ComCat phase-data product's + QuakeML file will be downloaded and parsed, which takes extra time. + get_tensors (str): Option of 'none', 'preferred', or 'all'. + get_moment_supplement (bool): Boolean indicating whether derived origin and + double-couple/source time information should be extracted + (when available.) + get_focals (str): String option of 'none', 'preferred', or 'all'. + Returns: + dict: OrderedDict with the same fields as returned by + SummaryEvent.toDict(), *preferred* moment tensor and focal + mechanism data. If all magnitudes are requested, then + those will be returned as well. Generally speaking, the + number and name of the fields will vary by what data is available. """ edict = OrderedDict() @@ -545,10 +606,10 @@ def toDict(self, catalog=None, def getNumVersions(self, product_name): """Count versions of a product (origin, shakemap, etc.) available. - :param product_name: - Name of product to query. - :returns: - Number of versions of a given product. + Args: + product_name (str): Name of product to query. + Returns: + int: Number of versions of a given product. """ if not self.hasProduct(product_name): raise AttributeError( @@ -559,18 +620,16 @@ def getProducts(self, product_name, source='preferred', version=VersionOption.PREFERRED): """Retrieve a Product object from this DetailEvent. - :param product_name: - Name of product (origin, shakemap, etc.) to retrieve. - :param version: - An enum value from VersionOption (PREFERRED,FIRST,ALL). - :param source: - Any one of: - - 'preferred' Get version(s) of products from preferred source. - - 'all' Get version(s) of products from all sources. - - Any valid source network for this type of product - ('us','ak',etc.) - :returns: - List of Product objects. + Args: + product_name (str): Name of product (origin, shakemap, etc.) to retrieve. + version (enum): A value from VersionOption (PREFERRED,FIRST,ALL). + source (str): Any one of: + - 'preferred' Get version(s) of products from preferred source. + - 'all' Get version(s) of products from all sources. + - Any valid source network for this type of product + ('us','ak',etc.) + Returns: + list: List of Product objects. """ if not self.hasProduct(product_name): raise AttributeError( @@ -689,12 +748,10 @@ class Product(object): def __init__(self, product_name, version, product): """Create a product class from product in detailed GeoJSON. - :param product_name: - Name of Product (origin, shakemap, etc.) - :param version: - Best guess as to ordinal version of the product. - :param product: - Product data to be copied from DetailEvent. + Args: + product_name (str): Name of Product (origin, shakemap, etc.) + version (int): Best guess as to ordinal version of the product. + product (dict): Product data to be copied from DetailEvent. """ self._product_name = product_name self._version = version @@ -703,11 +760,11 @@ def __init__(self, product_name, version, product): def getContentsMatching(self, regexp): """Find all contents that match the input regex, shortest to longest. - :param regexp: - Regular expression which should match one of the content files - in the Product. - :returns: - List of contents matching + Args: + regexp (str): Regular expression which should match one of the content files + in the Product. + Returns: + list: List of contents matching input regex. """ contents = [] if not len(self._product['contents']): @@ -734,11 +791,11 @@ def getContentName(self, regexp): grid.xml and grid.xml.zip, and the input regexp is grid.xml, then grid.xml will be matched. - :param regexp: - Regular expression to use to search for matching contents. - :returns: - Shortest file name to match input regexp, or None if - no matches found. + Args: + regexp (str): Regular expression to use to search for matching contents. + Returns: + str: Shortest file name to match input regexp, or None if + no matches found. """ content_name = 'a' * 1000 found = False @@ -763,11 +820,11 @@ def getContentURL(self, regexp): grid.xml.zip, and the input regexp is grid.xml, then grid.xml will be matched. - :param regexp: - Regular expression to use to search for matching contents. - :returns: - URL for shortest file name to match input regexp, or - None if no matches found. + Args: + regexp (str): Regular expression to use to search for matching contents. + Returns: + str: URL for shortest file name to match input regexp, or + None if no matches found. """ content_name = 'a' * 1000 found = False @@ -790,16 +847,16 @@ def getContentURL(self, regexp): def getContent(self, regexp, filename): """Download the shortest file name matching the input regular expression. - :param regexp: - Regular expression which should match one of the content files - in the Product. - :param filename: - Filename to which content should be downloaded. - :returns: - The URL from which the content was downloaded. - :raises: - Exception if content could not be downloaded from ComCat - after two tries. + Args: + regexp (str): Regular expression which should match one of the + content files + in the Product. + filename (str): Filename to which content should be downloaded. + Returns: + str: The URL from which the content was downloaded. + Raises: + Exception: If content could not be downloaded from ComCat + after two tries. """ data, url = self.getContentBytes(regexp) @@ -812,17 +869,19 @@ def getContent(self, regexp, filename): def getContentBytes(self, regexp): """Return bytes of shortest file name matching input regular expression. - :param regexp: - Regular expression which should match one of the content files in - the Product. - :returns: - Tuple of array of bytes containing file contents, and the source url. - Bytes can be decoded to UTF-8 by the user if file contents are known - to be ASCII. i.e., - product.getContentBytes('info.json').decode('utf-8') - :raises: - Exception if content could not be downloaded from ComCat - after two tries. + + Args: + regexp (str): Regular expression which should match one of the + content files in + the Product. + Returns: + tuple: (array of bytes containing file contents, source url) + Bytes can be decoded to UTF-8 by the user if file contents are known + to be ASCII. i.e., + product.getContentBytes('info.json').decode('utf-8') + Raises: + Exception: If content could not be downloaded from ComCat + after two tries. """ content_name = 'a' * 1000 content_url = None @@ -859,10 +918,10 @@ def getContentBytes(self, regexp): def hasProperty(self, key): """Determine if this Product contains a given property. - :param key: - Property to search for. - :returns: - Boolean indicating whether that key exists or not. + Args: + key (str): Property to search for. + Returns: + bool: Indicates whether that key exists or not. """ if key not in self._product['properties']: return False @@ -871,18 +930,38 @@ def hasProperty(self, key): @property def preferred_weight(self): """The weight assigned to this product by ComCat. + + Returns: + float: weight assigned to this product by ComCat. """ return self._product['preferredWeight'] @property def source(self): """The contributing source for this product. + + Returns: + str: contributing source for this product. """ return self._product['source'] + @property + def product_timestamp(self): + """The timestamp for this product. + + Returns: + int: The timestamp for this product (effectively used as + version number by ComCat). + """ + time_in_msec = self._product['updateTime'] + return time_in_msec + @property def update_time(self): """The datetime for when this product was updated. + + Returns: + datetime: datetime for when this product was updated. """ time_in_msec = self._product['updateTime'] time_in_sec = time_in_msec // 1000 @@ -895,28 +974,37 @@ def update_time(self): @property def version(self): """The best guess for the ordinal version number of this product. + + Returns: + int: best guess for the ordinal version number of this product. """ return self._version @property def properties(self): - """List of product properties (retrievable from object with [] operator). + """List of product properties. + + Returns: + list: List of product properties (retrievable from object with [] operator). """ return list(self._product['properties'].keys()) @property def contents(self): - """List of product properties (retrievable with getContent() method). + """List of product properties. + + Returns: + list: List of product properties (retrievable with getContent() method). """ return list(self._product['contents'].keys()) def __getitem__(self, key): """Extract Product property using the [] operator. - :param key: - Property to extract. - :returns: - Desired property. + Args: + key (str): Property to extract. + Returns: + str: Desired property. """ if key not in self._product['properties']: msg = 'No property %s found in %s product.' % ( diff --git a/libcomcat/dataframes.py b/libcomcat/dataframes.py new file mode 100644 index 0000000..814cbba --- /dev/null +++ b/libcomcat/dataframes.py @@ -0,0 +1,619 @@ +# stdlib imports +from xml.dom import minidom +import sys +from urllib.request import urlopen +import warnings +from datetime import datetime +import os.path +import json + +# third party imports +import numpy as np +import pandas as pd +from obspy.io.quakeml.core import Unpickler +from obspy.clients.fdsn import Client +from impactutils.time.ancient_time import HistoricTime +from openpyxl import load_workbook +import requests +from scipy.special import erfc, erfcinv + +# local imports +from .classes import VersionOption + +# constants +CATALOG_SEARCH_TEMPLATE = 'https://earthquake.usgs.gov/fdsnws/event/1/catalogs' +CONTRIBUTORS_SEARCH_TEMPLATE = 'https://earthquake.usgs.gov/fdsnws/event/1/contributors' +TIMEOUT = 60 +TIMEFMT1 = '%Y-%m-%dT%H:%M:%S' +TIMEFMT2 = '%Y-%m-%dT%H:%M:%S.%f' +DATEFMT = '%Y-%m-%d' +COUNTRYFILE = 'ne_10m_admin_0_countries.shp' + +# where is the PAGER fatality model found? +FATALITY_URL = 'https://raw.githubusercontent.com/usgs/pager/master/losspager/data/fatality.xml' +ECONOMIC_URL = 'https://raw.githubusercontent.com/usgs/pager/master/losspager/data/economy.xml' + + +def get_phase_dataframe(detail, catalog='preferred'): + """Return a Pandas DataFrame consisting of Phase arrival data. + + Args: + detail (DetailEvent): DetailEvent object. + catalog (str): Source network ('us','ak', etc. ,or 'preferred'.) + + Returns: + DataFrame: Pandas DataFrame containing columns: + - Channel: Network.Station.Channel.Location (NSCL) style station + description. ("--" indicates missing information) + - Distance: Distance (kilometers) from epicenter to station. + - Azimuth: Azimuth (degrees) from epicenter to station. + - Phase: Name of the phase (Pn,Pg, etc.) + - Arrival Time: Pick arrival time (UTC). + - Status: "manual" or "automatic". + - Residual: Arrival time residual. + - Weight: Arrival weight. + - Agency: Agency ID. + Raises: + AttributeError: If input DetailEvent does not have a phase-data product + for the input catalog. + """ + if catalog is None: + catalog = 'preferred' + df = pd.DataFrame(columns=['Channel', 'Distance', 'Azimuth', + 'Phase', 'Arrival Time', 'Status', + 'Residual', 'Weight', 'Agency']) + + phasedata = detail.getProducts('phase-data', source=catalog)[0] + quakeurl = phasedata.getContentURL('quakeml.xml') + try: + fh = urlopen(quakeurl, timeout=TIMEOUT) + data = fh.read() + fh.close() + except Exception: + return None + unpickler = Unpickler() + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=UserWarning) + catalog = unpickler.loads(data) + catevent = catalog.events[0] + for pick in catevent.picks: + phaserow = _get_phaserow(pick, catevent) + if phaserow is None: + continue + df = df.append(phaserow, ignore_index=True) + return df + + +def _get_phaserow(pick, catevent): + """Return a dictionary containing Phase data matching that found on ComCat event page. + Example: https://earthquake.usgs.gov/earthquakes/eventpage/us2000ahv0#origin + (Click on the Phases tab). + + Args: + pick (Pick): Obspy Catalog Pick object. + catevent (Event): Obspy Catalog Event object. + + Returns: + dict: Containing fields: + - Channel: NSCL-style channel string. + - Distance: Distance (km) from station to origin. + - Azimuth: Azimuth (deg.) from epicenter to station. + - Phase: Name of the phase (Pn,Pg, etc.) + - Arrival Time: Pick arrival time (UTC). + - Status: "manual" or "automatic". + - Residual: Arrival time residual. + - Weight: Arrival weight. + - Agency: Agency ID. + """ + pick_id = pick.resource_id + waveform_id = pick.waveform_id + arrival = get_arrival(catevent, pick_id) + if arrival is None: + return None + + # save info to row of dataframe + etime = pick.time.datetime + channel = stringify(waveform_id) + row = {'Channel': channel, + 'Distance': arrival.distance, + 'Azimuth': arrival.azimuth, + 'Phase': arrival.phase, + 'Arrival Time': etime, + 'Status': pick.evaluation_mode, + 'Residual': arrival.time_residual, + 'Weight': arrival.time_weight, + 'Agency': arrival.creation_info.agency_id} + return row + + +def stringify(waveform): + """Turn waveform object into NSCL-style station code + + Args: + waveform (Waveform): Obspy Catalog Waveform object. + Returns: + str: NSCL- style string representation of waveform object. + """ + fmt = '%s.%s.%s.%s' + network = '--' + if waveform.network_code is not None: + network = waveform.network_code + station = '--' + if waveform.station_code is not None: + station = waveform.station_code + channel = '--' + if waveform.channel_code is not None: + channel = waveform.channel_code + location = '--' + if waveform.location_code is not None: + location = waveform.location_code + tpl = (network, station, channel, location) + return fmt % tpl + + +def get_arrival(event, pickid): + """Find the arrival object in a Catalog Event corresponding to input pick id. + + Args: + event (Event): Obspy Catalog Event object. + pickid (str): Pick ID string. + + Returns: + Arrival: Obspy Catalog arrival object. + """ + for origin in event.origins: + idlist = [arr.pick_id for arr in origin.arrivals] + if pickid not in idlist: + continue + idx = idlist.index(pickid) + arrival = origin.arrivals[idx] + return arrival + if pickid is None: + return None + + +def get_magnitude_data_frame(detail, catalog, magtype): + """Return a Pandas DataFrame consisting of magnitude data. + + Args: + detail (DetailEvent): DetailEvent object. + catalog (str): Source catalog ('us','ak', etc. ,or 'preferred'.) + magtype (str): Magnitude type (mb, ml, etc.) + + Returns: + DataFrame: Pandas DataFrame containing columns: + - Channel: Network.Station.Channel.Location (NSCL) style station + description. ("--" indicates missing information) + - Type: Magnitude type. + - Amplitude: Amplitude of seismic wave at each station (m). + - Period: Period of seismic wave at each station (s). + - Status: "manual" or "automatic". + - Magnitude: Locally determined magnitude. + - Weight: Magnitude weight. + Raises: + AttributeError if input DetailEvent does not have a phase-data product + for the input catalog. + """ + columns = columns = ['Channel', 'Type', 'Amplitude', + 'Period', 'Status', 'Magnitude', + 'Weight'] + df = pd.DataFrame() + phasedata = detail.getProducts('phase-data', source=catalog)[0] + quakeurl = phasedata.getContentURL('quakeml.xml') + try: + fh = urlopen(quakeurl, timeout=TIMEOUT) + data = fh.read() + fh.close() + except Exception: + return None + unpickler = Unpickler() + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=UserWarning) + catalog = unpickler.loads(data) + catevent = catalog.events[0] # match this to input catalog + for magnitude in catevent.magnitudes: + if magnitude.magnitude_type != magtype: + continue + for contribution in magnitude.station_magnitude_contributions: + row = {} + smag = contribution.station_magnitude_id.get_referred_object() + ampid = smag.amplitude_id + amp = ampid.get_referred_object() + waveid = amp.waveform_id + fmt = '%s.%s.%s.%s' + tpl = (waveid.network_code, + waveid.station_code, + waveid.channel_code, + waveid.location_code) + row['Channel'] = fmt % tpl + row['Type'] = smag.station_magnitude_type + row['Amplitude'] = amp.generic_amplitude + row['Period'] = amp.period + row['Status'] = amp.evaluation_mode + row['Magnitude'] = smag.mag + row['Weight'] = contribution.weight + df = df.append(row, ignore_index=True) + df = df[columns] + return df + + +def get_detail_data_frame(events, get_all_magnitudes=False, + get_tensors='preferred', + get_focals='preferred', + get_moment_supplement=False, + verbose=False): + """Extract the detailed event informat into a pandas DataFrame. + + Usage: + TODO + + Args: + events (list): List of SummaryEvent objects as returned by search() function. + get_all_magnitudes (bool): Boolean indicating whether to return all + magnitudes in results for each event. + get_tensors (str): String option of 'none', 'preferred', or 'all'. + get_focals (str): String option of 'none', 'preferred', or 'all'. + get_moment_supplement (bool): Indicates whether derived origin and + double-couple/source time information + should be extracted (when available.) + + Returns: + DataFrame: Pandas DataFrame with one row per event, and all + relevant information in columns. + """ + elist = [] + ic = 0 + inc = min(100, np.power(10, np.floor(np.log10(len(events))) - 1)) + if verbose: + sys.stderr.write( + 'Getting detailed event info - reporting every %i events.\n' % inc) + for event in events: + try: + detail = event.getDetailEvent() + except Exception as e: + print('Failed to get detailed version of event %s' % event.id) + continue + edict = detail.toDict(get_all_magnitudes=get_all_magnitudes, + get_tensors=get_tensors, + get_moment_supplement=get_moment_supplement, + get_focals=get_focals) + elist.append(edict) + if ic % inc == 0 and verbose: + msg = 'Getting detailed information for %s, %i of %i events.\n' + sys.stderr.write(msg % (event.id, ic, len(events))) + ic += 1 + df = pd.DataFrame(elist) + first_columns = ['id', 'time', 'latitude', + 'longitude', 'depth', 'magnitude'] + all_columns = df.columns + rem_columns = [col for col in all_columns if col not in first_columns] + new_columns = first_columns + rem_columns + df = df[new_columns] + return df + + +def get_summary_data_frame(events): + """Take the results of a search and extract the summary event informat in a pandas DataFrame. + + Usage: + TODO + + Args: + events (list): List of SummaryEvent objects as returned by search() + function. + + Returns: + DataFrame: Pandas DataFrame with one row per event, and columns: + - id (string) Authoritative ComCat event ID. + - time (datetime) Authoritative event origin time. + - latitude (float) Authoritative event latitude. + - longitude (float) Authoritative event longitude. + - depth (float) Authoritative event depth. + - magnitude (float) Authoritative event magnitude. + """ + elist = [] + for event in events: + elist.append(event.toDict()) + df = pd.DataFrame(elist) + return df + + +def get_pager_data_frame(detail, get_losses=False, + get_country_exposures=False, + get_all_versions=False): + """Extract PAGER results for an event as a DataFrame. + + Args: + detail (DetailEvent): Detailed information for a given event. + get_losses (bool): Indicates whether to retrieve predicted fatalities + and dollar losses and uncertainties. + get_country_exposures (bool): Indicates whether to retrieve per-country + shaking exposures. + get_all_versions (bool): Indicates whether to retrieve PAGER results for + all versions. + Returns: + (DataFrame): DataFrame whose columns will vary depending on input: + (all): + id - ComCat Event ID + location - Location string for event. + time - Date/time of event. + latitude - Event latitude (dd) + longitude - Event longitude (dd) + depth - Event depth (km) + magnitude - Event magnitude. + mmi1 - Estimated population exposed to shaking at MMI intensity 1. + ... + mmi10 - Estimated population exposed to shaking at MMI intensity 10. + """ + default_columns = ['id', 'location', 'time', + 'latitude', 'longitude', + 'depth', 'magnitude', 'country', + 'pager_version', + 'mmi1', 'mmi2', + 'mmi3', 'mmi4', + 'mmi5', 'mmi6', + 'mmi7', 'mmi8', + 'mmi9', 'mmi10'] + + if not detail.hasProduct('losspager'): + return None + + df = None + for pager in detail.getProducts('losspager', version=VersionOption.ALL): + total_row = {} + default = {} + default['id'] = detail.id + default['location'] = detail.location + lat = detail.latitude + lon = detail.longitude + default['time'] = detail.time + default['latitude'] = lat + default['longitude'] = lon + default['depth'] = detail.depth + default['magnitude'] = detail.magnitude + default['pager_version'] = pager.version + total_row.update(default) + total_row['country'] = 'Total' + + if len(pager.getContentsMatching('exposures.json')): + total_row, country_rows = _get_json_exposure(total_row, + pager, + get_country_exposures, + default) + + if get_losses: + loss_json = pager.getContentBytes( + 'losses.json')[0].decode('utf-8') + jdict = json.loads(loss_json) + empfat = jdict['empirical_fatality'] + + # get the list of country codes + ccodes = [cfat['country_code'] + for cfat in empfat['country_fatalities']] + gfat, geco = get_g_values(ccodes) + + # get the total fatalities + total_row['predicted_fatalities'] = empfat['total_fatalities'] + + gfat_total, geco_total = _get_total_g(pager) + # get the Gs/sigmas for total fatality + fat_sigma = get_sigma(empfat['total_fatalities'], gfat_total) + total_row['fatality_sigma'] = fat_sigma + + # get the total economic losses + emploss = jdict['empirical_economic'] + total_row['predicted_dollars'] = emploss['total_dollars'] + + # get the Gs/sigmas for total dollars + eco_sigma = get_sigma(emploss['total_dollars'], geco_total) + total_row['dollars_sigma'] = eco_sigma + + if get_country_exposures: + for country_fat in empfat['country_fatalities']: + fat = country_fat['fatalities'] + ccode = country_fat['country_code'] + # in at least one case (not sure why) PAGER results + # have fatalities per country but not exposures. + if ccode not in country_rows: + country_rows[ccode] = {} + country_rows[ccode].update(default) + country_rows[ccode]['country'] = ccode + country_rows[ccode]['mmi1'] = np.nan + country_rows[ccode]['mmi2'] = np.nan + country_rows[ccode]['mmi3'] = np.nan + country_rows[ccode]['mmi4'] = np.nan + country_rows[ccode]['mmi5'] = np.nan + country_rows[ccode]['mmi6'] = np.nan + country_rows[ccode]['mmi7'] = np.nan + country_rows[ccode]['mmi8'] = np.nan + country_rows[ccode]['mmi9'] = np.nan + country_rows[ccode]['mmi10'] = np.nan + + country_rows[ccode]['predicted_fatalities'] = fat + gvalue = gfat[ccode] + country_rows[ccode]['fatality_sigma'] = get_sigma( + fat, gvalue) + + for country_eco in emploss['country_dollars']: + eco = country_eco['us_dollars'] + ccode = country_eco['country_code'] + country_rows[ccode]['predicted_dollars'] = eco + gvalue = gfat[ccode] + country_rows[ccode]['dollars_sigma'] = get_sigma( + eco, gvalue) + + else: # event does not have JSON content + country_rows = {} + total_row = _get_xml_exposure(total_row, pager, get_losses) + + columns = default_columns + if get_losses: + columns = default_columns + ['predicted_fatalities', + 'fatality_sigma', + 'predicted_dollars', + 'dollars_sigma'] + if df is None: + df = pd.DataFrame(columns=columns) + df = df.append(total_row, ignore_index=True) + for ccode, country_row in country_rows.items(): + df = df.append(country_row, ignore_index=True) + + df = df[columns] + # countries with zero fatalities don't report, so fill in with zeros + if get_losses: + df['predicted_fatalities'] = df['predicted_fatalities'].fillna(value=0) + df['fatality_sigma'] = df['fatality_sigma'].fillna(value=0) + df['predicted_dollars'] = df['predicted_dollars'].fillna(value=0) + df['dollars_sigma'] = df['dollars_sigma'].fillna(value=0) + + return df + + +def _invphi(input): + """Inverse phi function. + + Args: + input (float or ndarray): Float (scalar or array) value. + Returns: + float: invphi(input) + """ + return -1 * np.sqrt(2) * erfcinv(input/0.5) + + +def _get_total_g(pager): + """Retrieve the G norm value for the aggregated losses. + + Args: + pager (Product): PAGER ComCat Product. + Returns: + tuple: (Aggregated Fatality G value, Aggregated Economic G value) + """ + alert_json = pager.getContentBytes( + 'alerts.json')[0].decode('utf-8') + jdict = json.loads(alert_json) + gfat = jdict['fatality']['gvalue'] + geco = jdict['economic']['gvalue'] + return (gfat, geco) + + +def _get_xml_exposure(total_row, pager, get_losses): + """Retrieve aggregated exposure from events prior to new PAGER release. + + Args: + total_row (dict): Dictionary to be filled in with exposures. + pager (Product): PAGER ComCat Product. + get_losses (bool): If losses are desired, fill in values with NaN. + Returns: + dict: Filled in total_row. + """ + exposure_xml = pager.getContentBytes('pager.xml')[0].decode('utf-8') + root = minidom.parseString(exposure_xml) + pager = root.getElementsByTagName('pager')[0] + if get_losses: + total_row['predicted_fatalities'] = np.nan + total_row['predicted_dollars'] = np.nan + for node in pager.childNodes: + if node.localName != 'exposure': + continue + mmistr = 'mmi%i' % (int(float(node.getAttribute('dmax')))) + total_row[mmistr] = int(node.getAttribute('exposure')) + total_row['ccode'] = 'Total' + root.unlink() + return total_row + + +def _get_json_exposure(total_row, pager, get_country_exposures, default): + """Retrieve aggregated/country exposures from events after new PAGER release. + + Args: + total_row (dict): Dictionary to be filled in with exposures. + pager (Product): PAGER ComCat Product. + get_country_exposures (bool): Extract exposures for each affected country. + Returns: + tuple: (total_row, country_rows) + """ + exposure_json = pager.getContentBytes('exposures.json')[0].decode('utf-8') + jdict = json.loads(exposure_json) + exp = jdict['population_exposure']['aggregated_exposure'] + total_row['mmi1'] = exp[0] + total_row['mmi2'] = exp[1] + total_row['mmi3'] = exp[2] + total_row['mmi4'] = exp[3] + total_row['mmi5'] = exp[4] + total_row['mmi6'] = exp[5] + total_row['mmi7'] = exp[6] + total_row['mmi8'] = exp[7] + total_row['mmi9'] = exp[8] + total_row['mmi10'] = exp[9] + country_rows = {} + if get_country_exposures: + for country in jdict['population_exposure']['country_exposures']: + country_row = {} + ccode = country['country_code'] + country_row.update(default) + country_row['country'] = ccode + exp = country['exposure'] + country_row['mmi1'] = exp[0] + country_row['mmi2'] = exp[1] + country_row['mmi3'] = exp[2] + country_row['mmi4'] = exp[3] + country_row['mmi5'] = exp[4] + country_row['mmi6'] = exp[5] + country_row['mmi7'] = exp[6] + country_row['mmi8'] = exp[7] + country_row['mmi9'] = exp[8] + country_row['mmi10'] = exp[9] + country_rows[ccode] = country_row + + return (total_row, country_rows) + + +def get_sigma(loss, gvalue): + """Calculate sigma value for a given loss value and G statistic. + + Args: + loss (float): Fatality or economic loss value. + gvalue (float): G statistic for model. + Returns: + float: One sigma value. + """ + if loss == 0: + loss = 0.5 + percent = 0.6827 + prob = round(np.exp(gvalue * _invphi(percent) + np.log(loss))) + return prob + + +def get_g_values(ccodes): + """Retrieve G values for given country codes from PAGER repository. + + Args: + ccodes (list): Sequence of two-letter country codes. + Returns: + tuple: (Dictionary of fatality G values, Dictionary of economic G values) + + """ + res = requests.get(FATALITY_URL) + root = minidom.parseString(res.text) + res.close() + models = root.getElementsByTagName( + 'models')[0].getElementsByTagName('model') + fatmodels = {} + for model in models: + ccode = model.getAttribute('ccode') + if ccode in ccodes: + fatmodels[ccode] = float(model.getAttribute('evalnormvalue')) + root.unlink() + + res = requests.get(ECONOMIC_URL) + root = minidom.parseString(res.text) + models = root.getElementsByTagName( + 'models')[0].getElementsByTagName('model') + ecomodels = {} + for model in models: + ccode = model.getAttribute('ccode') + if ccode in ccodes: + ecomodels[ccode] = float(model.getAttribute('evalnormvalue')) + root.unlink() + + return (fatmodels, ecomodels) diff --git a/libcomcat/search.py b/libcomcat/search.py index cd27895..4bfca8b 100644 --- a/libcomcat/search.py +++ b/libcomcat/search.py @@ -75,96 +75,98 @@ def count(starttime=None, Usage: TODO - :param starttime: - Python datetime - Limit to events on or after the specified start time. - :param endtime: - Python datetime - Limit to events on or before the specified end time. - :param updatedafter: - Python datetime - Limit to events updated after the specified time. - :param minlatitude: - Limit to events with a latitude larger than the specified minimum. - :param maxlatitude: - Limit to events with a latitude smaller than the specified maximum. - :param minlongitude: - Limit to events with a longitude larger than the specified minimum. - :param maxlongitude: - Limit to events with a longitude smaller than the specified maximum. - :param latitude: - Specify the latitude to be used for a radius search. - :param longitude: - Specify the longitude to be used for a radius search. - :param maxradiuskm: - Limit to events within the specified maximum number of kilometers - from the geographic point defined by the latitude and longitude parameters. - :param maxradius: - Limit to events within the specified maximum number of degrees - from the geographic point defined by the latitude and longitude parameters. - :param catalog: - Limit to events from a specified catalog. - :param contributor: - Limit to events contributed by a specified contributor. - :param limit: - Limit the results to the specified number of events. - NOTE, this will be throttled by this Python API to the supported Web API limit of 20,000. - :param maxdepth: - Limit to events with depth less than the specified maximum. - :param maxmagnitude: - Limit to events with a magnitude smaller than the specified maximum. - :param mindepth: - Limit to events with depth more than the specified minimum. - :param minmagnitude: - Limit to events with a magnitude larger than the specified minimum. - :param offset: - Return results starting at the event count specified, starting at 1. - :param orderby: - Order the results. The allowed values are: - - time order by origin descending time - - time-asc order by origin ascending time - - magnitude order by descending magnitude - - magnitude-asc order by ascending magnitude - :param alertlevel: - Limit to events with a specific PAGER alert level. The allowed values are: - - green Limit to events with PAGER alert level "green". - - yellow Limit to events with PAGER alert level "yellow". - - orange Limit to events with PAGER alert level "orange". - - red Limit to events with PAGER alert level "red". - :param eventtype: - Limit to events of a specific type. NOTE: "earthquake" will filter non-earthquake events. - :param maxcdi: - Maximum value for Maximum Community Determined Intensity reported by DYFI. - :param maxgap: - Limit to events with no more than this azimuthal gap. - :param maxmmi: - Maximum value for Maximum Modified Mercalli Intensity reported by ShakeMap. - :param maxsig: - Limit to events with no more than this significance. - :param mincdi: - Minimum value for Maximum Community Determined Intensity reported by DYFI. - :param minfelt: - Limit to events with this many DYFI responses. - :param mingap: - Limit to events with no less than this azimuthal gap. - :param minsig: - Limit to events with no less than this significance. - :param producttype: - Limit to events that have this type of product associated. Example producttypes: - - moment-tensor - - focal-mechanism - - shakemap - - losspager - - dyfi - :param productcode: - Return the event that is associated with the productcode. - The event will be returned even if the productcode is not - the preferred code for the event. Example productcodes: - - nn00458749 - - at00ndf1fr - :param reviewstatus: - Limit to events with a specific review status. The different review statuses are: - - automatic Limit to events with review status "automatic". - - reviewed Limit to events with review status "reviewed". - :returns: - List of SummaryEvent() objects. + Args: + starttime (datetime): Python datetime - Limit to events on or after + the specified start time. + endtime (datetime): + Python datetime - Limit to events on or before the specified end time. + updatedafter (datetime): + Limit to events updated after the specified time. + minlatitude (float): + Limit to events with a latitude larger than the specified minimum. + maxlatitude (float): + Limit to events with a latitude smaller than the specified maximum. + minlongitude (float): + Limit to events with a longitude larger than the specified minimum. + maxlongitude (float): + Limit to events with a longitude smaller than the specified maximum. + latitude (float): + Specify the latitude to be used for a radius search. + longitude (float): + Specify the longitude to be used for a radius search. + maxradiuskm (float): + Limit to events within the specified maximum number of kilometers + from the geographic point defined by the latitude and longitude parameters. + maxradius (float): + Limit to events within the specified maximum number of degrees + from the geographic point defined by the latitude and longitude parameters. + catalog (str): + Limit to events from a specified catalog. + contributor (str): + Limit to events contributed by a specified contributor. + limit (int): + Limit the results to the specified number of events. + NOTE, this will be throttled by this Python API to the supported + Web API limit of 20,000. + maxdepth (float): + Limit to events with depth less than the specified maximum. + maxmagnitude (float): + Limit to events with a magnitude smaller than the specified maximum. + mindepth (float): + Limit to events with depth more than the specified minimum. + minmagnitude (float): + Limit to events with a magnitude larger than the specified minimum. + offset (int): + Return results starting at the event count specified, starting at 1. + orderby (str): + Order the results. The allowed values are: + - time order by origin descending time + - time-asc order by origin ascending time + - magnitude order by descending magnitude + - magnitude-asc order by ascending magnitude + alertlevel (str): + Limit to events with a specific PAGER alert level. The allowed values are: + - green Limit to events with PAGER alert level "green". + - yellow Limit to events with PAGER alert level "yellow". + - orange Limit to events with PAGER alert level "orange". + - red Limit to events with PAGER alert level "red". + eventtype (str): + Limit to events of a specific type. NOTE: "earthquake" will filter non-earthquake events. + maxcdi (float): + Maximum value for Maximum Community Determined Intensity reported by DYFI. + maxgap (float): + Limit to events with no more than this azimuthal gap. + maxmmi (float): + Maximum value for Maximum Modified Mercalli Intensity reported by ShakeMap. + maxsig (float): + Limit to events with no more than this significance. + mincdi (float): + Minimum value for Maximum Community Determined Intensity reported by DYFI. + minfelt (int): + Limit to events with this many DYFI responses. + mingap (float): + Limit to events with no less than this azimuthal gap. + minsig (float): + Limit to events with no less than this significance. + producttype (str): + Limit to events that have this type of product associated. Example producttypes: + - moment-tensor + - focal-mechanism + - shakemap + - losspager + - dyfi + productcode (str): + Return the event that is associated with the productcode. + The event will be returned even if the productcode is not + the preferred code for the event. Example productcodes: + - nn00458749 + - at00ndf1fr + reviewstatus (str): + Limit to events with a specific review status. The different review statuses are: + - automatic Limit to events with review status "automatic". + - reviewed Limit to events with review status "reviewed". + Returns: + list: List of SummaryEvent() objects. """ # getting the inputargs must be the first line of the method! inputargs = locals().copy() @@ -218,17 +220,15 @@ def get_event_by_id(eventid, catalog=None, Usage: TODO - :param eventid: - Select a specific event by ID; event identifiers are data center specific. - :param includesuperseded: - Specify if superseded products should be included. This also includes all - deleted products, and is mutually exclusive to the includedeleted parameter. - :param includedeleted: - Specify if deleted products should be incuded. - :param host: - Replace default ComCat host (earthquake.usgs.gov) with a custom host. - :returns: - DetailEvent object. + + Args: + eventid (str): Select a specific event by ID; event identifiers are data center specific. + includesuperseded (bool): + Specify if superseded products should be included. This also includes all + deleted products, and is mutually exclusive to the includedeleted parameter. + includedeleted (bool): Specify if deleted products should be incuded. + host (str): Replace default ComCat host (earthquake.usgs.gov) with a custom host. + Returns: DetailEvent object. """ # getting the inputargs must be the first line of the method! inputargs = locals().copy() @@ -282,6 +282,7 @@ def search(starttime=None, productcode=None, reviewstatus=None, host=None, + enable_limit=False, verbose=False): """Search the ComCat database for events matching input criteria. @@ -297,98 +298,103 @@ def search(starttime=None, Usage: TODO - :param starttime: - Python datetime - Limit to events on or after the specified start time. - :param endtime: - Python datetime - Limit to events on or before the specified end time. - :param updatedafter: - Python datetime - Limit to events updated after the specified time. - :param minlatitude: - Limit to events with a latitude larger than the specified minimum. - :param maxlatitude: - Limit to events with a latitude smaller than the specified maximum. - :param minlongitude: - Limit to events with a longitude larger than the specified minimum. - :param maxlongitude: - Limit to events with a longitude smaller than the specified maximum. - :param latitude: - Specify the latitude to be used for a radius search. - :param longitude: - Specify the longitude to be used for a radius search. - :param maxradiuskm: - Limit to events within the specified maximum number of kilometers - from the geographic point defined by the latitude and longitude parameters. - :param maxradius: - Limit to events within the specified maximum number of degrees - from the geographic point defined by the latitude and longitude parameters. - :param catalog: - Limit to events from a specified catalog. - :param contributor: - Limit to events contributed by a specified contributor. - :param limit: - Limit the results to the specified number of events. - NOTE, this will be throttled by this Python API to the supported Web API limit of 20,000. - :param maxdepth: - Limit to events with depth less than the specified maximum. - :param maxmagnitude: - Limit to events with a magnitude smaller than the specified maximum. - :param mindepth: - Limit to events with depth more than the specified minimum. - :param minmagnitude: - Limit to events with a magnitude larger than the specified minimum. - :param offset: - Return results starting at the event count specified, starting at 1. - :param orderby: - Order the results. The allowed values are: - - time order by origin descending time - - time-asc order by origin ascending time - - magnitude order by descending magnitude - - magnitude-asc order by ascending magnitude - :param alertlevel: - Limit to events with a specific PAGER alert level. The allowed values are: - - green Limit to events with PAGER alert level "green". - - yellow Limit to events with PAGER alert level "yellow". - - orange Limit to events with PAGER alert level "orange". - - red Limit to events with PAGER alert level "red". - :param eventtype: - Limit to events of a specific type. NOTE: "earthquake" will filter non-earthquake events. - :param maxcdi: - Maximum value for Maximum Community Determined Intensity reported by DYFI. - :param maxgap: - Limit to events with no more than this azimuthal gap. - :param maxmmi: - Maximum value for Maximum Modified Mercalli Intensity reported by ShakeMap. - :param maxsig: - Limit to events with no more than this significance. - :param mincdi: - Minimum value for Maximum Community Determined Intensity reported by DYFI. - :param minfelt: - Limit to events with this many DYFI responses. - :param mingap: - Limit to events with no less than this azimuthal gap. - :param minsig: - Limit to events with no less than this significance. - :param producttype: - Limit to events that have this type of product associated. Example producttypes: - - moment-tensor - - focal-mechanism - - shakemap - - losspager - - dyfi - :param productcode: - Return the event that is associated with the productcode. - The event will be returned even if the productcode is not - the preferred code for the event. Example productcodes: - - nn00458749 - - at00ndf1fr - :param reviewstatus: - Limit to events with a specific review status. The different review statuses are: - - automatic Limit to events with review status "automatic". - - reviewed Limit to events with review status "reviewed". - :param host: - Replace default ComCat host (earthquake.usgs.gov) with a custom host. - :returns: - List of SummaryEvent() objects. + Args: + starttime (datetime): + Python datetime - Limit to events on or after the specified start time. + endtime (datetime): + Python datetime - Limit to events on or before the specified end time. + updatedafter (datetime): + Python datetime - Limit to events updated after the specified time. + minlatitude (float): + Limit to events with a latitude larger than the specified minimum. + maxlatitude (float): + Limit to events with a latitude smaller than the specified maximum. + minlongitude (float): + Limit to events with a longitude larger than the specified minimum. + maxlongitude (float): + Limit to events with a longitude smaller than the specified maximum. + latitude (float): + Specify the latitude to be used for a radius search. + longitude (float): + Specify the longitude to be used for a radius search. + maxradiuskm (float): + Limit to events within the specified maximum number of kilometers + from the geographic point defined by the latitude and longitude parameters. + maxradius (float): + Limit to events within the specified maximum number of degrees + from the geographic point defined by the latitude and longitude parameters. + catalog (str): + Limit to events from a specified catalog. + contributor (str): + Limit to events contributed by a specified contributor. + limit (int): + Limit the results to the specified number of events. + NOTE, this will be throttled by this Python API to the supported Web API limit of 20,000. + maxdepth (float): + Limit to events with depth less than the specified maximum. + maxmagnitude (float): + Limit to events with a magnitude smaller than the specified maximum. + mindepth (float): + Limit to events with depth more than the specified minimum. + minmagnitude (float): + Limit to events with a magnitude larger than the specified minimum. + offset (int): + Return results starting at the event count specified, starting at 1. + orderby (str): + Order the results. The allowed values are: + - time order by origin descending time + - time-asc order by origin ascending time + - magnitude order by descending magnitude + - magnitude-asc order by ascending magnitude + alertlevel (str): + Limit to events with a specific PAGER alert level. The allowed values are: + - green Limit to events with PAGER alert level "green". + - yellow Limit to events with PAGER alert level "yellow". + - orange Limit to events with PAGER alert level "orange". + - red Limit to events with PAGER alert level "red". + eventtype (str): + Limit to events of a specific type. NOTE: "earthquake" will filter non-earthquake events. + maxcdi (float): + Maximum value for Maximum Community Determined Intensity reported by DYFI. + maxgap (float): + Limit to events with no more than this azimuthal gap. + maxmmi (float): + Maximum value for Maximum Modified Mercalli Intensity reported by ShakeMap. + maxsig (float): + Limit to events with no more than this significance. + mincdi (float): + Minimum value for Maximum Community Determined Intensity reported by DYFI. + minfelt (int): + Limit to events with this many DYFI responses. + mingap (float): + Limit to events with no less than this azimuthal gap. + minsig (float): + Limit to events with no less than this significance. + producttype (str): + Limit to events that have this type of product associated. Example producttypes: + - moment-tensor + - focal-mechanism + - shakemap + - losspager + - dyfi + productcode (str): + Return the event that is associated with the productcode. + The event will be returned even if the productcode is not + the preferred code for the event. Example productcodes: + - nn00458749 + - at00ndf1fr + reviewstatus (str): + Limit to events with a specific review status. The different review statuses are: + - automatic Limit to events with review status "automatic". + - reviewed Limit to events with review status "reviewed". + host (str): + Replace default ComCat host (earthquake.usgs.gov) with a custom host. + enable_limit (bool): Enable 20,000 event search limit. Will turn off searching + in segments, which is meant to safely avoid that limit. + Use only when you are certain your search will be small. + + Returns: + list: List of SummaryEvent() objects. """ # getting the inputargs must be the first line of the method! inputargs = locals().copy() @@ -408,7 +414,10 @@ def search(starttime=None, # remove the verbose element from the arguments del newargs['verbose'] - + del newargs['enable_limit'] + if enable_limit: + events = _search(**newargs) + return events segments = _get_time_segments(starttime, endtime, newargs['minmagnitude']) events = [] iseg = 1 @@ -464,12 +473,11 @@ def _search(**newargs): if 'updatedafter' in newargs: newargs['updatedafter'] = newargs['updatedafter'].strftime(TIMEFMT) if 'host' in newargs and newargs['host'] is not None: - template = SEARCH_TEMPLATE.replace('[HOST]',newargs['host']) + template = SEARCH_TEMPLATE.replace('[HOST]', newargs['host']) del newargs['host'] else: - template = SEARCH_TEMPLATE.replace('[HOST]',HOST) - - + template = SEARCH_TEMPLATE.replace('[HOST]', HOST) + paramstr = urlencode(newargs) url = template + '&' + paramstr events = [] diff --git a/libcomcat/utils.py b/libcomcat/utils.py index 4c0d0e8..246d469 100644 --- a/libcomcat/utils.py +++ b/libcomcat/utils.py @@ -5,6 +5,7 @@ import warnings from datetime import datetime import os.path +import json # third party imports import numpy as np @@ -13,6 +14,10 @@ from obspy.clients.fdsn import Client from impactutils.time.ancient_time import HistoricTime from openpyxl import load_workbook +import requests + +# local imports +from .classes import VersionOption # constants CATALOG_SEARCH_TEMPLATE = 'https://earthquake.usgs.gov/fdsnws/event/1/catalogs' @@ -21,6 +26,11 @@ TIMEFMT1 = '%Y-%m-%dT%H:%M:%S' TIMEFMT2 = '%Y-%m-%dT%H:%M:%S.%f' DATEFMT = '%Y-%m-%d' +COUNTRYFILE = 'ne_10m_admin_0_countries.shp' + +# where is the PAGER fatality model found? +FATALITY_URL = 'https://raw.githubusercontent.com/usgs/pager/master/losspager/data/fatality.xml' +ECONOMIC_URL = 'https://raw.githubusercontent.com/usgs/pager/master/losspager/data/economy.xml' def get_mag_src(mag): @@ -103,12 +113,13 @@ def get_all_mags(eventid): def read_phases(filename): """Read a phase file CSV or Excel file into data structures. - :param filename: - String file name of a CSV or Excel file created by getphases program. - :returns: - Tuple of: - header_dict - Dictionary containing header data from top of file. - dataframe - Pandas dataframe containing phase data. + Args: + filename (str): String file name of a CSV or Excel file + created by getphases program. + Returns: + tuple: + header_dict - Dictionary containing header data from top of file. + dataframe - Pandas dataframe containing phase data. """ if not os.path.isfile(filename): raise FileNotFoundError('Filename %s does not exist.' % filename) @@ -178,8 +189,9 @@ def maketime(timestring): def get_catalogs(): """Get the list of catalogs available in ComCat. - :returns: - List of catalogs available in ComCat (see the catalog parameter in search() method.) + Returns: + list: Catalogs available in ComCat (see the catalog + parameter in search() method.) """ fh = urlopen(CATALOG_SEARCH_TEMPLATE, timeout=TIMEOUT) data = fh.read().decode('utf8') @@ -196,8 +208,9 @@ def get_catalogs(): def get_contributors(): """Get the list of contributors available in ComCat. - :returns: - List of contributors available in ComCat (see the contributor parameter in search() method.) + Returns: + list: Contributors available in ComCat (see the contributor + parameter in search() method.) """ fh = urlopen(CONTRIBUTORS_SEARCH_TEMPLATE, timeout=TIMEOUT) data = fh.read().decode('utf8') @@ -209,286 +222,3 @@ def get_contributors(): conlist.append(contributor.firstChild.data) root.unlink() return conlist - - -def stringify(waveform): - """Turn waveform object into NSCL-style station code - - :param waveform: - Obspy Catalog Waveform object. - :returns: - NSCL- style string representation of waveform object. - """ - fmt = '%s.%s.%s.%s' - network = '--' - if waveform.network_code is not None: - network = waveform.network_code - station = '--' - if waveform.station_code is not None: - station = waveform.station_code - channel = '--' - if waveform.channel_code is not None: - channel = waveform.channel_code - location = '--' - if waveform.location_code is not None: - location = waveform.location_code - tpl = (network, station, channel, location) - return fmt % tpl - - -def get_arrival(event, pickid): - """Find the arrival object in a Catalog Event corresponding to input pick id. - :param event: - Obspy Catalog Event object. - :param pickid: - Pick ID string. - :returns: - Obspy Catalog arrival object. - """ - for origin in event.origins: - idlist = [arr.pick_id for arr in origin.arrivals] - if pickid not in idlist: - continue - idx = idlist.index(pickid) - arrival = origin.arrivals[idx] - return arrival - if pickid is None: - return None - - -def _get_phaserow(pick, catevent): - """Return a dictionary containing Phase data matching that found on ComCat event page. - Example: https://earthquake.usgs.gov/earthquakes/eventpage/us2000ahv0#origin - (Click on the Phases tab). - - :param pick: - Obspy Catalog Pick object. - :param catevent: - Obspy Catalog Event object. - :returns: - Dictionary containing: - - Channel: NSCL-style channel string. - - Distance: Distance (km) from station to origin. - - Azimuth: Azimuth (deg.) from epicenter to station. - - Phase: Name of the phase (Pn,Pg, etc.) - - Arrival Time: Pick arrival time (UTC). - - Status: "manual" or "automatic". - - Residual: Arrival time residual. - - Weight: Arrival weight. - - Agency: Agency ID. - """ - pick_id = pick.resource_id - waveform_id = pick.waveform_id - arrival = get_arrival(catevent, pick_id) - if arrival is None: - return None - - # save info to row of dataframe - etime = pick.time.datetime - channel = stringify(waveform_id) - row = {'Channel': channel, - 'Distance': arrival.distance, - 'Azimuth': arrival.azimuth, - 'Phase': arrival.phase, - 'Arrival Time': etime, - 'Status': pick.evaluation_mode, - 'Residual': arrival.time_residual, - 'Weight': arrival.time_weight, - 'Agency': arrival.creation_info.agency_id} - return row - - -def get_phase_dataframe(detail, catalog='preferred'): - """Return a Pandas DataFrame consisting of Phase arrival data. - - :param detail: - DetailEvent object. - :param catalog: - Source network ('us','ak', etc. ,or 'preferred'.) - :returns: - Pandas DataFrame containing columns: - - Channel: Network.Station.Channel.Location (NSCL) style station - description. ("--" indicates missing information) - - Distance: Distance (kilometers) from epicenter to station. - - Azimuth: Azimuth (degrees) from epicenter to station. - - Phase: Name of the phase (Pn,Pg, etc.) - - Arrival Time: Pick arrival time (UTC). - - Status: "manual" or "automatic". - - Residual: Arrival time residual. - - Weight: Arrival weight. - - Agency: Agency ID. - :raises: - AttributeError if input DetailEvent does not have a phase-data product - for the input catalog. - """ - if catalog is None: - catalog = 'preferred' - df = pd.DataFrame(columns=['Channel', 'Distance', 'Azimuth', - 'Phase', 'Arrival Time', 'Status', - 'Residual', 'Weight', 'Agency']) - - phasedata = detail.getProducts('phase-data', source=catalog)[0] - quakeurl = phasedata.getContentURL('quakeml.xml') - try: - fh = urlopen(quakeurl, timeout=TIMEOUT) - data = fh.read() - fh.close() - except Exception: - return None - unpickler = Unpickler() - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=UserWarning) - catalog = unpickler.loads(data) - catevent = catalog.events[0] - for pick in catevent.picks: - phaserow = _get_phaserow(pick, catevent) - if phaserow is None: - continue - df = df.append(phaserow, ignore_index=True) - return df - - -def get_magnitude_data_frame(detail, catalog, magtype): - """Return a Pandas DataFrame consisting of magnitude data. - - :param detail: - DetailEvent object. - :param catalog: - Source catalog ('us','ak', etc. ,or 'preferred'.) - :param magtype: - Magnitude type (mb, ml, etc.) - :returns: - Pandas DataFrame containing columns: - - Channel: Network.Station.Channel.Location (NSCL) style station - description. ("--" indicates missing information) - - Type: Magnitude type. - - Amplitude: Amplitude of seismic wave at each station (m). - - Period: Period of seismic wave at each station (s). - - Status: "manual" or "automatic". - - Magnitude: Locally determined magnitude. - - Weight: Magnitude weight. - :raises: - AttributeError if input DetailEvent does not have a phase-data product - for the input catalog. - """ - columns = columns = ['Channel', 'Type', 'Amplitude', - 'Period', 'Status', 'Magnitude', - 'Weight'] - df = pd.DataFrame() - phasedata = detail.getProducts('phase-data', source=catalog)[0] - quakeurl = phasedata.getContentURL('quakeml.xml') - try: - fh = urlopen(quakeurl, timeout=TIMEOUT) - data = fh.read() - fh.close() - except Exception: - return None - unpickler = Unpickler() - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=UserWarning) - catalog = unpickler.loads(data) - catevent = catalog.events[0] # match this to input catalog - for magnitude in catevent.magnitudes: - if magnitude.magnitude_type != magtype: - continue - for contribution in magnitude.station_magnitude_contributions: - row = {} - smag = contribution.station_magnitude_id.get_referred_object() - ampid = smag.amplitude_id - amp = ampid.get_referred_object() - waveid = amp.waveform_id - fmt = '%s.%s.%s.%s' - tpl = (waveid.network_code, - waveid.station_code, - waveid.channel_code, - waveid.location_code) - row['Channel'] = fmt % tpl - row['Type'] = smag.station_magnitude_type - row['Amplitude'] = amp.generic_amplitude - row['Period'] = amp.period - row['Status'] = amp.evaluation_mode - row['Magnitude'] = smag.mag - row['Weight'] = contribution.weight - df = df.append(row, ignore_index=True) - df = df[columns] - return df - - -def get_detail_data_frame(events, get_all_magnitudes=False, - get_tensors='preferred', - get_focals='preferred', - get_moment_supplement=False, - verbose=False): - """Extract the detailed event informat into a pandas DataFrame. - - Usage: - TODO - - :param events: - List of SummaryEvent objects as returned by search() function. - :param get_all_magnitudes: - Boolean indicating whether to return all magnitudes in results for each event. - :param get_tensors: - String option of 'none', 'preferred', or 'all'. - :param get_focals: - String option of 'none', 'preferred', or 'all'. - :param get_moment_supplement: - Boolean indicating whether derived origin and double-couple/source time information - should be extracted (when available.) - :returns: - Pandas DataFrame with one row per event, and all relevant information in columns. - """ - elist = [] - ic = 0 - inc = min(100, np.power(10, np.floor(np.log10(len(events))) - 1)) - if verbose: - sys.stderr.write( - 'Getting detailed event info - reporting every %i events.\n' % inc) - for event in events: - try: - detail = event.getDetailEvent() - except Exception as e: - print('Failed to get detailed version of event %s' % event.id) - continue - edict = detail.toDict(get_all_magnitudes=get_all_magnitudes, - get_tensors=get_tensors, - get_moment_supplement=get_moment_supplement, - get_focals=get_focals) - elist.append(edict) - if ic % inc == 0 and verbose: - msg = 'Getting detailed information for %s, %i of %i events.\n' - sys.stderr.write(msg % (event.id, ic, len(events))) - ic += 1 - df = pd.DataFrame(elist) - first_columns = ['id', 'time', 'latitude', - 'longitude', 'depth', 'magnitude'] - all_columns = df.columns - rem_columns = [col for col in all_columns if col not in first_columns] - new_columns = first_columns + rem_columns - df = df[new_columns] - return df - - -def get_summary_data_frame(events): - """Take the results of a search and extract the summary event informat in a pandas DataFrame. - - Usage: - TODO - - :param events: - List of SummaryEvent objects as returned by search() function. - - :returns: - Pandas DataFrame with one row per event, and columns: - - id (string) Authoritative ComCat event ID. - - time (datetime) Authoritative event origin time. - - latitude (float) Authoritative event latitude. - - longitude (float) Authoritative event longitude. - - depth (float) Authoritative event depth. - - magnitude (float) Authoritative event magnitude. - """ - elist = [] - for event in events: - elist.append(event.toDict()) - df = pd.DataFrame(elist) - return df diff --git a/notebooks/libcomcat_examples.ipynb b/notebooks/libcomcat_examples.ipynb index 32966e8..b341a04 100644 --- a/notebooks/libcomcat_examples.ipynb +++ b/notebooks/libcomcat_examples.ipynb @@ -3,9 +3,7 @@ { "cell_type": "code", "execution_count": 1, - "metadata": { - "collapsed": true - }, + "metadata": {}, "outputs": [], "source": [ "from libcomcat.utils import get_phase_dataframe, get_summary_data_frame\n", @@ -94,7 +92,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "['mag', 'place', 'time', 'updated', 'tz', 'url', 'detail', 'felt', 'cdi', 'mmi', 'alert', 'status', 'tsunami', 'sig', 'net', 'code', 'ids', 'sources', 'types', 'nst', 'dmin', 'rms', 'gap', 'magType', 'type', 'title']\n", + "['nst', 'cdi', 'ids', 'mag', 'felt', 'tsunami', 'net', 'types', 'gap', 'time', 'status', 'dmin', 'alert', 'sources', 'code', 'detail', 'title', 'sig', 'place', 'url', 'rms', 'mmi', 'magType', 'tz', 'updated', 'type']\n", "29.7\n" ] } @@ -147,18 +145,18 @@ "data": { "text/html": [ "
\n", - "\n", "\n", " \n", @@ -326,9 +324,7 @@ { "cell_type": "code", "execution_count": 8, - "metadata": { - "collapsed": true - }, + "metadata": {}, "outputs": [], "source": [ "dyfi = detail_event.getProducts('dyfi')[0]" @@ -364,9 +360,7 @@ { "cell_type": "code", "execution_count": 10, - "metadata": { - "collapsed": true - }, + "metadata": {}, "outputs": [], "source": [ "filename = os.path.join(os.path.expanduser('~'),'cdi_geo.txt')\n", @@ -394,18 +388,18 @@ "data": { "text/html": [ "
\n", - "\n", "
\n", " \n", @@ -528,18 +522,18 @@ "data": { "text/html": [ "
\n", - "\n", "
\n", " \n", @@ -553,6 +547,7 @@ " \n", " \n", " \n", + " \n", " \n", " \n", " \n", @@ -562,10 +557,11 @@ " \n", " \n", " \n", - " \n", + " \n", " \n", " \n", " \n", + " \n", " \n", " \n", " \n", @@ -573,10 +569,11 @@ " \n", " \n", " \n", - " \n", + " \n", " \n", " \n", " \n", + " \n", " \n", " \n", " \n", @@ -584,10 +581,11 @@ " \n", " \n", " \n", - " \n", + " \n", " \n", " \n", " \n", + " \n", " \n", " \n", " \n", @@ -595,10 +593,11 @@ " \n", " \n", " \n", - " \n", + " \n", " \n", " \n", " \n", + " \n", " \n", " \n", " \n", @@ -606,29 +605,30 @@ " \n", " \n", " \n", - " \n", + " \n", " \n", " \n", " \n", + " \n", " \n", " \n", "
StatusResidualWeightAgency
0.1267147.3P1994-01-17 12:30:551994-01-17 12:30:55.620manualNoneNoneCI
10.1267147.3P1994-01-17 12:30:551994-01-17 12:30:55.630manualNoneNoneCI
20.1267147.3S1994-01-17 12:30:591994-01-17 12:30:59.120manualNoneNoneCI
30.1792146.7P1994-01-17 12:30:571994-01-17 12:30:57.740manualNoneNoneCI
40.1792146.7S1994-01-17 12:30:591994-01-17 12:30:59.770manualNoneNoneCI
\n", "
" ], "text/plain": [ - " Channel Distance Azimuth Phase Arrival Time Status Residual \\\n", - "0 ZY.LA00.EHZ. 0.1267 147.3 P 1994-01-17 12:30:55 manual None \n", - "1 ZY.LA00.ELN. 0.1267 147.3 P 1994-01-17 12:30:55 manual None \n", - "2 ZY.LA00.ELE. 0.1267 147.3 S 1994-01-17 12:30:59 manual None \n", - "3 ZY.LA02.EHZ. 0.1792 146.7 P 1994-01-17 12:30:57 manual None \n", - "4 ZY.LA02.EHN. 0.1792 146.7 S 1994-01-17 12:30:59 manual None \n", + " Channel Distance Azimuth Phase Arrival Time Status \\\n", + "0 ZY.LA00.EHZ. 0.1267 147.3 P 1994-01-17 12:30:55.620 manual \n", + "1 ZY.LA00.ELN. 0.1267 147.3 P 1994-01-17 12:30:55.630 manual \n", + "2 ZY.LA00.ELE. 0.1267 147.3 S 1994-01-17 12:30:59.120 manual \n", + "3 ZY.LA02.EHZ. 0.1792 146.7 P 1994-01-17 12:30:57.740 manual \n", + "4 ZY.LA02.EHN. 0.1792 146.7 S 1994-01-17 12:30:59.770 manual \n", "\n", - " Weight \n", - "0 None \n", - "1 None \n", - "2 None \n", - "3 None \n", - "4 None " + " Residual Weight Agency \n", + "0 None None CI \n", + "1 None None CI \n", + "2 None None CI \n", + "3 None None CI \n", + "4 None None CI " ] }, "execution_count": 13, @@ -667,7 +667,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.2" + "version": "3.5.5" } }, "nbformat": 4, diff --git a/tests/data/vcr_pager_results.yaml b/tests/data/vcr_pager_results.yaml new file mode 100644 index 0000000..e9851d6 --- /dev/null +++ b/tests/data/vcr_pager_results.yaml @@ -0,0 +1,155 @@ +interactions: +- request: + body: null + headers: + Connection: [close] + Host: [earthquake.usgs.gov] + User-Agent: [Python-urllib/3.5] + method: GET + uri: https://earthquake.usgs.gov/fdsnws/event/1/query?format=geojson&includedeleted=false&eventid=us2000h8ty&includesuperseded=false + response: + body: {string: '{"type":"Feature","properties":{"mag":6.6,"place":"27km ENE of + Tomakomai, Japan","time":1536170879150,"updated":1542217214040,"tz":540,"url":"https://earthquake.usgs.gov/earthquakes/eventpage/us2000h8ty","felt":98,"cdi":8.1,"mmi":8.62,"alert":"orange","status":"reviewed","tsunami":1,"sig":1079,"net":"us","code":"2000h8ty","ids":",us2000h8ty,pt18248000,at00pelgzo,","sources":",us,pt,at,","types":",associate,dyfi,general-text,geoserve,ground-failure,impact-link,impact-text,losspager,moment-tensor,origin,phase-data,shakemap,","nst":null,"dmin":1.13,"rms":0.74,"gap":32,"magType":"mww","type":"earthquake","title":"M + 6.6 - 27km ENE of Tomakomai, Japan","products":{"associate":[{"indexid":"167969392","indexTime":1536172800883,"id":"urn:usgs-product:admin:associate:us2000h8ty_pt18248000:1536172798414","type":"associate","code":"us2000h8ty_pt18248000","source":"admin","updateTime":1536172798414,"status":"UPDATE","properties":{"eventsource":"us","eventsourcecode":"2000h8ty","othereventsource":"pt","othereventsourcecode":"18248000","pdl-client-version":"Version + 1.10.1 2016-04-07"},"preferredWeight":1,"contents":{"":{"contentType":"1","lastModified":1536172798000,"length":36,"bytes":"outside + automatic association window"}}},{"indexid":"167969222","indexTime":1536172701279,"id":"urn:usgs-product:admin:associate:us2000h8ty_us2000h8ty:1536172700236","type":"associate","code":"us2000h8ty_us2000h8ty","source":"admin","updateTime":1536172700236,"status":"UPDATE","properties":{"eventsource":"us","eventsourcecode":"2000h8ty","othereventsource":"us","othereventsourcecode":"2000h8ty","pdl-client-version":"Version + 1.10.1 2016-04-07"},"preferredWeight":1,"contents":{"":{"contentType":"1","lastModified":1536172700000,"length":35,"bytes":"multiple + event ids from same source"}}},{"indexid":"167969202","indexTime":1536172678771,"id":"urn:usgs-product:admin:associate:us2000h8ty_at00pelgzo:1536172677508","type":"associate","code":"us2000h8ty_at00pelgzo","source":"admin","updateTime":1536172677508,"status":"UPDATE","properties":{"eventsource":"us","eventsourcecode":"2000h8ty","othereventsource":"at","othereventsourcecode":"00pelgzo","pdl-client-version":"Version + 1.10.1 2016-04-07"},"preferredWeight":1,"contents":{"":{"contentType":"1","lastModified":1536172677000,"length":36,"bytes":"outside + automatic association window"}}}],"dyfi":[{"indexid":"170437402","indexTime":1541327651890,"id":"urn:usgs-product:us:dyfi:us2000h8ty:1541327647432","type":"dyfi","code":"us2000h8ty","source":"us","updateTime":1541327647432,"status":"UPDATE","properties":{"depth":"35","eventsource":"us","eventsourcecode":"2000h8ty","eventtime":"2018-09-05T18:07:59.000Z","latitude":"42.6864","longitude":"141.9303","magnitude":"6.6","maxmmi":"8.1","num-responses":"98","numResp":"98","pdl-client-version":"Version + 1.13.3 2018-02-06"},"preferredWeight":156,"contents":{"cdi_geo.txt":{"contentType":"text/plain","lastModified":1541327646000,"length":2820,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/cdi_geo.txt"},"cdi_geo.xml":{"contentType":"application/xml","lastModified":1541327646000,"length":10191,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/cdi_geo.xml"},"cdi_geo_1km.txt":{"contentType":"text/plain","lastModified":1541327646000,"length":5916,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/cdi_geo_1km.txt"},"cdi_zip.txt":{"contentType":"text/plain","lastModified":1541327646000,"length":2139,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/cdi_zip.txt"},"cdi_zip.xml":{"contentType":"application/xml","lastModified":1541327646000,"length":6179,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/cdi_zip.xml"},"contents.xml":{"contentType":"application/xml","lastModified":1541327646000,"length":4224,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/contents.xml"},"dyfi.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541327646000,"length":1170,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/dyfi.kmz"},"dyfi_geo.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541327646000,"length":3102,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/dyfi_geo.kmz"},"dyfi_geo_10km.geojson":{"contentType":"application/octet-stream","lastModified":1541327646000,"length":9576,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/dyfi_geo_10km.geojson"},"dyfi_geo_1km.geojson":{"contentType":"application/octet-stream","lastModified":1541327646000,"length":17530,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/dyfi_geo_1km.geojson"},"dyfi_plot_atten.json":{"contentType":"application/json","lastModified":1541327646000,"length":3602,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/dyfi_plot_atten.json"},"dyfi_plot_numresp.json":{"contentType":"application/json","lastModified":1541327646000,"length":5249,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/dyfi_plot_numresp.json"},"dyfi_zip.geojson":{"contentType":"application/octet-stream","lastModified":1541327646000,"length":4361,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/dyfi_zip.geojson"},"dyfi_zip.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541327646000,"length":2331,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/dyfi_zip.kmz"},"event_data.xml":{"contentType":"application/xml","lastModified":1541327646000,"length":3040,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/event_data.xml"},"us2000h8ty_ciim.jpg":{"contentType":"image/jpeg","lastModified":1541327646000,"length":85877,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_ciim.jpg"},"us2000h8ty_ciim.pdf":{"contentType":"application/pdf","lastModified":1541327646000,"length":229585,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_ciim.pdf"},"us2000h8ty_ciim.ps":{"contentType":"application/postscript","lastModified":1541327646000,"length":466962,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_ciim.ps"},"us2000h8ty_ciim_geo.jpg":{"contentType":"image/jpeg","lastModified":1541327646000,"length":83190,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_ciim_geo.jpg"},"us2000h8ty_ciim_geo.pdf":{"contentType":"application/pdf","lastModified":1541327646000,"length":228057,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_ciim_geo.pdf"},"us2000h8ty_ciim_geo.ps":{"contentType":"application/postscript","lastModified":1541327646000,"length":462579,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_ciim_geo.ps"},"us2000h8ty_ciim_geo_imap.html":{"contentType":"text/html","lastModified":1541327646000,"length":8684,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_ciim_geo_imap.html"},"us2000h8ty_ciim_imap.html":{"contentType":"text/html","lastModified":1541327646000,"length":4228,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_ciim_imap.html"},"us2000h8ty_plot_atten.jpg":{"contentType":"image/jpeg","lastModified":1541327646000,"length":52048,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_plot_atten.jpg"},"us2000h8ty_plot_atten.ps":{"contentType":"application/postscript","lastModified":1541327646000,"length":52639,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_plot_atten.ps"},"us2000h8ty_plot_atten.txt":{"contentType":"text/plain","lastModified":1541327646000,"length":2650,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_plot_atten.txt"},"us2000h8ty_plot_numresp.jpg":{"contentType":"image/jpeg","lastModified":1541327646000,"length":37336,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_plot_numresp.jpg"},"us2000h8ty_plot_numresp.ps":{"contentType":"application/postscript","lastModified":1541327646000,"length":35420,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_plot_numresp.ps"},"us2000h8ty_plot_numresp.txt":{"contentType":"text/plain","lastModified":1541327646000,"length":3120,"url":"https://earthquake.usgs.gov/archive/product/dyfi/us2000h8ty/us/1541327647432/us2000h8ty_plot_numresp.txt"}}}],"general-text":[{"indexid":"168010922","indexTime":1536248320721,"id":"urn:usgs-product:admin:general-text:us2000h8ty-1536248267441:1536248319183","type":"general-text","code":"us2000h8ty-1536248267441","source":"admin","updateTime":1536248319183,"status":"UPDATE","properties":{"eventsource":"us","eventsourcecode":"2000h8ty","pdl-client-version":"Version + 1.10.1 2016-04-07","review-status":"Reviewed"},"preferredWeight":1,"contents":{"":{"contentType":"1","lastModified":1536248319000,"length":2894,"bytes":"

Tectonic + Summary

\n

\nThe September 5, 2018, M 6.6 earthquake east of Tomakomai, + on the island of Hokkaido, Japan, occurred as the result of shallow reverse + faulting. Focal mechanism solutions for the earthquake indicate faulting occurred + on either a moderately dipping reverse fault striking northwest, or on a shallow-to-moderately + dipping fault striking southeast. At the location of this earthquake, the + Pacific plate is moving towards the west-northwest at a velocity of about + 87 mm/yr relative to the North America plate, subducting beneath Japan and + Eurasia at the Japan and Kuril-Kamchatka Trenches to the east of the September + 5th earthquake. Note that some authors divide this region into several microplates + that together define the relative motions between among the larger Pacific, + North America and Eurasia plates; these include the Okhotsk and Amur microplates + that are part of North America and Eurasia, respectively. Given the depth + and focal mechanism solutions of this earthquake, the event likely represents + rupture of a fault within the North America (upper) plate (or Okhotsk microplate), + rather than on the subduction zone plate boundary interface between the Pacific + and North America plates, which is at a depth of approximately 100 km at the + location of this earthquake. Slip on a fault aligned with either nodal plane + of the focal mechanism solution is consistent with this intraplate setting.\n

\nThis + reverse-faulting earthquake occurred in a collisional belt whose tectonics + are driven by the convergence of the Pacific, Eurasia and North America plates. + Hokkaido is made up of accreted terrain that roughly parallels the plate boundary + to the east, and today\u2019s earthquake is located in the Oshima Belt region. + Japanese seismic hazard maps indicate this earthquake was located in close + proximity to the Nohoro-Kyuryo fault zone, with high hazard and an expected + maximum magnitude of M~7.\n

\nWhile large earthquakes are common in + Japan, and regularly cause damage, few moderate-to-large shallow intraplate + earthquakes have occurred in the vicinity of today\u2019s earthquake. Over + the preceding century, 70 earthquakes of M 6 or larger have occurred within + 250 km of the September 5th earthquake, though only 6 of these occurred at + shallow depths beneath Hokkaido. The most recent of these six upper plate + events was a M 6.4 earthquake about 100 km to the east of today\u2019s event + in January, 1970. \n

\nOver the first 14 hours following the September + 5th earthquake, the USGS have located 7 aftershocks of M 4.3 and larger. Two + of these were larger than M 5, and the largest was M 5.4. News reports indicate + that significant landsliding was triggered by the M 6.6 mainshock, burying + houses in the epicentral region. At the time of writing, there have been 7 + reported fatalities, hundreds of injuries, and dozens of people reported missing. + \n

"}}}],"geoserve":[{"indexid":"170522572","indexTime":1541539102280,"id":"urn:usgs-product:us:geoserve:us2000h8ty:1541539090040","type":"geoserve","code":"us2000h8ty","source":"us","updateTime":1541539090040,"status":"UPDATE","properties":{"eventsource":"us","eventsourcecode":"2000h8ty","location":"27km + ENE of Tomakomai, Japan","pdl-client-version":"Version 1.14.1 2018-04-26","tsunamiFlag":"false","utcOffset":"540"},"preferredWeight":6,"contents":{"geoserve.json":{"contentType":"application/json","lastModified":1541539101000,"length":1049,"url":"https://earthquake.usgs.gov/archive/product/geoserve/us2000h8ty/us/1541539090040/geoserve.json"}}},{"indexid":"167968822","indexTime":1536172010206,"id":"urn:usgs-product:us:geoserve:at00pelgzo:1536172005904","type":"geoserve","code":"at00pelgzo","source":"us","updateTime":1536172005904,"status":"UPDATE","properties":{"eventsource":"at","eventsourcecode":"00pelgzo","location":"31km + ESE of Chitose, Japan","pdl-client-version":"Version 1.14.1 2018-04-26","tsunamiFlag":"false","utcOffset":"540"},"preferredWeight":1,"contents":{"geoserve.json":{"contentType":"application/json","lastModified":1536172009000,"length":1047,"url":"https://earthquake.usgs.gov/archive/product/geoserve/at00pelgzo/us/1536172005904/geoserve.json"}}},{"indexid":"167968632","indexTime":1536171771950,"id":"urn:usgs-product:us:geoserve:pt18248000:1536171767831","type":"geoserve","code":"pt18248000","source":"us","updateTime":1536171767831,"status":"UPDATE","properties":{"eventsource":"pt","eventsourcecode":"18248000","location":"31km + ESE of Chitose, Japan","pdl-client-version":"Version 1.14.1 2018-04-26","tsunamiFlag":"false","utcOffset":"540"},"preferredWeight":1,"contents":{"geoserve.json":{"contentType":"application/json","lastModified":1536171770000,"length":1047,"url":"https://earthquake.usgs.gov/archive/product/geoserve/pt18248000/us/1536171767831/geoserve.json"}}}],"ground-failure":[{"indexid":"170523202","indexTime":1541540346055,"id":"urn:usgs-product:us:ground-failure:2000h8ty:1541539883354","type":"ground-failure","code":"2000h8ty","source":"us","updateTime":1541539883354,"status":"UPDATE","properties":{"depth":"35.0","eventsource":"us","eventsourcecode":"2000h8ty","eventtime":"2018-09-05T18:07:59.000Z","landslide-alert":"orange","landslide-hazard-alert-color":"orange","landslide-hazard-alert-parameter":"Aggregate + Hazard","landslide-hazard-alert-value":"12.0","landslide-maximum-latitude":"46.36145833333333","landslide-maximum-longitude":"146.93020833332423","landslide-minimum-latitude":"39.011458333340016","landslide-minimum-longitude":"136.93020833333333","landslide-overlay":"jessee_2017.png","landslide-population-alert-color":"green","landslide-population-alert-parameter":"Population + exposure","landslide-population-alert-value":"54","latitude":"42.6861","liquefaction-alert":"red","liquefaction-hazard-alert-color":"orange","liquefaction-hazard-alert-parameter":"Aggregate + Hazard","liquefaction-hazard-alert-value":"229.99999999999997","liquefaction-maximum-latitude":"46.364601075791974","liquefaction-maximum-longitude":"146.92979313113221","liquefaction-minimum-latitude":"39.015038601831215","liquefaction-minimum-longitude":"136.9300246179838","liquefaction-overlay":"zhu_2017_general.png","liquefaction-population-alert-color":"red","liquefaction-population-alert-parameter":"Population + exposure","liquefaction-population-alert-value":"130000","longitude":"141.9294","magnitude":"6.6","maximum-latitude":"43.43352556534491","maximum-longitude":"142.4333333333333","minimum-latitude":"42.333591045433","minimum-longitude":"140.79035192108498","pdl-client-version":"Version + 1.14.0 2018-04-02","rupture-warning":"True","shakemap-version":"10","version":"4"},"preferredWeight":156,"contents":{"contents.xml":{"contentType":"application/xml","lastModified":1541539882000,"length":1606,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/contents.xml"},"godt_2008.hdf5":{"contentType":"application/octet-stream","lastModified":1541539881000,"length":63885,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/godt_2008.hdf5"},"godt_2008.png":{"contentType":"image/png","lastModified":1541539881000,"length":10949,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/godt_2008.png"},"godt_2008_model.flt":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":4233728,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/godt_2008_model.flt"},"godt_2008_model.hdr":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":105,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/godt_2008_model.hdr"},"godt_2008_model.tif":{"contentType":"image/tiff","lastModified":1541539881000,"length":4241034,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/godt_2008_model.tif"},"info.json":{"contentType":"application/json","lastModified":1541539881000,"length":4303,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/info.json"},"jessee_2017.hdf5":{"contentType":"application/octet-stream","lastModified":1541539881000,"length":6473687,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/jessee_2017.hdf5"},"jessee_2017.png":{"contentType":"image/png","lastModified":1541539881000,"length":158544,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/jessee_2017.png"},"jessee_2017_model.flt":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":67737728,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/jessee_2017_model.flt"},"jessee_2017_model.hdr":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":106,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/jessee_2017_model.hdr"},"jessee_2017_model.tif":{"contentType":"image/tiff","lastModified":1541539881000,"length":67766202,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/jessee_2017_model.tif"},"nowicki_2014_global.hdf5":{"contentType":"application/octet-stream","lastModified":1541539881000,"length":2880646,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/nowicki_2014_global.hdf5"},"nowicki_2014_global.png":{"contentType":"image/png","lastModified":1541539881000,"length":89696,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/nowicki_2014_global.png"},"nowicki_2014_global_model.flt":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":16934528,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/nowicki_2014_global_model.flt"},"nowicki_2014_global_model.hdr":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":106,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/nowicki_2014_global_model.hdr"},"nowicki_2014_global_model.tif":{"contentType":"image/tiff","lastModified":1541539881000,"length":16948890,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/nowicki_2014_global_model.tif"},"zhu_2015.hdf5":{"contentType":"application/octet-stream","lastModified":1541539881000,"length":3027049,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/zhu_2015.hdf5"},"zhu_2015_model.flt":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":16934528,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/zhu_2015_model.flt"},"zhu_2015_model.hdr":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":106,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/zhu_2015_model.hdr"},"zhu_2015_model.tif":{"contentType":"image/tiff","lastModified":1541539881000,"length":16948890,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/zhu_2015_model.tif"},"zhu_2017_general.hdf5":{"contentType":"application/octet-stream","lastModified":1541539881000,"length":348403,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/zhu_2017_general.hdf5"},"zhu_2017_general.png":{"contentType":"image/png","lastModified":1541539881000,"length":55600,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/zhu_2017_general.png"},"zhu_2017_general_model.flt":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":16934528,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/zhu_2017_general_model.flt"},"zhu_2017_general_model.hdr":{"contentType":"application/octet-stream","lastModified":1541539882000,"length":106,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/zhu_2017_general_model.hdr"},"zhu_2017_general_model.tif":{"contentType":"image/tiff","lastModified":1541539881000,"length":16948890,"url":"https://earthquake.usgs.gov/archive/product/ground-failure/2000h8ty/us/1541539883354/zhu_2017_general_model.tif"}}}],"impact-link":[{"indexid":"167968832","indexTime":1536172011148,"id":"urn:usgs-product:at:impact-link:at00pelgzo-tsunamilinks_at_v1:1536172008590","type":"impact-link","code":"at00pelgzo-tsunamilinks_at_v1","source":"at","updateTime":1536172008590,"status":"UPDATE","properties":{"addon-code":"TsunamiLinks_AT_v1","addon-type":"LinkURL","eventsource":"at","eventsourcecode":"00pelgzo","text":"1-Tsunami + Information Statement from the NTWC","url":"http://ntwc.arh.noaa.gov/events/PAAQ/2018/09/05/pelgzo/1/WEAK53/WEAK53.txt","version":"01"},"preferredWeight":6,"contents":[]},{"indexid":"167968672","indexTime":1536171798465,"id":"urn:usgs-product:pt:impact-link:pt18248000-tsunamilinks_hi:1536171795844","type":"impact-link","code":"pt18248000-tsunamilinks_hi","source":"pt","updateTime":1536171795844,"status":"UPDATE","properties":{"addon-code":"TsunamiLinks_HI","addon-type":"LinkURL","eventsource":"pt","eventsourcecode":"18248000","text":"Tsunami + Information for Hawaii from the PTWC","url":"http://www.weather.gov/ptwc/index.php?region=2","version":"01"},"preferredWeight":6,"contents":[]},{"indexid":"167968622","indexTime":1536171771221,"id":"urn:usgs-product:pt:impact-link:pt18248000-tsunamilinks_pi:1536171770081","type":"impact-link","code":"pt18248000-tsunamilinks_pi","source":"pt","updateTime":1536171770081,"status":"UPDATE","properties":{"addon-code":"TsunamiLinks_PI","addon-type":"LinkURL","eventsource":"pt","eventsourcecode":"18248000","text":"Tsunami + Information Bulletin from the PTWC","url":"http://www.weather.gov/ptwc/index.php?region=1","version":"01"},"preferredWeight":6,"contents":[]}],"impact-text":[{"indexid":"170813142","indexTime":1542217272044,"id":"urn:usgs-product:us:impact-text:us2000h8ty:1542217214040","type":"impact-text","code":"us2000h8ty","source":"us","updateTime":1542217214040,"status":"UPDATE","properties":{"eventParametersPublicID":"quakeml:us.anss.org/eventparameters/2000h8ty/1542217222","eventsource":"us","eventsourcecode":"2000h8ty","pdl-client-version":"Version + 1.15.0-rc 2018-09-13"},"preferredWeight":6,"contents":{"":{"contentType":"text/plain","lastModified":1542217214000,"length":439,"bytes":"Forty-one + people killed, including 36 from several landslides at Atsuma, and about 680 + people injured in southern Hokkaido. Numerous buildings and roads damaged + or destroyed at Sapporo. Dozens of houses destroyed from multiple landslides + at Atsuma. Damage and fires occurred at Hokkaido''s largest coal-fired plant + causing power outages throughout the island. Liquefaction damaged roads in + the epicentral area, as far away as Kiyota-ku.\n\n"}}}],"losspager":[{"indexid":"170522732","indexTime":1541539378300,"id":"urn:usgs-product:us:losspager:us2000h8ty:1541539345392","type":"losspager","code":"us2000h8ty","source":"us","updateTime":1541539345392,"status":"UPDATE","properties":{"alertlevel":"orange","depth":"35.0","eventsource":"us","eventsourcecode":"2000h8ty","eventtime":"2018-09-06T00:07:59.000Z","latitude":"42.6861","longitude":"141.9294","magnitude":"6.6","maxmmi":"9","pdl-client-version":"Version + 1.14.1 2018-04-26"},"preferredWeight":156,"contents":{"alertecon.pdf":{"contentType":"application/pdf","lastModified":1541539316000,"length":12032,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/alertecon.pdf"},"alertecon.png":{"contentType":"image/png","lastModified":1541539316000,"length":14617,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/alertecon.png"},"alertecon_small.png":{"contentType":"image/png","lastModified":1541539316000,"length":6091,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/alertecon_small.png"},"alertecon_smaller.png":{"contentType":"image/png","lastModified":1541539316000,"length":3783,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/alertecon_smaller.png"},"alertfatal.pdf":{"contentType":"application/pdf","lastModified":1541539315000,"length":10005,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/alertfatal.pdf"},"alertfatal.png":{"contentType":"image/png","lastModified":1541539315000,"length":13608,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/alertfatal.png"},"alertfatal_small.png":{"contentType":"image/png","lastModified":1541539315000,"length":5727,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/alertfatal_small.png"},"alertfatal_smaller.png":{"contentType":"image/png","lastModified":1541539315000,"length":3481,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/alertfatal_smaller.png"},"contents.xml":{"contentType":"application/xml","lastModified":1541539345000,"length":2951,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/contents.xml"},"event.log":{"contentType":"application/octet-stream","lastModified":1541539343000,"length":1220,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/event.log"},"exposure.pdf":{"contentType":"application/pdf","lastModified":1541539338000,"length":614375,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/exposure.pdf"},"exposure.png":{"contentType":"image/png","lastModified":1541539338000,"length":182886,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/exposure.png"},"grid.xml":{"contentType":"application/xml","lastModified":1541539279000,"length":15376958,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/grid.xml"},"json/alerts.json":{"contentType":"application/json","lastModified":1541539343000,"length":1451,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/json/alerts.json"},"json/cities.json":{"contentType":"application/json","lastModified":1541539343000,"length":12777,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/json/cities.json"},"json/comments.json":{"contentType":"application/json","lastModified":1541539343000,"length":1044,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/json/comments.json"},"json/event.json":{"contentType":"application/json","lastModified":1541539343000,"length":707,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/json/event.json"},"json/exposures.json":{"contentType":"application/json","lastModified":1541539343000,"length":1040,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/json/exposures.json"},"json/historical_earthquakes.json":{"contentType":"application/json","lastModified":1541539343000,"length":1587,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/json/historical_earthquakes.json"},"json/losses.json":{"contentType":"application/json","lastModified":1541539343000,"length":1930,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/json/losses.json"},"onepager.aux":{"contentType":"application/octet-stream","lastModified":1541539345000,"length":166,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/onepager.aux"},"onepager.log":{"contentType":"application/octet-stream","lastModified":1541539345000,"length":27140,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/onepager.log"},"onepager.pdf":{"contentType":"application/pdf","lastModified":1541539345000,"length":926731,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/onepager.pdf"},"onepager.tex":{"contentType":"application/x-tex","lastModified":1541539344000,"length":17979,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/onepager.tex"},"pager.xml":{"contentType":"application/xml","lastModified":1541539343000,"length":12109,"url":"https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/pager.xml"}}}],"moment-tensor":[{"indexid":"170813162","indexTime":1542217273026,"id":"urn:usgs-product:us:moment-tensor:us_2000h8ty_mww:1542217214040","type":"moment-tensor","code":"us_2000h8ty_mww","source":"us","updateTime":1542217214040,"status":"UPDATE","properties":{"beachball-source":"us","depth":"40.5","derived-depth":"40.5","derived-eventtime":"2018-09-05T18:08:00.400Z","derived-latitude":"42.7024","derived-longitude":"142.4522","derived-magnitude":"6.6","derived-magnitude-type":"Mww","evaluation-status":"reviewed","eventParametersPublicID":"quakeml:us.anss.org/eventparameters/2000h8ty/1542217222","eventsource":"us","eventsourcecode":"2000h8ty","eventtime":"2018-09-05T18:08:00.400Z","latitude":"42.7024","longitude":"141.9079","n-axis-azimuth":"336","n-axis-length":"1.15266E+18","n-axis-plunge":"6","nodal-plane-1-dip":"30.14","nodal-plane-1-rake":"102.38","nodal-plane-1-strike":"167.11","nodal-plane-2-dip":"60.63","nodal-plane-2-rake":"82.91","nodal-plane-2-strike":"332.87","p-axis-azimuth":"68","p-axis-length":"-1.06911E+19","p-axis-plunge":"15","pdl-client-version":"Version + 1.15.0-rc 2018-09-13","percent-double-couple":"0.7844","quakeml-publicid":"quakeml:us.anss.org/focalmechanism/2000h8ty/mww","review-status":"reviewed","scalar-moment":"1.01E+19","sourcetime-decaytime":"4.96","sourcetime-duration":"9.92","sourcetime-risetime":"4.96","sourcetime-type":"triangle","t-axis-azimuth":"225","t-axis-length":"9.53847E+18","t-axis-plunge":"73","tensor-mpp":"-7.981E+18","tensor-mrp":"4.432E+18","tensor-mrr":"8.023E+18","tensor-mrt":"-2.751E+18","tensor-mtp":"3.473E+18","tensor-mtt":"-4.2E+16"},"preferredWeight":216,"contents":{"contents.xml":{"contentType":"application/xml","lastModified":1542217266000,"length":195,"url":"https://earthquake.usgs.gov/archive/product/moment-tensor/us_2000h8ty_mww/us/1542217214040/contents.xml"},"quakeml.xml":{"contentType":"application/xml","lastModified":1542217214000,"length":9625,"url":"https://earthquake.usgs.gov/archive/product/moment-tensor/us_2000h8ty_mww/us/1542217214040/quakeml.xml"}}},{"indexid":"170813152","indexTime":1542217272584,"id":"urn:usgs-product:us:moment-tensor:us_2000h8ty_mwb:1542217214040","type":"moment-tensor","code":"us_2000h8ty_mwb","source":"us","updateTime":1542217214040,"status":"UPDATE","properties":{"beachball-source":"us","depth":"34.2","derived-depth":"43","derived-eventtime":"1970-01-01T00:00:00.000Z","derived-latitude":"0.0000","derived-longitude":"0.0000","derived-magnitude":"6.6","derived-magnitude-type":"Mwb","evaluation-status":"reviewed","eventParametersPublicID":"quakeml:us.anss.org/eventparameters/2000h8ty/1542217222","eventsource":"us","eventsourcecode":"2000h8ty","eventtime":"2018-09-05T18:07:59.000Z","latitude":"42.6812","longitude":"141.9158","n-axis-azimuth":"171","n-axis-length":"8.49855E+17","n-axis-plunge":"16","nodal-plane-1-dip":"69.5","nodal-plane-1-rake":"106.72","nodal-plane-1-strike":"357.05","nodal-plane-2-dip":"26.22","nodal-plane-2-rake":"52.43","nodal-plane-2-strike":"136.43","p-axis-azimuth":"74","p-axis-length":"-1.06568E+19","p-axis-plunge":"23","pdl-client-version":"Version + 1.15.0-rc 2018-09-13","percent-double-couple":"0.8405","quakeml-publicid":"quakeml:us.anss.org/focalmechanism/2000h8ty/mwb","review-status":"reviewed","scalar-moment":"1.03E+19","t-axis-azimuth":"293","t-axis-length":"9.80693E+18","t-axis-plunge":"62","tensor-mpp":"-6.525E+18","tensor-mrp":"7.387E+18","tensor-mrr":"6.097E+18","tensor-mrt":"3.22E+17","tensor-mtp":"3.255E+18","tensor-mtt":"4.28E+17"},"preferredWeight":157,"contents":{"contents.xml":{"contentType":"application/xml","lastModified":1542217266000,"length":195,"url":"https://earthquake.usgs.gov/archive/product/moment-tensor/us_2000h8ty_mwb/us/1542217214040/contents.xml"},"quakeml.xml":{"contentType":"application/xml","lastModified":1542217214000,"length":28017,"url":"https://earthquake.usgs.gov/archive/product/moment-tensor/us_2000h8ty_mwb/us/1542217214040/quakeml.xml"}}}],"origin":[{"indexid":"170813112","indexTime":1542217268257,"id":"urn:usgs-product:us:origin:us2000h8ty:1542217214040","type":"origin","code":"us2000h8ty","source":"us","updateTime":1542217214040,"status":"UPDATE","properties":{"azimuthal-gap":"32","depth":"35","depth-type":"from + modeling of broad-band P waveforms","error-ellipse-azimuth":"278","error-ellipse-intermediate":"9100","error-ellipse-major":"10800","error-ellipse-minor":"2900","error-ellipse-plunge":"0","error-ellipse-rotation":"271","evaluation-status":"reviewed","event-type":"earthquake","eventParametersPublicID":"quakeml:us.anss.org/eventparameters/2000h8ty/1542217222","eventsource":"us","eventsourcecode":"2000h8ty","eventtime":"2018-09-05T18:07:59.150Z","eventtime-error":"1.51","horizontal-error":"6.5","latitude":"42.6861","latitude-error":"0.0540","longitude":"141.9294","longitude-error":"0.0869","magnitude":"6.6","magnitude-error":"0.062","magnitude-num-stations-used":"25","magnitude-source":"us","magnitude-type":"mww","minimum-distance":"1.130","num-phases-used":"162","origin-source":"us","pdl-client-version":"Version + 1.15.0-rc 2018-09-13","quakeml-magnitude-publicid":"quakeml:us.anss.org/magnitude/2000h8ty/mww","quakeml-origin-publicid":"quakeml:us.anss.org/origin/2000h8ty","quakeml-publicid":"quakeml:us.anss.org/event/2000h8ty","review-status":"reviewed","standard-error":"0.74","vertical-error":"1.9"},"preferredWeight":156,"contents":{"contents.xml":{"contentType":"application/xml","lastModified":1542217266000,"length":195,"url":"https://earthquake.usgs.gov/archive/product/origin/us2000h8ty/us/1542217214040/contents.xml"},"quakeml.xml":{"contentType":"application/xml","lastModified":1542217214000,"length":3687,"url":"https://earthquake.usgs.gov/archive/product/origin/us2000h8ty/us/1542217214040/quakeml.xml"}}},{"indexid":"167968812","indexTime":1536172007659,"id":"urn:usgs-product:at:origin:at00pelgzo:1536172005904","type":"origin","code":"at00pelgzo","source":"at","updateTime":1536172005904,"status":"UPDATE","properties":{"azimuthal-gap":"255.5999795520016358398691328104694","cube-location-method":"l","cube-magnitude-type":"I","depth":"39","depth-method":"Fixed","event-type":"earthquake","eventsource":"at","eventsourcecode":"00pelgzo","eventtime":"2018-09-05T18:07:00.000Z","horizontal-error":"0","latitude":"42.7","location-method-algorithm":"l","location-method-class":"CUBE_Code","longitude":"142","magnitude":"6.7","magnitude-num-stations-used":"5","magnitude-type":"Mi","minimum-distance":"4.575120","num-phases-used":"4","num-stations-used":"4","review-status":"REVIEWED","standard-error":"0.37","version":"1","vertical-error":"0"},"preferredWeight":6,"contents":{"contents.xml":{"contentType":"text/plain","lastModified":1536172005000,"length":272,"url":"https://earthquake.usgs.gov/archive/product/origin/at00pelgzo/at/1536172005904/contents.xml"},"eqxml.xml":{"contentType":"text/plain","lastModified":1536172005000,"length":1026,"url":"https://earthquake.usgs.gov/archive/product/origin/at00pelgzo/at/1536172005904/eqxml.xml"}}},{"indexid":"167968612","indexTime":1536171769095,"id":"urn:usgs-product:pt:origin:pt18248000:1536171767831","type":"origin","code":"pt18248000","source":"pt","updateTime":1536171767831,"status":"UPDATE","properties":{"azimuthal-gap":"147.5999881920009446399244288060457","cube-location-method":"u","cube-magnitude-type":"I","depth":"39","event-type":"earthquake","eventsource":"pt","eventsourcecode":"18248000","eventtime":"2018-09-05T18:07:00.000Z","latitude":"42.7","location-method-algorithm":"u","location-method-class":"CUBE_Code","longitude":"142","magnitude":"6.7","magnitude-num-stations-used":"23","magnitude-type":"Mi","num-stations-used":"16","review-status":"REVIEWED","version":"B"},"preferredWeight":6,"contents":{"contents.xml":{"contentType":"text/plain","lastModified":1536171767000,"length":272,"url":"https://earthquake.usgs.gov/archive/product/origin/pt18248000/pt/1536171767831/contents.xml"},"eqxml.xml":{"contentType":"text/plain","lastModified":1536171767000,"length":890,"url":"https://earthquake.usgs.gov/archive/product/origin/pt18248000/pt/1536171767831/eqxml.xml"}}}],"phase-data":[{"indexid":"170813132","indexTime":1542217271680,"id":"urn:usgs-product:us:phase-data:us2000h8ty:1542217214040","type":"phase-data","code":"us2000h8ty","source":"us","updateTime":1542217214040,"status":"UPDATE","properties":{"azimuthal-gap":"32","depth":"35","depth-type":"from + modeling of broad-band P waveforms","error-ellipse-azimuth":"278","error-ellipse-intermediate":"9100","error-ellipse-major":"10800","error-ellipse-minor":"2900","error-ellipse-plunge":"0","error-ellipse-rotation":"271","evaluation-status":"reviewed","event-type":"earthquake","eventParametersPublicID":"quakeml:us.anss.org/eventparameters/2000h8ty/1542217222","eventsource":"us","eventsourcecode":"2000h8ty","eventtime":"2018-09-05T18:07:59.150Z","eventtime-error":"1.51","horizontal-error":"6.5","latitude":"42.6861","latitude-error":"0.0540","longitude":"141.9294","longitude-error":"0.0869","magnitude":"6.6","magnitude-error":"0.062","magnitude-num-stations-used":"25","magnitude-source":"us","magnitude-type":"mww","minimum-distance":"1.130","num-phases-used":"162","origin-source":"us","pdl-client-version":"Version + 1.15.0-rc 2018-09-13","quakeml-magnitude-publicid":"quakeml:us.anss.org/magnitude/2000h8ty/mww","quakeml-origin-publicid":"quakeml:us.anss.org/origin/2000h8ty","quakeml-publicid":"quakeml:us.anss.org/event/2000h8ty","review-status":"reviewed","standard-error":"0.74","vertical-error":"1.9"},"preferredWeight":156,"contents":{"contents.xml":{"contentType":"application/xml","lastModified":1542217266000,"length":195,"url":"https://earthquake.usgs.gov/archive/product/phase-data/us2000h8ty/us/1542217214040/contents.xml"},"quakeml.xml":{"contentType":"application/xml","lastModified":1542217214000,"length":2928967,"url":"https://earthquake.usgs.gov/archive/product/phase-data/us2000h8ty/us/1542217214040/quakeml.xml"}}}],"shakemap":[{"indexid":"170522682","indexTime":1541539275951,"id":"urn:usgs-product:us:shakemap:us2000h8ty:1541539192315","type":"shakemap","code":"us2000h8ty","source":"us","updateTime":1541539192315,"status":"UPDATE","properties":{"depth":"35","event-description":"HOKKAIDO, + JAPAN REGION","event-type":"ACTUAL","eventsource":"us","eventsourcecode":"2000h8ty","eventtime":"2018-09-05T18:07:59.000Z","latitude":"42.686100","longitude":"141.929400","magnitude":"6.6","map-status":"RELEASED","maximum-latitude":"46.361100","maximum-longitude":"146.929400","maxmmi":"8.62","maxmmi-grid":"8.62","maxpga":"83.78","maxpga-grid":"83.78","maxpgv":"130.12","maxpgv-grid":"130.12","maxpsa03":"290.06","maxpsa03-grid":"290.06","maxpsa10":"131.68","maxpsa10-grid":"131.68","maxpsa30":"15.21","maxpsa30-grid":"15.21","minimum-latitude":"39.011100","minimum-longitude":"136.929400","overlayHeight":"344","overlayWidth":"468","pdl-client-version":"Version + 1.13.3 2018-02-06","process-timestamp":"2018-11-06T21:18:52Z","version":"10"},"preferredWeight":231,"contents":{"about_formats.html":{"contentType":"text/html","lastModified":1541539190000,"length":28820,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/about_formats.html"},"contents.xml":{"contentType":"application/xml","lastModified":1541539191000,"length":9187,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/contents.xml"},"download/2000h8ty.kml":{"contentType":"application/vnd.google-earth.kml+xml","lastModified":1541539188000,"length":1032,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/2000h8ty.kml"},"download/cont_mi.json":{"contentType":"application/json","lastModified":1541539188000,"length":281436,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_mi.json"},"download/cont_mi.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":60960,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_mi.kmz"},"download/cont_pga.json":{"contentType":"application/json","lastModified":1541539188000,"length":48192,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_pga.json"},"download/cont_pga.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":10799,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_pga.kmz"},"download/cont_pgv.json":{"contentType":"application/json","lastModified":1541539188000,"length":21196,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_pgv.json"},"download/cont_pgv.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":5131,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_pgv.kmz"},"download/cont_psa03.json":{"contentType":"application/json","lastModified":1541539188000,"length":27972,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_psa03.json"},"download/cont_psa03.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":6521,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_psa03.kmz"},"download/cont_psa10.json":{"contentType":"application/json","lastModified":1541539188000,"length":24785,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_psa10.json"},"download/cont_psa10.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":5869,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_psa10.kmz"},"download/cont_psa30.json":{"contentType":"application/json","lastModified":1541539188000,"length":57635,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_psa30.json"},"download/cont_psa30.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":12660,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/cont_psa30.kmz"},"download/epicenter.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":1289,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/epicenter.kmz"},"download/event.txt":{"contentType":"text/plain","lastModified":1541539185000,"length":121,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/event.txt"},"download/grid.xml":{"contentType":"application/xml","lastModified":1541539185000,"length":15376958,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/grid.xml"},"download/grid.xml.zip":{"contentType":"application/zip","lastModified":1541539187000,"length":2596667,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/grid.xml.zip"},"download/grid.xyz.zip":{"contentType":"application/zip","lastModified":1541539188000,"length":2060813,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/grid.xyz.zip"},"download/hazus.zip":{"contentType":"application/zip","lastModified":1541539188000,"length":2547335,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/hazus.zip"},"download/ii_overlay.png":{"contentType":"image/png","lastModified":1541539188000,"length":55356,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/ii_overlay.png"},"download/ii_thumbnail.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":3377,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/ii_thumbnail.jpg"},"download/info.json":{"contentType":"application/json","lastModified":1541539185000,"length":2274,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/info.json"},"download/intensity.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":83588,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/intensity.jpg"},"download/intensity.ps.zip":{"contentType":"application/zip","lastModified":1541539185000,"length":250217,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/intensity.ps.zip"},"download/metadata.txt":{"contentType":"text/plain","lastModified":1541539188000,"length":33138,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/metadata.txt"},"download/mi_regr.png":{"contentType":"image/png","lastModified":1541539185000,"length":50139,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/mi_regr.png"},"download/overlay.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":55756,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/overlay.kmz"},"download/pga.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":72343,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/pga.jpg"},"download/pga.ps.zip":{"contentType":"application/zip","lastModified":1541539186000,"length":716160,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/pga.ps.zip"},"download/pga_regr.png":{"contentType":"image/png","lastModified":1541539185000,"length":54680,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/pga_regr.png"},"download/pgv.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":72343,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/pgv.jpg"},"download/pgv.ps.zip":{"contentType":"application/zip","lastModified":1541539185000,"length":715152,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/pgv.ps.zip"},"download/pgv_regr.png":{"contentType":"image/png","lastModified":1541539185000,"length":52308,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/pgv_regr.png"},"download/polygons_mi.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":145228,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/polygons_mi.kmz"},"download/psa03.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":71685,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/psa03.jpg"},"download/psa03.ps.zip":{"contentType":"application/zip","lastModified":1541539185000,"length":715497,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/psa03.ps.zip"},"download/psa03_regr.png":{"contentType":"image/png","lastModified":1541539185000,"length":55832,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/psa03_regr.png"},"download/psa10.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":71332,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/psa10.jpg"},"download/psa10.ps.zip":{"contentType":"application/zip","lastModified":1541539185000,"length":715293,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/psa10.ps.zip"},"download/psa10_regr.png":{"contentType":"image/png","lastModified":1541539185000,"length":53483,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/psa10_regr.png"},"download/psa30.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":71709,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/psa30.jpg"},"download/psa30.ps.zip":{"contentType":"application/zip","lastModified":1541539185000,"length":716438,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/psa30.ps.zip"},"download/psa30_regr.png":{"contentType":"image/png","lastModified":1541539185000,"length":52292,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/psa30_regr.png"},"download/raster.zip":{"contentType":"application/zip","lastModified":1541539190000,"length":7481301,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/raster.zip"},"download/rock_grid.xml.zip":{"contentType":"application/zip","lastModified":1541539186000,"length":1896445,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/rock_grid.xml.zip"},"download/sd.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":71354,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/sd.jpg"},"download/shape.zip":{"contentType":"application/zip","lastModified":1541539188000,"length":6613711,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/shape.zip"},"download/stationlist.json":{"contentType":"application/json","lastModified":1541539191000,"length":634734,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/stationlist.json"},"download/stationlist.txt":{"contentType":"text/plain","lastModified":1541539190000,"length":86289,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/stationlist.txt"},"download/stationlist.xml":{"contentType":"application/xml","lastModified":1541539185000,"length":383216,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/stationlist.xml"},"download/stations.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":29934,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/stations.kmz"},"download/tvguide.txt":{"contentType":"text/plain","lastModified":1541539185000,"length":8772,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/tvguide.txt"},"download/tvmap.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":85829,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/tvmap.jpg"},"download/tvmap.ps.zip":{"contentType":"application/zip","lastModified":1541539185000,"length":528935,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/tvmap.ps.zip"},"download/tvmap_bare.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":87196,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/tvmap_bare.jpg"},"download/tvmap_bare.ps.zip":{"contentType":"application/zip","lastModified":1541539185000,"length":529069,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/tvmap_bare.ps.zip"},"download/uncertainty.xml.zip":{"contentType":"application/zip","lastModified":1541539186000,"length":1238734,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/uncertainty.xml.zip"},"download/urat_pga.jpg":{"contentType":"image/jpeg","lastModified":1541539185000,"length":71354,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/urat_pga.jpg"},"download/urat_pga.ps.zip":{"contentType":"application/zip","lastModified":1541539185000,"length":150874,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/urat_pga.ps.zip"},"download/us2000h8ty.kml":{"contentType":"application/vnd.google-earth.kml+xml","lastModified":1541539188000,"length":1032,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/us2000h8ty.kml"},"download/us2000h8ty.kmz":{"contentType":"application/vnd.google-earth.kmz","lastModified":1541539188000,"length":347209,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/download/us2000h8ty.kmz"},"intensity.html":{"contentType":"text/html","lastModified":1541539190000,"length":79321,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/intensity.html"},"pga.html":{"contentType":"text/html","lastModified":1541539190000,"length":79113,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/pga.html"},"pgv.html":{"contentType":"text/html","lastModified":1541539190000,"length":79113,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/pgv.html"},"products.html":{"contentType":"text/html","lastModified":1541539190000,"length":18584,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/products.html"},"psa03.html":{"contentType":"text/html","lastModified":1541539190000,"length":80280,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/psa03.html"},"psa10.html":{"contentType":"text/html","lastModified":1541539190000,"length":80279,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/psa10.html"},"psa30.html":{"contentType":"text/html","lastModified":1541539190000,"length":80279,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/psa30.html"},"stationlist.html":{"contentType":"text/html","lastModified":1541539190000,"length":1082904,"url":"https://earthquake.usgs.gov/archive/product/shakemap/us2000h8ty/us/1541539192315/stationlist.html"}}}]}},"geometry":{"type":"Point","coordinates":[141.9294,42.6861,35]},"id":"us2000h8ty"}'} + headers: + Access-Control-Allow-Headers: ['accept,origin,authorization,content-type'] + Access-Control-Allow-Methods: ['*'] + Access-Control-Allow-Origin: ['*'] + Cache-Control: ['public, max-age=86400'] + Connection: [close] + Content-Type: [application/json] + Date: ['Fri, 16 Nov 2018 21:43:48 GMT'] + Expires: ['Sat, 17 Nov 2018 21:43:47 GMT'] + Last-Modified: ['Fri, 16 Nov 2018 21:43:47 GMT'] + Server: [nginx] + Strict-Transport-Security: [max-age=31536000] + Transfer-Encoding: [chunked] + Vary: [Accept-Encoding] + Via: [1.1 8018acc24623a7eb3bfaed8e4f39347d.cloudfront.net (CloudFront)] + X-Amz-Cf-Id: [Oc3iiFfnJjHEBVG4C80FzT1x0911hGxMqeopCrydor7pVicyjvsZOA==] + X-Cache: [Miss from cloudfront] + X-Cache-Status: [MISS] + X-Content-Type-Options: [nosniff] + X-Frame-Options: [SAMEORIGIN] + X-XSS-Protection: [1; mode=block] + status: {code: 200, message: OK} +- request: + body: null + headers: + Connection: [close] + Host: [earthquake.usgs.gov] + User-Agent: [Python-urllib/3.5] + method: GET + uri: https://earthquake.usgs.gov/archive/product/losspager/us2000h8ty/us/1541539345392/json/exposures.json + response: + body: {string: '{"population_exposure": {"mmi": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + "aggregated_exposure": [0, 0, 2248544, 2752848, 1330217, 1431811, 1281064, + 41261, 3881, 0], "maximum_border_mmi": 3.6393713910104504, "country_exposures": + [{"country_code": "JP", "exposure": [0, 0, 2227773, 2747917, 1330217, 1431811, + 1281064, 41261, 3881, 0]}, {"country_code": "UK", "exposure": [0, 0, 58, 0, + 0, 0, 0, 0, 0, 0]}, {"country_code": "RU", "exposure": [0, 0, 20713, 4931, + 0, 0, 0, 0, 0, 0]}]}, "economic_exposure": {"mmi": [1, 2, 3, 4, 5, 6, 7, 8, + 9, 10], "aggregated_exposure": [0.0, 0.0, 1085433675978.5692, 1333122236949.4187, + 644675842272.261, 693912318365.8668, 620854351737.1013, 19996714767.587364, + 1880886309.4206772, 0.0], "country_exposures": [{"country_code": "JP", "exposure": + [0.0, 0.0, 1079667028136.3129, 1331749411163.1895, 644675842272.261, 693912318365.8668, + 620854351737.1013, 19996714767.587364, 1880886309.4206772, 0.0]}, {"country_code": + "RU", "exposure": [0.0, 0.0, 5766647842.2563715, 1372825786.2292361, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0]}]}}'} + headers: + Access-Control-Allow-Headers: ['accept,origin,authorization,content-type'] + Access-Control-Allow-Methods: ['*'] + Access-Control-Allow-Origin: ['*'] + Cache-Control: [max-age=315360000] + Connection: [close] + Content-Length: ['1040'] + Content-Type: [application/json] + Date: ['Fri, 16 Nov 2018 21:43:49 GMT'] + ETag: ['"410-57a0598f261c0"'] + Expires: ['Mon, 13 Nov 2028 21:43:49 GMT'] + Last-Modified: ['Tue, 06 Nov 2018 21:22:23 GMT'] + Server: [nginx] + Strict-Transport-Security: [max-age=31536000] + Vary: ['Accept-Encoding,Accept-Encoding'] + Via: [1.1 bfde723073540323df595311d2f5c9bf.cloudfront.net (CloudFront)] + X-Amz-Cf-Id: [ChkL1EY0sLpoXJTny8dCmHPb2QS-JZU3H5rdN_cKsEG4bAzEoH72Fg==] + X-Cache: [Miss from cloudfront] + X-Cache-Status: [MISS] + X-Content-Type-Options: [nosniff] + X-Frame-Options: [SAMEORIGIN] + X-XSS-Protection: [1; mode=block] + status: {code: 200, message: OK} +version: 1 diff --git a/tests/libcomcat/dataframes_test.py b/tests/libcomcat/dataframes_test.py new file mode 100755 index 0000000..c8856f8 --- /dev/null +++ b/tests/libcomcat/dataframes_test.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python + +import os.path +from datetime import datetime + +import numpy as np + +import vcr + +from libcomcat.dataframes import (get_summary_data_frame, + get_detail_data_frame, + get_pager_data_frame, + get_phase_dataframe, + get_magnitude_data_frame) +from libcomcat.search import search, get_event_by_id + + +def get_datadir(): + # where is this script? + homedir = os.path.dirname(os.path.abspath(__file__)) + datadir = os.path.join(homedir, '..', 'data') + return datadir + + +def test_phase_dataframe(): + datadir = get_datadir() + tape_file = os.path.join(datadir, 'vcr_phase_dataframe.yaml') + # with vcr.use_cassette(tape_file): + detail = get_event_by_id('us1000778i') # 2016 NZ event + df = get_magnitude_data_frame(detail, 'us', 'mb') + np.testing.assert_almost_equal(df['Magnitude'].sum(), 756.8100000000001) + x = 1 + + +def test_magnitude_dataframe(): + datadir = get_datadir() + tape_file = os.path.join(datadir, 'vcr_magnitude_dataframe.yaml') + with vcr.use_cassette(tape_file): + detail = get_event_by_id('us1000778i') # 2016 NZ event + df = get_phase_dataframe(detail, catalog='us') + assert len(df) == 174 + + +def test_get_summary_data_frame(): + datadir = get_datadir() + tape_file = os.path.join(datadir, 'vcr_summary_frame.yaml') + with vcr.use_cassette(tape_file): + events = search(starttime=datetime(1994, 6, 1), + endtime=datetime(1994, 10, 6), + minmagnitude=8.0, maxmagnitude=9.0, verbose=True) + + df = get_summary_data_frame(events) + assert len(df) == 2 + assert df.iloc[0]['magnitude'] == 8.2 + + +def test_get_detail_data_frame(): + datadir = get_datadir() + tape_file = os.path.join(datadir, 'vcr_detail_frame.yaml') + with vcr.use_cassette(tape_file): + events = search(starttime=datetime(1994, 6, 1), + endtime=datetime(1994, 10, 6), + minmagnitude=8.0, maxmagnitude=9.0) + all_mags = get_detail_data_frame( + events, get_all_magnitudes=True, verbose=True) + assert all_mags.iloc[0]['magnitude'] == 8.2 + + +def test_get_pager_data_frame(): + datadir = get_datadir() + EVENTID = 'us2000h8ty' + detail = get_event_by_id(EVENTID) + tape_file = os.path.join(datadir, 'vcr_pager_results.yaml') + # with vcr.use_cassette(tape_file): + df = get_pager_data_frame(detail) + mmi3_total = 2248544 + mmi3 = df.iloc[0]['mmi3'] + assert mmi3 == mmi3_total + + df = get_pager_data_frame(detail, get_country_exposures=True) + assert mmi3_total == df.iloc[1:]['mmi3'].sum() + + df = get_pager_data_frame(detail, get_losses=True) + testfat = 13 + testeco = 323864991 + assert df.iloc[0]['predicted_fatalities'] == testfat + assert df.iloc[0]['predicted_dollars'] == testeco + + df = get_pager_data_frame(detail, get_losses=True, + get_country_exposures=True) + assert df.iloc[1:]['predicted_fatalities'].sum() == testfat + assert df.iloc[1:]['predicted_dollars'].sum() == testeco + + EVENTID = 'us1000778i' + detail = get_event_by_id(EVENTID) + df = get_pager_data_frame(detail) + testval = 14380 + assert df.iloc[0]['mmi4'] == testval + + # test getting superseded versions of the pager product + EVENTID = 'us2000h8ty' + detail = get_event_by_id(EVENTID, includesuperseded=True) + df = get_pager_data_frame(detail, get_losses=True) + version_7 = df[df['pager_version'] == 7].iloc[0] + v7fats = 16 + assert version_7['predicted_fatalities'] == v7fats + + +if __name__ == '__main__': + print('Testing pager extraction...') + test_get_pager_data_frame() + print('Testing getting phase dataframe...') + test_phase_dataframe() + print('Testing summary frame...') + test_get_summary_data_frame() + print('Testing detail frame...') + test_get_detail_data_frame() + print('Testing magnitude frame...') + test_magnitude_dataframe() diff --git a/tests/libcomcat/utils_test.py b/tests/libcomcat/utils_test.py index 5de1a57..f17e658 100755 --- a/tests/libcomcat/utils_test.py +++ b/tests/libcomcat/utils_test.py @@ -7,13 +7,9 @@ import vcr -from libcomcat.utils import (get_summary_data_frame, - get_detail_data_frame, - makedict, +from libcomcat.utils import (makedict, maketime, get_catalogs, - get_phase_dataframe, - get_magnitude_data_frame, read_phases, get_contributors) from libcomcat.search import search, get_event_by_id @@ -82,25 +78,6 @@ def test_maketime(): pass -def test_phase_dataframe(): - datadir = get_datadir() - tape_file = os.path.join(datadir, 'vcr_phase_dataframe.yaml') - # with vcr.use_cassette(tape_file): - detail = get_event_by_id('us1000778i') # 2016 NZ event - df = get_magnitude_data_frame(detail, 'us', 'mb') - np.testing.assert_almost_equal(df['Magnitude'].sum(), 756.8100000000001) - x = 1 - - -def test_magnitude_dataframe(): - datadir = get_datadir() - tape_file = os.path.join(datadir, 'vcr_magnitude_dataframe.yaml') - with vcr.use_cassette(tape_file): - detail = get_event_by_id('us1000778i') # 2016 NZ event - df = get_phase_dataframe(detail, catalog='us') - assert len(df) == 174 - - def test_catalogs(): datadir = get_datadir() tape_file = os.path.join(datadir, 'vcr_catalogs.yaml') @@ -117,34 +94,7 @@ def test_contributors(): assert 'ak' in contributors -def test_get_summary_data_frame(): - datadir = get_datadir() - tape_file = os.path.join(datadir, 'vcr_summary_frame.yaml') - with vcr.use_cassette(tape_file): - events = search(starttime=datetime(1994, 6, 1), - endtime=datetime(1994, 10, 6), - minmagnitude=8.0, maxmagnitude=9.0, verbose=True) - - df = get_summary_data_frame(events) - assert len(df) == 2 - assert df.iloc[0]['magnitude'] == 8.2 - - -def test_get_detail_data_frame(): - datadir = get_datadir() - tape_file = os.path.join(datadir, 'vcr_detail_frame.yaml') - with vcr.use_cassette(tape_file): - events = search(starttime=datetime(1994, 6, 1), - endtime=datetime(1994, 10, 6), - minmagnitude=8.0, maxmagnitude=9.0) - all_mags = get_detail_data_frame( - events, get_all_magnitudes=True, verbose=True) - assert all_mags.iloc[0]['magnitude'] == 8.2 - - if __name__ == '__main__': - print('Testing getting phase dataframe...') - test_phase_dataframe() print('Testing reader...') test_reader() print('Testing makedict...') @@ -155,9 +105,3 @@ def test_get_detail_data_frame(): test_catalogs() print('Testing conributors...') test_contributors() - print('Testing summary frame...') - test_get_summary_data_frame() - print('Testing detail frame...') - test_get_detail_data_frame() - print('Testing magnitude frame...') - test_get_magnitude_data_frame()