diff --git a/gplately/__init__.py b/gplately/__init__.py index c146c7a3..a4002a2d 100644 --- a/gplately/__init__.py +++ b/gplately/__init__.py @@ -200,3 +200,12 @@ "_ContinentCollision": False, "_ReconstructByTopologies": False, } + +try: + import plate_model_manager +except ImportError: + print("The plate_model_manager is not installed, installing it now!") + import subprocess + import sys + + subprocess.call([sys.executable, "-m", "pip", "install", "plate-model-manager"]) diff --git a/gplately/pygplates.py b/gplately/pygplates.py index 9784a6e8..8ebfd72c 100644 --- a/gplately/pygplates.py +++ b/gplately/pygplates.py @@ -9,7 +9,9 @@ import pygplates as _pygplates from pygplates import * import warnings as _warnings -_warnings.simplefilter('always', ImportWarning) + +_warnings.simplefilter("always", ImportWarning) + def _is_string(value): # convert sets to list @@ -20,83 +22,84 @@ def _is_string(value): if type(value) is list: bl = [] for val in value: - bl.append( type(val) is str ) + bl.append(type(val) is str) return all(bl) - + # if no list, check if string else: return type(value) is str class RotationModel(_pygplates.RotationModel): - """A class that wraps the - [`pyGPlates.RotationModel` class](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel). - This queries a finite rotation of a moving plate relative to any other plate, - optionally between two instants in geological time. + """A class that wraps the + [`pyGPlates.RotationModel` class](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel). + This queries a finite rotation of a moving plate relative to any other plate, + optionally between two instants in geological time. See [Plate reconstruction hierarchy](https://www.gplates.org/docs/pygplates/pygplates_foundations.html#pygplates-foundations-plate-reconstruction-hierarchy). - This class provides an easy way to query rotations in any of the four - combinations of total/stage and equivalent/relative rotations using - [`get_rotation()`](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel.get_rotation). - - Reconstruction trees can also be created at any instant - of geological time and these are cached internally depending on a - user-specified cache size parameter pass to `gplately.pygplates.RotationModel.__init__()`. - The reconstruction_tree_cache_size parameter of those methods controls the - size of an internal least-recently-used cache of reconstruction trees - (evicts least recently requested reconstruction tree when a new - reconstruction time is requested that does not currently exist in the cache). - This enables reconstruction trees associated with different reconstruction - times to be re-used instead of re-creating them, provided they have not been - evicted from the cache. This benefit also applies when querying rotations with - [`get_rotation()`](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel.get_rotation) + This class provides an easy way to query rotations in any of the four + combinations of total/stage and equivalent/relative rotations using + [`get_rotation()`](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel.get_rotation). + + Reconstruction trees can also be created at any instant + of geological time and these are cached internally depending on a + user-specified cache size parameter pass to `gplately.pygplates.RotationModel.__init__()`. + The reconstruction_tree_cache_size parameter of those methods controls the + size of an internal least-recently-used cache of reconstruction trees + (evicts least recently requested reconstruction tree when a new + reconstruction time is requested that does not currently exist in the cache). + This enables reconstruction trees associated with different reconstruction + times to be re-used instead of re-creating them, provided they have not been + evicted from the cache. This benefit also applies when querying rotations with + [`get_rotation()`](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel.get_rotation) since it, in turn, requests reconstruction trees. - This wrapping of `pygplates.RotationModel` contains all + This wrapping of `pygplates.RotationModel` contains all [`pygplates.RotationModel` functionality](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel), and in addition tracks the names of files from which the rotation feature(s) are read - using the `gplately.pygplates.RotationModel.filenames` attribute. + using the `gplately.pygplates.RotationModel.filenames` attribute. """ - def __init__(self, rotation_features): + + def __init__(self, rotation_features, default_anchor_plate_id=0): """**A RotationModel object can be constructed in three ways.** --- **1. Create from rotation feature collection(s) and/or rotation filename(s)** -------------------------------------------------------------------------- - + Parameters ---------- rotation_features : instance of `pygplates.FeatureCollection` or `str` or instance of `pygplates.Feature` or sequence of `pygplates.Feature` or sequence of any combination of those four types - A rotation feature collection, or rotation filename, or rotation feature, - or sequence of rotation features, or a sequence (eg, `list` or `tuple`) of + A rotation feature collection, or rotation filename, or rotation feature, + or sequence of rotation features, or a sequence (eg, `list` or `tuple`) of any combination of those four types. reconstruction_tree_cache_size : int, default 150 Number of reconstruction trees to cache internally. Defaults to 150. extend_total_reconstruction_poles_to_distant_past : bool, default False - Extend each moving plate sequence back infinitely far into the distant - past such that reconstructed geometries will not snap back to their - present day positions when the reconstruction time is older than + Extend each moving plate sequence back infinitely far into the distant + past such that reconstructed geometries will not snap back to their + present day positions when the reconstruction time is older than the oldest times specified in the rotation features (defaults to False). default_anchor_plate_id : int, default 0 - The default anchored plate id to use when - [`get_rotation()`](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel.get_rotation) + The default anchored plate id to use when + [`get_rotation()`](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel.get_rotation) and [`get_reconstruction_tree()`](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel.get_reconstruction_tree) are called without specifying their `anchor_plate_id` parameter. Defaults to 0. Raises ------ - OpenFileForReadingError + OpenFileForReadingError If any file is not readable (when filenames specified) - FileFormatNotSupportedError - If any file format (identified by the filename extensions) + FileFormatNotSupportedError + If any file format (identified by the filename extensions) does not support reading (when filenames specified) @@ -124,23 +127,23 @@ def __init__(self, rotation_features): An existing rotation model. reconstruction_tree_cache_size : int, default 2 - Number of reconstruction trees to cache internally. - Defaults to 2 - this is much lower than the usual default - cache size since the existing rotation model likely - already has a sizeable cache anyway - and if you are - leaving this at its default value then you are presumably - only interested in changing the default anchor plate ID + Number of reconstruction trees to cache internally. + Defaults to 2 - this is much lower than the usual default + cache size since the existing rotation model likely + already has a sizeable cache anyway - and if you are + leaving this at its default value then you are presumably + only interested in changing the default anchor plate ID (not increasing the cache size). default_anchor_plate_id : int, defaults to the default anchor plate of `rotation_model` - The default anchored plate id to use when + The default anchored plate id to use when [`get_rotation()`](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel.get_rotation) and [`get_reconstruction_tree()`](https://www.gplates.org/docs/pygplates/generated/pygplates.rotationmodel#pygplates.RotationModel.get_reconstruction_tree) - are called without specifying their `anchor_plate_id` parameter. + are called without specifying their `anchor_plate_id` parameter. Defaults to the default anchor plate of `rotation_model`. - This is useful if you want to use an existing rotation model but with a + This is useful if you want to use an existing rotation model but with a larger cache size or a different default anchor plate ID: Example @@ -155,8 +158,8 @@ def __init__(self, rotation_features): **3. Return an existing rotation model as a convenience** ------------------------------------------------------- - This is useful when defining your own function that accepts - rotation features or a rotation model. It avoids the hassle + This is useful when defining your own function that accepts + rotation features or a rotation model. It avoids the hassle of having to explicitly test for each source type: def my_function(rotation_features_or_model): @@ -171,7 +174,9 @@ def my_function(rotation_features_or_model): --- """ - super(RotationModel, self).__init__(rotation_features) + super(RotationModel, self).__init__( + rotation_features, default_anchor_plate_id=default_anchor_plate_id + ) self.filenames = [] # update filename list @@ -186,15 +191,18 @@ def my_function(rotation_features_or_model): elif hasattr(rotation_features, "filenames"): self.filenames = rotation_features.filenames else: - msg = "\nRotationModel: No filename associated with {} in __init__".format(type(rotation_features)) + msg = "\nRotationModel: No filename associated with {} in __init__".format( + type(rotation_features) + ) msg += "\n ensure pygplates is imported from gplately. Run," msg += "\n from gplately import pygplates" _warnings.warn(msg, ImportWarning) self.filenames = [] + class Feature(_pygplates.Feature): - """A class that wraps the `pyGPlates.Feature` class. This contains tools to query and set - geological or plate-tectonic feature properties defined by the + """A class that wraps the `pyGPlates.Feature` class. This contains tools to query and set + geological or plate-tectonic feature properties defined by the [GPlates Geological Information Model (GPGIM)](https://www.gplates.org/docs/gpgim/). A feature consists of a collection of `properties`, a `feature type` and a `feature id`. @@ -208,8 +216,8 @@ class Feature(_pygplates.Feature): This wrapping of `pygplates.Feature` contains all `pygplates.Feature` functionality, - and in addition tracks the names of files from which the feature(s) are read - using the `gplately.pygplates.Feature.filenames` attribute. + and in addition tracks the names of files from which the feature(s) are read + using the `gplately.pygplates.Feature.filenames` attribute. Creating `feature`s @@ -387,15 +395,16 @@ class Feature(_pygplates.Feature): A feature can be deep copied using `clone()`. """ + def __init__(self, feature): """ Parameters ---------- feature_type : instance of `pygplates.FeatureType` - The type of feature. See - [here](https://www.gplates.org/docs/pygplates/generated/pygplates.featuretype#pygplates.FeatureType) - for a list of pygplates feature types. + The type of feature. See + [here](https://www.gplates.org/docs/pygplates/generated/pygplates.featuretype#pygplates.FeatureType) + for a list of pygplates feature types. feature_id : instance of `pygplates.FeatureId` The [feature identifier](https://www.gplates.org/docs/pygplates/generated/pygplates.featureid#pygplates.FeatureID). @@ -409,7 +418,7 @@ def __init__(self, feature): If neither a `str`, `list` of `str`, `gplately.pygplates.Feature` or `None` is passed, no `Feature` filenames will be collected, and the user will be alerted of this. - InformationModelError + InformationModelError if `verify_information_model` is `VerifyInformationModel.yes` and `feature_type` is not a recognised feature type. """ @@ -428,15 +437,16 @@ def __init__(self, feature): elif hasattr(feature, "filenames"): self.filenames = feature.filenames else: - msg = "\nFeature: No filename associated with {} in __init__".format(type(feature)) + msg = "\nFeature: No filename associated with {} in __init__".format( + type(feature) + ) msg += "\n ensure pygplates is imported from gplately. Run," msg += "\n from gplately import pygplates" _warnings.warn(msg, ImportWarning) self.filenames = [] - def add(self, feature): - """Adds a property (or properties) to this feature. See original docs + """Adds a property (or properties) to this feature. See original docs [here](https://www.gplates.org/docs/pygplates/generated/pygplates.feature#pygplates.Feature.add). Parameters @@ -464,7 +474,9 @@ def add(self, feature): elif hasattr(feature, "filenames"): self.filenames.extend(feature.filenames) else: - msg = "\nFeature: No filename associated with {} in add".format(type(feature)) + msg = "\nFeature: No filename associated with {} in add".format( + type(feature) + ) msg += "\n ensure pygplates is imported from gplately. Run," msg += "\n from gplately import pygplates" _warnings.warn(msg, ImportWarning) @@ -472,7 +484,7 @@ def add(self, feature): def clone(self): """Create a duplicate of this `Feature` instance. - This creates a new `Feature` instance with cloned versions of this feature’s `properties`. + This creates a new `Feature` instance with cloned versions of this feature’s `properties`. The cloned feature is created with its own unique `pygplates.FeatureId`. Returns @@ -484,18 +496,19 @@ def clone(self): feat = super().clone() feat.filenames = self.filenames + class FeatureCollection(_pygplates.FeatureCollection): - """A class that wraps the - [`pyGPlates.FeatureCollection`](https://www.gplates.org/docs/pygplates/generated/pygplates.featurecollection#pygplates.FeatureCollection) - class. This aggregates a set of features into a collection. - This is traditionally so that a group of features can be loaded, saved or + """A class that wraps the + [`pyGPlates.FeatureCollection`](https://www.gplates.org/docs/pygplates/generated/pygplates.featurecollection#pygplates.FeatureCollection) + class. This aggregates a set of features into a collection. + This is traditionally so that a group of features can be loaded, saved or unloaded in a single operation. - This wrapping of `pygplates.FeatureCollection` contains all + This wrapping of `pygplates.FeatureCollection` contains all [`pygplates.FeatureCollection` functionality](https://www.gplates.org/docs/pygplates/generated/pygplates.featurecollection#pygplates.FeatureCollection), - and in addition tracks the names of files from which the feature - collection(s) are read using the - `gplately.pygplates.FeatureCollection.filenames` attribute. + and in addition tracks the names of files from which the feature + collection(s) are read using the + `gplately.pygplates.FeatureCollection.filenames` attribute. Examples -------- @@ -541,7 +554,7 @@ class FeatureCollection(_pygplates.FeatureCollection): In the future, support will be added to enable users to implement and register readers/writers for other file formats (or their own non-standard file formats). - + Operations for accessing features --------------------------------- @@ -552,7 +565,7 @@ class FeatureCollection(_pygplates.FeatureCollection): * `for f in fc` : Iterates over the features `f` in feature collection `fc`. - * `fc[i]` : The feature of fc at index `i`. + * `fc[i]` : The feature of fc at index `i`. For example: @@ -562,6 +575,7 @@ class FeatureCollection(_pygplates.FeatureCollection): # assert(num_features == len(features_in_collection)) """ + def __init__(self, features=None): """ @@ -573,10 +587,10 @@ def __init__(self, features=None): Raises ------ - OpenFileForReadingError + OpenFileForReadingError If file is not readable (if filename specified). - FileFormatNotSupportedError + FileFormatNotSupportedError If file format (identified by the filename extension) does not support reading (when filename specified). """ @@ -595,7 +609,9 @@ def __init__(self, features=None): elif hasattr(features, "filenames"): self.filenames = features.filenames else: - msg = "\nFeatureCollection: No filename associated with {} in __init__".format(type(features)) + msg = "\nFeatureCollection: No filename associated with {} in __init__".format( + type(features) + ) msg += "\n ensure pygplates is imported from gplately. Run," msg += "\n from gplately import pygplates" _warnings.warn(msg, ImportWarning) @@ -609,7 +625,7 @@ def add(self, features): feature : instance of `Feature` or sequence (eg, `list` or `tuple`) of `Feature` One or more features to add. - A feature collection is an unordered collection of features so there is no concept + A feature collection is an unordered collection of features so there is no concept of where a feature is inserted in the sequence of features. feature_collection.add(feature) @@ -625,7 +641,9 @@ def add(self, features): elif hasattr(features, "filenames"): self.filenames.extend(features.filenames) else: - msg = "\nFeatureCollection: No filename associated with {} in add".format(type(features)) + msg = "\nFeatureCollection: No filename associated with {} in add".format( + type(features) + ) msg += "\n ensure pygplates is imported from gplately. Run," msg += "\n from gplately import pygplates" _warnings.warn(msg, ImportWarning) @@ -633,16 +651,16 @@ def add(self, features): def clone(self): """Create a duplicate of this feature collection instance. - This creates a new `FeatureCollection` instance with cloned versions of - this collection’s features. And the cloned features (in the cloned + This creates a new `FeatureCollection` instance with cloned versions of + this collection’s features. And the cloned features (in the cloned collection) are each created with a unique `FeatureId`. Returns ------- feature_collection : instance of `gplately.pygplates.FeatureCollection` - The cloned `FeatureCollection`. + The cloned `FeatureCollection`. """ fc = super().clone() fc.filenames = self.filenames - return fc \ No newline at end of file + return fc diff --git a/gplately/reconstruction.py b/gplately/reconstruction.py index 5ac40839..5e75eee4 100644 --- a/gplately/reconstruction.py +++ b/gplately/reconstruction.py @@ -16,53 +16,59 @@ class PlateReconstruction(object): - """The `PlateReconstruction` class contains methods to reconstruct topology features to specific - geological times given a `rotation_model`, a set of `topology_features` and a set of - `static_polygons`. Topological plate velocity data at specific geological times can also be - calculated from these reconstructed features. + """The `PlateReconstruction` class contains methods to reconstruct topology features to specific + geological times given a `rotation_model`, a set of `topology_features` and a set of + `static_polygons`. Topological plate velocity data at specific geological times can also be + calculated from these reconstructed features. Attributes ---------- rotation_model : str, or instance of , or , or sequence of , or instance of , default None A rotation model to query equivalent and/or relative topological plate rotations - from a time in the past relative to another time in the past or to present day. Can be - provided as a rotation filename, or rotation feature collection, or rotation feature, or - sequence of rotation features, or a sequence (eg, a list or tuple) of any combination of + from a time in the past relative to another time in the past or to present day. Can be + provided as a rotation filename, or rotation feature collection, or rotation feature, or + sequence of rotation features, or a sequence (eg, a list or tuple) of any combination of those four types. topology_features : str, or a sequence (eg, `list` or `tuple`) of instances of , or a single instance of , or an instance of , default None - Reconstructable topological features like trenches, ridges and transforms. Can be provided - as an optional topology-feature filename, or sequence of features, or a single feature. + Reconstructable topological features like trenches, ridges and transforms. Can be provided + as an optional topology-feature filename, or sequence of features, or a single feature. static_polygons : str, or instance of , or sequence of ,or an instance of , default None Present-day polygons whose shapes do not change through geological time. They are - used to cookie-cut dynamic polygons into identifiable topological plates (assigned - an ID) according to their present-day locations. Can be provided as a static polygon feature + used to cookie-cut dynamic polygons into identifiable topological plates (assigned + an ID) according to their present-day locations. Can be provided as a static polygon feature collection, or optional filename, or a single feature, or a sequence of features. """ - - def __init__(self, rotation_model, topology_features=None, static_polygons=None): + def __init__( + self, + rotation_model, + topology_features=None, + static_polygons=None, + default_anchor_plate_id=0, + ): if hasattr(rotation_model, "reconstruction_identifier"): self.name = rotation_model.reconstruction_identifier else: self.name = None - self.rotation_model = _RotationModel(rotation_model) + self.rotation_model = _RotationModel( + rotation_model, default_anchor_plate_id=default_anchor_plate_id + ) self.topology_features = _load_FeatureCollection(topology_features) self.static_polygons = _load_FeatureCollection(static_polygons) def __getstate__(self): - filenames = {"rotation_model": self.rotation_model.filenames} if self.topology_features: - filenames['topology_features'] = self.topology_features.filenames + filenames["topology_features"] = self.topology_features.filenames if self.static_polygons: - filenames['static_polygons'] = self.static_polygons.filenames + filenames["static_polygons"] = self.static_polygons.filenames # # remove unpicklable items # del self.rotation_model, self.topology_features, self.static_polygons - + # # really make sure they're gone # self.rotation_model = None # self.topology_features = None @@ -70,33 +76,37 @@ def __getstate__(self): return filenames - def __setstate__(self, state): - # reinstate unpicklable items - self.rotation_model = _RotationModel(state['rotation_model']) + self.rotation_model = _RotationModel(state["rotation_model"]) self.topology_features = None self.static_polygons = None - if 'topology_features' in state: + if "topology_features" in state: self.topology_features = _FeatureCollection() - for topology in state['topology_features']: - self.topology_features.add( _FeatureCollection(topology) ) + for topology in state["topology_features"]: + self.topology_features.add(_FeatureCollection(topology)) - if 'static_polygons' in state: + if "static_polygons" in state: self.static_polygons = _FeatureCollection() - for polygon in state['static_polygons']: - self.static_polygons.add( _FeatureCollection(polygon) ) - + for polygon in state["static_polygons"]: + self.static_polygons.add(_FeatureCollection(polygon)) - def tessellate_subduction_zones(self, time, tessellation_threshold_radians=0.001, ignore_warnings=False, return_geodataframe=False, **kwargs): + def tessellate_subduction_zones( + self, + time, + tessellation_threshold_radians=0.001, + ignore_warnings=False, + return_geodataframe=False, + **kwargs + ): """Samples points along subduction zone trenches and obtains subduction data at a particular geological time. - + Resolves topologies at `time`, tessellates all resolved subducting features to within 'tessellation_threshold_radians' radians and obtains the following information for each sampled point along a trench: - + `tessellate_subduction_zones` returns a list of 10 vertically-stacked tuples with the following data per sampled trench point: * Col. 0 - longitude of sampled trench point @@ -116,7 +126,7 @@ def tessellate_subduction_zones(self, time, tessellation_threshold_radians=0.001 time : float The reconstruction time (Ma) at which to query subduction convergence. - tessellation_threshold_radians : float, default=0.001 + tessellation_threshold_radians : float, default=0.001 The threshold sampling distance along the subducting trench (in radians). Returns @@ -140,7 +150,7 @@ def tessellate_subduction_zones(self, time, tessellation_threshold_radians=0.001 Notes ----- Each sampled point in the output is the midpoint of a great circle arc between two adjacent points in the trench polyline. - The trench normal vector used in the obliquity calculations is perpendicular to the great circle arc of each point + The trench normal vector used in the obliquity calculations is perpendicular to the great circle arc of each point (arc midpoint) and pointing towards the overriding plate (rather than away from it). Each trench is sampled at approximately uniform intervals along its length (specified via a threshold sampling distance). @@ -154,23 +164,24 @@ def tessellate_subduction_zones(self, time, tessellation_threshold_radians=0.001 from the trench normal direction to the velocity vector. The range (0, -180) goes counter-clockwise. You can change the range (-180, 180) to the range (0, 360) by adding 360 to negative angles. The trench normal is perpendicular to the trench and pointing toward the overriding plate. - - Note that the convergence velocity magnitude is negative if the plates are diverging (if convergence obliquity angle - is greater than 90 or less than -90). And note that the absolute velocity magnitude is negative if the trench - (subduction zone) is moving towards the overriding plate (if absolute obliquity angle is less than 90 or greater + + Note that the convergence velocity magnitude is negative if the plates are diverging (if convergence obliquity angle + is greater than 90 or less than -90). And note that the absolute velocity magnitude is negative if the trench + (subduction zone) is moving towards the overriding plate (if absolute obliquity angle is less than 90 or greater than -90) - note that this ignores the kinematics of the subducting plate. - + The delta time interval used for velocity calculations is, by default, assumed to be 1Ma. """ if ignore_warnings: with warnings.catch_warnings(): - warnings.simplefilter('ignore') + warnings.simplefilter("ignore") subduction_data = ptt.subduction_convergence.subduction_convergence( self.rotation_model, self.topology_features, tessellation_threshold_radians, float(time), - **kwargs) + **kwargs + ) else: subduction_data = ptt.subduction_convergence.subduction_convergence( @@ -178,28 +189,35 @@ def tessellate_subduction_zones(self, time, tessellation_threshold_radians=0.001 self.topology_features, tessellation_threshold_radians, float(time), - **kwargs) + **kwargs + ) subduction_data = np.vstack(subduction_data) if return_geodataframe: from shapely import geometry import geopandas as gpd - coords = [geometry.Point(lon,lat) for lon,lat in zip(subduction_data[:,0], subduction_data[:,1])] + + coords = [ + geometry.Point(lon, lat) + for lon, lat in zip(subduction_data[:, 0], subduction_data[:, 1]) + ] d = {"geometry": coords} - labels = ['convergence velocity (cm/yr)', - 'convergence obliquity angle (degrees)', - 'trench velocity (cm/yr)', - 'trench obliquity angle (degrees)', - 'length (degrees)', - 'trench normal angle (degrees)', - 'subducting plate ID', - 'overriding plate ID'] + labels = [ + "convergence velocity (cm/yr)", + "convergence obliquity angle (degrees)", + "trench velocity (cm/yr)", + "trench obliquity angle (degrees)", + "length (degrees)", + "trench normal angle (degrees)", + "subducting plate ID", + "overriding plate ID", + ] for i, label in enumerate(labels): - index = 2+i - d[label] = subduction_data[:,index] + index = 2 + i + d[label] = subduction_data[:, index] gdf = gpd.GeoDataFrame(d, geometry="geometry") return gdf @@ -207,13 +225,12 @@ def tessellate_subduction_zones(self, time, tessellation_threshold_radians=0.001 else: return subduction_data - def total_subduction_zone_length(self, time, use_ptt=False, ignore_warnings=False): """Calculates the total length of all mid-ocean ridges (km) at the specified geological time (Ma). if `use_ptt` is True - - Uses Plate Tectonic Tools' `subduction_convergence` module to calculate trench segment lengths on a unit sphere. + + Uses Plate Tectonic Tools' `subduction_convergence` module to calculate trench segment lengths on a unit sphere. The aggregated total subduction zone length is scaled to kilometres using the geocentric radius. Otherwise @@ -230,9 +247,9 @@ def total_subduction_zone_length(self, time, use_ptt=False, ignore_warnings=Fals use_ptt : bool, default=False If set to `True`, the PTT method is used. ignore_warnings : bool, default=False - Choose whether to ignore warning messages from PTT's `subduction_convergence` workflow. These warnings alert the user - when certain subduction sub-segments are ignored - this happens when the trench segments have unidentifiable subduction - polarities and/or subducting plates. + Choose whether to ignore warning messages from PTT's `subduction_convergence` workflow. These warnings alert the user + when certain subduction sub-segments are ignored - this happens when the trench segments have unidentifiable subduction + polarities and/or subducting plates. Raises ------ @@ -247,36 +264,55 @@ def total_subduction_zone_length(self, time, use_ptt=False, ignore_warnings=Fals """ if use_ptt: with warnings.catch_warnings(): - warnings.simplefilter('ignore') - subduction_data = self.tessellate_subduction_zones(time, ignore_warnings=ignore_warnings) + warnings.simplefilter("ignore") + subduction_data = self.tessellate_subduction_zones( + time, ignore_warnings=ignore_warnings + ) + + trench_arcseg = subduction_data[:, 6] + trench_pt_lat = subduction_data[:, 1] - trench_arcseg = subduction_data[:,6] - trench_pt_lat = subduction_data[:,1] - total_subduction_zone_length_kms = 0 for i, segment in enumerate(trench_arcseg): - earth_radius = _tools.geocentric_radius(trench_pt_lat[i])/1e3 - total_subduction_zone_length_kms += np.deg2rad(segment)*earth_radius - + earth_radius = _tools.geocentric_radius(trench_pt_lat[i]) / 1e3 + total_subduction_zone_length_kms += np.deg2rad(segment) * earth_radius + return total_subduction_zone_length_kms else: resolved_topologies = [] shared_boundary_sections = [] - pygplates.resolve_topologies(self.topology_features, self.rotation_model, resolved_topologies, time, shared_boundary_sections) + pygplates.resolve_topologies( + self.topology_features, + self.rotation_model, + resolved_topologies, + time, + shared_boundary_sections, + ) total_subduction_zone_length_kms = 0.0 for shared_boundary_section in shared_boundary_sections: - if shared_boundary_section.get_feature().get_feature_type() != pygplates.FeatureType.gpml_subduction_zone: + if ( + shared_boundary_section.get_feature().get_feature_type() + != pygplates.FeatureType.gpml_subduction_zone + ): continue - for shared_sub_segment in shared_boundary_section.get_shared_sub_segments(): - clat, clon = shared_sub_segment.get_resolved_geometry().get_centroid().to_lat_lon() + for ( + shared_sub_segment + ) in shared_boundary_section.get_shared_sub_segments(): + clat, clon = ( + shared_sub_segment.get_resolved_geometry() + .get_centroid() + .to_lat_lon() + ) earth_radius = _tools.geocentric_radius(clat) / 1e3 - total_subduction_zone_length_kms += shared_sub_segment.get_resolved_geometry().get_arc_length()*earth_radius + total_subduction_zone_length_kms += ( + shared_sub_segment.get_resolved_geometry().get_arc_length() + * earth_radius + ) return total_subduction_zone_length_kms - def total_continental_arc_length( self, time, @@ -286,12 +322,12 @@ def total_continental_arc_length( ): """Calculates the total length of all global continental arcs (km) at the specified geological time (Ma). - Uses Plate Tectonic Tools' `subduction_convergence` workflow to sample a given plate model's trench features into + Uses Plate Tectonic Tools' `subduction_convergence` workflow to sample a given plate model's trench features into point features and obtain their subduction polarities. The resolved points are projected out by the `trench_arc_distance` - and their new locations are linearly interpolated onto the supplied `continental_grid`. If the projected trench - points lie in the grid, they are considered continental arc points, and their arc segment lengths are appended - to the total continental arc length for the specified `time`. The total length is scaled to kilometres using the geocentric - Earth radius. + and their new locations are linearly interpolated onto the supplied `continental_grid`. If the projected trench + points lie in the grid, they are considered continental arc points, and their arc segment lengths are appended + to the total continental arc length for the specified `time`. The total length is scaled to kilometres using the geocentric + Earth radius. Parameters ---------- @@ -303,11 +339,11 @@ def total_continental_arc_length( assumed [-180,180,-90,90]. For a filename, the extent is obtained from the file. trench_arc_distance : float - The trench-to-arc distance (in kilometres) to project sampled trench points out by in the direction of their - subduction polarities. + The trench-to-arc distance (in kilometres) to project sampled trench points out by in the direction of their + subduction polarities. ignore_warnings : bool, default=True - Choose whether to ignore warning messages from PTT's subduction_convergence workflow that alerts the user of - subduction sub-segments that are ignored due to unidentified polarities and/or subducting plates. + Choose whether to ignore warning messages from PTT's subduction_convergence workflow that alerts the user of + subduction sub-segments that are ignored due to unidentified polarities and/or subducting plates. Returns ------- @@ -331,7 +367,7 @@ def total_continental_arc_length( continental_grid = np.array(continental_grid) graster = _grids.Raster( data=continental_grid, - extent=[-180,180,-90,90], + extent=[-180, 180, -90, 90], time=float(time), ) except Exception as e: @@ -346,20 +382,24 @@ def total_continental_arc_length( ) # Obtain trench data with Plate Tectonic Tools - trench_data = self.tessellate_subduction_zones(time, ignore_warnings=ignore_warnings) + trench_data = self.tessellate_subduction_zones( + time, ignore_warnings=ignore_warnings + ) # Extract trench data - trench_normal_azimuthal_angle = trench_data[:,7] - trench_arcseg = trench_data[:,6] - trench_pt_lon = trench_data[:,0] - trench_pt_lat = trench_data[:,1] + trench_normal_azimuthal_angle = trench_data[:, 7] + trench_arcseg = trench_data[:, 6] + trench_pt_lon = trench_data[:, 0] + trench_pt_lat = trench_data[:, 1] # Modify the trench-arc distance using the geocentric radius - arc_distance = trench_arc_distance / (_tools.geocentric_radius(trench_pt_lat)/1000) + arc_distance = trench_arc_distance / ( + _tools.geocentric_radius(trench_pt_lat) / 1000 + ) # Project trench points out along trench-arc distance, and obtain their new lat-lon coordinates - dlon = arc_distance*np.sin(np.radians(trench_normal_azimuthal_angle)) - dlat = arc_distance*np.cos(np.radians(trench_normal_azimuthal_angle)) + dlon = arc_distance * np.sin(np.radians(trench_normal_azimuthal_angle)) + dlat = arc_distance * np.cos(np.radians(trench_normal_azimuthal_angle)) ilon = trench_pt_lon + np.degrees(dlon) ilat = trench_pt_lat + np.degrees(dlat) @@ -368,7 +408,7 @@ def total_continental_arc_length( sampled_points = graster.interpolate( ilon, ilat, - method='linear', + method="linear", return_indices=False, ) continental_indices = np.where(sampled_points > 0) @@ -378,11 +418,17 @@ def total_continental_arc_length( segment_lengths = point_radii * segment_arclens return np.sum(segment_lengths) - - def tessellate_mid_ocean_ridges(self, time, tessellation_threshold_radians=0.001, ignore_warnings=False, return_geodataframe=False, **kwargs): - """Samples points along resolved spreading features (e.g. mid-ocean ridges) and calculates spreading rates and + def tessellate_mid_ocean_ridges( + self, + time, + tessellation_threshold_radians=0.001, + ignore_warnings=False, + return_geodataframe=False, + **kwargs + ): + """Samples points along resolved spreading features (e.g. mid-ocean ridges) and calculates spreading rates and lengths of ridge segments at a particular geological time. - + Resolves topologies at `time`, tessellates all resolved spreading features to within 'tessellation_threshold_radians' radians. Returns a 4-column vertically stacked tuple with the following data. @@ -390,9 +436,9 @@ def tessellate_mid_ocean_ridges(self, time, tessellation_threshold_radians=0.001 * Col. 1 - latitude of sampled ridge point * Col. 2 - spreading velocity magnitude (in cm/yr) * Col. 3 - length of arc segment (in degrees) that current point is on - - All spreading feature types are considered. The transform segments of spreading features are ignored. - Note: by default, the function assumes that a segment can deviate 45 degrees from the stage pole before it is + + All spreading feature types are considered. The transform segments of spreading features are ignored. + Note: by default, the function assumes that a segment can deviate 45 degrees from the stage pole before it is considered a transform segment. Parameters @@ -400,11 +446,11 @@ def tessellate_mid_ocean_ridges(self, time, tessellation_threshold_radians=0.001 time : float The reconstruction time (Ma) at which to query subduction convergence. - tessellation_threshold_radians : float, default=0.001 + tessellation_threshold_radians : float, default=0.001 The threshold sampling distance along the subducting trench (in radians). ignore_warnings : bool, default=False - Choose to ignore warnings from Plate Tectonic Tools' ridge_spreading_rate workflow. + Choose to ignore warnings from Plate Tectonic Tools' ridge_spreading_rate workflow. Returns ------- @@ -412,7 +458,7 @@ def tessellate_mid_ocean_ridges(self, time, tessellation_threshold_radians=0.001 The results for all tessellated points sampled along the trench. The size of the returned list is equal to the number of tessellated points. Each tuple in the list corresponds to a tessellated point and has the following tuple items: - + * longitude of sampled point * latitude of sampled point * spreading velocity magnitude (in cm/yr) @@ -420,7 +466,7 @@ def tessellate_mid_ocean_ridges(self, time, tessellation_threshold_radians=0.001 """ if ignore_warnings: with warnings.catch_warnings(): - warnings.simplefilter('ignore') + warnings.simplefilter("ignore") spreading_feature_types = [pygplates.FeatureType.gpml_mid_ocean_ridge] ridge_data = ptt.ridge_spreading_rate.spreading_rates( self.rotation_model, @@ -428,7 +474,8 @@ def tessellate_mid_ocean_ridges(self, time, tessellation_threshold_radians=0.001 float(time), tessellation_threshold_radians, spreading_feature_types, - **kwargs) + **kwargs + ) else: spreading_feature_types = [pygplates.FeatureType.gpml_mid_ocean_ridge] @@ -438,7 +485,8 @@ def tessellate_mid_ocean_ridges(self, time, tessellation_threshold_radians=0.001 float(time), tessellation_threshold_radians, spreading_feature_types, - **kwargs) + **kwargs + ) ridge_data = np.vstack(ridge_data) @@ -446,22 +494,28 @@ def tessellate_mid_ocean_ridges(self, time, tessellation_threshold_radians=0.001 from shapely import geometry import geopandas as gpd - points = [geometry.Point(lon,lat) for lon,lat in zip(ridge_data[:,0], ridge_data[:,1])] - gdf = gpd.GeoDataFrame({"geometry": points, - "velocity (cm/yr)": ridge_data[:,2], - "length (degrees)": ridge_data[:,3]}, - geometry="geometry") + points = [ + geometry.Point(lon, lat) + for lon, lat in zip(ridge_data[:, 0], ridge_data[:, 1]) + ] + gdf = gpd.GeoDataFrame( + { + "geometry": points, + "velocity (cm/yr)": ridge_data[:, 2], + "length (degrees)": ridge_data[:, 3], + }, + geometry="geometry", + ) return gdf else: return ridge_data - def total_ridge_length(self, time, use_ptt=False, ignore_warnings=False): """Calculates the total length of all mid-ocean ridges (km) at the specified geological time (Ma). if `use_ptt` is True - + Uses Plate Tectonic Tools' `ridge_spreading_rate` workflow to calculate ridge segment lengths. Scales lengths to kilometres using the geocentric radius. @@ -477,7 +531,7 @@ def total_ridge_length(self, time, use_ptt=False, ignore_warnings=False): time : int The geological time at which to calculate total mid-ocean ridge lengths. use_ptt : bool, default=False - If set to `True`, the PTT method is used. + If set to `True`, the PTT method is used. ignore_warnings : bool, default=False Choose whether to ignore warning messages from PTT's `ridge_spreading_rate` workflow. @@ -494,73 +548,90 @@ def total_ridge_length(self, time, use_ptt=False, ignore_warnings=False): if use_ptt is True: with warnings.catch_warnings(): - warnings.simplefilter('ignore') + warnings.simplefilter("ignore") ridge_data = self.tessellate_mid_ocean_ridges(time) - ridge_arcseg = ridge_data[:,3] - ridge_pt_lat = ridge_data[:,1] + ridge_arcseg = ridge_data[:, 3] + ridge_pt_lat = ridge_data[:, 1] total_ridge_length_kms = 0 for i, segment in enumerate(ridge_arcseg): - earth_radius = _tools.geocentric_radius(ridge_pt_lat[i])/1e3 - total_ridge_length_kms += np.deg2rad(segment)*earth_radius + earth_radius = _tools.geocentric_radius(ridge_pt_lat[i]) / 1e3 + total_ridge_length_kms += np.deg2rad(segment) * earth_radius return total_ridge_length_kms else: resolved_topologies = [] shared_boundary_sections = [] - pygplates.resolve_topologies(self.topology_features, self.rotation_model, resolved_topologies, time, shared_boundary_sections) + pygplates.resolve_topologies( + self.topology_features, + self.rotation_model, + resolved_topologies, + time, + shared_boundary_sections, + ) total_ridge_length_kms = 0.0 for shared_boundary_section in shared_boundary_sections: - if shared_boundary_section.get_feature().get_feature_type() != pygplates.FeatureType.gpml_mid_ocean_ridge: + if ( + shared_boundary_section.get_feature().get_feature_type() + != pygplates.FeatureType.gpml_mid_ocean_ridge + ): continue - for shared_sub_segment in shared_boundary_section.get_shared_sub_segments(): - clat, clon = shared_sub_segment.get_resolved_geometry().get_centroid().to_lat_lon() + for ( + shared_sub_segment + ) in shared_boundary_section.get_shared_sub_segments(): + clat, clon = ( + shared_sub_segment.get_resolved_geometry() + .get_centroid() + .to_lat_lon() + ) earth_radius = _tools.geocentric_radius(clat) / 1e3 - total_ridge_length_kms += shared_sub_segment.get_resolved_geometry().get_arc_length()*earth_radius + total_ridge_length_kms += ( + shared_sub_segment.get_resolved_geometry().get_arc_length() + * earth_radius + ) return total_ridge_length_kms - def reconstruct(self, feature, to_time, from_time=0, anchor_plate_id=0, **kwargs): """Reconstructs regular geological features, motion paths or flowlines to a specific geological time. - + Parameters ---------- feature : str, or instance of , or , or sequence of - The geological features to reconstruct. Can be provided as a feature collection, or filename, - or feature, or sequence of features, or a sequence (eg, a list or tuple) of any combination of + The geological features to reconstruct. Can be provided as a feature collection, or filename, + or feature, or sequence of features, or a sequence (eg, a list or tuple) of any combination of those four types. to_time : float, or :class:`GeoTimeInstant` The specific geological time to reconstruct to. from_time : float, default=0 - The specific geological time to reconstruct from. By default, this is set to present day. Raises + The specific geological time to reconstruct from. By default, this is set to present day. Raises `NotImplementedError` if `from_time` is not set to 0.0 Ma (present day). anchor_plate_id : int, default=0 - Reconstruct features with respect to a certain anchor plate. By default, reconstructions are made + Reconstruct features with respect to a certain anchor plate. By default, reconstructions are made with respect to the absolute reference frame, like a stationary geological element (e.g. a mantle - plume). This frame is given the plate ID of 0. + plume). This frame is given the plate ID of 0. **reconstruct_type : ReconstructType, default=ReconstructType.feature_geometry - The specific reconstruction type to generate based on input feature geometry type. Can be provided as + The specific reconstruction type to generate based on input feature geometry type. Can be provided as pygplates.ReconstructType.feature_geometry to only reconstruct regular feature geometries, or - pygplates.ReconstructType.motion_path to only reconstruct motion path features, or - pygplates.ReconstructType.flowline to only reconstruct flowline features. - Generates :class:`reconstructed feature geometries’, or :class:`reconstructed + pygplates.ReconstructType.motion_path to only reconstruct motion path features, or + pygplates.ReconstructType.flowline to only reconstruct flowline features. + Generates :class:`reconstructed feature geometries’, or :class:`reconstructed motion paths’, or :class:`reconstructed flowlines’ respectively. **group_with_feature : bool, default=False Used to group reconstructed geometries with their features. This can be useful when a feature has more than one - geometry and hence more than one reconstructed geometry. The output *reconstructed_geometries* then becomes a - list of tuples where each tuple contains a :class:`feature` and a ``list`` of reconstructed geometries. - Note: this keyword argument only applies when *reconstructed_geometries* is a list because exported files are + geometry and hence more than one reconstructed geometry. The output *reconstructed_geometries* then becomes a + list of tuples where each tuple contains a :class:`feature` and a ``list`` of reconstructed geometries. + Note: this keyword argument only applies when *reconstructed_geometries* is a list because exported files are always grouped with their features. This is applicable to all ReconstructType features. - + **export_wrap_to_dateline : bool, default=True Wrap/clip reconstructed geometries to the dateline. @@ -568,10 +639,10 @@ def reconstruct(self, feature, to_time, from_time=0, anchor_plate_id=0, **kwargs ------- reconstructed_features : list Reconstructed geological features (generated by the reconstruction) are appended to the list. - The reconstructed geometries are output in the same order as that of their respective input features (in the - parameter “features”). The order across input feature collections is also retained. This happens regardless - of whether *features* and *reconstructed_features* include files or not. Note: if keyword argument - group_with_feature=True then the list contains tuples that group each :class:`feature` with a list + The reconstructed geometries are output in the same order as that of their respective input features (in the + parameter “features”). The order across input feature collections is also retained. This happens regardless + of whether *features* and *reconstructed_features* include files or not. Note: if keyword argument + group_with_feature=True then the list contains tuples that group each :class:`feature` with a list of its reconstructed geometries. Raises @@ -582,23 +653,28 @@ def reconstruct(self, feature, to_time, from_time=0, anchor_plate_id=0, **kwargs from_time, to_time = float(from_time), float(to_time) reconstructed_features = [] - pygplates.reconstruct(feature, self.rotation_model, reconstructed_features, to_time,\ - anchor_plate_id=anchor_plate_id, **kwargs) + pygplates.reconstruct( + feature, + self.rotation_model, + reconstructed_features, + to_time, + anchor_plate_id=anchor_plate_id, + **kwargs + ) return reconstructed_features - def get_point_velocities(self, lons, lats, time, delta_time=1.0): - """Generates a velocity domain feature collection, resolves them into points, and calculates the north and east - components of the velocity vector for each point in the domain at a particular geological `time`. - + """Generates a velocity domain feature collection, resolves them into points, and calculates the north and east + components of the velocity vector for each point in the domain at a particular geological `time`. + Notes ----- Velocity domain feature collections are MeshNode-type features. These are produced from `lons` and `lats` pairs - represented as multi-point geometries (projections onto the surface of the unit length sphere). These features are - resolved into domain points and assigned plate IDs which are used to obtain the equivalent stage rotations of - identified tectonic plates over a time interval (`delta_time`). Each velocity domain point and its stage rotation - are used to calculate the point's plate velocity at a particular `time`. Obtained velocities for each domain point - are represented in the north-east-down coordinate system. + represented as multi-point geometries (projections onto the surface of the unit length sphere). These features are + resolved into domain points and assigned plate IDs which are used to obtain the equivalent stage rotations of + identified tectonic plates over a time interval (`delta_time`). Each velocity domain point and its stage rotation + are used to calculate the point's plate velocity at a particular `time`. Obtained velocities for each domain point + are represented in the north-east-down coordinate system. Parameters ---------- @@ -617,7 +693,7 @@ def get_point_velocities(self, lons, lats, time, delta_time=1.0): Returns ------- all_velocities : 1D list of tuples - For each velocity domain feature point, a tuple of (north, east, down) velocity components is generated and + For each velocity domain feature point, a tuple of (north, east, down) velocity components is generated and appended to a list of velocity data. The length of `all_velocities` is equivalent to the number of domain points resolved from the lat-lon array parameters. """ @@ -625,71 +701,87 @@ def get_point_velocities(self, lons, lats, time, delta_time=1.0): time = float(time) - multi_point = pygplates.MultiPointOnSphere([(float(lat),float(lon)) for lat, lon in zip(lats,lons)]) + multi_point = pygplates.MultiPointOnSphere( + [(float(lat), float(lon)) for lat, lon in zip(lats, lons)] + ) # Create a feature containing the multipoint feature, and defined as MeshNode type - meshnode_feature = pygplates.Feature(pygplates.FeatureType.create_from_qualified_string('gpml:MeshNode')) + meshnode_feature = pygplates.Feature( + pygplates.FeatureType.create_from_qualified_string("gpml:MeshNode") + ) meshnode_feature.set_geometry(multi_point) - meshnode_feature.set_name('Velocity Mesh Nodes from pygplates') + meshnode_feature.set_name("Velocity Mesh Nodes from pygplates") velocity_domain_features = pygplates.FeatureCollection(meshnode_feature) - + # NB: at this point, the feature could be written to a file using # output_feature_collection.write('myfilename.gpmlz') - - + # All domain points and associated (magnitude, azimuth, inclination) velocities for the current time. all_domain_points = [] all_velocities = [] # Partition our velocity domain features into our topological plate polygons at the current 'time'. - plate_partitioner = pygplates.PlatePartitioner(self.topology_features, self.rotation_model, time) + plate_partitioner = pygplates.PlatePartitioner( + self.topology_features, self.rotation_model, time + ) for velocity_domain_feature in velocity_domain_features: # A velocity domain feature usually has a single geometry but we'll assume it can be any number. # Iterate over them all. for velocity_domain_geometry in velocity_domain_feature.get_geometries(): - for velocity_domain_point in velocity_domain_geometry.get_points(): - all_domain_points.append(velocity_domain_point) - partitioning_plate = plate_partitioner.partition_point(velocity_domain_point) + partitioning_plate = plate_partitioner.partition_point( + velocity_domain_point + ) if partitioning_plate: - # We need the newly assigned plate ID # to get the equivalent stage rotation of that tectonic plate. - partitioning_plate_id = partitioning_plate.get_feature().get_reconstruction_plate_id() + partitioning_plate_id = ( + partitioning_plate.get_feature().get_reconstruction_plate_id() + ) # Get the stage rotation of partitioning plate from 'time + delta_time' to 'time'. - equivalent_stage_rotation = self.rotation_model.get_rotation(time, - partitioning_plate_id, - time + delta_time) + equivalent_stage_rotation = self.rotation_model.get_rotation( + time, partitioning_plate_id, time + delta_time + ) # Calculate velocity at the velocity domain point. # This is from 'time + delta_time' to 'time' on the partitioning plate. velocity_vectors = pygplates.calculate_velocities( [velocity_domain_point], equivalent_stage_rotation, - delta_time) + delta_time, + ) # Convert global 3D velocity vectors to local (magnitude, azimuth, inclination) tuples # (one tuple per point). - velocities =pygplates.LocalCartesian.convert_from_geocentric_to_north_east_down( - [velocity_domain_point], - velocity_vectors) - all_velocities.append((velocities[0].get_x(), velocities[0].get_y())) + velocities = pygplates.LocalCartesian.convert_from_geocentric_to_north_east_down( + [velocity_domain_point], velocity_vectors + ) + all_velocities.append( + (velocities[0].get_x(), velocities[0].get_y()) + ) else: - all_velocities.append((0,0)) - - return np.array(all_velocities) + all_velocities.append((0, 0)) + return np.array(all_velocities) - def create_motion_path(self, lons, lats, time_array, plate_id=None, anchor_plate_id=0, return_rate_of_motion=False): - """ Create a path of points to mark the trajectory of a plate's motion + def create_motion_path( + self, + lons, + lats, + time_array, + plate_id=None, + anchor_plate_id=0, + return_rate_of_motion=False, + ): + """Create a path of points to mark the trajectory of a plate's motion through geological time. - + Parameters ---------- lons : arr @@ -697,9 +789,9 @@ def create_motion_path(self, lons, lats, time_array, plate_id=None, anchor_plate lats : arr An array containing the latitudes of seed points on a plate in motion. time_array : arr - An array of reconstruction times at which to determine the trajectory + An array of reconstruction times at which to determine the trajectory of a point on a plate. For example: - + import numpy as np min_time = 30 max_time = 100 @@ -707,30 +799,30 @@ def create_motion_path(self, lons, lats, time_array, plate_id=None, anchor_plate time_array = np.arange(min_time, max_time + time_step, time_step) plate_id : int, default=None - The ID of the moving plate. If this is not passed, the plate ID of the + The ID of the moving plate. If this is not passed, the plate ID of the seed points are ascertained using pygplates' `PlatePartitioner`. anchor_plate_id : int, default=0 The ID of the anchor plate. return_rate_of_motion : bool, default=False Choose whether to return the rate of plate motion through time for each - + Returns ------- rlons : ndarray - An n-dimensional array with columns containing the longitudes of - the seed points at each timestep in `time_array`. There are n - columns for n seed points. + An n-dimensional array with columns containing the longitudes of + the seed points at each timestep in `time_array`. There are n + columns for n seed points. rlats : ndarray - An n-dimensional array with columns containing the latitudes of - the seed points at each timestep in `time_array`. There are n - columns for n seed points. + An n-dimensional array with columns containing the latitudes of + the seed points at each timestep in `time_array`. There are n + columns for n seed points. StepTimes StepRates - + Examples -------- To access the latitudes and longitudes of each seed point's motion path: - + for i in np.arange(0,len(seed_points)): current_lons = lon[:,i] current_lats = lat[:,i] @@ -739,45 +831,43 @@ def create_motion_path(self, lons, lats, time_array, plate_id=None, anchor_plate lats = np.atleast_1d(lats) time_array = np.atleast_1d(time_array) - # ndarrays to fill with reconstructed points and + # ndarrays to fill with reconstructed points and # rates of motion (if requested) rlons = np.empty((len(time_array), len(lons))) rlats = np.empty((len(time_array), len(lons))) - + if plate_id is None: query_plate_id = True else: query_plate_id = False plate_ids = np.ones(len(lons), dtype=int) * plate_id - seed_points = zip(lats,lons) + seed_points = zip(lats, lons) for i, lat_lon in enumerate(seed_points): - seed_points_at_digitisation_time = pygplates.PointOnSphere( pygplates.LatLonPoint(float(lat_lon[0]), float(lat_lon[1])) ) - # Allocate the present-day plate ID to the PointOnSphere if + # Allocate the present-day plate ID to the PointOnSphere if # it was not given. if query_plate_id: plate_id = _tools.plate_partitioner_for_point( - lat_lon, - self.topology_features, - self.rotation_model + lat_lon, self.topology_features, self.rotation_model ) else: plate_id = plate_ids[i] - + # Create the motion path feature. enforce float and int for C++ signature. motion_path_feature = pygplates.Feature.create_motion_path( - seed_points_at_digitisation_time, - time_array, + seed_points_at_digitisation_time, + time_array, valid_time=(time_array.max(), time_array.min()), relative_plate=int(plate_id), - reconstruction_plate_id=int(anchor_plate_id)) + reconstruction_plate_id=int(anchor_plate_id), + ) reconstructed_motion_paths = self.reconstruct( - motion_path_feature, - to_time=0, + motion_path_feature, + to_time=0, reconstruct_type=pygplates.ReconstructType.motion_path, anchor_plate_id=int(plate_id), ) @@ -786,16 +876,15 @@ def create_motion_path(self, lons, lats, time_array, plate_id=None, anchor_plate trail = reconstructed_motion_path.get_motion_path().to_lat_lon_array() lon, lat = np.flipud(trail[:, 1]), np.flipud(trail[:, 0]) - - rlons[:,i] = lon - rlats[:,i] = lat + + rlons[:, i] = lon + rlats[:, i] = lat # Obtain step-plot coordinates for rate of motion if return_rate_of_motion is True: + StepTimes = np.empty(((len(time_array) - 1) * 2, len(lons))) + StepRates = np.empty(((len(time_array) - 1) * 2, len(lons))) - StepTimes = np.empty(((len(time_array)-1)*2, len(lons))) - StepRates = np.empty(((len(time_array)-1)*2, len(lons))) - # Get timestep TimeStep = [] for j in range(len(time_array) - 1): @@ -806,28 +895,36 @@ def create_motion_path(self, lons, lats, time_array, plate_id=None, anchor_plate # plate relative to the fixed plate in each time step Dist = [] for reconstructed_motion_path in reconstructed_motion_paths: - for segment in reconstructed_motion_path.get_motion_path().get_segments(): - Dist.append(segment.get_arc_length() * _tools.geocentric_radius(segment.get_start_point().to_lat_lon()[0]) / 1e3) + for ( + segment + ) in reconstructed_motion_path.get_motion_path().get_segments(): + Dist.append( + segment.get_arc_length() + * _tools.geocentric_radius( + segment.get_start_point().to_lat_lon()[0] + ) + / 1e3 + ) # Note that the motion path coordinates come out starting with the oldest time and working forwards # So, to match our 'times' array, we flip the order Dist = np.flipud(Dist) # Get rate of motion as distance per Myr - Rate = np.asarray(Dist)/TimeStep - + Rate = np.asarray(Dist) / TimeStep + # Manipulate arrays to get a step plot - StepRate = np.zeros(len(Rate)*2) + StepRate = np.zeros(len(Rate) * 2) StepRate[::2] = Rate StepRate[1::2] = Rate - StepTime = np.zeros(len(Rate)*2) + StepTime = np.zeros(len(Rate) * 2) StepTime[::2] = time_array[:-1] StepTime[1::2] = time_array[1:] - + # Append the nth point's step time and step rate coordinates to the ndarray - StepTimes[:,i] = StepTime - StepRates[:,i] = StepRate*0.1 # cm/yr + StepTimes[:, i] = StepTime + StepRates[:, i] = StepRate * 0.1 # cm/yr # Obseleted by Lauren's changes above (though it is more efficient) # multiply arc length of the motion path segment by a latitude-dependent Earth radius @@ -836,15 +933,27 @@ def create_motion_path(self, lons, lats, time_array, plate_id=None, anchor_plate # rate = np.asarray(distance)/np.diff(time_array) # rates[:,i] = np.flipud(rate) # rates *= 0.1 # cm/yr - + if return_rate_of_motion is True: - return np.squeeze(rlons), np.squeeze(rlats), np.squeeze(StepTimes), np.squeeze(StepRates) + return ( + np.squeeze(rlons), + np.squeeze(rlats), + np.squeeze(StepTimes), + np.squeeze(StepRates), + ) else: return np.squeeze(rlons), np.squeeze(rlats) - - def create_flowline(self, lons, lats, time_array, left_plate_ID, right_plate_ID, return_rate_of_motion=False): - """ Create a path of points to track plate motion away from + def create_flowline( + self, + lons, + lats, + time_array, + left_plate_ID, + right_plate_ID, + return_rate_of_motion=False, + ): + """Create a path of points to track plate motion away from spreading ridges over time using half-stage rotations. Parameters @@ -863,14 +972,14 @@ def create_flowline(self, lons, lats, time_array, left_plate_ID, right_plate_ID, ridge. return_rate_of_motion : bool, default False Choose whether to return a step time and step rate array - for a step plot of motion. + for a step plot of motion. Returns ------- left_lon : ndarray The longitudes of the __left__ flowline for n seed points. There are n columns for n seed points, and m rows - for m time steps in `time_array`. + for m time steps in `time_array`. left_lat : ndarray The latitudes of the __left__ flowline of n seed points. There are n columns for n seed points, and m rows @@ -890,18 +999,18 @@ def create_flowline(self, lons, lats, time_array, left_plate_ID, right_plate_ID, longitudes: for i in np.arange(0,len(seed_points)): - left_flowline_longitudes = left_lon[:,i] - left_flowline_latitudes = left_lat[:,i] + left_flowline_longitudes = left_lon[:,i] + left_flowline_latitudes = left_lat[:,i] right_flowline_longitudes = right_lon[:,i] right_flowline_latitudes = right_lat[:,i] """ lats = np.atleast_1d(lats) lons = np.atleast_1d(lons) time_array = np.atleast_1d(time_array) - + seed_points = list(zip(lats, lons)) multi_point = pygplates.MultiPointOnSphere(seed_points) - + start = 0 if time_array[0] != 0: start = 1 @@ -913,81 +1022,104 @@ def create_flowline(self, lons, lats, time_array, left_plate_ID, right_plate_ID, time_array.tolist(), valid_time=(time_array.max(), time_array.min()), left_plate=left_plate_ID, - right_plate=right_plate_ID) + right_plate=right_plate_ID, + ) # reconstruct the flowline in present-day coordinates reconstructed_flowlines = self.reconstruct( - flowline_feature, + flowline_feature, to_time=0, - reconstruct_type=pygplates.ReconstructType.flowline) + reconstruct_type=pygplates.ReconstructType.flowline, + ) # Wrap things to the dateline, to avoid plotting artefacts. date_line_wrapper = pygplates.DateLineWrapper() # Create lat-lon ndarrays to store the left and right lats and lons of flowlines - left_lon = np.empty((len(time_array), len(lons))) - left_lat = np.empty((len(time_array), len(lons))) + left_lon = np.empty((len(time_array), len(lons))) + left_lat = np.empty((len(time_array), len(lons))) right_lon = np.empty((len(time_array), len(lons))) right_lat = np.empty((len(time_array), len(lons))) - StepTimes = np.empty(((len(time_array)-1)*2, len(lons))) - StepRates = np.empty(((len(time_array)-1)*2, len(lons))) - + StepTimes = np.empty(((len(time_array) - 1) * 2, len(lons))) + StepRates = np.empty(((len(time_array) - 1) * 2, len(lons))) - # Iterate over the reconstructed flowlines. Each seed point results in a 'left' and 'right' flowline + # Iterate over the reconstructed flowlines. Each seed point results in a 'left' and 'right' flowline for i, reconstructed_flowline in enumerate(reconstructed_flowlines): - # Get the points for the left flowline only left_latlon = reconstructed_flowline.get_left_flowline().to_lat_lon_array() - left_lon[:,i] = left_latlon[:,1] - left_lat[:,i] = left_latlon[:,0] + left_lon[:, i] = left_latlon[:, 1] + left_lat[:, i] = left_latlon[:, 0] # Repeat for the right flowline points - right_latlon = reconstructed_flowline.get_right_flowline().to_lat_lon_array() - right_lon[:,i] = right_latlon[:,1] - right_lat[:,i] = right_latlon[:,0] + right_latlon = ( + reconstructed_flowline.get_right_flowline().to_lat_lon_array() + ) + right_lon[:, i] = right_latlon[:, 1] + right_lat[:, i] = right_latlon[:, 0] if return_rate_of_motion: for i, reconstructed_motion_path in enumerate(reconstructed_flowlines): distance = [] - for segment in reconstructed_motion_path.get_left_flowline().get_segments(): - distance.append(segment.get_arc_length() * _tools.geocentric_radius(segment.get_start_point().to_lat_lon()[0]) / 1e3) - + for ( + segment + ) in reconstructed_motion_path.get_left_flowline().get_segments(): + distance.append( + segment.get_arc_length() + * _tools.geocentric_radius( + segment.get_start_point().to_lat_lon()[0] + ) + / 1e3 + ) + # Get rate of motion as distance per Myr # Need to multiply rate by 2, since flowlines give us half-spreading rate - time_step = time_array[1]-time_array[0] - Rate = (np.asarray(distance)/time_step) * 2 # since we created the flowline at X increment + time_step = time_array[1] - time_array[0] + Rate = ( + np.asarray(distance) / time_step + ) * 2 # since we created the flowline at X increment # Manipulate arrays to get a step plot - StepRate = np.zeros(len(Rate)*2) + StepRate = np.zeros(len(Rate) * 2) StepRate[::2] = Rate StepRate[1::2] = Rate - StepTime = np.zeros(len(Rate)*2) + StepTime = np.zeros(len(Rate) * 2) StepTime[::2] = time_array[:-1] StepTime[1::2] = time_array[1:] # Append the nth point's step time and step rate coordinates to the ndarray - StepTimes[:,i] = StepTime - StepRates[:,i] = StepRate*0.1 # cm/yr - - return left_lon[start:], left_lat[start:], right_lon[start:], right_lat[start:], StepTimes, StepRates + StepTimes[:, i] = StepTime + StepRates[:, i] = StepRate * 0.1 # cm/yr + + return ( + left_lon[start:], + left_lat[start:], + right_lon[start:], + right_lat[start:], + StepTimes, + StepRates, + ) else: - return left_lon[start:], left_lat[start:], right_lon[start:], right_lat[start:] - + return ( + left_lon[start:], + left_lat[start:], + right_lon[start:], + right_lat[start:], + ) class Points(object): - """`Points` contains methods to reconstruct and work with with geological point data. For example, the - locations and plate velocities of point data can be calculated at a specific geological `time`. The `Points` - object requires the `PlateReconstruction` object to work because it holds the `rotation_model` and `static_polygons` - needed to classify topological plates and quantify feature rotations through time. + """`Points` contains methods to reconstruct and work with with geological point data. For example, the + locations and plate velocities of point data can be calculated at a specific geological `time`. The `Points` + object requires the `PlateReconstruction` object to work because it holds the `rotation_model` and `static_polygons` + needed to classify topological plates and quantify feature rotations through time. Attributes ---------- plate_reconstruction : object pointer - Allows for the accessibility of `PlateReconstruction` object attributes: `rotation_model`, `topology_featues` - and `static_polygons` for use in the `Points` object if called using “self.plate_reconstruction.X”, + Allows for the accessibility of `PlateReconstruction` object attributes: `rotation_model`, `topology_featues` + and `static_polygons` for use in the `Points` object if called using “self.plate_reconstruction.X”, where X is the attribute. lons : float, or 1D array @@ -1001,10 +1133,11 @@ class Points(object): the present day (0 Ma). plate_id : int, default=None - The plate ID of a particular tectonic plate on which point data lies, if known. This is obtained in `init` + The plate ID of a particular tectonic plate on which point data lies, if known. This is obtained in `init` if not provided. """ + def __init__(self, plate_reconstruction, lons, lats, time=0, plate_id=None): self.lons = lons self.lats = lats @@ -1015,9 +1148,7 @@ def __init__(self, plate_reconstruction, lons, lats, time=0, plate_id=None): self._update(lons, lats, time, plate_id) - def _update(self, lons, lats, time=0, plate_id=None): - # get Cartesian coordinates self.x, self.y, self.z = _tools.lonlat2xyz(lons, lats, degrees=False) @@ -1030,10 +1161,8 @@ def _update(self, lons, lats, time=0, plate_id=None): self.lonlat = np.c_[self.lons, self.lats] self.xyz = np.c_[self.x, self.y, self.z] - rotation_model = self.plate_reconstruction.rotation_model static_polygons = self.plate_reconstruction.static_polygons - features = _tools.points_to_features(lons, lats, plate_id) @@ -1044,10 +1173,8 @@ def _update(self, lons, lats, time=0, plate_id=None): # partition using static polygons # being careful to observe 'from time' partitioned_features = pygplates.partition_into_plates( - static_polygons, - rotation_model, - features, - reconstruction_time=time) + static_polygons, rotation_model, features, reconstruction_time=time + ) self.features = partitioned_features plate_id = np.empty(len(self.lons), dtype=int) @@ -1059,55 +1186,61 @@ def _update(self, lons, lats, time=0, plate_id=None): @property def size(self): - """ Number of points - """ + """Number of points""" return len(self.lons) - def __getstate__(self): - filenames = self.plate_reconstruction.__getstate__() # add important variables from Points object filenames["lons"] = self.lons filenames["lats"] = self.lats - filenames['time'] = self.time - filenames['plate_id'] = self.plate_id + filenames["time"] = self.time + filenames["plate_id"] = self.plate_id for key in self.attributes: filenames[key] = self.attributes[key] return filenames def __setstate__(self, state): - - self.plate_reconstruction = PlateReconstruction(state['rotation_model'], state['topology_features'], state['static_polygons']) + self.plate_reconstruction = PlateReconstruction( + state["rotation_model"], + state["topology_features"], + state["static_polygons"], + ) # reinstate unpicklable items - self.lons = state['lons'] - self.lats = state['lats'] - self.time = state['time'] - self.plate_id = state['plate_id'] + self.lons = state["lons"] + self.lats = state["lats"] + self.time = state["time"] + self.plate_id = state["plate_id"] self.attributes = dict() self._update(self.lons, self.lats, self.time, self.plate_id) for key in state: - if key not in ['lons', 'lats', 'time', 'plate_id']: + if key not in ["lons", "lats", "time", "plate_id"]: self.attributes[key] = state[key] def copy(self): - """ Returns a copy of the Points object + """Returns a copy of the Points object Returns ------- Points A copy of the current Points object """ - gpts = Points(self.plate_reconstruction, self.lons.copy(), self.lats.copy(), self.time, self.plate_id.copy()) + gpts = Points( + self.plate_reconstruction, + self.lons.copy(), + self.lats.copy(), + self.time, + self.plate_id.copy(), + ) gpts.add_attributes(**self.attributes.copy()) def add_attributes(self, **kwargs): - """ Adds the value of a feature attribute associated with a key. + """Adds the value of a feature attribute associated with a key. Example ------- @@ -1120,8 +1253,8 @@ def add_attributes(self, **kwargs): # Add the attributes a, b and c to the points in the Points object gpts.add_attributes( - a=[10,2,2], - b=[2,3,3], + a=[10,2,2], + b=[2,3,3], c=[30,0,0], ) @@ -1160,7 +1293,9 @@ def add_attributes(self, **kwargs): array = np.full(self.lons.size, attribute, dtype=attribute.dtype) attribute = array - assert len(attribute) == self.lons.size, "Size mismatch, ensure attributes have the same number of entries as Points" + assert ( + len(attribute) == self.lons.size + ), "Size mismatch, ensure attributes have the same number of entries as Points" self.attributes[key] = attribute if any(kwargs): @@ -1174,7 +1309,7 @@ def add_attributes(self, **kwargs): feature.set_shapefile_attribute(key, val) def get_geopandas_dataframe(self): - """ Adds a shapely point `geometry` attribute to each point in the `gplately.Points` object. + """Adds a shapely point `geometry` attribute to each point in the `gplately.Points` object. pandas.DataFrame that has a column with geometry Any existing point attributes are kept. @@ -1194,8 +1329,8 @@ def get_geopandas_dataframe(self): # Add sample attributes a, b and c to the points in the Points object gpts.add_attributes( - a=[10,2,2], - b=[2,3,3], + a=[10,2,2], + b=[2,3,3], c=[30,0,0], ) @@ -1216,15 +1351,15 @@ def get_geopandas_dataframe(self): # create shapely points points = [] for lon, lat in zip(self.lons, self.lats): - points.append( geometry.Point(lon, lat) ) + points.append(geometry.Point(lon, lat)) attributes = self.attributes.copy() - attributes['geometry'] = points + attributes["geometry"] = points - return gpd.GeoDataFrame(attributes, geometry='geometry') + return gpd.GeoDataFrame(attributes, geometry="geometry") def get_geodataframe(self): - """ Returns the output of `Points.get_geopandas_dataframe()`. + """Returns the output of `Points.get_geopandas_dataframe()`. Adds a shapely point `geometry` attribute to each point in the `gplately.Points` object. pandas.DataFrame that has a column with geometry @@ -1246,8 +1381,8 @@ def get_geodataframe(self): # Add sample attributes a, b and c to the points in the Points object gpts.add_attributes( - a=[10,2,2], - b=[2,3,3], + a=[10,2,2], + b=[2,3,3], c=[30,0,0], ) @@ -1265,10 +1400,10 @@ def get_geodataframe(self): return self.get_geopandas_dataframe() def reconstruct(self, time, anchor_plate_id=0, return_array=False, **kwargs): - """Reconstructs regular geological features, motion paths or flowlines to a specific geological time and extracts + """Reconstructs regular geological features, motion paths or flowlines to a specific geological time and extracts the latitudinal and longitudinal points of these features. - Note: this method accesses and uses the rotation model attribute from the PointReconstruction object, and reconstructs + Note: this method accesses and uses the rotation model attribute from the PointReconstruction object, and reconstructs the feature lat-lon point attributes of the Points object. Parameters @@ -1277,7 +1412,7 @@ def reconstruct(self, time, anchor_plate_id=0, return_array=False, **kwargs): The specific geological time (Ma) to reconstruct features to. anchor_plate_id : int, default=0 - Reconstruct features with respect to a certain anchor plate. By default, reconstructions are made + Reconstruct features with respect to a certain anchor plate. By default, reconstructions are made with respect to the absolute reference frame, like a stationary geological element (e.g. a mantle plume). This frame is given the plate ID of 0. @@ -1293,9 +1428,9 @@ def reconstruct(self, time, anchor_plate_id=0, return_array=False, **kwargs): **group_with_feature : bool, default=False Used to group reconstructed geometries with their features. This can be useful when a feature has more than one - geometry and hence more than one reconstructed geometry. The output *reconstructed_geometries* then becomes a - list of tuples where each tuple contains a :class:`feature` and a ``list`` of reconstructed geometries. - Note: this keyword argument only applies when *reconstructed_geometries* is a list because exported files are + geometry and hence more than one reconstructed geometry. The output *reconstructed_geometries* then becomes a + list of tuples where each tuple contains a :class:`feature` and a ``list`` of reconstructed geometries. + Note: this keyword argument only applies when *reconstructed_geometries* is a list because exported files are always grouped with their features. This is applicable to all ReconstructType features. **export_wrap_to_dateline : bool, default=True @@ -1304,10 +1439,10 @@ def reconstruct(self, time, anchor_plate_id=0, return_array=False, **kwargs): Returns ------- rlons : list of float - A 1D numpy array enclosing all reconstructed point features' longitudes. + A 1D numpy array enclosing all reconstructed point features' longitudes. rlats : list of float - A 1D numpy array enclosing all reconstructed point features' latitudes. + A 1D numpy array enclosing all reconstructed point features' latitudes. Raises ------ @@ -1317,31 +1452,37 @@ def reconstruct(self, time, anchor_plate_id=0, return_array=False, **kwargs): from_time = self.time to_time = time reconstructed_features = self.plate_reconstruction.reconstruct( - self.features, to_time, from_time, anchor_plate_id=anchor_plate_id, **kwargs) + self.features, to_time, from_time, anchor_plate_id=anchor_plate_id, **kwargs + ) rlons, rlats = _tools.extract_feature_lonlat(reconstructed_features) if return_array: return rlons, rlats else: - gpts = Points(self.plate_reconstruction, rlons, rlats, time=to_time, plate_id=self.plate_id) + gpts = Points( + self.plate_reconstruction, + rlons, + rlats, + time=to_time, + plate_id=self.plate_id, + ) gpts.add_attributes(**self.attributes.copy()) return gpts - def reconstruct_to_birth_age(self, ages, anchor_plate_id=0, **kwargs): - """ Reconstructs point features supplied to the `Points` object from the supplied initial time (`self.time`) - to a range of times. The number of supplied times must equal the number of point features supplied to the Points object. + """Reconstructs point features supplied to the `Points` object from the supplied initial time (`self.time`) + to a range of times. The number of supplied times must equal the number of point features supplied to the Points object. Attributes ---------- ages : array - Geological times to reconstruct features to. Must have the same length as the `Points `object's `self.features` attribute + Geological times to reconstruct features to. Must have the same length as the `Points `object's `self.features` attribute (which holds all point features represented on a unit length sphere in 3D Cartesian coordinates). anchor_plate_id : int, default=0 - Reconstruct features with respect to a certain anchor plate. By default, reconstructions are made + Reconstruct features with respect to a certain anchor plate. By default, reconstructions are made with respect to the absolute reference frame, like a stationary geological element (e.g. a mantle plume). This frame is given the plate ID of 0. - **kwargs + **kwargs Additional keyword arguments for the `gplately.PlateReconstruction.reconstruct` method. Raises @@ -1385,24 +1526,24 @@ def reconstruct_to_birth_age(self, ages, anchor_plate_id=0, **kwargs): mask_age = ages == age reconstructed_features = self.plate_reconstruction.reconstruct( - self.features, age, from_time, anchor_plate_id=anchor_plate_id, **kwargs) + self.features, age, from_time, anchor_plate_id=anchor_plate_id, **kwargs + ) lons, lats = _tools.extract_feature_lonlat(reconstructed_features) rlons[mask_age] = lons[mask_age] rlats[mask_age] = lats[mask_age] - return rlons, rlats def plate_velocity(self, time, delta_time=1): """Calculates the x and y components of tectonic plate velocities at a particular geological time. - This method accesses and uses the `rotation_model` attribute from the `PlateReconstruction` object and uses the `Points` + This method accesses and uses the `rotation_model` attribute from the `PlateReconstruction` object and uses the `Points` object's `self.features` attribute. Feature points are extracted and assigned plate IDs. These IDs are used to obtain the equivalent stage rotations of identified tectonic plates over a time interval `delta_time`. Each feature point and its stage rotation are used to calculate the point's plate velocity at a particular geological time. Obtained velocities for each domain - point are represented in the north-east-down coordinate system, and their x,y Cartesian coordinate components are extracted. + point are represented in the north-east-down coordinate system, and their x,y Cartesian coordinate components are extracted. Parameters ---------- @@ -1411,13 +1552,13 @@ def plate_velocity(self, time, delta_time=1): delta_time : float, default=1.0 The time increment used for generating partitioning plate stage rotations. 1.0 Ma by default. - + Returns ------- all_velocities.T : 2D numpy list - A transposed 2D numpy list with two rows and a number of columns equal to the number of x,y Cartesian velocity - components obtained (and thus the number of feature points extracted from a supplied feature). Each list column + A transposed 2D numpy list with two rows and a number of columns equal to the number of x,y Cartesian velocity + components obtained (and thus the number of feature points extracted from a supplied feature). Each list column stores one point’s x,y, velocity components along its two rows. """ time = float(time) @@ -1428,33 +1569,37 @@ def plate_velocity(self, time, delta_time=1): for i, feature in enumerate(self.features): geometry = feature.get_geometry() partitioning_plate_id = feature.get_reconstruction_plate_id() - equivalent_stage_rotation = rotation_model.get_rotation(time, partitioning_plate_id, time+delta_time) - + equivalent_stage_rotation = rotation_model.get_rotation( + time, partitioning_plate_id, time + delta_time + ) + velocity_vectors = pygplates.calculate_velocities( [geometry], equivalent_stage_rotation, delta_time, - pygplates.VelocityUnits.cms_per_yr) - - velocities = pygplates.LocalCartesian.convert_from_geocentric_to_north_east_down( - [geometry], - velocity_vectors) + pygplates.VelocityUnits.cms_per_yr, + ) + + velocities = ( + pygplates.LocalCartesian.convert_from_geocentric_to_north_east_down( + [geometry], velocity_vectors + ) + ) all_velocities[i] = velocities[0].get_y(), velocities[0].get_x() return list(all_velocities.T) - def motion_path(self, time_array, anchor_plate_id=0, return_rate_of_motion=False): - """ Create a path of points to mark the trajectory of a plate's motion + """Create a path of points to mark the trajectory of a plate's motion through geological time. - + Parameters ---------- time_array : arr - An array of reconstruction times at which to determine the trajectory + An array of reconstruction times at which to determine the trajectory of a point on a plate. For example: - + import numpy as np min_time = 30 max_time = 100 @@ -1465,43 +1610,44 @@ def motion_path(self, time_array, anchor_plate_id=0, return_rate_of_motion=False The ID of the anchor plate. return_rate_of_motion : bool, default=False Choose whether to return the rate of plate motion through time for each - + Returns ------- rlons : ndarray - An n-dimensional array with columns containing the longitudes of - the seed points at each timestep in `time_array`. There are n - columns for n seed points. + An n-dimensional array with columns containing the longitudes of + the seed points at each timestep in `time_array`. There are n + columns for n seed points. rlats : ndarray - An n-dimensional array with columns containing the latitudes of - the seed points at each timestep in `time_array`. There are n - columns for n seed points. + An n-dimensional array with columns containing the latitudes of + the seed points at each timestep in `time_array`. There are n + columns for n seed points. """ time_array = np.atleast_1d(time_array) - - # ndarrays to fill with reconstructed points and + + # ndarrays to fill with reconstructed points and # rates of motion (if requested) rlons = np.empty((len(time_array), len(self.lons))) rlats = np.empty((len(time_array), len(self.lons))) for i, point_feature in enumerate(self.FeatureCollection): - # Create the motion path feature motion_path_feature = pygplates.Feature.create_motion_path( - point_feature.get_geometry(), + point_feature.get_geometry(), time_array.tolist(), valid_time=(time_array.max(), time_array.min()), - #relative_plate=int(self.plate_id[i]), - #reconstruction_plate_id=int(anchor_plate_id)) + # relative_plate=int(self.plate_id[i]), + # reconstruction_plate_id=int(anchor_plate_id)) relative_plate=int(self.plate_id[i]), - reconstruction_plate_id=int(anchor_plate_id)) + reconstruction_plate_id=int(anchor_plate_id), + ) reconstructed_motion_paths = self.plate_reconstruction.reconstruct( - motion_path_feature, - to_time=0, - #from_time=0, + motion_path_feature, + to_time=0, + # from_time=0, reconstruct_type=pygplates.ReconstructType.motion_path, - anchor_plate_id=int(anchor_plate_id)) + anchor_plate_id=int(anchor_plate_id), + ) # Turn motion paths in to lat-lon coordinates for reconstructed_motion_path in reconstructed_motion_paths: @@ -1509,15 +1655,14 @@ def motion_path(self, time_array, anchor_plate_id=0, return_rate_of_motion=False lon, lat = np.flipud(trail[:, 1]), np.flipud(trail[:, 0]) - rlons[:,i] = lon - rlats[:,i] = lat + rlons[:, i] = lon + rlats[:, i] = lat # Obtain step-plot coordinates for rate of motion if return_rate_of_motion is True: + StepTimes = np.empty(((len(time_array) - 1) * 2, len(self.lons))) + StepRates = np.empty(((len(time_array) - 1) * 2, len(self.lons))) - StepTimes = np.empty(((len(time_array)-1)*2, len(self.lons))) - StepRates = np.empty(((len(time_array)-1)*2, len(self.lons))) - # Get timestep TimeStep = [] for j in range(len(time_array) - 1): @@ -1528,39 +1673,53 @@ def motion_path(self, time_array, anchor_plate_id=0, return_rate_of_motion=False # plate relative to the fixed plate in each time step Dist = [] for reconstructed_motion_path in reconstructed_motion_paths: - for segment in reconstructed_motion_path.get_motion_path().get_segments(): - Dist.append(segment.get_arc_length() * _tools.geocentric_radius(segment.get_start_point().to_lat_lon()[0]) / 1e3) + for ( + segment + ) in reconstructed_motion_path.get_motion_path().get_segments(): + Dist.append( + segment.get_arc_length() + * _tools.geocentric_radius( + segment.get_start_point().to_lat_lon()[0] + ) + / 1e3 + ) # Note that the motion path coordinates come out starting with the oldest time and working forwards # So, to match our 'times' array, we flip the order Dist = np.flipud(Dist) # Get rate of motion as distance per Myr - Rate = np.asarray(Dist)/TimeStep - + Rate = np.asarray(Dist) / TimeStep + # Manipulate arrays to get a step plot - StepRate = np.zeros(len(Rate)*2) + StepRate = np.zeros(len(Rate) * 2) StepRate[::2] = Rate StepRate[1::2] = Rate - StepTime = np.zeros(len(Rate)*2) + StepTime = np.zeros(len(Rate) * 2) StepTime[::2] = time_array[:-1] StepTime[1::2] = time_array[1:] - + # Append the nth point's step time and step rate coordinates to the ndarray - StepTimes[:,i] = StepTime - StepRates[:,i] = StepRate*0.1 # cm/yr - + StepTimes[:, i] = StepTime + StepRates[:, i] = StepRate * 0.1 # cm/yr + if return_rate_of_motion is True: - return np.squeeze(rlons), np.squeeze(rlats), np.squeeze(StepTimes), np.squeeze(StepRates) + return ( + np.squeeze(rlons), + np.squeeze(rlats), + np.squeeze(StepTimes), + np.squeeze(StepRates), + ) else: return np.squeeze(rlons), np.squeeze(rlats) - - def flowline(self, time_array, left_plate_ID, right_plate_ID, return_rate_of_motion=False): - """ Create a path of points to track plate motion away from + def flowline( + self, time_array, left_plate_ID, right_plate_ID, return_rate_of_motion=False + ): + """Create a path of points to track plate motion away from spreading ridges over time using half-stage rotations. - + Parameters ---------- lons : arr @@ -1578,13 +1737,13 @@ def flowline(self, time_array, left_plate_ID, right_plate_ID, return_rate_of_mot return_rate_of_motion : bool, default False Choose whether to return a step time and step rate array for a step-plot of flowline motion. - + Returns ------- left_lon : ndarray The longitudes of the __left__ flowline for n seed points. There are n columns for n seed points, and m rows - for m time steps in `time_array`. + for m time steps in `time_array`. left_lat : ndarray The latitudes of the __left__ flowline of n seed points. There are n columns for n seed points, and m rows @@ -1597,38 +1756,44 @@ def flowline(self, time_array, left_plate_ID, right_plate_ID, return_rate_of_mot The latitudes of the __right__ flowline of n seed points. There are n columns for n seed points, and m rows for m time steps in `time_array`. - + Examples -------- To access the ith seed point's left and right latitudes and longitudes: - + for i in np.arange(0,len(seed_points)): - left_flowline_longitudes = left_lon[:,i] - left_flowline_latitudes = left_lat[:,i] + left_flowline_longitudes = left_lon[:,i] + left_flowline_latitudes = left_lat[:,i] right_flowline_longitudes = right_lon[:,i] right_flowline_latitudes = right_lat[:,i] """ model = self.plate_reconstruction - return model.create_flowline(self.lons, self.lats, time_array, left_plate_ID, right_plate_ID, return_rate_of_motion) - + return model.create_flowline( + self.lons, + self.lats, + time_array, + left_plate_ID, + right_plate_ID, + return_rate_of_motion, + ) def _get_dataframe(self): import geopandas as gpd data = dict() data["Longitude"] = self.lons - data["Latitude"] = self.lats - data["Plate_ID"] = self.plate_id + data["Latitude"] = self.lats + data["Plate_ID"] = self.plate_id for key in self.attributes: data[key] = self.attributes[key] return gpd.GeoDataFrame(data) def save(self, filename): - """Saves the feature collection used in the Points object under a given filename to the current directory. + """Saves the feature collection used in the Points object under a given filename to the current directory. - The file format is determined from the filename extension. + The file format is determined from the filename extension. Parameters ---------- @@ -1641,23 +1806,25 @@ def save(self, filename): """ filename = str(filename) - if filename.endswith(('.csv', '.txt', '.dat')): + if filename.endswith((".csv", ".txt", ".dat")): df = self._get_dataframe() df.to_csv(filename, index=False) - elif filename.endswith(('.xls', '.xlsx')): + elif filename.endswith((".xls", ".xlsx")): df = self._get_dataframe() df.to_excel(filename, index=False) - elif filename.endswith('xml'): + elif filename.endswith("xml"): df = self._get_dataframe() df.to_xml(filename, index=False) - elif filename.endswith('.gpml') or filename.endswith('.gpmlz'): + elif filename.endswith(".gpml") or filename.endswith(".gpmlz"): self.FeatureCollection.write(filename) else: - raise ValueError("Cannot save to specified file type. Use csv, gpml, or xls file extension.") + raise ValueError( + "Cannot save to specified file type. Use csv, gpml, or xls file extension." + ) # FROM RECONSTRUCT_BY_TOPOLOGIES.PY @@ -1675,9 +1842,10 @@ class _DefaultCollision(object): """ def __init__( - self, - global_collision_parameters = DEFAULT_GLOBAL_COLLISION_PARAMETERS, - feature_specific_collision_parameters = None): + self, + global_collision_parameters=DEFAULT_GLOBAL_COLLISION_PARAMETERS, + feature_specific_collision_parameters=None, + ): """ global_collision_parameters: The collision parameters to use for any feature type not specified in 'feature_specific_collision_parameters'. Should be a 2-tuple of (threshold velocity delta in kms/my, threshold distance to boundary per My in kms/my). @@ -1690,7 +1858,7 @@ def __init__( (note that some boundaries are meant to do this and others are a result of digitisation). The actual distance threshold used is (threshold_distance_to_boundary + relative_velocity) * time_interval Defaults to parameters used in GPlates 2.0, if not specified. - + feature_specific_collision_parameters: Optional sequence of collision parameters specific to feature types. If specified then should be a sequence of 2-tuples, with each 2-tuple specifying (feature_type, collision_parameters). And where each 'collision_parameters' is a 2-tuple of (threshold velocity delta in kms/my, threshold distance to boundary per My in kms/my). @@ -1700,7 +1868,9 @@ def __init__( # Convert list of (feature_type, collision_parameters) tuples to a dictionary. if feature_specific_collision_parameters: - self.feature_specific_collision_parameters = dict(feature_specific_collision_parameters) + self.feature_specific_collision_parameters = dict( + feature_specific_collision_parameters + ) else: self.feature_specific_collision_parameters = dict() # Fallback for any feature type not specified in the optional feature-specific list. @@ -1710,29 +1880,29 @@ def __init__( self.velocity_stage_rotation_dict = {} self.velocity_stage_rotation_time = None - def __call__( - self, - rotation_model, - time, - reconstruction_time_interval, - prev_point, - curr_point, - prev_topology_plate_id, - prev_resolved_plate_boundary, - curr_topology_plate_id, - curr_resolved_plate_boundary): + self, + rotation_model, + time, + reconstruction_time_interval, + prev_point, + curr_point, + prev_topology_plate_id, + prev_resolved_plate_boundary, + curr_topology_plate_id, + curr_resolved_plate_boundary, + ): """ Returns True if a collision was detected. If transitioning from a rigid plate to another rigid plate with a different plate ID then calculate the difference in velocities and continue testing as follows (otherwise, if there's no transition, then the point is still active)... - + If the velocity difference is below a threshold then we assume the previous plate was split, or two plates joined. In this case the point has not subducted (forward in time) or been consumed by a mid-ocean (backward in time) and hence is still active. - + If the velocity difference is large enough then we see if the distance of the *previous* position to the polygon boundary (of rigid plate containing it) exceeds a threshold. If the distance exceeds the threshold then the point is far enough away from the boundary that it @@ -1742,29 +1912,30 @@ def __call__( Also note that the threshold distance increases according to the velocity difference to account for fast moving points (that would otherwise tunnel through the boundary and accrete onto the other plate). The reason for testing the distance from the *previous* point, and not from the *current* point, is: - + (i) A topological boundary may *appear* near the current point (such as a plate split at the current time) and we don't want that split to consume the current point regardless of the velocity difference. It won't get consumed because the *previous* point was not near a boundary (because before split happened). If the velocity difference is large enough then it might cause the current point to transition to the adjacent split plate in the *next* time step (and that's when it should get consumed, not in the current time step). An example of this is a mid-ocean ridge suddenly appearing (going forward in time). - + (ii) A topological boundary may *disappear* near the current point (such as a plate merge at the current time) and we want that merge to consume the current point if the velocity difference is large enough. In this case the *previous* point is near a boundary (because before plate merged) and hence can be consumed (provided velocity difference is large enough). And since the boundary existed in the previous time step, it will affect position of the current point (and whether it gets consumed or not). An example of this is a mid-ocean ridge suddenly disappearing (going backward in time). - + ...note that items (i) and (ii) above apply both going forward and backward in time. """ # See if a collision occurred. - if (curr_topology_plate_id != prev_topology_plate_id and - prev_topology_plate_id is not None and - curr_topology_plate_id is not None): - + if ( + curr_topology_plate_id != prev_topology_plate_id + and prev_topology_plate_id is not None + and curr_topology_plate_id is not None + ): # # Speed up by caching velocity stage rotations in a dict. # @@ -1776,73 +1947,103 @@ def __call__( # (which would also require including 'time' in the key) and using memory unnecessarily. self.velocity_stage_rotation_dict.clear() self.velocity_stage_rotation_time = time - prev_location_velocity_stage_rotation = self.velocity_stage_rotation_dict.get(prev_topology_plate_id) + prev_location_velocity_stage_rotation = ( + self.velocity_stage_rotation_dict.get(prev_topology_plate_id) + ) if not prev_location_velocity_stage_rotation: - prev_location_velocity_stage_rotation = rotation_model.get_rotation(time + 1, prev_topology_plate_id, time) - self.velocity_stage_rotation_dict[prev_topology_plate_id] = prev_location_velocity_stage_rotation - curr_location_velocity_stage_rotation = self.velocity_stage_rotation_dict.get(curr_topology_plate_id) + prev_location_velocity_stage_rotation = rotation_model.get_rotation( + time + 1, prev_topology_plate_id, time + ) + self.velocity_stage_rotation_dict[ + prev_topology_plate_id + ] = prev_location_velocity_stage_rotation + curr_location_velocity_stage_rotation = ( + self.velocity_stage_rotation_dict.get(curr_topology_plate_id) + ) if not curr_location_velocity_stage_rotation: - curr_location_velocity_stage_rotation = rotation_model.get_rotation(time + 1, curr_topology_plate_id, time) - self.velocity_stage_rotation_dict[curr_topology_plate_id] = curr_location_velocity_stage_rotation + curr_location_velocity_stage_rotation = rotation_model.get_rotation( + time + 1, curr_topology_plate_id, time + ) + self.velocity_stage_rotation_dict[ + curr_topology_plate_id + ] = curr_location_velocity_stage_rotation # Note that even though the current point is not inside the previous boundary (because different plate ID), we can still # calculate a velocity using its plate ID (because we really should use the same point in our velocity comparison). prev_location_velocity = pygplates.calculate_velocities( - (curr_point,), prev_location_velocity_stage_rotation, 1, pygplates.VelocityUnits.kms_per_my)[0] + (curr_point,), + prev_location_velocity_stage_rotation, + 1, + pygplates.VelocityUnits.kms_per_my, + )[0] curr_location_velocity = pygplates.calculate_velocities( - (curr_point,), curr_location_velocity_stage_rotation, 1, pygplates.VelocityUnits.kms_per_my)[0] + (curr_point,), + curr_location_velocity_stage_rotation, + 1, + pygplates.VelocityUnits.kms_per_my, + )[0] delta_velocity = curr_location_velocity - prev_location_velocity delta_velocity_magnitude = delta_velocity.get_magnitude() - + # If we have feature-specific collision parameters then iterate over the boundary sub-segments of the *previous* topological boundary # and test proximity to each sub-segment individually (with sub-segment feature type specific collision parameters). # Otherwise just test proximity to the entire boundary polygon using the global collision parameters. if self.feature_specific_collision_parameters: - for prev_boundary_sub_segment in prev_resolved_plate_boundary.get_boundary_sub_segments(): + for ( + prev_boundary_sub_segment + ) in prev_resolved_plate_boundary.get_boundary_sub_segments(): # Use feature-specific collision parameters if found (falling back to global collision parameters). - threshold_velocity_delta, threshold_distance_to_boundary_per_my = self.feature_specific_collision_parameters.get( - prev_boundary_sub_segment.get_feature().get_feature_type(), - # Default to global collision parameters if no collision parameters specified for sub-segment's feature type... - self.global_collision_parameters) - + ( + threshold_velocity_delta, + threshold_distance_to_boundary_per_my, + ) = self.feature_specific_collision_parameters.get( + prev_boundary_sub_segment.get_feature().get_feature_type(), + # Default to global collision parameters if no collision parameters specified for sub-segment's feature type... + self.global_collision_parameters, + ) + # Since each feature type could use different collision parameters we must use the current boundary sub-segment instead of the boundary polygon. if self._detect_collision_using_collision_parameters( - reconstruction_time_interval, - delta_velocity_magnitude, - prev_point, - prev_boundary_sub_segment.get_resolved_geometry(), - threshold_velocity_delta, - threshold_distance_to_boundary_per_my): + reconstruction_time_interval, + delta_velocity_magnitude, + prev_point, + prev_boundary_sub_segment.get_resolved_geometry(), + threshold_velocity_delta, + threshold_distance_to_boundary_per_my, + ): # Detected a collision. return True else: # No feature-specific collision parameters so use global fallback. - threshold_velocity_delta, threshold_distance_to_boundary_per_my = self.global_collision_parameters - + ( + threshold_velocity_delta, + threshold_distance_to_boundary_per_my, + ) = self.global_collision_parameters + # Since all feature types use the same collision parameters we can use the boundary polygon instead of iterating over its sub-segments. if self._detect_collision_using_collision_parameters( - reconstruction_time_interval, - delta_velocity_magnitude, - prev_point, - prev_resolved_plate_boundary.get_resolved_boundary(), - threshold_velocity_delta, - threshold_distance_to_boundary_per_my): + reconstruction_time_interval, + delta_velocity_magnitude, + prev_point, + prev_resolved_plate_boundary.get_resolved_boundary(), + threshold_velocity_delta, + threshold_distance_to_boundary_per_my, + ): # Detected a collision. return True - + return False - - + def _detect_collision_using_collision_parameters( - self, - reconstruction_time_interval, - delta_velocity_magnitude, - prev_point, - prev_boundary_geometry, - threshold_velocity_delta, - threshold_distance_to_boundary_per_my): - + self, + reconstruction_time_interval, + delta_velocity_magnitude, + prev_point, + prev_boundary_geometry, + threshold_velocity_delta, + threshold_distance_to_boundary_per_my, + ): if delta_velocity_magnitude > threshold_velocity_delta: # Add the minimum distance threshold to the delta velocity threshold. # The delta velocity threshold only allows those points that are close enough to the boundary to reach @@ -1850,8 +2051,11 @@ def _detect_collision_using_collision_parameters( # The minimum distance threshold accounts for sudden changes in the shape of a plate boundary # which are no supposed to represent a new or shifted boundary but are just a result of the topology # builder/user digitising a new boundary line that differs noticeably from that of the previous time period. - distance_threshold_radians = ((threshold_distance_to_boundary_per_my + delta_velocity_magnitude) - * reconstruction_time_interval / pygplates.Earth.equatorial_radius_in_kms) + distance_threshold_radians = ( + (threshold_distance_to_boundary_per_my + delta_velocity_magnitude) + * reconstruction_time_interval + / pygplates.Earth.equatorial_radius_in_kms + ) distance_threshold_radians = min(distance_threshold_radians, math.pi) distance_threshold_radians = max(distance_threshold_radians, 0.0) @@ -1863,7 +2067,7 @@ def _detect_collision_using_collision_parameters( if distance is not None: # Detected a collision. return True - + return False @@ -1889,11 +2093,11 @@ def __init__( verbose: Print progress messages """ - + self.grd_output_dir = grd_output_dir self.chain_collision_detection = chain_collision_detection self.verbose = verbose - + # Load a new grid each time the reconstruction time changes. self.grid_time = None @@ -1908,12 +2112,10 @@ def grid_time(self, time): if time is None: self._grid_time = time else: - filename = '{:s}'.format(self.grd_output_dir.format(time)) + filename = "{:s}".format(self.grd_output_dir.format(time)) if self.verbose: print( - 'Points masked against grid: {0}'.format( - os.path.basename(filename) - ) + "Points masked against grid: {0}".format(os.path.basename(filename)) ) gridZ, gridX, gridY = read_netcdf_grid(filename, return_grids=True) self.gridZ = gridZ @@ -1924,18 +2126,18 @@ def grid_time(self, time): self.ymax = np.nanmax(gridY) self._grid_time = float(time) - def __call__( - self, - rotation_model, - time, - reconstruction_time_interval, - prev_point, - curr_point, - prev_topology_plate_id, - prev_resolved_plate_boundary, - curr_topology_plate_id, - curr_resolved_plate_boundary): + self, + rotation_model, + time, + reconstruction_time_interval, + prev_point, + curr_point, + prev_topology_plate_id, + prev_resolved_plate_boundary, + curr_topology_plate_id, + curr_resolved_plate_boundary, + ): """ Returns True if a collision with a continent was detected, or returns result of chained collision detection if 'self.chain_collision_detection' is not None. @@ -1947,14 +2149,8 @@ def __call__( # Sample mask grid, which is one over continents and zero over oceans. point_lat, point_lon = curr_point.to_lat_lon() - point_i = (self.ni - 1) * ( - (point_lat - self.ymin) - / (self.ymax - self.ymin) - ) - point_j = (self.nj - 1) * ( - (point_lon - self.xmin) - / (self.xmax - self.xmin) - ) + point_i = (self.ni - 1) * ((point_lat - self.ymin) / (self.ymax - self.ymin)) + point_j = (self.nj - 1) * ((point_lon - self.xmin) / (self.xmax - self.xmin)) point_i_uint = np.rint(point_i).astype(np.uint) point_j_uint = np.rint(point_j).astype(np.uint) try: @@ -1971,93 +2167,99 @@ def __call__( # We didn't find a collision, so ask the chained collision detection if it did (if we have anything chained). if self.chain_collision_detection: return self.chain_collision_detection( - rotation_model, - time, - reconstruction_time_interval, - prev_point, - curr_point, - prev_topology_plate_id, - prev_resolved_plate_boundary, - curr_topology_plate_id, - curr_resolved_plate_boundary) + rotation_model, + time, + reconstruction_time_interval, + prev_point, + curr_point, + prev_topology_plate_id, + prev_resolved_plate_boundary, + curr_topology_plate_id, + curr_resolved_plate_boundary, + ) return False -def reconstruct_points( - rotation_features_or_model, - topology_features, - reconstruction_begin_time, - reconstruction_end_time, - reconstruction_time_interval, - points, - point_begin_times = None, - point_end_times = None, - point_plate_ids = None, - detect_collisions = _DEFAULT_COLLISION): +def reconstruct_points( + rotation_features_or_model, + topology_features, + reconstruction_begin_time, + reconstruction_end_time, + reconstruction_time_interval, + points, + point_begin_times=None, + point_end_times=None, + point_plate_ids=None, + detect_collisions=_DEFAULT_COLLISION, +): """ Function to reconstruct points using the ReconstructByTopologies class below. - + For description of parameters see the ReconstructByTopologies class below. """ - + topology_reconstruction = _ReconstructByTopologies( - rotation_features_or_model, - topology_features, - reconstruction_begin_time, - reconstruction_end_time, - reconstruction_time_interval, - points, - point_begin_times, - point_end_times, - point_plate_ids, - detect_collisions) - + rotation_features_or_model, + topology_features, + reconstruction_begin_time, + reconstruction_end_time, + reconstruction_time_interval, + points, + point_begin_times, + point_end_times, + point_plate_ids, + detect_collisions, + ) + return topology_reconstruction.reconstruct() class _ReconstructByTopologies(object): """ Class to reconstruct geometries using topologies. - + Currently only points are supported. - + use_plate_partitioner: If True then use pygplates.PlatePartitioner to partition points, otherwise use faster points_in_polygons.find_polygons(). """ + use_plate_partitioner = False - - + def __init__( - self, - rotation_features_or_model, - topology_features, - reconstruction_begin_time, - reconstruction_end_time, - reconstruction_time_interval, - points, - point_begin_times = None, - point_end_times = None, - point_plate_ids = None, - detect_collisions = _DEFAULT_COLLISION): + self, + rotation_features_or_model, + topology_features, + reconstruction_begin_time, + reconstruction_end_time, + reconstruction_time_interval, + points, + point_begin_times=None, + point_end_times=None, + point_plate_ids=None, + detect_collisions=_DEFAULT_COLLISION, + ): """ rotation_features_or_model: Rotation model or feature collection(s), or list of features, or filename(s). - + topology_features: Topology feature collection(s), or list of features, or filename(s) or any combination of those. - + detect_collisions: Collision detection function, or None. Defaults to _DEFAULT_COLLISION. """ - + # Turn rotation data into a RotationModel (if not already). if not isinstance(rotation_features_or_model, pygplates.RotationModel): rotation_model = pygplates.RotationModel(rotation_features_or_model) else: rotation_model = rotation_features_or_model self.rotation_model = rotation_model - + # Turn topology data into a list of features (if not already). - self.topology_features = pygplates.FeaturesFunctionArgument(topology_features).get_features() - + self.topology_features = pygplates.FeaturesFunctionArgument( + topology_features + ).get_features() + # Set up an array of reconstruction times covering the reconstruction time span. self.reconstruction_begin_time = reconstruction_begin_time self.reconstruction_end_time = reconstruction_end_time @@ -2071,72 +2273,82 @@ def __init__( # Get number of times including end time if time span is a multiple of time step. # The '1' is because, for example, 2 time intervals is 3 times. # The '1e-6' deals with limited floating-point precision, eg, we want (3.0 - 0.0) / 1.0 to be 3.0 and not 2.999999 (which gets truncated to 2). - self.num_times = 1 + int(math.floor(1e-6 + float(self.reconstruction_end_time - self.reconstruction_begin_time) / self.reconstruction_time_step)) + self.num_times = 1 + int( + math.floor( + 1e-6 + + float(self.reconstruction_end_time - self.reconstruction_begin_time) + / self.reconstruction_time_step + ) + ) # It's possible the time step is larger than the time span, in which case we change it to equal the time span. # This guarantees there'll be at least one time step (which has two times; one at either end of interval). if self.num_times == 1: self.num_times = 2 - self.reconstruction_time_step = self.reconstruction_end_time - self.reconstruction_begin_time + self.reconstruction_time_step = ( + self.reconstruction_end_time - self.reconstruction_begin_time + ) self.reconstruction_time_interval = math.fabs(self.reconstruction_time_step) - + self.last_time_index = self.num_times - 1 - + self.points = points self.num_points = len(points) - + # Use the specified point begin times if provided (otherwise use 'inf'). self.point_begin_times = point_begin_times if self.point_begin_times is None: - self.point_begin_times = [float('inf')] * self.num_points + self.point_begin_times = [float("inf")] * self.num_points elif len(self.point_begin_times) != self.num_points: - raise ValueError("Length of 'point_begin_times' must match length of 'points'.") - + raise ValueError( + "Length of 'point_begin_times' must match length of 'points'." + ) + # Use the specified point end times if provided (otherwise use '-inf'). self.point_end_times = point_end_times if self.point_end_times is None: - self.point_end_times = [float('-inf')] * self.num_points + self.point_end_times = [float("-inf")] * self.num_points elif len(self.point_end_times) != self.num_points: - raise ValueError("Length of 'point_end_times' must match length of 'points'.") - + raise ValueError( + "Length of 'point_end_times' must match length of 'points'." + ) + # Use the specified point plate IDs if provided (otherwise use '0'). # These plate IDs are only used when a point falls outside all resolved topologies during a time step. self.point_plate_ids = point_plate_ids if self.point_plate_ids is None: self.point_plate_ids = [0] * self.num_points elif len(self.point_plate_ids) != self.num_points: - raise ValueError("Length of 'point_plate_ids' must match length of 'points'.") - + raise ValueError( + "Length of 'point_plate_ids' must match length of 'points'." + ) + self.detect_collisions = detect_collisions - - + def reconstruct(self): - # Initialise the reconstruction. self.begin_reconstruction() - + # Loop over the reconstruction times until reached end of the reconstruction time span, or # all points have entered their valid time range *and* either exited their time range or # have been deactivated (subducted forward in time or consumed by MOR backward in time). while self.reconstruct_to_next_time(): pass - + return self.get_active_current_points() - - + def begin_reconstruction(self): - self.current_time_index = 0 - + # Set up point arrays. # Store active and inactive points here (inactive points have None in corresponding entries). self.prev_points = [None] * self.num_points self.curr_points = [None] * self.num_points self.next_points = [None] * self.num_points - + # Each point can only get activated once (after deactivation it cannot be reactivated). self.point_has_been_activated = [False] * self.num_points self.num_activated_points = 0 - + # Set up topology arrays (corresponding to active/inactive points at same indices). self.prev_topology_plate_ids = [None] * self.num_points self.curr_topology_plate_ids = [None] * self.num_points @@ -2148,61 +2360,52 @@ def begin_reconstruction(self): self.in_continent_points = [None] * self.num_points self.deletedpoints = [] - + self._activate_deactivate_points() self._find_resolved_topologies_containing_points() - - + def get_current_time(self): - - return self.reconstruction_begin_time + self.current_time_index * self.reconstruction_time_step - - + return ( + self.reconstruction_begin_time + + self.current_time_index * self.reconstruction_time_step + ) + def get_all_current_points(self): - return self.curr_points - - + def get_active_current_points(self): - # Return only the active points (the ones that are not None). return [point for point in self.get_all_current_points() if point is not None] - def get_in_continent_indices(self): - return self.in_continent_points, self.in_continent_indices - - + def reconstruct_to_next_time(self): - # If we're at the last time then there is no next time to reconstruct to. if self.current_time_index == self.last_time_index: return False - + # If all points have been previously activated, but none are currently active then we're finished. # This means all points have entered their valid time range *and* either exited their time range or # have been deactivated (subducted forward in time or consumed by MOR backward in time). - if (self.num_activated_points == self.num_points and - not any(self.curr_points)): + if self.num_activated_points == self.num_points and not any(self.curr_points): return False - + # Cache stage rotations by plate ID. # Use different dicts since using different rotation models and time steps, etc. reconstruct_stage_rotation_dict = {} - + current_time = self.get_current_time() - + # Iterate over all points to reconstruct them to the next time step. for point_index in range(self.num_points): - curr_point = self.curr_points[point_index] if curr_point is None: # Current point is not currently active. # So we cannot reconstruct to next time. self.next_points[point_index] = None continue - + # Get plate ID of resolved topology containing current point # (this was determined in last call to '_find_resolved_topologies_containing_points()'). curr_plate_id = self.curr_topology_plate_ids[point_index] @@ -2211,24 +2414,25 @@ def reconstruct_to_next_time(self): # So instead we just reconstruct using its plate ID (that was manually assigned by the user/caller). curr_plate_id = self.point_plate_ids[point_index] continue - + # Get the stage rotation that will move the point from where it is at the current time to its # location at the next time step, based on the plate id that contains the point at the current time. - + # Speed up by caching stage rotations in a dict. stage_rotation = reconstruct_stage_rotation_dict.get(curr_plate_id) if not stage_rotation: stage_rotation = self.rotation_model.get_rotation( - # Positive/negative time step means reconstructing backward/forward in time. - current_time + self.reconstruction_time_step, - curr_plate_id, - current_time) + # Positive/negative time step means reconstructing backward/forward in time. + current_time + self.reconstruction_time_step, + curr_plate_id, + current_time, + ) reconstruct_stage_rotation_dict[curr_plate_id] = stage_rotation - - # Use the stage rotation to reconstruct the tracked point from position at current time + + # Use the stage rotation to reconstruct the tracked point from position at current time # to position at the next time step. self.next_points[point_index] = stage_rotation * curr_point - + # # Set up for next loop iteration. # @@ -2236,24 +2440,33 @@ def reconstruct_to_next_time(self): # The new previous will be the old current. # The new current will be the old next. # The new next will be the old previous (but values are ignored and overridden in next time step; just re-using its memory). - self.prev_points, self.curr_points, self.next_points = self.curr_points, self.next_points, self.prev_points + self.prev_points, self.curr_points, self.next_points = ( + self.curr_points, + self.next_points, + self.prev_points, + ) # Swap previous and current topology arrays. # The new previous will be the old current. # The new current will be the old previous (but values are ignored and overridden in next time step; just re-using its memory). - self.prev_topology_plate_ids, self.curr_topology_plate_ids = self.curr_topology_plate_ids, self.prev_topology_plate_ids - self.prev_resolved_plate_boundaries, self.curr_resolved_plate_boundaries = self.curr_resolved_plate_boundaries, self.prev_resolved_plate_boundaries - + self.prev_topology_plate_ids, self.curr_topology_plate_ids = ( + self.curr_topology_plate_ids, + self.prev_topology_plate_ids, + ) + self.prev_resolved_plate_boundaries, self.curr_resolved_plate_boundaries = ( + self.curr_resolved_plate_boundaries, + self.prev_resolved_plate_boundaries, + ) + # Move the current time to the next time. self.current_time_index += 1 current_time = self.get_current_time() - + self._activate_deactivate_points() self._find_resolved_topologies_containing_points() - + # Iterate over all points to detect collisions. if self.detect_collisions: for point_index in range(self.num_points): - prev_point = self.prev_points[point_index] curr_point = self.curr_points[point_index] if prev_point is None or curr_point is None: @@ -2261,45 +2474,49 @@ def reconstruct_to_next_time(self): # Also previous point might just have been activated now, at end of current time step, and hence # not active at beginning of time step. continue - + # Get plate IDs of resolved topology containing previous and current point # (this was determined in last call to '_find_resolved_topologies_containing_points()'). # # Note that could be None, so the collision detection needs to handle that. prev_plate_id = self.prev_topology_plate_ids[point_index] curr_plate_id = self.curr_topology_plate_ids[point_index] - + # Detect collisions at the end of the current time step since we need previous, and current, points and topologies. # De-activate point (in 'curr_points') if subducted (forward in time) or consumed back into MOR (backward in time). if self.detect_collisions( - self.rotation_model, - current_time, - self.reconstruction_time_interval, - prev_point, curr_point, - prev_plate_id, self.prev_resolved_plate_boundaries[point_index], - curr_plate_id, self.curr_resolved_plate_boundaries[point_index]): + self.rotation_model, + current_time, + self.reconstruction_time_interval, + prev_point, + curr_point, + prev_plate_id, + self.prev_resolved_plate_boundaries[point_index], + curr_plate_id, + self.curr_resolved_plate_boundaries[point_index], + ): # An inactive point in 'curr_points' becomes None. # It may have been reconstructed from the previous time step to a valid position # but now we override that result as inactive. self.curr_points[point_index] = None - #self.curr_points.remove(self.curr_points[point_index]) + # self.curr_points.remove(self.curr_points[point_index]) self.deletedpoints.append(point_index) - + # We successfully reconstructed to the next time. return True - - + def _activate_deactivate_points(self): - current_time = self.get_current_time() - + # Iterate over all points and activate/deactivate as necessary depending on each point's valid time range. for point_index in range(self.num_points): if self.curr_points[point_index] is None: if not self.point_has_been_activated[point_index]: # Point is not active and has never been activated, so see if can activate it. - if (current_time <= self.point_begin_times[point_index] and - current_time >= self.point_end_times[point_index]): + if ( + current_time <= self.point_begin_times[point_index] + and current_time >= self.point_end_times[point_index] + ): # The initial point is assumed to be the position at the current time # which is typically the point's begin time (approximately). # But it could be the beginning of the reconstruction time span (specified in constructor) @@ -2315,22 +2532,29 @@ def _activate_deactivate_points(self): self.num_activated_points += 1 else: # Point is active, so see if can deactivate it. - if not (current_time <= self.point_begin_times[point_index] and - current_time >= self.point_end_times[point_index]): + if not ( + current_time <= self.point_begin_times[point_index] + and current_time >= self.point_end_times[point_index] + ): self.curr_points[point_index] = None - - + def _find_resolved_topologies_containing_points(self): - current_time = self.get_current_time() - + # Resolve the plate polygons for the current time. resolved_topologies = [] - pygplates.resolve_topologies(self.topology_features, self.rotation_model, resolved_topologies, current_time) - + pygplates.resolve_topologies( + self.topology_features, + self.rotation_model, + resolved_topologies, + current_time, + ) + if _ReconstructByTopologies.use_plate_partitioner: # Create a plate partitioner from the resolved polygons. - plate_partitioner = pygplates.PlatePartitioner(resolved_topologies, self.rotation_model) + plate_partitioner = pygplates.PlatePartitioner( + resolved_topologies, self.rotation_model + ) else: # Some of 'curr_points' will be None so 'curr_valid_points' contains only the valid (not None) # points, and 'curr_valid_points_indices' is the same length as 'curr_points' but indexes into @@ -2343,35 +2567,43 @@ def _find_resolved_topologies_containing_points(self): curr_valid_points_indices[point_index] = len(curr_valid_points) curr_valid_points.append(curr_point) # For each valid current point find the resolved topology containing it. - resolved_topologies_containing_curr_valid_points = ptt.utils.points_in_polygons.find_polygons( + resolved_topologies_containing_curr_valid_points = ( + ptt.utils.points_in_polygons.find_polygons( curr_valid_points, - [resolved_topology.get_resolved_boundary() for resolved_topology in resolved_topologies], - resolved_topologies) - + [ + resolved_topology.get_resolved_boundary() + for resolved_topology in resolved_topologies + ], + resolved_topologies, + ) + ) + # Iterate over all points. for point_index, curr_point in enumerate(self.curr_points): - if curr_point is None: # Current point is not currently active - so skip it. self.curr_topology_plate_ids[point_index] = None self.curr_resolved_plate_boundaries[point_index] = None continue - + # Find the plate id of the polygon that contains 'curr_point'. if _ReconstructByTopologies.use_plate_partitioner: curr_polygon = plate_partitioner.partition_point(curr_point) else: curr_polygon = resolved_topologies_containing_curr_valid_points[ - # Index back into 'curr_valid_points' and hence also into - # 'resolved_topologies_containing_curr_valid_points'. - curr_valid_points_indices[point_index]] + # Index back into 'curr_valid_points' and hence also into + # 'resolved_topologies_containing_curr_valid_points'. + curr_valid_points_indices[point_index] + ] self.curr_resolved_plate_boundaries[point_index] = curr_polygon - - # If the polygon is None, that means (presumably) that it fell into a crack between + + # If the polygon is None, that means (presumably) that it fell into a crack between # topologies. So it will be skipped and thrown away from future iterations. if curr_polygon is None: self.curr_topology_plate_ids[point_index] = None continue - + # Set the plate ID of resolved topology containing current point. - self.curr_topology_plate_ids[point_index] = curr_polygon.get_feature().get_reconstruction_plate_id() + self.curr_topology_plate_ids[ + point_index + ] = curr_polygon.get_feature().get_reconstruction_plate_id() diff --git a/models.json b/models.json new file mode 100644 index 00000000..9357e7fe --- /dev/null +++ b/models.json @@ -0,0 +1,17 @@ +{ + "Muller2019": { + "BigTime": 250, + "SmallTime": 0, + "Rotations": "https://www.earthbyte.org/webdav/ftp/gplately/Muller2019/Rotations.zip", + "Layers": { + "Coastlines": "https://www.earthbyte.org/webdav/ftp/gplately/Muller2019/Coastlines.zip", + "StaticPolygons": "https://www.earthbyte.org/webdav/ftp/gplately/Muller2019/StaticPolygons.zip", + "ContinentalPolygons": "https://www.earthbyte.org/webdav/ftp/gplately/Muller2019/ContinentalPolygons.zip", + "Topologies": "https://www.earthbyte.org/webdav/ftp/gplately/Muller2019/Topologies.zip", + "COBs": "https://www.earthbyte.org/webdav/ftp/gplately/Muller2019/COBs.zip" + }, + "TimeDepRasters": { + "AgeGrids": "https://www.earthbyte.org/webdav/ftp/Data_Collections/Muller_etal_2019_Tectonics/Muller_etal_2019_Agegrids/Muller_etal_2019_Tectonics_v2.0_netCDF/Muller_etal_2019_Tectonics_v2.0_AgeGrid-{}.nc" + } + } +} \ No newline at end of file diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..e11105b7 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +addopts = --ignore=unittest --ignore=test-with-plate-model-manager +testpaths = test \ No newline at end of file diff --git a/test-with-plate-model-manager/.gitignore b/test-with-plate-model-manager/.gitignore new file mode 100644 index 00000000..afa194e3 --- /dev/null +++ b/test-with-plate-model-manager/.gitignore @@ -0,0 +1 @@ +muller2019 diff --git a/test-with-plate-model-manager/conftest.py b/test-with-plate-model-manager/conftest.py new file mode 100644 index 00000000..cd45271e --- /dev/null +++ b/test-with-plate-model-manager/conftest.py @@ -0,0 +1,184 @@ +import gzip +import os +import shutil + +import numpy as np +import pygplates +import pytest +import gplately +from plate_model_manager import ( + PlateModelManager, + PresentDayRasterManager, + network_requests, +) + + +## ========================== + +# We will test GPlately functionalities on the Müller et al. (2019) plate reconstruction +# model at 0 and 100 Ma. +reconstruction_times = [0, 100] +gridding_times = [249.0, 250.0] +pt_lon = np.array([-155.4696, 164.3]) +pt_lat = np.array([19.8202, 53.5]) +test_geometry_n_points = 1000 +test_geometry_origins = ((20, -10), (175, -40)) # (lon, lat) +test_geometry_radii = (100, 500, 1000, 2000) # km +test_geometry_azimuths = (45, -100) # degrees + + +@pytest.fixture(scope="module") +def muller_2019_model(): + pm_manger = PlateModelManager() + return pm_manger.get_model("Muller2019") + + +@pytest.fixture(scope="module") +def merdith_2021_model(): + pm_manger = PlateModelManager() + return pm_manger.get_model("Merdith2021") + + +@pytest.fixture(scope="module") +def gplately_muller_static_geometries(muller_2019_model): + coastlines = pygplates.FeatureCollection() + for file in muller_2019_model.get_layer("Coastlines"): + coastlines.add(pygplates.FeatureCollection(file)) + continental_polygons = pygplates.FeatureCollection() + for file in muller_2019_model.get_layer("ContinentalPolygons"): + continental_polygons.add(pygplates.FeatureCollection(file)) + COBs = pygplates.FeatureCollection() + for file in muller_2019_model.get_layer("COBs"): + COBs.add(pygplates.FeatureCollection(file)) + return coastlines, continental_polygons, COBs + + +@pytest.fixture(scope="module") +def gplately_merdith_static_geometries(merdith_2021_model): + coastlines = pygplates.FeatureCollection() + for file in merdith_2021_model.get_layer("Coastlines"): + coastlines.add(pygplates.FeatureCollection(file)) + continental_polygons = pygplates.FeatureCollection() + for file in merdith_2021_model.get_layer("ContinentalPolygons"): + continental_polygons.add(pygplates.FeatureCollection(file)) + + return coastlines, continental_polygons + + +@pytest.fixture(scope="module") +def gplately_muller_reconstruction_files(muller_2019_model): + rotation_model = pygplates.RotationModel(muller_2019_model.get_rotation_model()) + topology_features = pygplates.FeatureCollection() + for file in muller_2019_model.get_layer("Topologies"): + topology_features.add(pygplates.FeatureCollection(file)) + static_polygons = pygplates.FeatureCollection() + for file in muller_2019_model.get_layer("StaticPolygons"): + static_polygons.add(pygplates.FeatureCollection(file)) + return rotation_model, topology_features, static_polygons + + +@pytest.fixture(scope="module") +def gplately_merdith_reconstruction_files(merdith_2021_model): + rotation_model = pygplates.RotationModel(merdith_2021_model.get_rotation_model()) + topology_features = pygplates.FeatureCollection() + for file in merdith_2021_model.get_layer("Topologies"): + topology_features.add(pygplates.FeatureCollection(file)) + static_polygons = pygplates.FeatureCollection() + for file in merdith_2021_model.get_layer("StaticPolygons"): + static_polygons.add(pygplates.FeatureCollection(file)) + return rotation_model, topology_features, static_polygons + + +@pytest.fixture(scope="module") +def gplately_plate_reconstruction_object(muller_2019_model): + return gplately.PlateReconstruction( + rotation_model=muller_2019_model.get_rotation_model(), + topology_features=muller_2019_model.get_layer("Topologies"), + static_polygons=muller_2019_model.get_layer("StaticPolygons"), + ) + + +@pytest.fixture(scope="module") +def gplately_merdith_reconstruction(gplately_merdith_reconstruction_files): + return gplately.PlateReconstruction(*gplately_merdith_reconstruction_files) + + +@pytest.fixture(scope="module") +def gplately_plot_topologies_object( + gplately_plate_reconstruction_object, + gplately_muller_static_geometries, +): + gplot = gplately.PlotTopologies( + gplately_plate_reconstruction_object, + *gplately_muller_static_geometries, + ) + gplot.time = 0 + return gplot + + +@pytest.fixture(scope="module") +def gplately_points_object(gplately_plate_reconstruction_object): + model = gplately_plate_reconstruction_object + time = 0 # Ma, will change to 100 Ma in test 2. + + # For example: Longitude and latitude of the Hawaiian-Emperor Seamount chain seed points + pt_lon = np.array([-155.4696, 164.3]) + pt_lat = np.array([19.8202, 53.5]) + + # Call the Points object: pass the PlateReconstruction object, and the latitudes and longitudes of the seed points. + gpts = gplately.Points(model, pt_lon, pt_lat) + return gpts + + +@pytest.fixture(scope="module") +def gplately_raster_object( + muller_2019_model, + gplately_plate_reconstruction_object, +): + model = gplately_plate_reconstruction_object + time = 0 + agegrid_path = muller_2019_model.get_raster("AgeGrids", time) + + graster = gplately.Raster( + data=agegrid_path, + plate_reconstruction=model, + extent=[-180, 180, -90, 90], + ) + + return graster + + +@pytest.fixture(scope="module") +def gplately_merdith_raster( + gplately_merdith_reconstruction, +): + raster_manager = PresentDayRasterManager() + etopo = gplately.Raster(data=raster_manager.get_raster("ETOPO1_grd")) + etopo = etopo.data.astype("float") + downsampled = etopo[::15, ::15] + raster = gplately.Raster( + plate_reconstruction=gplately_merdith_reconstruction, + data=downsampled, + origin="lower", + ) + return raster + + +@pytest.fixture(scope="module") +def gplately_seafloorgrid_object( + gplately_plate_reconstruction_object, gplately_plot_topologies_object +): + model = gplately_plate_reconstruction_object + gplot = gplately_plot_topologies_object + + seafloorgrid = gplately.SeafloorGrid( + model, + gplot, + max_time=250, + min_time=249, + ridge_time_step=1.0, + save_directory="test-seafloor-grid", + file_collection="Muller2019", + grid_spacing=0.25, + ) + return seafloorgrid diff --git a/test-with-plate-model-manager/test_0_imports.py b/test-with-plate-model-manager/test_0_imports.py new file mode 100644 index 00000000..5f958dd2 --- /dev/null +++ b/test-with-plate-model-manager/test_0_imports.py @@ -0,0 +1,40 @@ +import pytest + +## ========================== + +def test_numpy_import(): + import numpy + return + +def test_scipy_import(): + import scipy + print("\t\t You have scipy version {}".format(scipy.__version__)) + + +def test_cartopy_import(): + import cartopy + + +def test_pygplates_import(): + import pygplates + + +def test_pooch_import(): + import pooch + + +def test_gplately_modules(): + import gplately + from gplately import plot + from gplately import download + from gplately import tools + from gplately import grids + + +def test_jupyter_available(): + from subprocess import check_output + try: + result = str(check_output(['which', 'jupyter']))[2:-3] + except: + print("Jupyter not installed") + print("Jupyter is needed to run the example documentation") diff --git a/test-with-plate-model-manager/test_1_reconstructions_pmm.py b/test-with-plate-model-manager/test_1_reconstructions_pmm.py new file mode 100644 index 00000000..e4d9a1ae --- /dev/null +++ b/test-with-plate-model-manager/test_1_reconstructions_pmm.py @@ -0,0 +1,179 @@ +import pytest +import gplately +import numpy as np +from conftest import gplately_plate_reconstruction_object as model +from conftest import reconstruction_times + +# ========================================= ===================================== + +""" +A series of automated tests that ensure GPlately's object collects the necessary +rotation files, topology features and static polygons to initialise the +object with the Müller et al. (2019) plate reconstruction model. The following methods in the +object are tested: + + - __init__ + - tessellate_subduction_zones + - tessellate_mid_ocean_ridges + + Using pyGPlates and Plate Tectonic Tools: + - total_subduction_zone_length + - total_ridge_length + - total_continental_arc_length + + - get_point_velocities +""" + +# TESSELLATION OF TRENCHES AND RIDGES +""" +Contents are: + 0 - longitude of sampled point + 1 - latitude of sampled point + 2 - subducting convergence (relative to trench) velocity magnitude (in cm/yr) + 3 - subducting convergence velocity obliquity angle (angle between trench normal vector and convergence velocity vector) + 4 - trench absolute (relative to anchor plate) velocity magnitude (in cm/yr) + 5 - trench absolute velocity obliquity angle (angle between trench normal vector and trench absolute velocity vector) + 6 - length of arc segment (in degrees) that current point is on + 7 - trench normal azimuth angle (clockwise starting at North, ie, 0 to 360 degrees) at current point + 8 - subducting plate ID + 9 - trench plate ID +""" + + +@pytest.mark.parametrize("time", reconstruction_times) +def test_tessellate_trenches(time, model): + subduction_data = model.tessellate_subduction_zones(time, ignore_warnings=True) + # CONDITIONS + assert ( + subduction_data.any() + ), "There is no trench data inside Muller et al. (2019) at {} Ma.".format(time) + assert ( + np.abs(subduction_data[:, 0]) <= 180 + ).all(), "Some trench lons exceed a magnitude of 180 degrees in Muller et al. (2019) at {} Ma.".format( + time + ) + assert ( + np.abs(subduction_data[:, 1]) <= 90 + ).all(), "Some trench lats exceed a magnitude of 90 degrees in Muller et al. (2019) at {} Ma.".format( + time + ) + assert ( + np.abs(subduction_data[:, 2]) <= 50 + ).all(), "Some trench convergence velocities exceed 20 cm/yr in Muller et al. (2019) at {} Ma.".format( + time + ) + + +@pytest.mark.parametrize("time", reconstruction_times) +def test_tessellate_ridges(time, model): + ridge_data = model.tessellate_mid_ocean_ridges(time, ignore_warnings=False) + assert ( + ridge_data.any() + ), "There is no ridge data inside Müller et al. (2019) at {} Ma.".format(time) + assert ( + np.abs(ridge_data[:, 0]) <= 180 + ).all(), "Some ridge lons exceed a magnitude of 180 degrees in Muller et al. (2019) at {} Ma.".format( + time + ) + assert ( + np.abs(ridge_data[:, 1]) <= 90 + ).all(), "Some ridge lats exceed a magnitude of 90 degrees in Muller et al. (2019) at {} Ma.".format( + time + ) + assert ( + np.abs(ridge_data[:, 2]) <= 50 + ).all(), "Some ridge convergence velocities exceed 20 cm/yr in Muller et al. (2019) at {} Ma.".format( + time + ) + + +# TOTAL TRENCH AND RIDGE LENGTHS: PYGPLATES +@pytest.mark.parametrize("time", reconstruction_times) +def test_pygplates_trench_length(time, model): + total_sz_length = model.total_subduction_zone_length(time) + assert ( + 50000 <= total_sz_length <= 100000 + ), "Could not calculate total SZ lengths for Muller et al. (2019) at {} Ma.".format( + time + ) + total_sz_length = model.total_subduction_zone_length( + time, use_ptt=True, ignore_warnings=True + ) + assert ( + 50000 <= total_sz_length <= 100000 + ), "Could not calculate total SZ lengths for Muller et al. (2019) at {} Ma.".format( + time + ) + + +@pytest.mark.parametrize("time", reconstruction_times) +def test_pygplates_ridge_length(time, model): + total_ridge_length = model.total_ridge_length(time) + assert ( + total_ridge_length + ), "Could not calculate total MOR lengths for Muller et al. (2019) at {} Ma.".format( + time + ) + total_ridge_length = model.total_ridge_length( + time, use_ptt=True, ignore_warnings=True + ) + assert ( + total_ridge_length + ), "Could not calculate total MOR lengths for Muller et al. (2019) at {} Ma.".format( + time + ) + + +# TOTAL CONTINENTAL ARC LENGTHS AT 0 AND 100 MA +@pytest.mark.parametrize("time", reconstruction_times) +def test_cont_arc_length(time, model): + gdownload = gplately.download.DataServer("Muller2019") + + agegrid = gdownload.get_age_grid(time) + agegrid = agegrid.data + continental_grid = np.isnan(np.array(agegrid)) + # Use 281km as the trench-arc distance 281 km; the median distance frin global analysis at the present-day (Pall et al. 2018) + trench_arc_distance = 281 + total_cont_arc_length = model.total_continental_arc_length( + time, + continental_grid, + trench_arc_distance, + ignore_warnings=True, + ) + approx_lengths = { + 0: 51500, + 100: 44000, + } + err_msg = ( + "Could not calculate total continental arc lengths for Muller et " + + "al. (2019) at {} Ma.".format(time) + ) + if time in approx_lengths.keys(): + # Check arc length approximately matches precomputed value + cmp = approx_lengths[time] + diff = np.abs(total_cont_arc_length - cmp) + assert diff <= 550.0, err_msg + else: + # Value for this time has not been computed, so just make sure no + # errors were encountered + assert total_cont_arc_length >= 0.0, err_msg + + +# POINT VELOCITIES +@pytest.mark.parametrize("time", reconstruction_times) +def test_point_velocities(time, model): + lons = [-30] + lats = [15] + point_velocities = model.get_point_velocities(lons, lats, time, delta_time=1.0) + assert ( + point_velocities.any() + ), "Could not calculate point data velocities for Muller et al. (2019) at {} Ma.".format( + time + ) + + +def test_pickle_Points(): + import pickle + + model_dump = pickle.dumps(model) + model_load = pickle.loads(model_dump) diff --git a/test-with-plate-model-manager/test_2_points.py b/test-with-plate-model-manager/test_2_points.py new file mode 100644 index 00000000..00211f7c --- /dev/null +++ b/test-with-plate-model-manager/test_2_points.py @@ -0,0 +1,52 @@ +import pytest +import gplately +import numpy as np +from conftest import reconstruction_times +from conftest import gplately_points_object as gpts + + +# ========================================= ===================================== + +""" +A series of automated tests that ensure GPlately's object is initialised. The object must +process an array of latitudinal and longitudinal coordinates into point data that: + - are reconstructable through geological time with respect to another geological element or + the absolute reference frame, + - are ascribed to kinetic topological plates, + - have point velocity data (the velocities of these plates) + +The following methods in the object are tested: + + - __init__ + - reconstruct + - plate_velocity + Ensures plate velocities are . + +""" + +# TESTING THE POINTS OBJECT +@pytest.mark.parametrize("time", reconstruction_times) +def test_point_reconstruction(time, gpts): + rlons, rlats = gpts.reconstruct(time, return_array=True, anchor_plate_id=0) + assert (rlons, rlats), "Unable to reconstruct point data to {} Ma with Muller et al. (2019).".format(time) + + +# TESTING PLATE VELOCITY CALCULATIONS +@pytest.mark.parametrize("time", reconstruction_times) +def test_plate_velocity(time, gpts): + plate_vel = gpts.plate_velocity(time, delta_time=1) + assert plate_vel, "Unable to calculate plate velocities of point data at {} Ma with Muller et al. (2019).".format(time) + +def test_point_attributes(gpts): + attr = np.arange(0, gpts.size) + gpts.add_attributes(FROMAGE=attr, TOAGE=attr) + +def test_pickle_Points(gpts): + import pickle + gpts_dump = pickle.dumps(gpts) + gpts_load = pickle.loads(gpts_dump) + + attr = np.arange(0, gpts.size) + gpts.add_attributes(FROMAGE=attr, TOAGE=attr) + gpts_dump = pickle.dumps(gpts) + gpts_load = pickle.loads(gpts_dump) \ No newline at end of file diff --git a/test-with-plate-model-manager/test_3_plottopologies.py b/test-with-plate-model-manager/test_3_plottopologies.py new file mode 100644 index 00000000..1c81c17c --- /dev/null +++ b/test-with-plate-model-manager/test_3_plottopologies.py @@ -0,0 +1,112 @@ +import pytest +import matplotlib.pyplot as plt +import numpy as np +from conftest import reconstruction_times +from conftest import gplately_plot_topologies_object as gplot + +# ========================================= ========================================= + +""" +A series of automated tests that ensure GPlately's object collects the necessary +continent, coastline and continent-ocean boundary shapefiles/GPML files to initialise the + object with the Müller et al. (2019) plate reconstruction model. The following +methods in the object are tested: + + - __init__ + This includes testing update_time; this ensures all geometries in the provided coastline, + COB and/or continent files can be reconstructed to 0 and 100 Ma. It also ensures that the + outputs of Plate Tectonic Tools' resolve_topologies method: + + - resolved topology features (topological plates and networks) + - ridge and transform boundary sections (resolved features) + - ridge boundary sections (resolved features) + - transform boundary sections (resolved features) + - subduction boundary sections (resolved features) + - left subduction boundary sections (resolved features) + - right subduction boundary sections (resolved features) + - other boundary sections (resolved features) that are not subduction zones or mid-ocean ridges (ridge/transform) + + are initialised in the object for a given plate reconstruction model. + + - Plotting functions: + This ensures that objects are created with the attributes passed to + . Namely, the following plotting methods are tested: + + - plot_coastlines + - plot_continents + - plot_continent_ocean_boundaries + - plot_ridges + - plot_ridges_and_transforms + - plot_transforms + - plot_trenches + - plot_misc_boundaries + - plot_plate_id + - plot_subduction_teeth + - plot_grid + - plot_grid_from_netCDF + - plot_plate_motion_vectors +""" + +# CALL THE PLOT TOPOLOGIES OBJECT +@pytest.mark.parametrize("time", reconstruction_times) +def test_gplately_plotTopologies_object(time, gplot): + #assert gplot, "No object made with {}.".format(model) + assert gplot, "Unable to create a object with Müller et al. (2019) at {} Ma.".format(time) + +# ================================================================================================================================================================================================================ + +# ENSURE PLOT TOPOLOGIES' ATTRIBUTES EXIST AND ARE CORRECT +# Topologies +@pytest.mark.parametrize("time", reconstruction_times) +def test_PlotTopologies_topologies(time, gplot): + assert gplot.topologies, "No topological features from Müller et al. (2019) at {} Ma are attributed to .".format(time) + +# Ridge_transforms +@pytest.mark.parametrize("time", reconstruction_times) +def test_PlotTopologies_ridge_transforms(time, gplot): + assert gplot.ridge_transforms, "No ridge/transform features from Müller et al. (2019) at {} Ma are attributed to .".format(time) + assert [ridge_transform.get_feature_type() == "gpml:MidOceanRidge" for ridge_transform in gplot.ridge_transforms], " ridge/transforms are not all of type gpml:MidOceanRidge in Müller et al. (2019) at {} Ma.".format(time) + +# Ridges +@pytest.mark.parametrize("time", reconstruction_times) +def test_PlotTopologies_ridges(time, gplot): + assert gplot.ridges, "No ridge features from Müller et al. (2019) at {} Ma are attributed to .".format(time) + assert [ridge.get_feature_type() == "gpml:MidOceanRidge" for ridge in gplot.ridges], " ridges are not all of type gpml:MidOceanRidge in Müller et al. (2019) at {} Ma.".format(time) + +# Transforms +@pytest.mark.parametrize("time", reconstruction_times) +def test_PlotTopologies_transforms(time, gplot): + assert gplot.transforms, "No transform features from Müller et al. (2019) at {} Ma are attributed to .".format(time) + assert [transform.get_feature_type() == "gpml:MidOceanRidge" for transform in gplot.transforms], " transforms are not all of type gpml:MidOceanRidge in Müller et al. (2019) at {} Ma.".format(time) + +# Trenches +@pytest.mark.parametrize("time", reconstruction_times) +def test_PlotTopologies_trenches(time, gplot): + assert gplot.trenches, "No trench features from Müller et al. (2019) at {} Ma are attributed to .".format(time) + assert [trench.get_feature_type() == "gpml:SubductionZone" for trench in gplot.trenches], " trenches are not all of type gpml:SubductionZone in Müller et al. (2019) at {} Ma.".format(time) + +# Trench_left +@pytest.mark.parametrize("time", reconstruction_times) +def test_PlotTopologies_trench_L(time, gplot): + assert gplot.trench_left, "No trench (L) features from Müller et al. (2019) at {} Ma are attributed to .".format(time) + assert [trench_l.get_feature_type() == "gpml:SubductionZone" for trench_l in gplot.trench_left], " trenches (L) are not all of type gpml:SubductionZone in Müller et al. (2019) at {} Ma.".format(time) + +# Trench_right +@pytest.mark.parametrize("time", reconstruction_times) +def test_PlotTopologies_trench_R(time, gplot): + assert gplot.trench_right, "No trench (R) features from Müller et al. (2019) at {} Ma are attributed to .".format(time) + assert [trench_r.get_feature_type() == "gpml:SubductionZone" for trench_r in gplot.trench_right], " trenches (R) are not all of type gpml:SubductionZone in Müller et al. (2019) at {} Ma.".format(time) + + +# Subduction teeth +def test_PlotTopologies_subduction_teeth(gplot): + ax = plt.gca() + gplot.plot_subduction_teeth(ax=ax) + fig = plt.gcf() + plt.close(fig) + + +def test_pickle_Points(): + import pickle + gplot_dump = pickle.dumps(gplot) + gplot_load = pickle.loads(gplot_dump) diff --git a/test-with-plate-model-manager/test_4_rasters.py b/test-with-plate-model-manager/test_4_rasters.py new file mode 100644 index 00000000..8f4f1031 --- /dev/null +++ b/test-with-plate-model-manager/test_4_rasters.py @@ -0,0 +1,113 @@ +import numpy as np +import pytest, os +from conftest import gplately_merdith_raster, gplately_merdith_static_geometries +from conftest import gplately_raster_object as graster +from conftest import pt_lat, pt_lon, reconstruction_times + +import gplately + +# ========================================= ========================================= + +""" +A series of automated tests that ensure GPlately's object collects the 0 and 100 Ma +age grids from Müller et al. (2019) to initialise the object. The following +methods in the object are tested: + + - __init__ + - interpolate + - resample + - resize + - fill_NaNs + - reconstruct + For the test, the present-day Müller et al. 2019 age grid is reconstructed to its + configuration at 50 Ma. +""" + + +# TEST LINEAR POINT DATA INTERPOLATION (WITH PT. COORDS FROM CONFTEST) +def test_point_interpolation(graster): + interpolated_points = graster.interpolate( + pt_lon, + pt_lat, + method="linear", + return_indices=False, + ) + assert interpolated_points.any(), "Unable to interpolate points" + + +def test_bilinear_interpolation(): + array = np.array([[0, 1], [1, 2]], dtype=float) + graster = gplately.Raster(data=array, extent=[0, 1, 0, 1]) + + ilon = ilat = 2.0 / 3 + result = 1.0 + 1.0 / 3 + interp = graster.interpolate(ilon, ilat, method="linear") + assert np.isclose(interp, result), "Linear interpolation in x direction failed" + + # get interpolation coordinates + interp, (ci, cj) = graster.interpolate( + ilon, ilat, method="linear", return_indices=True + ) + assert ci == 1 and cj == 1, "Indices of interpolation are incorrect" + + +def test_nearest_neighbour_interpolation(): + array = np.array([[0, 1], [1, 2]], dtype=float) + graster = gplately.Raster(data=array, extent=[0, 1, 0, 1]) + + ilon = ilat = 2.0 / 3 + result = 2 + interp = graster.interpolate(ilon, ilat, method="nearest") + assert np.isclose(interp, result), "Linear interpolation in x direction failed" + + # get interpolation coordinates + interp, (ci, cj) = graster.interpolate( + ilon, ilat, method="nearest", return_indices=True + ) + assert ci == 1 and cj == 1, "Indices of interpolation are incorrect" + + +# TEST AGE GRID RESIZING (AT RESOLUTIONS OF RES_X = 1000, RES_Y = 400) +def test_resizing(graster): + resized_agegrid = graster.resize(1000, 400, return_array=True) + assert np.shape(resized_agegrid) == (400, 1000), "Unable to rezise" + + +# TEST FILLING NaNs IN AGE GRIDS +def test_fill_NaNs(graster): + no_NaNs = graster.fill_NaNs(return_array=True) + assert not np.isnan(no_NaNs).all(), "Unable to fill NaNs" + + +def test_reconstruct(graster): + reconstructed_raster = graster.reconstruct(50) + assert np.shape(reconstructed_raster) == np.shape( + graster + ), "Unable to reconstruct age grid" + + +@pytest.mark.skipif( + int(os.getenv("TEST_LEVEL", 0)) < 1, reason="requires TEST_LEVEL higher than 0" +) +def test_reverse_reconstruct( + gplately_merdith_raster, + gplately_merdith_static_geometries, +): + continents = gplately_merdith_static_geometries[1] + original_data = np.array(gplately_merdith_raster.data) + + gplately_merdith_raster.reconstruct( + 50, + partitioning_features=continents, + inplace=True, + ) + gplately_merdith_raster.reconstruct( + 0, + partitioning_features=continents, + inplace=True, + ) + diff = gplately_merdith_raster.data - original_data + # RMS error after reconstructing and reverse reconstructing + # should be fairly small if reconstruction is working well + rmse = np.sqrt(np.nanmean(diff**2)) + assert rmse < 250.0 # make sure RMSE is within a reasonable limit diff --git a/test-with-plate-model-manager/test_5_seafloorgrid.py b/test-with-plate-model-manager/test_5_seafloorgrid.py new file mode 100644 index 00000000..0f424b0f --- /dev/null +++ b/test-with-plate-model-manager/test_5_seafloorgrid.py @@ -0,0 +1,171 @@ +import pytest +import gplately +import os +import numpy as np +import pandas as pd +import statistics +from conftest import gplately_seafloorgrid_object as seafloorgrid +from conftest import gridding_times + +# ========================================= ========================================= + +""" +A series of automated tests that ensure GPlately's object can be initialised and all gridding +routines can be performed. The routines must also return sensible grids - for example, spreading rate grids +must not have an excessive amount of NaN entries (which may mean incorrect masking has occurred). +The following methods in the object are tested: + + - __init__ + - reconstruct_by_topologies + - lon_lat_z_to_netcdf + +These tests will be conducted using the Muller et al. 2019 model, with the gridding time range of 250-249 Ma (one +time step). +""" + +zval_names = ["SEAFLOOR_AGE", "SPREADING_RATE"] + +# CALL THE SEAFLOORGRID OBJECT +def test_gplately_SeafloorGrid_object( + seafloorgrid +): + #assert gplot, "No object made with {}.".format(model) + assert seafloorgrid, "Unable to create a object with \ + Müller et al. (2019) at a max time of {} Ma, and a min time of {} Ma.".format(gridding_times[1], gridding_times[0]) + +# ======================================================================================================================= + +# TEST RECONSTRUCTION BY TOPOLOGIES BY TESTING THE CONTENTS OF OUTPUT FILES. +# Gridding input npz files are ok if they have no NaN entries in the spreading rate column. +# Spreading rate ultimately builds the seafloor age. +@pytest.mark.parametrize("time", gridding_times) +def test_reconstruct_by_topologies( + time, + seafloorgrid +): + _reconstruct_by_topologies(time, seafloorgrid, clean=True) + + +# Separate reconstruction by topologies code so that test_lat_lon_z_to_netCDF +# does not depend on the result of test_reconstruct_by_topologies +# This allows the tests to be run in parallel using pytest-xdist +def _reconstruct_by_topologies(time, seafloorgrid, clean=False): + # This next line must run smoothly. An interruption will happen in pytest + # if any aspect of reconstruct_by_topologies goes wrong. + reconstruction_process = seafloorgrid.reconstruct_by_topologies() + + # Otherwise, the following lines then test the validity of written gridding inputs. + + # Accounts for a given save directory only + npz_gridding_input = "{:s}/{}_gridding_input_{:0.1f}Ma.npz".format( + seafloorgrid.save_directory, + seafloorgrid.file_collection, + time + ) + + npz_loaded = np.load(npz_gridding_input) + + curr_data = pd.DataFrame.from_dict( + + {item: npz_loaded[item] for item in npz_loaded.files}, + orient='columns' + ) + curr_data.columns = seafloorgrid.total_column_headers + + unique_data = curr_data.drop_duplicates( + + subset=[ + "CURRENT_LONGITUDES", + "CURRENT_LATITUDES" + ] + ) + + # Gridding input critical data + age_data = np.array( + unique_data["SEAFLOOR_AGE"].to_list() + ) + spreading_rate_data = np.array( + unique_data["SPREADING_RATE"].to_list() + ) + + # Ensure spreading rate is sensible at max_time; namely that + # it is only the initial spreading rate + if time == np.sort(gridding_times)[-1]: + initial_sr = np.unique(spreading_rate_data) + assert initial_sr == seafloorgrid.initial_ocean_mean_spreading_rate, "Initial grids have not been allocated the correct initial mean ocean spreading rate." + + # If it is not the max time, but still near the start of the reconstruction tree + # (in this test we use the first time step after max_time), + # ensure the initial spreading rate is still the mode in the array + elif time == np.sort(gridding_times)[-2]: + most_common_spreading_rate = statistics.mode(spreading_rate_data) + assert most_common_spreading_rate == seafloorgrid.initial_ocean_mean_spreading_rate, "Initial grids have not been allocated the correct initial mean ocean spreading rate." + + # Otherwise, make sure the array is not empty, and has no NaNs. + else: + assert np.unique(np.isnan(spreading_rate_data))[0] is False, "Some spreading rates in the {} Ma gridding input have been ascribed a NaN value.".format(time) + + if clean: + for i in ("", "unmasked_"): + for zval_name in zval_names: + filename = os.path.join( + seafloorgrid.save_directory, + "{}_{}_grid_{}{}Ma.nc".format( + seafloorgrid.file_collection, + zval_name, + i, + time, + ) + ) + if os.path.exists(filename): + os.remove(filename) + + +# test netCDF writing +@pytest.mark.parametrize("zval_name", zval_names) +def test_lat_lon_z_to_netCDF( + zval_name, + seafloorgrid +): + time = gridding_times[0] + + # Test the creation of a masked and unmasked age grid + _reconstruct_by_topologies(time, seafloorgrid) + seafloorgrid.lat_lon_z_to_netCDF( + zval_name, + unmasked=True + ) + + grid_output_unmasked = "{}/{}_{}_grid_unmasked_{}Ma.nc".format( + seafloorgrid.save_directory, + seafloorgrid.file_collection, + zval_name, + time + ) + + grid_output_dir = "{}/{}_{}_grid_{}Ma.nc".format( + seafloorgrid.save_directory, + seafloorgrid.file_collection, + zval_name, + time + ) + + age_grid_unmasked = gplately.Raster( + data=grid_output_unmasked, + extent=[-180,180,-90,90] + ) + + age_grid = gplately.Raster( + data=grid_output_dir, + extent=[-180,180,-90,90] + ) + + assert age_grid_unmasked.data.any(), "Could not produce an unmasked {} grid.".format(zval_name) + assert age_grid.data.any(), "Could not produce a masked {} grid.".format(zval_name) + + + + + + + diff --git a/test-with-plate-model-manager/test_6_geometry.py b/test-with-plate-model-manager/test_6_geometry.py new file mode 100644 index 00000000..1e6a3e10 --- /dev/null +++ b/test-with-plate-model-manager/test_6_geometry.py @@ -0,0 +1,230 @@ +from itertools import product + +import numpy as np +import pygplates +import pytest +from gplately import EARTH_RADIUS +from gplately.geometry import * +from shapely.geometry import ( + LineString, + MultiPoint, + Point, + Polygon, +) +from shapely.geometry.base import BaseMultipartGeometry + +from conftest import ( + test_geometry_n_points as N_POINTS, + test_geometry_origins as origins, + test_geometry_radii as RADII, + test_geometry_azimuths as azimuths, +) + + +@pytest.mark.parametrize("lat", (-100, 95)) +def test_invalid_point(lat): + p = Point(0, lat) + with pytest.raises(pygplates.InvalidLatLonError): + PointOnSphere.from_shapely(p) + + +def test_multipoint_conversion(n_points=N_POINTS): + n_lats = int(np.sqrt(n_points / 2)) + point_lons = np.linspace(-179, 179, n_lats * 2) + point_lats = np.linspace(-89, 89, n_lats) + grid_lons, grid_lats = np.meshgrid(point_lons, point_lats) + + mp_shapely = MultiPoint( + np.column_stack( + [np.ravel(i) for i in (grid_lons, grid_lats)] + ) + ) + mp_pgp = MultiPointOnSphere.from_shapely(mp_shapely) + + a1 = np.row_stack([i.coords for i in mp_shapely.geoms]) + a2 = np.fliplr(mp_pgp.to_lat_lon_array()) + assert np.allclose( + a1, + a2, + ), "Multi-point coordinates not equal" + + +@pytest.mark.parametrize( + "origin,azimuth,distance", + product(origins, azimuths, RADII), +) +def test_polyline_conversion(origin, azimuth, distance, n_points=N_POINTS): + lons, lats = _point_from_origin_distance_azimuth( + origin, + np.linspace(0, distance, n_points), + azimuth, + ) + line = LineString(zip(lons, lats)) + converted = PolylineOnSphere.from_shapely(line) + reconverted = converted.to_shapely( + central_meridian=converted.get_centroid().to_lat_lon()[1], + ) + + # Ensure consistency + a1 = np.array(line.coords) + inds1 = np.where(a1[:, 0] < -180) + a1[inds1, 0] += 360 + inds2 = np.where(a1[:, 0] > 180) + a1[inds2, 0] -= 360 + + a2 = np.array(reconverted.coords) + inds3 = np.where(a2[:, 0] < -180) + a2[inds3, 0] += 360 + inds4 = np.where(a2[:, 0] > 180) + a2[inds4, 0] -= 360 + + assert np.allclose(a1, a2), "Polyline coordinates not equal" + + +@pytest.mark.parametrize( + "origin,azimuth,distance", + product(origins, azimuths, RADII), +) +def test_polyline_length(origin, azimuth, distance, n_points=N_POINTS): + lons, lats = _point_from_origin_distance_azimuth( + origin, + np.linspace(0, distance, n_points), + azimuth, + ) + line = LineString(zip(lons, lats)) + converted = PolylineOnSphere.from_shapely(line) + assert np.allclose( + distance, + converted.get_arc_length() * EARTH_RADIUS, + ), "Polyline length not consistent" + + +@pytest.mark.parametrize("origin,radius", product(origins, RADII)) +def test_polygon_conversion(origin, radius, n_points=N_POINTS): + lons, lats = _get_circle(origin, radius, n=n_points) + circle = Polygon(zip(lons, lats)) + converted = PolygonOnSphere.from_shapely(circle) + reconverted = converted.to_shapely( + central_meridian=converted.get_interior_centroid().to_lat_lon()[1], + ) + assert np.allclose( + circle.exterior.coords, + reconverted.exterior.coords, + ), "Polygon coordinates not equal" + + +def test_polygon_splitting(n_points=N_POINTS): + origin = (-179, 0) + radius = 1000 + lons, lats = _get_circle(origin, radius, n=n_points) + circle = Polygon(zip(lons, lats)) + converted = PolygonOnSphere.from_shapely(circle) + split = converted.to_shapely(central_meridian=0) + unsplit = converted.to_shapely( + central_meridian=converted.get_interior_centroid().to_lat_lon()[1] + ) + + assert isinstance( + split, + BaseMultipartGeometry, + ), "Polygon splitting failed (incorrect output type: {})".format(type(split)) + assert isinstance( + unsplit, + Polygon, + ), "Polygon conversion failed (incorrect output type: {})".format( + type(unsplit) + ) + + assert ( + len(split.geoms) == 2 + ), "Polygon splitting failed (incorrect number of outputs: {})".format( + len(split.geoms) + ) + + assert ( + np.allclose(split.area, unsplit.area) + ), "Polygon splitting area mismatch" + + +@pytest.mark.parametrize("origin", origins) +def test_polygon_areas(origin, radii=RADII, n_points=N_POINTS): + areas = [_get_circle_areas(r, origin=origin, n=n_points) for r in radii] + geometric_areas, pygplates_areas = zip(*areas) + assert np.allclose( + geometric_areas, + pygplates_areas, + ), "Polygon areas not equal" + + +@pytest.mark.parametrize("origin", origins) +def test_polygon_perimeters(origin, radii=RADII, n_points=N_POINTS): + perimeters = [_get_circle_perimeters(r, origin=origin, n=n_points) for r in radii] + geometric_perimeters, pygplates_perimeters = zip(*perimeters) + assert np.allclose( + geometric_perimeters, + pygplates_perimeters, + ), "Polygon perimeters not equal" + + +def _get_circle_perimeters(radius, origin=(0, 0), n=N_POINTS): + lons, lats = _get_circle(origin, radius, n=n) + circle = Polygon(zip(lons, lats)) + converted = PolygonOnSphere.from_shapely(circle) + + pygplates_perimeter = converted.get_arc_length() * EARTH_RADIUS + geometric_perimeter = ( + 2 * np.pi + * EARTH_RADIUS * np.sin(radius / EARTH_RADIUS) + ) + return geometric_perimeter, pygplates_perimeter + + +def _get_circle_areas(radius, origin=(0, 0), n=N_POINTS): + lons, lats = _get_circle(origin, radius, n=n) + circle = Polygon(zip(lons, lats)) + converted = PolygonOnSphere.from_shapely(circle) + + pygplates_area = converted.get_area() * (EARTH_RADIUS ** 2) + geometric_area = ( + 2 * np.pi * + (EARTH_RADIUS ** 2) + * (1 - np.cos(radius / EARTH_RADIUS)) + ) + return geometric_area, pygplates_area + + +def _point_from_origin_distance_azimuth( + origin, + distance, + azimuth, + radius=EARTH_RADIUS, +): + lon1, lat1 = origin + lon1 = np.deg2rad(lon1) + lat1 = np.deg2rad(lat1) + azimuth = np.deg2rad(azimuth) + + b = distance / radius + a = np.arccos( + np.cos(b) * np.cos(0.5 * np.pi - lat1) + + np.sin(0.5 * np.pi - lat1) * np.sin(b) * np.cos(azimuth) + ) + B = np.arcsin( + np.sin(b) * np.sin(azimuth) + / np.sin(a) + ) + lat2 = np.rad2deg(0.5 * np.pi - a) + lon2 = np.rad2deg(B + lon1) + + return lon2, lat2 + + +def _get_circle(origin, radius, earth_radius=EARTH_RADIUS, n=N_POINTS): + azimuth = np.linspace(0, 360.0, n, endpoint=False) + lons, lats = _point_from_origin_distance_azimuth( + origin, + radius, + azimuth, + earth_radius, + ) + return lons, lats diff --git a/test/test_4_rasters.py b/test/test_4_rasters.py index 90361016..2f1fe27b 100644 --- a/test/test_4_rasters.py +++ b/test/test_4_rasters.py @@ -1,13 +1,12 @@ +import os + import numpy as np +import pytest +from conftest import gplately_merdith_raster, gplately_merdith_static_geometries +from conftest import gplately_raster_object as graster +from conftest import pt_lat, pt_lon, reconstruction_times + import gplately -from conftest import ( - reconstruction_times, - pt_lon, - pt_lat, - gplately_raster_object as graster, - gplately_merdith_raster, - gplately_merdith_static_geometries, -) # ========================================= ========================================= @@ -26,48 +25,54 @@ configuration at 50 Ma. """ + # TEST LINEAR POINT DATA INTERPOLATION (WITH PT. COORDS FROM CONFTEST) def test_point_interpolation(graster): interpolated_points = graster.interpolate( pt_lon, pt_lat, - method='linear', + method="linear", return_indices=False, ) assert interpolated_points.any(), "Unable to interpolate points" def test_bilinear_interpolation(): - array = np.array([[0,1],[1,2]], dtype=float) - graster = gplately.Raster(data=array, extent=[0,1,0,1]) + array = np.array([[0, 1], [1, 2]], dtype=float) + graster = gplately.Raster(data=array, extent=[0, 1, 0, 1]) - ilon = ilat = 2.0/3 - result = 1.0 + 1.0/3 + ilon = ilat = 2.0 / 3 + result = 1.0 + 1.0 / 3 interp = graster.interpolate(ilon, ilat, method="linear") assert np.isclose(interp, result), "Linear interpolation in x direction failed" # get interpolation coordinates - interp, (ci,cj) = graster.interpolate(ilon, ilat, method="linear", return_indices=True) + interp, (ci, cj) = graster.interpolate( + ilon, ilat, method="linear", return_indices=True + ) assert ci == 1 and cj == 1, "Indices of interpolation are incorrect" + def test_nearest_neighbour_interpolation(): - array = np.array([[0,1],[1,2]], dtype=float) - graster = gplately.Raster(data=array, extent=[0,1,0,1]) + array = np.array([[0, 1], [1, 2]], dtype=float) + graster = gplately.Raster(data=array, extent=[0, 1, 0, 1]) - ilon = ilat = 2.0/3 + ilon = ilat = 2.0 / 3 result = 2 interp = graster.interpolate(ilon, ilat, method="nearest") assert np.isclose(interp, result), "Linear interpolation in x direction failed" # get interpolation coordinates - interp, (ci,cj) = graster.interpolate(ilon, ilat, method="nearest", return_indices=True) - assert ci == 1 and cj == 1, "Indices of interpolation are incorrect" + interp, (ci, cj) = graster.interpolate( + ilon, ilat, method="nearest", return_indices=True + ) + assert ci == 1 and cj == 1, "Indices of interpolation are incorrect" # TEST AGE GRID RESIZING (AT RESOLUTIONS OF RES_X = 1000, RES_Y = 400) def test_resizing(graster): resized_agegrid = graster.resize(1000, 400, return_array=True) - assert np.shape(resized_agegrid)==(400,1000), "Unable to rezise" + assert np.shape(resized_agegrid) == (400, 1000), "Unable to rezise" # TEST FILLING NaNs IN AGE GRIDS @@ -75,13 +80,17 @@ def test_fill_NaNs(graster): no_NaNs = graster.fill_NaNs(return_array=True) assert not np.isnan(no_NaNs).all(), "Unable to fill NaNs" + def test_reconstruct(graster): reconstructed_raster = graster.reconstruct(50) - assert ( - np.shape(reconstructed_raster) == np.shape(graster) + assert np.shape(reconstructed_raster) == np.shape( + graster ), "Unable to reconstruct age grid" +@pytest.mark.skipif( + int(os.getenv("TEST_LEVEL", 0)) < 1, reason="requires TEST_LEVEL higher than 0" +) def test_reverse_reconstruct( gplately_merdith_raster, gplately_merdith_static_geometries, @@ -102,5 +111,5 @@ def test_reverse_reconstruct( diff = gplately_merdith_raster.data - original_data # RMS error after reconstructing and reverse reconstructing # should be fairly small if reconstruction is working well - rmse = np.sqrt(np.nanmean(diff ** 2)) + rmse = np.sqrt(np.nanmean(diff**2)) assert rmse < 250.0 # make sure RMSE is within a reasonable limit diff --git a/unittest/test_anchor_plate_id.py b/unittest/test_anchor_plate_id.py new file mode 100755 index 00000000..ac95e2e9 --- /dev/null +++ b/unittest/test_anchor_plate_id.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +import matplotlib.pyplot as plt +import cartopy.crs as ccrs +import sys + +sys.path.insert(0, "../") +import gplately + + +def main(): + gdownload = gplately.download.DataServer("Clennett2020") + ( + C2020_rotation_file, + C2020_topology_features, + C2020_static_polygons, + ) = gdownload.get_plate_reconstruction_files() + + C2020_501 = gplately.PlateReconstruction( + C2020_rotation_file, + C2020_topology_features, + C2020_static_polygons, + default_anchor_plate_id=501, + ) + + C2020_101 = gplately.PlateReconstruction( + C2020_rotation_file, + C2020_topology_features, + C2020_static_polygons, + default_anchor_plate_id=101, + ) + + gplot501 = gplately.PlotTopologies(C2020_501, time=130) + gplot101 = gplately.PlotTopologies(C2020_101, time=130) + + fig, ax = plt.subplots( + figsize=(10, 5), subplot_kw={"projection": ccrs.Robinson()}, dpi=120 + ) + + gplot501.plot_all_topologies(ax=ax, color="red") + gplot101.plot_all_topologies(ax=ax, color="blue") + + plt.show() + + +if __name__ == "__main__": + main() diff --git a/unittest/test_concurrent_download.py b/unittest/test_concurrent_download.py new file mode 100755 index 00000000..f8617edf --- /dev/null +++ b/unittest/test_concurrent_download.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python + +import time + +from plate_model_manager import network_aiohttp, network_requests + +test_urls = [ + f"https://www.earthbyte.org/webdav/ftp/Data_Collections/Zahirovic_etal_2016_ESR_AgeGrid/jpegs/EarthByte_Zahirovic_etal_2016_ESR_r888_AgeGrid-{i}.jpg" + for i in range(20) +] + +auto_unzip = False + + +def test_with_for_loop(): + """requests + "for loop" """ + st = time.time() + spt = time.process_time() + + print("Start test_with_for_loop ... ") + + for url in test_urls: + network_requests.fetch_file( + url, + "./download-with-for-loop/", + auto_unzip=auto_unzip, + ) + + et = time.time() + ept = time.process_time() + + print(f"time: {et - st}") + print(f"process time: {ept - spt}") + + print("End test_with_for_loop ... ") + + +def test_concurrent_aiohttp(): + """asyncio + aiohttp""" + st = time.time() + spt = time.process_time() + paths = "./download-concurrently-with-aiohttp/" + + print("Start test_concurrent_aiohttp ... ") + + network_aiohttp.fetch_files( + test_urls, + paths, + auto_unzip=auto_unzip, + ) + + et = time.time() + ept = time.process_time() + + print(f"time: {et - st}") + print(f"process time: {ept - spt}") + + print("End test_concurrent_aiohttp ... ") + + +def test_concurrent_executor(): + """requests + ThreadPoolExecutor + asyncio""" + st = time.time() + spt = time.process_time() + + paths = "./download-concurrently-with-executor/" + + print("Start test_concurrent_executor ... ") + + network_requests.fetch_files( + test_urls, + paths, + auto_unzip=auto_unzip, + ) + + et = time.time() + ept = time.process_time() + + print(f"time: {et - st}") + print(f"process time: {ept - spt}") + + print("End test_concurrent_executor ... ") + + +if __name__ == "__main__": + test_with_for_loop() + test_concurrent_aiohttp() + test_concurrent_executor() diff --git a/unittest/test_download.py b/unittest/test_download.py new file mode 100755 index 00000000..cc050c1c --- /dev/null +++ b/unittest/test_download.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +import sys + +sys.path.insert(0, "../") +import time +import gplately +from platformdirs import * + + +def main(): + print(user_cache_dir("gplately", "")) + + # test the old download code + + st = time.time() + spt = time.process_time() + + gdownload = gplately.download.DataServer("Muller2019") + ( + rotation_model, + topology_features, + static_polygons, + ) = gdownload.get_plate_reconstruction_files() + print(rotation_model) + print(topology_features) + print(static_polygons) + # coastlines, continents, COBs = gdownload.get_topology_geometries() + + et = time.time() + ept = time.process_time() + + print(f"time: {et - st}") + print(f"process time: {ept - spt}") + + +if __name__ == "__main__": + main() diff --git a/unittest/test_download_large_file.py b/unittest/test_download_large_file.py new file mode 100755 index 00000000..d7e461fc --- /dev/null +++ b/unittest/test_download_large_file.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python + +import time + +from plate_model_manager import network_aiohttp, network_requests + +# concurrent download from www.earthbyte.org does not work because of Cloud provider's network traffic control +# use "http://212.183.159.230/100MB.zip" to test. you can see the performance improvement +# the performance improvement depends on the server/network's configuration + +test_url = "http://212.183.159.230/100MB.zip" +# test_url="https://www.earthbyte.org/webdav/ftp/Data_Collections/Zahirovic_etal_2016_ESR_AgeGrid/jpegs.zip" + +auto_unzip = False + + +def test_download_directly(): + """Download the large file directly with requests""" + st = time.time() + spt = time.process_time() + + print("Start download directly ...") + + network_requests.fetch_file(test_url, "./download-directly", auto_unzip=auto_unzip) + + et = time.time() + ept = time.process_time() + + print(f"time: {et - st}") + print(f"process time: {ept - spt}") + + print("End download directly ...") + + +def test_requests(): + """requests + ThreadPoolExecutor + asyncio""" + st = time.time() + spt = time.process_time() + + print("Start download with requests+executor ...") + + network_requests.fetch_large_file( + test_url, "./download-with-requests-executor", auto_unzip=auto_unzip + ) + + et = time.time() + ept = time.process_time() + + print(f"time: {et - st}") + print(f"process time: {ept - spt}") + + print("End download with requests+executor ...") + + +def test_aiohttp(): + """Download with aiohttp+asyncio""" + st = time.time() + spt = time.process_time() + + print("Start download with aiohttp ...") + + network_aiohttp.fetch_large_file( + test_url, "./download-with-aiohttp", auto_unzip=auto_unzip + ) + + et = time.time() + ept = time.process_time() + + print(f"time: {et - st}") + print(f"process time: {ept - spt}") + + print("End download with aiohttp ...") + + +if __name__ == "__main__": + test_download_directly() + test_requests() + test_aiohttp() diff --git a/unittest/test_plate_model.py b/unittest/test_plate_model.py new file mode 100755 index 00000000..5670acf8 --- /dev/null +++ b/unittest/test_plate_model.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python + +from plate_model_manager import PlateModelManager + + +def main(): + pm_manger = PlateModelManager() + model = pm_manger.get_model("Muller2019") + model.set_data_dir("test-plate-model-folder") + + print(model.get_avail_layers()) + + print(model.get_rotation_model()) + + print(model.get_layer("Coastlines")) + + print(model.get_COBs()) + + print(model.get_topologies()) + + model.download_all_layers() + + model.download_time_dependent_rasters("AgeGrids", times=[1, 2]) + + print(model.get_data_dir()) + + +if __name__ == "__main__": + main() diff --git a/unittest/test_plot.py b/unittest/test_plot.py new file mode 100755 index 00000000..8a5dac84 --- /dev/null +++ b/unittest/test_plot.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +import matplotlib.pyplot as plt +import cartopy.crs as ccrs +import sys + +from plate_model_manager import PlateModelManager + +sys.path.insert(0, "../") +from gplately import PlateReconstruction, PlotTopologies + +# test the plot function with the new PlateModel class + + +def main(): + pm_manager = PlateModelManager() + + age = 55 + model = pm_manager.get_model("Muller2019") + model.set_data_dir("plate-model-repo") + + rotation_model = model.get_rotation_model() + + test_model = PlateReconstruction( + rotation_model, + topology_features=model.get_layer("Topologies"), + static_polygons=model.get_layer("StaticPolygons"), + ) + gplot = PlotTopologies( + test_model, + coastlines=model.get_layer("Coastlines"), + COBs=model.get_layer("COBs"), + time=age, + ) + + fig = plt.figure(figsize=(10, 10), dpi=100) + ax = fig.add_subplot(111, projection=ccrs.Robinson(central_longitude=180)) + + gplot.plot_continent_ocean_boundaries(ax, color="cornflowerblue") + gplot.plot_coastlines(ax, color="black") + gplot.plot_ridges_and_transforms(ax, color="red") + gplot.plot_trenches(ax, color="orange") + gplot.plot_subduction_teeth(ax, color="orange") + ax.set_global() + + ids = set([f.get_reconstruction_plate_id() for f in gplot.topologies]) + for id in ids: + gplot.plot_plate_id(ax, id, facecolor="None", edgecolor="lightgreen") + plt.show() + + +if __name__ == "__main__": + main()