Skip to content

Commit

Permalink
Merge pull request #123 from GPlates/add_new_models
Browse files Browse the repository at this point in the history
Add 3 new models
  • Loading branch information
brmather authored Nov 7, 2023
2 parents 3e0d788 + fc6a57b commit 5f775a3
Show file tree
Hide file tree
Showing 2 changed files with 120 additions and 15 deletions.
23 changes: 21 additions & 2 deletions gplately/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,9 @@ def plate_reconstruction_files(self):
"Muller2008" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Muller2008/Global_Model_Rigid_Internal_Release_2010.zip"],
"Scotese2016" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Scotese2016/PALEOMAP_GlobalPlateModel.zip"],
"Shephard2013" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Shephard2013/GPlates.zip"],
"Muller2022" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Muller2022/Muller_etal_2022_SE_1Ga_Opt.zip"],
"Muller2022" : ["https://earthbyte.org/webdav/ftp/Data_Collections/Muller_etal_2022_SE/Muller_etal_2022_SE_1Ga_Opt_PlateMotionModel.zip"],
"Cao2023" :["https://www.earthbyte.org/webdav/ftp/Data_Collections/Cao_etal_2023/1.8Ga_model_submit.zip"],
"Cao2023_Opt" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Cao_etal_2023_Opt/Cao1800Opt.zip"],

}

Expand Down Expand Up @@ -183,11 +185,22 @@ def plate_model_valid_reconstruction_times(self):
"Shephard2013" : [0,200],
"Muller2008" : [0,141], #GPlates static polygons reconstruct to this time
"Muller2022" : [0,1000],
"Cao2023" : [0,1800],
"Cao2023_Opt" : [0,1800],

}
return database


def rotation_strings_to_include(self):

strings = [

"Muller2022 1000_0_rotfile_Merdith_et_al_optimised.rot", # For Muller et al. 2022
]
return strings


def rotation_strings_to_ignore(self):

strings = [
Expand Down Expand Up @@ -233,6 +246,7 @@ def dynamic_polygon_strings_to_include(self):
"Clennett_2020_Coastlines",
"Clennett_2020_NAm_boundaries",
"Shephard_etal_ESR2013_Global_EarthByte_2013", # For Shephard et al. 2013
"1800-1000Ma-plate-boundary_new_valid_time_and_subduction_polarity.gpml", # for Cao2023

]
return strings
Expand All @@ -252,6 +266,7 @@ def dynamic_polygon_strings_to_ignore(self):
"Seton_etal_ESR2012_Coastline_2012", # Seton 2012
"PALEOMAP_PoliticalBoundaries", # Scotese 2016
"SimplifiedFiles", # Muller et al. 2019 (updated)
"1000-410_poles", # Merdith
]
return strings

Expand Down Expand Up @@ -312,7 +327,9 @@ def topology_geometries(self):
"Muller2008" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Muller2008/Global_Model_Rigid_Internal_Release_2010.zip"],
"Scotese2016" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Scotese2016/PALEOMAP_GlobalPlateModel.zip"],
"Shephard2013" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Shephard2013/GPlates.zip"],
"Muller2022" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Muller2022/Muller_etal_2022_SE_1Ga_Opt.zip"],
"Muller2022" : ["https://earthbyte.org/webdav/ftp/Data_Collections/Muller_etal_2022_SE/Muller_etal_2022_SE_1Ga_Opt_PlateMotionModel.zip"],
"Cao2023" :["https://www.earthbyte.org/webdav/ftp/Data_Collections/Cao_etal_2023/1.8Ga_model_submit.zip"],
"Cao2023_Opt" : ["https://www.earthbyte.org/webdav/ftp/Data_Collections/Cao_etal_2023_Opt/Cao1800Opt.zip"],

}
return database
Expand All @@ -325,6 +342,7 @@ def coastline_strings_to_include(self):
"coastline",
"CEED6_LAND.gpml", # for TorsvikCocks2017
"PALEOMAP_PoliticalBoundaries", # For Scotese 2016
"coast", # for Cao2023
]
return strings

Expand All @@ -337,6 +355,7 @@ def coastline_strings_to_ignore(self):
"OLD",
"__MACOSX",
"Clennett_2020_Coastlines", # Clennett et al. 2020
"COB_polygons_and_coastlines_combined_1000_0_Merdith_etal", # Muller et al. 2022
]
return strings

Expand Down
112 changes: 99 additions & 13 deletions gplately/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def _first_time_download_from_web(url, model_name=None, verbose=True):
# Provided a web connection to a server can be established,
download the files from the URL into the GPlately cache.
"""

if _test_internet_connection(url):

if not verbose:
Expand Down Expand Up @@ -596,18 +596,87 @@ def _str_in_folder(fnames, strings_to_include=None, strings_to_ignore=None):
return sorted_fnames


def _str_in_filename(fnames, strings_to_include=None, strings_to_ignore=None):
def _str_in_filename(fnames, strings_to_include=None, strings_to_ignore=None, file_collection=None, file_collection_sensitive=False):
out = []
def filter_func(fname):
basename = _os.path.basename(fname)
keep = False
if strings_to_include is None:
keep = True
else:
for s in strings_to_include:
if s.lower() in basename.lower():
keep = True
break
# If a file collection was passed to the string to include, there is at least one file specific to
# this model that must be included. Such a file should be presented in the respective
# strings_to_include list in data.py with format:

# "file_collection string_to_include"

# That is, a whitespace must be placed between the file collection and the string to include.
# The file collection must be identical to the string allocated to the key.

# For example, strings_to_include = ["Muller2022 1000_0_rotfile_Merdith_et_al_optimised.rot"]

# In this example, "Muller2022" and the strings_to_include list from data.py are passed to this function
# when sorting through rotation files.
# The list is looped through - if the current string has "Muller2022" (case insensitive) in it,
# we will only pass through the filename following "Muller2022", i.e. the optmised plate model.
# All other rotation files bundled in the webDAV zip (including the published Merdith et al. 2021 rot files)
# are excluded from the filter.

# If no strings in the list include the passed file collection, we have one of two options, depending on whether
# file_collection_sensitive is True or False.

# If it is set to True, that means that we should only treat strings_to_include as True if and only if the
# passed file collection was found in the strings_to_include list. Otherwise, we have to treat strings_to_include
# as if it was NoneType, and therefore place no filter for the files we accept through (that is, accept all files).

# If it is set to False, that means that we should treat strings_to_include as True always, irrespective of
# whether the passed file collection was found in the strings_to_include list. An example is the static polygon
# filetype - this relies on strings_to_include being True no matter what.


# For example, Merdith2021, Muller2019 would have file_collection_sensitive = False because these
# models currently don't have any files that MUST be excluded for their own instance, but MUST
# be included for other model instances.

# Conversely, Muller2022 would have file_collection_sensitive = True because it requires all published Merdith2021
# rot models to be ignored (in favour of the optimised model). However, we do not want to ignore Merdith2021 rot
# models when we are using DataServer to collect Merdith2021 files.

if file_collection is not None:

# If the file collection is in the provided list of strings to include...
strings_with_file_collection = [s for s in strings_to_include if file_collection.lower() in s.lower()]
if strings_with_file_collection:

# Include the string, and break out.
for s in strings_with_file_collection:
if s.split(" ")[-1].lower() in basename.lower():
keep = True
break

# If there is a file collection passed, but none of the strings to include include the file collection,
else:
# If we no longer require strings_to_include, treat as if strings_to_include is False, and just pass
# all files through.
if file_collection_sensitive is True:
keep = True

# If we still need strings_to_include, treat as if strings_to_include is True, and pass only required
# files through.
else:
for s in strings_to_include:
if s.lower() in basename.lower():
keep = True
break


# If a file collection is not passed, but strings_to_include exists, only pass through those requested.
else:
for s in strings_to_include:
if s.lower() in basename.lower():
keep = True
break

if strings_to_ignore is not None:
for s in strings_to_ignore:
if s.lower() in basename.lower():
Expand Down Expand Up @@ -1355,7 +1424,11 @@ def __init__(self, file_collection, verbose=True):

self.file_collection = file_collection.capitalize()
self.data_collection = DataCollection(self.file_collection)
self.verbose = verbose

if str(type(verbose)) == "<class 'bool'>":
self.verbose = verbose
else:
raise ValueError("The verbose toggle must be of Boolean type, not {}".format(type(verbose)))


def get_plate_reconstruction_files(self):
Expand Down Expand Up @@ -1425,7 +1498,10 @@ def get_plate_reconstruction_files(self):
rotation_filenames = _collect_file_extension(
_str_in_folder(
_str_in_filename(fnames,
strings_to_ignore=DataCollection.rotation_strings_to_ignore(self)
strings_to_include=DataCollection.rotation_strings_to_include(self),
strings_to_ignore=DataCollection.rotation_strings_to_ignore(self),
file_collection=self.file_collection,
file_collection_sensitive=True
),
strings_to_ignore=DataCollection.rotation_strings_to_ignore(self)
),
Expand All @@ -1438,7 +1514,9 @@ def get_plate_reconstruction_files(self):
_str_in_folder(
_str_in_filename(fnames,
strings_to_include=DataCollection.dynamic_polygon_strings_to_include(self),
strings_to_ignore=DataCollection.dynamic_polygon_strings_to_ignore(self)
strings_to_ignore=DataCollection.dynamic_polygon_strings_to_ignore(self),
file_collection=self.file_collection,
file_collection_sensitive=False,
),
strings_to_ignore=DataCollection.dynamic_polygon_strings_to_ignore(self)
),
Expand All @@ -1452,7 +1530,9 @@ def get_plate_reconstruction_files(self):
_str_in_folder(
_str_in_filename(fnames,
strings_to_include=DataCollection.static_polygon_strings_to_include(self),
strings_to_ignore=DataCollection.static_polygon_strings_to_ignore(self)
strings_to_ignore=DataCollection.static_polygon_strings_to_ignore(self),
file_collection=self.file_collection,
file_collection_sensitive=False
),
strings_to_ignore=DataCollection.static_polygon_strings_to_ignore(self)
)
Expand Down Expand Up @@ -1587,7 +1667,9 @@ def get_topology_geometries(self):
_str_in_filename(
fnames,
strings_to_include=DataCollection.coastline_strings_to_include(self),
strings_to_ignore=DataCollection.coastline_strings_to_ignore(self)
strings_to_ignore=DataCollection.coastline_strings_to_ignore(self),
file_collection=self.file_collection,
file_collection_sensitive=False
),
strings_to_ignore=DataCollection.coastline_strings_to_ignore(self)
)
Expand All @@ -1597,7 +1679,9 @@ def get_topology_geometries(self):
_str_in_filename(
fnames,
strings_to_include=DataCollection.continent_strings_to_include(self),
strings_to_ignore=DataCollection.continent_strings_to_ignore(self)
strings_to_ignore=DataCollection.continent_strings_to_ignore(self),
file_collection=self.file_collection,
file_collection_sensitive=False
),
strings_to_ignore=DataCollection.continent_strings_to_ignore(self)
)
Expand All @@ -1607,7 +1691,9 @@ def get_topology_geometries(self):
_str_in_filename(
fnames,
strings_to_include=DataCollection.COB_strings_to_include(self),
strings_to_ignore=DataCollection.COB_strings_to_ignore(self)
strings_to_ignore=DataCollection.COB_strings_to_ignore(self),
file_collection=self.file_collection,
file_collection_sensitive=False
),
strings_to_ignore=DataCollection.COB_strings_to_ignore(self)
)
Expand Down

0 comments on commit 5f775a3

Please sign in to comment.