Skip to content

Commit

Permalink
update feature backend when lat/lon empty, store URL
Browse files Browse the repository at this point in the history
  • Loading branch information
cyschneck committed Jun 12, 2024
1 parent 108facf commit 76e463d
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 4 deletions.
38 changes: 38 additions & 0 deletions add_new_features.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
## Developer Note: Update Pydar's backend when new features are updated
## New offically named features: https://planetarynames.wr.usgs.gov/#nomenclature-news
import pydar
import os
import pandas as pd

if __name__ == "__main__":
os.system('python pydar/updateCsvCORADARJPLOptions.py.py')
os.system('python pydar/updateCsvFeatureNameDetails.py')
os.system('python pydar/updateCsvSwathCoverage.py.py')

print("New Features (diff):")
os.system("git diff pydar/data/feature_name_details.csv | grep '^[+-][^+-]'")

# read in all feature names from CSV
features_df = pd.read_csv("pydar/data/feature_name_details.csv")
features_in_csv = list(features_df["Feature Name"])

# list of all features that exist (with both latitude/longitude values)
retrieved_features = pydar.retrieveFeaturesFromLatitudeLongitudeRange(min_latitude=-90, max_latitude=90, min_longitude=0, max_longitude=360)

# check if all features in CSV have both latitude/longitude values
if not features_in_csv == retrieved_features:
print("\nMissing features to Fix or Ignore:")
all_missing_features = [x for x in features_in_csv if x not in retrieved_features]
for feature_missing in all_missing_features:
print(feature_missing)
feature_row = features_df[features_df["Feature Name"] == feature_missing]
missing_lat_long = feature_row.columns[feature_row.isna().any()].tolist()
print(f"\tMissing: {missing_lat_long}")
print(f"\t{feature_row['URL'].iloc[0]}")

print("\nAll features in CSV with latitude/longitude values")
print("New Total Feature List to Copy in README and Test:")
print(retrieved_features)
print("Update README.md")
print("Update 'feature_name_full_list' in pydar/pytests/test_error_retrieve_ids_by_time_position.py")
print("Run: python -m pytest")
1 change: 1 addition & 0 deletions pydar/retrieve_ids_by_time_position.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ def latitudeLongitudeWithFeatureNameFromCSV():
flyby_dataframe = pd.read_csv(flyby_csv_file)

for index, row in flyby_dataframe.iterrows():
if not pd.isnull(row).any(): # ignore rows where Latitude/Longitude are empty
feature_name_dict[row["Feature Name"]] = {"Southmost Latitude": row["Southmost Latitude"],
"Northmost Latitude": row["Northmost Latitude"],
"Eastmost Longitude": row["Eastmost Longitude"],
Expand Down
8 changes: 4 additions & 4 deletions pydar/updateCsvFeatureNameDetails.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,13 @@ def updateCsvFeatureNameDetails():
logger.info(f"[{i+1}/{len(ahref_lst)}] Retrieving: {base_url + feature_ahref}")
soup = BeautifulSoup(feature_html, 'html.parser')
tables = soup.find_all('table', class_='usa-table')
# [Feature Name, Northmost Latitude, Southmost Latitude, Eastmost Longitude, Westmost Longitude, Center Latitude, Center Longitude, URL]
feature_object = [None, None, None, None, None, None, None, None]
for table in tables:
for row in table.tbody.find_all("tr"):
feature_row = ((row.text).lstrip()).split("\n")
feature_row = [f.strip() for f in feature_row if f != '' and re.search(r'[a-zA-Z-?\d+]', f)]
feature_object[7] = base_url + feature_ahref
if len(feature_row) == 2:
if feature_row[0] == "Feature Name":
feature_object[0] = feature_row[1]
Expand All @@ -82,8 +84,6 @@ def updateCsvFeatureNameDetails():
feature_object[5] = feature_row[1].split(" ")[0]
if feature_row[0] == "Center Longitude":
feature_object[6] = feature_row[1].split(" ")[0]
if feature_row[0] == "Origin":
feature_object[7] = feature_row[1]
feature_options.append(feature_object)

# Add Huygens landing site manually
Expand All @@ -94,7 +94,7 @@ def updateCsvFeatureNameDetails():
"167.547",
"-10.576",
"167.547",
"where the Huygens probe landed east Adiri"]
"https://pds-imaging.jpl.nasa.gov/documentation/Cassini_RADAR_Users_Guide_2nd_Ed_191004_cmp_200421.pdf#page=165"]
feature_options.append(huygens_landing_site)

# Write to CSV
Expand All @@ -105,7 +105,7 @@ def updateCsvFeatureNameDetails():
"Westmost Longitude",
"Center Latitude",
"Center Longitude",
"Origin of Name"]
"URL"]
df = pd.DataFrame(feature_options, columns=header_options)
df = df.sort_values(by=["Feature Name"])
df.to_csv(os.path.join(os.path.dirname(__file__), 'data', 'feature_name_details.csv'), header=header_options, index=False)
Expand Down

0 comments on commit 76e463d

Please sign in to comment.