Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow user to run specific zones. #140

Merged
merged 1 commit into from
Jan 23, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion StreamCat.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
OUT_DIR,
PCT_FULL_FILE,
PCT_FULL_FILE_RP100,
USER_ZONES,
)
from StreamCat_functions import (
Accumulation,
Expand Down Expand Up @@ -71,7 +72,7 @@

if not os.path.exists(ACCUM_DIR):
# TODO: work out children OR bastards only
makeNumpyVectors(inter_vpu, NHD_DIR)
makeNumpyVectors(inter_vpu, NHD_DIR, USER_ZONES)

INPUTS = np.load(ACCUM_DIR +"/vpu_inputs.npy", allow_pickle=True).item()

Expand Down
15 changes: 11 additions & 4 deletions StreamCat_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -1270,7 +1270,7 @@ def make_all_cat_comids(nhd, inputs):
return set(all_comids) # RETURN A SET!


def makeNumpyVectors(inter_tbl, nhd):
def makeNumpyVectors(inter_tbl, nhd, user_zones):
"""
Uses the NHD tables to create arrays of upstream catchments which are used
in the Accumulation function
Expand All @@ -1281,7 +1281,7 @@ def makeNumpyVectors(inter_tbl, nhd):
nhd : directory where NHD is stored
"""
os.mkdir("accum_npy")
inputs = nhd_dict(nhd)
inputs = nhd_dict(nhd, user_zones=user_zones)
all_comids = make_all_cat_comids(nhd, inputs)
print("Making numpy files in zone...", end="", flush=True)
for zone, hr in inputs.items():
Expand Down Expand Up @@ -1334,7 +1334,7 @@ def makeNumpyVectors(inter_tbl, nhd):
##############################################################################


def nhd_dict(nhd, unit="VPU"):
def nhd_dict(nhd, unit="VPU", user_zones=None):
"""
__author__ = "Rick Debbout <[email protected]>"
Creates an OrderdDict for looping through regions of the NHD to carry
Expand All @@ -1351,7 +1351,12 @@ def nhd_dict(nhd, unit="VPU"):
"""

inputs = OrderedDict()
if user_zones: # Use user specified zones
inputs |= user_zones
np.save("./accum_npy/vpu_inputs.npy", inputs)
return inputs
bounds = dbf2DF(f"{nhd}/NHDPlusGlobalData/BoundaryUnit.dbf")
# Drop Hawaii and Cayman Islands.
remove = bounds.loc[bounds.DRAINAGEID.isin(["HI", "CI"])].index
bounds = bounds.drop(remove, axis=0)
if unit == "VPU":
Expand Down Expand Up @@ -1403,7 +1408,9 @@ def findUpstreamNpy(zone, com, numpy_dir):


def dbf2DF(f, upper=True):
data = gpd.read_file(f).drop("geometry", axis=1)
data = gpd.read_file(f)
if "geometry" in data:
data.drop("geometry", axis=1, inplace=True)
if upper is True:
data.columns = data.columns.str.upper()
return data
3 changes: 3 additions & 0 deletions stream_cat_config.py.template
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@ STATES_FILE = "/path/to/file/tl_2008_us_state.shp"

ACCUM_DIR = "path/to/local/repository/accump_npy/"

# to run other than all NHD zones, set this dict to e.g. {"04": "GL", "12": "TX"}
# keys are UnitID and values are DrainageID, see ...\NHDPlusGlobalData\BoundaryUnit.dbf
USER_ZONES = {}

# location to write out accumulated StreamCat data <- this is intermediate
OUT_DIR = "/path/to/write/out/files/to"
Expand Down