Skip to content

Commit

Permalink
add hazardDatasets field to TornadoDataset model (#257)
Browse files Browse the repository at this point in the history
* Remove LdapClient and refactor methods in Authorizer class in interface

* Warning changes

* add hazardDatasets field to TornadoDataset model

* modify eqmodel to send hazardDataset instead of rasterDataset

* add hazardDatasets to tornadoModel as well

* remove unused imports

* fix post tonado and remote unused imports

* remove test files

* use addTornadoHazardDataset instead of setHazardDatasets

---------

Co-authored-by: Ya-Lan Yang <[email protected]>
  • Loading branch information
Rashmil-1999 and ylyangtw authored Jan 25, 2024
1 parent aeb0c94 commit 5c87be5
Show file tree
Hide file tree
Showing 9 changed files with 128 additions and 36 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/).

## [Unreleased]

### Added
- Add `hazardDatasets` field to TornadoDataset, TornadoModel and EarthquakeModel class [#213](https://github.com/IN-CORE/incore-services/issues/213)

### Changed
- Use Java models to represent semantics [#239](https://github.com/IN-CORE/incore-services/issues/239)
- Sort Semantic Definition Alphabetically [#238](https://github.com/IN-CORE/incore-services/issues/238)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,14 +118,14 @@ private void storeEarthquakeResults(Job job) {
String earthquakeId = job.getObjectId();

EarthquakeModel earthquake = (EarthquakeModel) repository.getEarthquakeById(earthquakeId);
String demandType = earthquake.getRasterDataset().getDemandType();
String demandType = earthquake.getHazardDatasets().get(0).getDemandType();
String username = earthquake.getCreator();
String description = "Earthquake visualization";
String userGroups = "{\"groups\": [\"incore_user\"]}";
try {
String datasetId = ServiceUtil.createRasterDataset(hazardFile, demandType + " hazard", username, userGroups,
description, HazardConstants.DETERMINISTIC_EARTHQUAKE_HAZARD_SCHEMA);
earthquake.getRasterDataset().setDatasetId(datasetId);
earthquake.getHazardDatasets().get(0).setDatasetId(datasetId);

repository.addEarthquake(earthquake);
log.debug("eq id is = " + earthquakeId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import dev.morphia.annotations.Property;
import dev.morphia.annotations.experimental.Name;
import edu.illinois.ncsa.incore.common.AllocationConstants;
import edu.illinois.ncsa.incore.common.HazardConstants;
import edu.illinois.ncsa.incore.common.auth.Authorizer;
Expand Down Expand Up @@ -222,7 +220,7 @@ public Earthquake createEarthquake(
rasterDataset.setDemandUnits(scenarioEarthquake.getVisualizationParameters().getDemandUnits());
rasterDataset.setPeriod(Double.parseDouble(demandComponents[0]));

scenarioEarthquake.setHazardDataset(rasterDataset);
scenarioEarthquake.addEarthquakeHazardDataset(rasterDataset);
// add creator using username info
earthquake.setCreator(this.username);
earthquake.setOwner(this.username);
Expand Down Expand Up @@ -1080,8 +1078,10 @@ public Earthquake deleteEarthquake(@Parameter(name = "Earthquake Id", required =
// delete associated datasets
if (eq != null && eq instanceof EarthquakeModel) {
EarthquakeModel scenarioEarthquake = (EarthquakeModel) eq;
if (ServiceUtil.deleteDataset(scenarioEarthquake.getRasterDataset().getDatasetId(), this.username, this.userGroups) == null) {
spaceRepository.addToOrphansSpace(scenarioEarthquake.getRasterDataset().getDatasetId());
for (HazardDataset dataset : scenarioEarthquake.getHazardDatasets()) {
if (ServiceUtil.deleteDataset(dataset.getDatasetId(), this.username, this.userGroups) == null) {
spaceRepository.addToOrphansSpace(dataset.getDatasetId());
}
}
} else if (eq != null && eq instanceof EarthquakeDataset) {
EarthquakeDataset eqDataset = (EarthquakeDataset) eq;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,6 @@ public List<DemandDefinition> getTornadoDemands() {
@Operation(summary = "Creates a new tornado, the newly created tornado is returned.",
description = "Additionally, a GeoTiff (raster) is created by default and publish to data repository. " +
"User can create both model tornadoes and dataset-based tornadoes with GeoTiff files uploaded.")

@RequestBody(description = "Tornado json and files.", required = true,
content = @Content(mediaType = MediaType.APPLICATION_FORM_URLENCODED,
schema = @Schema(type = "object",
Expand Down Expand Up @@ -263,20 +262,28 @@ public Tornado createTornado(

// Store the dataset
datasetId = ServiceUtil.createDataset(datasetObject, this.username, this.userGroups, files);
tornadoModel.setDatasetId(datasetId);
// Assuming only one hazardDataset will be created here.
// construct tornado hazardDatasets
TornadoHazardDataset hazardDataset = new TornadoHazardDataset();
hazardDataset.setDatasetId(datasetId);
hazardDataset.setDemandType(TornadoHazard.DEMAND_TYPE);
hazardDataset.setDemandUnits(TornadoHazard.WIND_MPH);
hazardDataset.setThreshold(null);
tornadoModel.addTornadoHazardDataset(hazardDataset);

tornado.setCreator(this.username);
tornado.setOwner(this.username);
tornado = repository.addTornado(tornado);
addTornadoToSpace(tornado, this.username);
} else if (tornado != null && tornado instanceof TornadoDataset) {
TornadoDataset tornadoDataset = (TornadoDataset) tornado;
datasetId = null;

if (fileParts != null && !fileParts.isEmpty() && TornadoUtils.validateDatasetTypes(fileParts)) {
// Create dataset object representation for storing shapefile
JSONObject datasetObject = TornadoUtils.getTornadoDatasetObject("Tornado Hazard", "EF Boxes representing tornado");
// Store the dataset
datasetId = ServiceUtil.createDataset(datasetObject, this.username, this.userGroups);

if (datasetId != null) {
// attach files to the dataset
int statusCode = ServiceUtil.attachFileToTornadoDataset(datasetId, this.username, this.userGroups, fileParts);
Expand All @@ -285,11 +292,18 @@ public Tornado createTornado(
logger.error(tornadoErrorMsg);
throw new IncoreHTTPException(Response.Status.BAD_REQUEST, tornadoErrorMsg);
}

// construct tornado hazardDatasets
TornadoHazardDataset hazardDataset = new TornadoHazardDataset();
hazardDataset.setDatasetId(datasetId);
hazardDataset.setDemandType(TornadoHazard.DEMAND_TYPE);
hazardDataset.setDemandUnits(TornadoHazard.WIND_MPH);
hazardDataset.setThreshold(null);
tornadoDataset.addTornadoHazardDataset(hazardDataset);
} else {
logger.error(tornadoJsonErrorMsg);
throw new IncoreHTTPException(Response.Status.BAD_REQUEST, tornadoJsonErrorMsg);
}
((TornadoDataset) tornado).setDatasetId(datasetId);

tornado.setCreator(this.username);
tornado.setOwner(this.username);
Expand All @@ -314,7 +328,6 @@ public Tornado createTornado(
}
ServiceUtil.deleteDataset(datasetId, this.username, this.userGroups);
throw new IncoreHTTPException(Response.Status.BAD_REQUEST, tornadoErrorMsg);

}

@GET
Expand Down Expand Up @@ -531,14 +544,17 @@ public Tornado deleteTornado(@Parameter(name = "Tornado Id", required = true) @P
// delete associated datasets
if (tornado != null && tornado instanceof TornadoModel) {
TornadoModel tModel = (TornadoModel) tornado;
if (ServiceUtil.deleteDataset(tModel.getDatasetId(), this.username, this.userGroups) == null) {
spaceRepository.addToOrphansSpace(tModel.getDatasetId());
for (TornadoHazardDataset dataset: tModel.getHazardDatasets()) {
if (ServiceUtil.deleteDataset(dataset.getDatasetId(), this.username, this.userGroups) == null) {
spaceRepository.addToOrphansSpace(dataset.getDatasetId());
}
}
} else if (tornado != null && tornado instanceof TornadoDataset) {
TornadoDataset tDataset = (TornadoDataset) tornado;
ServiceUtil.deleteDataset(tDataset.getDatasetId(), this.username, this.userGroups);
if (ServiceUtil.deleteDataset(tDataset.getDatasetId(), this.username, this.userGroups) == null) {
spaceRepository.addToOrphansSpace(tDataset.getDatasetId());
for (TornadoHazardDataset dataset: tDataset.getHazardDatasets()) {
if (ServiceUtil.deleteDataset(dataset.getDatasetId(), this.username, this.userGroups) == null) {
spaceRepository.addToOrphansSpace(dataset.getDatasetId());
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

import dev.morphia.annotations.Entity;

import java.util.LinkedList;
import java.util.List;
import java.util.Map;

@Entity("EarthquakeModel")
Expand All @@ -23,21 +25,23 @@ public class EarthquakeModel extends Earthquake {
private final String siteAmplification;

// Visualization raster
private HazardDataset rasterDataset;
private List<HazardDataset> hazardDatasets = new LinkedList<HazardDataset>();

public EarthquakeModel() {
defaultSiteClass = NEHRPSoilType.D;
siteAmplification = "NEHRP";
}

public void setHazardDataset(HazardDataset rasterDataset) {
this.rasterDataset = rasterDataset;
public void setHazardDataset(List<HazardDataset> hazardDatasets) {
this.hazardDatasets = hazardDatasets;
}

public HazardDataset getRasterDataset() {
return this.rasterDataset;
public List<HazardDataset> getHazardDatasets() {
return this.hazardDatasets;
}

public void addEarthquakeHazardDataset(HazardDataset hazardDataset) { this.hazardDatasets.add(hazardDataset); }

public Map<String, Double> getAttenuations() {
return this.attenuations;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,29 @@
package edu.illinois.ncsa.incore.service.hazard.models.tornado;

import dev.morphia.annotations.Entity;
import java.util.LinkedList;
import java.util.List;

@Entity("TornadoDataset")
public class TornadoDataset extends Tornado {
// CMN: this could be moved to the parent if we determine there will be no difference between probabilistic and
// deterministic tornadoes. If there would be multiple files with different probabilities, this should be
// modified similar to the Earthquake HazardDataset and the Tsunami hazard dataset
private String datasetId;

public String getDatasetId() {
return datasetId;
private List<TornadoHazardDataset> hazardDatasets;

public TornadoDataset(){
this.hazardDatasets = new LinkedList<>();
}

public List<TornadoHazardDataset> getHazardDatasets() {
return hazardDatasets;
}

public void setDatasetId(String datasetId) {
this.datasetId = datasetId;
public void setHazardDatasets(List<TornadoHazardDataset> hazardDatasets) {
this.hazardDatasets = hazardDatasets;
}

public void addTornadoHazardDataset(TornadoHazardDataset hazardDataset) { this.hazardDatasets.add(hazardDataset); }

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package edu.illinois.ncsa.incore.service.hazard.models.tornado;

import com.fasterxml.jackson.annotation.JsonIgnore;
import dev.morphia.annotations.Embedded;

@Embedded
public class TornadoHazardDataset {
private String datasetId;
private String demandType;
private String demandUnits;
private Double threshold = null;

public String getDatasetId() {
return datasetId;
}

public String getDemandType() {
return demandType;
}

public String getDemandUnits() {
return demandUnits;
}

public Double getThreshold() {
return threshold;
}

public void setDatasetId(String datasetId) {
this.datasetId = datasetId;
}

public void setDemandType(String demandType) {
this.demandType = demandType;
}

public void setDemandUnits(String demandUnits) {
this.demandUnits = demandUnits;
}

public void setThreshold(Double threshold) {
this.threshold = threshold;
}

@JsonIgnore
public String getThresholdJsonString(){
return String.format("{'%s': {'value': %s, 'unit': '%s'}}",
this.demandType, this.threshold, this.demandUnits);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import dev.morphia.annotations.Entity;
import edu.illinois.ncsa.incore.service.hazard.models.tornado.types.EFBox;

import java.util.LinkedList;
import java.util.List;

@Entity("TornadoModel")
Expand All @@ -21,7 +22,22 @@ public class TornadoModel extends Tornado {
private TornadoParameters tornadoParameters;
private List<Double> tornadoWidth;
private List<EFBox> efBoxes;
private String datasetId;

private List<TornadoHazardDataset> hazardDatasets;

public TornadoModel(){
this.hazardDatasets = new LinkedList<>();
}

public List<TornadoHazardDataset> getHazardDatasets() {
return hazardDatasets;
}

public void setHazardDatasets(List<TornadoHazardDataset> hazardDatasets) {
this.hazardDatasets = hazardDatasets;
}

public void addTornadoHazardDataset(TornadoHazardDataset hazardDataset) { this.hazardDatasets.add(hazardDataset); }

public List<Double> getTornadoWidth() {
return tornadoWidth;
Expand Down Expand Up @@ -56,11 +72,4 @@ public void setTornadoModel(String tornadoModel) {
this.tornadoModel = tornadoModel;
}

public String getDatasetId() {
return datasetId;
}

public void setDatasetId(String datasetId) {
this.datasetId = datasetId;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public static WindHazardResult getWindHazardAtSite(Tornado tornado, Point localS
scenarioTornado.getTornadoParameters(), seed);
} else {
TornadoDataset tornadoDataset = (TornadoDataset) tornado;
Object obj = GISUtil.getFeatureCollection(tornadoDataset.getDatasetId(), username, userGroups);
Object obj = GISUtil.getFeatureCollection(tornadoDataset.getHazardDatasets().get(0).getDatasetId(), username, userGroups);
if (obj == null) {
throw new IOException(" Could not calculate the grid coverage for the raster. Possibly because the dataset files are " +
"unreadable or not found.");
Expand Down

0 comments on commit 5c87be5

Please sign in to comment.