diff --git a/.gitignore b/.gitignore index 552cd4b..25cbb92 100644 --- a/.gitignore +++ b/.gitignore @@ -156,3 +156,9 @@ dependencies.pdf use_cases/nibelungenbruecke_demonstrator_line_test/output/* use_cases/nibelungenbruecke_demonstrator_self_weight/output/* *.pdf + +### sensor output files ### +use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/output/sensors/* +use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/20230215092338.json +use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/API_meta_output.json +use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/*.csv diff --git a/nibelungenbruecke/scripts/data_generation/displacement_generator_fenicsxconcrete.py b/nibelungenbruecke/scripts/data_generation/displacement_generator_fenicsxconcrete.py index a816ec3..4f3f26a 100644 --- a/nibelungenbruecke/scripts/data_generation/displacement_generator_fenicsxconcrete.py +++ b/nibelungenbruecke/scripts/data_generation/displacement_generator_fenicsxconcrete.py @@ -2,56 +2,76 @@ from petsc4py.PETSc import ScalarType from dolfinx import fem import dolfinx as df - +import json +import numpy as np from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity from fenicsxconcrete.util import ureg - from mpi4py import MPI from nibelungenbruecke.scripts.data_generation.generator_model_base_class import GeneratorModel from nibelungenbruecke.scripts.data_generation.nibelungen_experiment import NibelungenExperiment -from nibelungenbruecke.scripts.utilities.sensor_translators import Translator - +from nibelungenbruecke.scripts.utilities.API_sensor_retrieval import API_Request, MetadataSaver, Translator class GeneratorFeniCSXConcrete(GeneratorModel): + """ + A class for generating FEniCS-X Concrete-based models and handling data generation. + + Parameters: + model_path (str): Path to the model. + sensor_positions_path (str): Path to the sensor positions. + model_parameters (dict): Model parameters. + output_parameters (dict): Output parameters (optional). + """ + def __init__(self, model_path: str, sensor_positions_path: str, model_parameters: dict, output_parameters: dict = None): super().__init__(model_path, sensor_positions_path, model_parameters, output_parameters) - self.material_parameters = self.model_parameters["material_parameters"] # currently it is an empty dict!! + self.material_parameters = self.model_parameters["material_parameters"] # Default empty dict!! def LoadGeometry(self): ''' Load the meshed geometry from a .msh file''' pass def GenerateModel(self): + """Generate the model based on the provided parameters.""" self.experiment = NibelungenExperiment(self.model_path, self.material_parameters) - default_p = self._get_default_parameters() default_p.update(self.experiment.default_parameters()) self.problem = LinearElasticity(self.experiment, default_p) - + def GenerateData(self): - #Generating Translator object - T = Translator(self.sensor_positions) - - # Translation from MKP data format (currently supports "move" operations only!) - _, meta_output_path = T.translator_to_sensor(self.model_parameters["df_output_path"], self.model_parameters["meta_output_path"]) + """Generate data based on the model parameters.""" - self.problem.import_sensors_from_metadata(meta_output_path) + api_request = API_Request() + self.api_dataFrame = api_request.fetch_data() + + metadata_saver = MetadataSaver(self.model_parameters, self.api_dataFrame) + metadata_saver.saving_metadata() + + translator = Translator(self.model_parameters) + translator.translator_to_sensor() + + self.problem.import_sensors_from_metadata(self.model_parameters["MKP_meta_output_path"]) + self.problem.fields.temperature = self.problem.fields.displacement #!! self.problem.solve() - #Paraview output + translator.save_to_MKP(self.api_dataFrame) + translator.save_virtual_sensor(self.problem) + + if self.model_parameters["paraview_output"]: with df.io.XDMFFile(self.problem.mesh.comm, self.model_parameters["paraview_output_path"]+"/"+self.model_parameters["model_name"]+".xdmf", "w") as xdmf: xdmf.write_mesh(self.problem.mesh) xdmf.write_function(self.problem.fields.displacement) - # Reverse translation to MKP data format - T.translator_to_MKP(self.problem, self.model_parameters["save_to_MKP_path"]) - @staticmethod def _get_default_parameters(): + """ + Get default material parameters. + + Returns: + dict: Default material parameters. + """ default_parameters = { "rho":7750 * ureg("kg/m^3"), "E":210e9 * ureg("N/m^2"), "nu":0.28 * ureg("") } return default_parameters - diff --git a/nibelungenbruecke/scripts/utilities/API_sensor_retrieval.py b/nibelungenbruecke/scripts/utilities/API_sensor_retrieval.py new file mode 100644 index 0000000..df1be21 --- /dev/null +++ b/nibelungenbruecke/scripts/utilities/API_sensor_retrieval.py @@ -0,0 +1,507 @@ + +#%% +# Importieren der erforderlichen Bibliotheken +import requests +import pandas as pd +from datetime import datetime, timedelta +from os import PathLike +from typing import Union, Tuple +import matplotlib.pyplot as plt +import numpy as np +import json +import h5py +import math +from pyproj import Proj, transform + + + +class API_Request: + """ + A class to handle API requests. + + Attributes: + url: The API endpoint URL. + headers: Request headers. + params: URL parameters. + body: Request body parameters. + """ + + def __init__(self): + self.url = "https://func-70021-nibelungen-export.azurewebsites.net/samples" + self.headers = { + "Content-Type": "application/json" + } + self.params = { + "code": "nv8QrKftsTHj93hPM4-BiaJJYbWU7blfUGz89KdkuEbpAzFuHX1Rmg==" # der Code aus den über Keeper mitgetielten Zugangdaten + } + self.body = { + "startTime": "2023-08-11T08:00:00Z", + "endTime": "2023-08-11T09:00:00Z", + "meta_channel": True, + "columns": ['E_plus_445LVU_HS--o-_Avg1', + 'E_plus_445LVU_HS--u-_Avg1', + 'E_plus_413TU_HSS-m-_Avg1', + ] + } + + """ + self.body = { + "startTime": "2023-08-11T08:00:00Z", + "endTime": "2023-09-11T08:01:00Z", + "meta_channel": True, + "columns": ['E_plus_413TU_HS--o-_Avg1', + 'E_plus_413TU_HSN-m-_Avg1', + 'E_plus_413TU_HSS-m-_Avg1', + 'E_plus_413TU_HS--u-_Avg1', + 'E_plus_423NU_HSN-o-_Avg1', + 'E_plus_423NUT_HSN-o-_Avg1', + 'E_plus_445LVU_HS--o-_Avg1', + 'E_plus_445LVU_HS--u-_Avg1', + 'E_plus_467NU_HSN-o-_Avg1', + 'E_plus_467NUT_HSN-o_Avg1', + 'F_plus_000TA_KaS-o-_Avg1', + 'F_plus_000S_KaS-o-_Avg1', + 'F_plus_000N_KaS-o-_Avg1', + 'E_plus_040TU_HS--o-_Avg1', + 'E_plus_040TU_HSN-m-_Avg1', + 'E_plus_040TU_HSS-m-_Avg1', + 'E_plus_040TU_HS--u-_Avg1', + 'E_plus_080DU_HSN-o-_Avg1', + 'E_plus_080DU_HSN-u-_Avg1', + 'E_plus_413TI_HSS-m-_Avg', + 'E_plus_040TI_HSS-u-_Avg', + 'E_plus_233BU_HSN-m-_Avg1', + 'E_plus_432BU_HSN-m-_Avg1'] + } + """ + + """ + TU: Temperaturmessung des Überbaus + LI: Luftfeuchtigkeit im Inneren des Hohlkastens + TI: Temperatur im Inneren des Hohlkastens + NU: Neigung des Überbaus + NUT: Temperatur Neigungsaufnehmer + LVU: Längsverschiebung des Überbaus + TA: Außentemperaturmessung + LA: Luftfeuchtigkeit außen + S: Strahlungsintensität + N: Niederschlag + DU: Dehnung des Überbaus + + """ + + + """ + All mesaurements with 10 hz + + ["E_plus_413TU_HS--o-","E_plus_413TU_HS--o-_Avg1","E_plus_413TU_HS--o-_Max1", + "E_plus_413TU_HS--o-_Min1","E_plus_413TU_HSN-m-","E_plus_413TU_HSN-m-_Avg1", + "E_plus_413TU_HSN-m-_Max1","E_plus_413TU_HSN-m-_Min1","E_plus_413TU_HSS-m-", + "E_plus_413TU_HSS-m-_Avg1","E_plus_413TU_HSS-m-_Max1","E_plus_413TU_HSS-m-_Min1", + "E_plus_413TU_HS--u-","E_plus_413TU_HS--u-_Avg1","E_plus_413TU_HS--u-_Max1", + "E_plus_413TU_HS--u-_Min1","E_plus_423NU_HSN-o-","E_plus_423NU_HSN-o-_Avg1", + "E_plus_423NU_HSN-o-_Max1","E_plus_423NU_HSN-o-_Min1","E_plus_423NUT_HSN-o-", + "E_plus_423NUT_HSN-o-_Avg1","E_plus_423NUT_HSN-o-_Max1","E_plus_423NUT_HSN-o-_Min1", + "E_plus_445LVU_HS--o-","E_plus_445LVU_HS--o-_Avg1","E_plus_445LVU_HS--o-_Max1", + "E_plus_445LVU_HS--o-_Min1","E_plus_445LVU_HS--u-","E_plus_445LVU_HS--u-_Avg1", + "E_plus_445LVU_HS--u-_Max1","E_plus_445LVU_HS--u-_Min1","E_plus_467NU_HSN-o-", + "E_plus_467NU_HSN-o-_Avg1","E_plus_467NU_HSN-o-_Max1","E_plus_467NU_HSN-o-_Min1", + "E_plus_467NUT_HSN-o","E_plus_467NUT_HSN-o_Avg1","E_plus_467NUT_HSN-o_Max1", + "E_plus_467NUT_HSN-o_Min1","F_plus_000TA_KaS-o-","F_plus_000TA_KaS-o-_Avg1", + "F_plus_000TA_KaS-o-_Max1","F_plus_000TA_KaS-o-_Min1","F_plus_000LA_KaS-o-", + "F_plus_000LA_KaS-o-_Avg1","F_plus_000LA_KaS-o-_Max1","F_plus_000LA_KaS-o-_Min1", + "F_plus_000S_KaS-o-","F_plus_000S_KaS-o-_Avg1","F_plus_000S_KaS-o-_Max1", + "F_plus_000S_KaS-o-_Min1","F_plus_000N_KaS-o-","F_plus_000N_KaS-o-_Avg1", + "F_plus_000N_KaS-o-_Max1","F_plus_000N_KaS-o-_Min1","E_plus_040TU_HS--o-", + "E_plus_040TU_HS--o-_Avg1","E_plus_040TU_HS--o-_Max1","E_plus_040TU_HS--o-_Min1", + "E_plus_040TU_HSN-m-","E_plus_040TU_HSN-m-_Avg1","E_plus_040TU_HSN-m-_Max1", + "E_plus_040TU_HSN-m-_Min1","E_plus_040TU_HSS-m-","E_plus_040TU_HSS-m-_Avg1", + "E_plus_040TU_HSS-m-_Max1","E_plus_040TU_HSS-m-_Min1","E_plus_040TU_HS--u-", + "E_plus_040TU_HS--u-_Avg1","E_plus_040TU_HS--u-_Max1","E_plus_040TU_HS--u-_Min1", + "E_plus_080DU_HSN-o-","E_plus_080DU_HSN-o-_Avg1","E_plus_080DU_HSN-o-_Max1", + "E_plus_080DU_HSN-o-_Min1","E_plus_080DU_HSN-u-","E_plus_080DU_HSN-u-_Avg1", + "E_plus_080DU_HSN-u-_Max1","E_plus_080DU_HSN-u-_Min1","E_plus_413LI_HSS-m-", + "E_plus_413LI_HSS-m-_Avg","E_plus_413LI_HSS-m-_Max","E_plus_413LI_HSS-m-_Min", + "E_plus_040LI_HSS-u-","E_plus_040LI_HSS-u-_Avg","E_plus_040LI_HSS-u-_Max", + "E_plus_040LI_HSS-u-_Min","E_plus_413TI_HSS-m-","E_plus_413TI_HSS-m-_Avg", + "E_plus_413TI_HSS-m-_Max","E_plus_413TI_HSS-m-_Min","E_plus_040TI_HSS-u-", + "E_plus_040TI_HSS-u-_Avg","E_plus_040TI_HSS-u-_Max","E_plus_040TI_HSS-u-_Min", + "E_plus_233BU_HSN-m-_Avg1","E_plus_233BU_HSN-m-_Max1","E_plus_233BU_HSN-m-_Min1", + "E_plus_432BU_HSN-m-_Avg1","E_plus_432BU_HSN-m-_Max1","E_plus_432BU_HSN-m-_Min1"] + """ + + + def Plotting(self): + + # Plotting each column separately + for column in self.df.columns: + plt.plot(self.df.index, self.df[column], label=column) + + plt.xlabel('Timestamp') + plt.ylabel('Values') + plt.title('Time Series Data') + + plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left') + + plt.show() + + def fetch_data(self): + """ + Fetch data from the API and store it in a DataFrame. + """ + + response = requests.post(self.url, headers=self.headers, params=self.params, json=self.body) + + if response.status_code != 200: + raise ValueError(f"Anfrage fehlgeschlagen mit Statuscode {response.status_code}: {response.text}") + data = response.json() + self.df = pd.DataFrame(data["rows"], columns=[col["ColumnName"] for col in data["columns"]]) + #self.df["Timestamp"] = pd.to_datetime(self.df["Timestamp"], format="ISO8601") + self.df["Timestamp"] = pd.to_datetime(self.df["Timestamp"]) + self.df = self.df.set_index("Timestamp") + # print(self.df) + return pd.DataFrame(self.df[self.df.columns], index=pd.to_datetime(self.df.index)) + +# %% +class MetadataSaver: + + def __init__(self, path, df): + self.data = { + "df": { + "columns": [], + "index": [], + "data": [] + }, + "meta": { + "Temp": [], + "Move": [], + "Humidity": [] + } + } + self.path_meta = path["meta_output_path"] + self.path_df = path["df_output_path"] + self.df = df + + def saving_metadata(self): + + # Origin: "49.630742, 8.378049" + + for i in range(len(self.df.columns)): + if self.df.columns[i] == 'E_plus_413TU_HS--o-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [41.42, 0.0, 0.0], + "height": 107.438 + }) + + elif self.df.columns[i] == 'E_plus_413TU_HSS-m-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [41.42, -3.69, 0.0], + "height": 103.748 + }) + + elif self.df.columns[i] == 'E_plus_413TU_HS--u-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [41.42, -4.74, 0.0], + "height": 102.698 + }) + + elif self.df.columns[i] == 'E_plus_423NU_HSN-o-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [42.22, -3.33, 0.0], + "height": 102.698 + }) + + elif self.df.columns[i] == 'E_plus_040TU_HS--o-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [4, 0.0, 0.0], + "height": 107.438 + }) + + elif self.df.columns[i] == 'E_plus_040TU_HSN-m-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [4, -2.37, 0.005], + "height": 105.068 + }) + + elif self.df.columns[i] == 'E_plus_040TU_HSS-m-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [4, -2.37, 0.005], + "height": 105.068 + }) + + elif self.df.columns[i] == 'E_plus_040TU_HS--u-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [4, -4.74, 0.355], + "height": 102.698 + }) + + elif self.df.columns[i] == 'E_plus_413TI_HSS-m-_Avg': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [41.42, -3.33, 0.0], + "height": 102.698 + }) + + elif self.df.columns[i] == 'E_plus_040TI_HSS-u-_Avg': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [4, -4.74, 0.0], + "height": 104.105 + }) + + elif self.df.columns[i] == 'E_plus_423NUT_HSN-o-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [42.22, 0.0, 0.0], + "height": 107.438 + }) + + elif self.df.columns[i] == 'E_plus_467NUT_HSN-o_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [46.82, 0.0, 0.0], + "height": 107.438 + }) + + elif self.df.columns[i] == 'F_plus_000TA_KaS-o-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Temp"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [1, 0.0, 0.0], + "height": 104.105 + }) + + elif self.df.columns[i] == 'E_plus_445LVU_HS--o-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Move"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [44.52, 0.0, 0.0], + "height": 107.438 + }) + + elif self.df.columns[i] == 'E_plus_445LVU_HS--u-_Avg1': + column_name = self.df.columns[i] + self.data["meta"]["Move"].append({ + "name": column_name, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": [44.52, -4.74, 0.0], + "height": 102.698 + }) + + with open(self.path_meta, "w") as json_file: + json.dump(self.data, json_file, indent=2) + + # Saving dataframe of the request + #self.df.to_hdf(self.path_df, key='e', mode='w') + + # Saving dataframe of the request as CSV + self.df.to_csv(self.path_df, index=True) + + # Saving dataframe of the request as json + #self.df.to_json(self.path_df) + + #print(self.data) + return self.path_meta, self.path_df + +# %% +class Translator: + + def __init__(self, path, **kwargs): + self.columns = ["Temp", "Move", "Humidity"] + self.path = path + self.meta_path = self.path["meta_output_path"] + self.kwargs = kwargs + + def _default_parameters(self): + return { + "sensors": [] + } + + def translator_to_sensor(self): + self.MKP_meta_output_path = self.path["MKP_meta_output_path"] + + default_parameters_data = self._default_parameters() + with open(self.meta_path, 'r') as f: + self.j = json.load(f) + self.meta = self.j["meta"] + + for key in self.columns: + if key in self.meta.keys(): + for item in self.meta[key]: + sensor_data = { + "id": item["name"], + "type": "", + "sensor_file": "", + "units": "meter", + "dimensionality": "[length]", + "where": item["coordinate"] + } + + if key == "Temp": + sensor_data["type"] = "TemperatureSensor" + sensor_data["sensor_file"] = "temperature_sensor" + sensor_data["units"] = "kelvin" + + elif key == "Move": + sensor_data["type"] = "DisplacementSensor" + sensor_data["sensor_file"] = "displacement_sensor" + + default_parameters_data["sensors"].append(sensor_data) + + with open(self.MKP_meta_output_path, "w") as f: + json.dump(default_parameters_data, f, indent=4) + + return self.MKP_meta_output_path + + @staticmethod + def cartesian_to_geodesic(cartesian, origin=[49.630742, 8.378049]): + # Define the Earth's radius in kilometers + R = 6371.0 + + # Convert origin to radians + origin_lat_rad = math.radians(origin[0]) + origin_lon_rad = math.radians(origin[1]) + + # Convert Cartesian coordinates to geodesic + x, y, z = cartesian + distance = math.sqrt(x**2 + y**2 + z**2) + + # Calculate latitude + latitude = math.asin(math.sin(origin_lat_rad) * math.cos(distance / R) + + math.cos(origin_lat_rad) * math.sin(distance / R) * math.cos(0)) + + # Calculate longitude + longitude = origin_lon_rad + math.atan2(math.sin(0) * math.sin(distance / R) * math.cos(origin_lat_rad), + math.cos(distance / R) - math.sin(origin_lat_rad) * math.sin(latitude)) + + # Convert latitude and longitude to degrees + latitude = math.degrees(latitude) + longitude = math.degrees(longitude) + + return latitude, longitude + + @staticmethod + def geodesic_to_utm(latitude, longitude): + # Define the UTM projection using WGS84 datum + utm_zone_number = math.floor((longitude + 180) / 6) + 1 + utm_zone_letter = 'C' if -80 <= latitude < 72 else 'D' + utm_proj = Proj(proj='utm', zone=utm_zone_number, ellps='WGS84') + + # Convert latitude and longitude to UTM coordinates + utm_easting, utm_northing = utm_proj(longitude, latitude) + + # Format UTM coordinates + utm_easting_str = "{:.0f}".format(utm_easting) + utm_northing_str = "{:.0f}".format(utm_northing) + + return f"{utm_zone_number} {utm_zone_letter} E{utm_easting_str} N{utm_northing_str}" + + def save_to_MKP(self, df): + self.MKP_input_path = self.path["MKP_meta_output_path"] + self.translated_data_path = self.path["MKP_translated_output_path"] + + json_data = { + "df": { + "columns": df.columns.tolist(), + "index": df.index.strftime("%Y-%m-%dT%H:%M:%S.000000Z").tolist(), + "data": df.values.tolist() + }, + "meta": {} + } + + with open(self.MKP_input_path, "r") as file: + self.displacement_data = json.load(file) + + for column in df.columns: + sensor_coords = next((sensor["where"] for sensor in self.displacement_data["sensors"] if sensor["id"] == column), "") + geod_coords = self.cartesian_to_geodesic(sensor_coords) if sensor_coords else "" + utm_coords = self.geodesic_to_utm(*geod_coords) + json_data["meta"][column] = { + "name": column, + "unit": "\u00b0C", + "sample_rate": 0.0016666666666666668, + "coordinate": utm_coords, + "height": "" + } + + with open(self.translated_data_path, "w") as json_file: + json.dump(json_data, json_file, indent=4) + + def save_virtual_sensor(self, displacement_values): + self.virtual_sensor_added_output_path = self.path["virtual_sensor_added_output_path"] + + with open(self.MKP_input_path, 'r') as f: + self.metadata = json.load(f) + + with open(self.translated_data_path, 'r') as f: + MKP_data = json.load(f) + try: + with open(self.virtual_sensor_added_output_path, 'r') as f: + VS_data = json.load(f) + except: + VS_data = MKP_data + + if "virtual_sensors" not in VS_data: + VS_data["virtual_sensors"] = {} + + for sensor in self.metadata["sensors"]: + sensor_id = sensor["id"] + position = sensor["where"] + displacement_value = displacement_values.sensors.get(sensor_id, None) + if displacement_value is not None: + displacement_value_list = displacement_value.data[0].tolist() + if sensor_id not in VS_data["virtual_sensors"]: + VS_data["virtual_sensors"][sensor_id] = {"displacements": []} + VS_data["virtual_sensors"][sensor_id]["displacements"].append(displacement_value_list) + + with open(self.virtual_sensor_added_output_path, 'w') as f: + json.dump(VS_data, f, indent=4) diff --git a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/df_output_D.json b/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/df_output_D.json deleted file mode 100644 index 443d308..0000000 --- a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/df_output_D.json +++ /dev/null @@ -1 +0,0 @@ -{"columns": ["Temp", "Move"], "index": ["2023-02-13T11:00:00.000000Z", "2023-02-13T11:10:00.000000Z", "2023-02-13T11:20:00.000000Z"], "data": [[-0.3228149414, 109.6334991455], [-0.3199920654, 109.6382293701], [-0.3182678223, 109.6385650635]]} \ No newline at end of file diff --git a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/meta_output_D.json b/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/meta_output_D.json deleted file mode 100644 index a41abd3..0000000 --- a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/meta_output_D.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "sensors": [ - { - "id": "DisplacementSensor", - "type": "DisplacementSensor", - "sensor_file": "displacement_sensor", - "units": "meter", - "dimensionality": "[length]", - "where": [ - 1, - 0.0, - 0.0 - ] - } - ] -} \ No newline at end of file diff --git a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/sensors_displacements.json b/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/sensors_displacements.json index 40f497d..7ca11b1 100644 --- a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/sensors_displacements.json +++ b/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/sensors/sensors_displacements.json @@ -2,8 +2,9 @@ "Sensor_2":{ "type": "DisplacementSensor", "where": [[0,0,50]] + }, + "Sensor_3":{ + "type": "TemperatureSensor", + "where": [[0,0,50]] } - - - } diff --git a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/settings/generate_data_parameters.json b/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/settings/generate_data_parameters.json index 9049be9..edeffe1 100644 --- a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/settings/generate_data_parameters.json +++ b/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/input/settings/generate_data_parameters.json @@ -8,9 +8,11 @@ "sensors_path": "./input/sensors/20230215092338.json", "model_parameters": { "model_name": "displacements", - "df_output_path":"./input/sensors/df_output_D.json", - "meta_output_path":"./input/sensors/meta_output_D.json", - "save_to_MKP_path": "./output/sensors", + "df_output_path":"./input/sensors/API_df_output.csv", + "meta_output_path":"./input/sensors/API_meta_output.json", + "MKP_meta_output_path":"./output/sensors/MKP_meta_output.json", + "MKP_translated_output_path":"./output/sensors/MKP_translated.json", + "virtual_sensor_added_output_path":"./output/sensors/virtual_sensor_added_translated.json", "paraview_output": true, "paraview_output_path": "./output/paraview", "material_parameters":{}, diff --git a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/output/sensors/20240117143050.json b/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/output/sensors/20240117143050.json deleted file mode 100644 index d5b5b95..0000000 --- a/use_cases/nibelungenbruecke_demonstrator_self_weight_fenicsxconcrete/output/sensors/20240117143050.json +++ /dev/null @@ -1 +0,0 @@ -{"df": {"columns": ["Temp", "Move"], "index": ["2023-02-13T11:00:00.000000Z", "2023-02-13T11:10:00.000000Z", "2023-02-13T11:20:00.000000Z"], "data": [[-0.3228149414, -2.9515593314], [-0.3199920654, 22.9599705744], [-0.3182678223, 8.0074334977]]}, "meta": {"Temp": {"name": "Temp", "unit": "\u00b0C", "sample_rate": 0.0016666666666666668, "coordinate": "32 U E455088 N5497785", "height": 104.105}, "Move": {"name": "Move", "unit": "mm", "sample_rate": 0.0016666666666666668, "coordinate": [1, 0.0, 0.0], "height": 99.3}}} \ No newline at end of file