Coverage for aixweather/core_data_format_2_output_file/to_mos_TMY3.py: 100%
56 statements
« prev ^ index » next coverage.py v7.4.4, created at 2025-12-31 11:58 +0000
« prev ^ index » next coverage.py v7.4.4, created at 2025-12-31 11:58 +0000
1"""
2Converts core data to modelica TMY3Reader data
3"""
4import logging
5import calendar
6import datetime as dt
7import pandas as pd
8import numpy as np
10from aixweather import definitions
11from aixweather.imports.utils_import import MetaData
12from aixweather.transformation_functions import auxiliary, time_observation_transformations, pass_through_handling
15logger = logging.getLogger(__name__)
17class ModelicaTMY3Format:
18 """
19 Information on Modelicas TMY3 reader:
21 Format info:
22 key = output data point name
23 core_name = corresponding name matching the format_core_data
24 time_of_meas_shift = desired 30min shifting+interpolation to convert the value that is "at
25 indicated time" to "average of preceding hour" (ind2prec).
26 unit = unit of the output data following the naming convention of format_core_data
27 nan = The default values stated from the AixLib TMY3 Reader, those values are
28 filled if nan.
30 All changes here automatically change the calculations.
31 Exception: unit conversions have to be added manually.
33 time_of_meas_shift´s checked by Martin Rätz (07.08.2023)
34 unit´s checked by Martin Rätz (07.08.2023)
35 """
37 @classmethod
38 def export_format(cls) -> dict:
39 return {
40 'timeOfYear': {'core_name': '', 'unit': 's', 'time_of_meas_shift': None, 'nan': None},
41 'DryBulbTemp': {'core_name': 'DryBulbTemp', 'unit': 'degC', 'time_of_meas_shift': None, 'nan': 20.0},
42 'DewPointTemp': {'core_name': 'DewPointTemp', 'unit': 'degC', 'time_of_meas_shift': None, 'nan': 10.0},
43 'RelHum': {'core_name': 'RelHum', 'unit': 'percent', 'time_of_meas_shift': None, 'nan': 50},
44 'AtmPressure': {'core_name': 'AtmPressure', 'unit': 'Pa', 'time_of_meas_shift': None, 'nan': 101325},
45 'ExtHorRad': {'core_name': 'ExtHorRad', 'unit': 'Wh/m2', 'time_of_meas_shift': 'ind2prec', 'nan': '-0'},
46 'ExtDirNormRad': {'core_name': 'ExtDirNormRad', 'unit': 'Wh/m2', 'time_of_meas_shift': 'ind2prec', 'nan': '-0'},
47 'HorInfra': {'core_name': 'HorInfra', 'unit': 'Wh/m2', 'time_of_meas_shift': 'ind2prec', 'nan': 0},
48 'GlobHorRad': {'core_name': 'GlobHorRad', 'unit': 'Wh/m2', 'time_of_meas_shift': 'ind2prec', 'nan': 0},
49 'DirNormRad': {'core_name': 'DirNormRad', 'unit': 'Wh/m2', 'time_of_meas_shift': 'ind2prec', 'nan': 0},
50 'DiffHorRad': {'core_name': 'DiffHorRad', 'unit': 'Wh/m2', 'time_of_meas_shift': 'ind2prec', 'nan': 0},
51 'GlobHorIll': {'core_name': 'GlobHorIll', 'unit': 'lux', 'time_of_meas_shift': 'ind2prec', 'nan': '-0'},
52 'DirecNormIll': {'core_name': 'DirecNormIll', 'unit': 'lux', 'time_of_meas_shift': 'ind2prec', 'nan': '-0'},
53 'DiffuseHorIll': {'core_name': 'DiffuseHorIll', 'unit': 'lux', 'time_of_meas_shift': 'ind2prec', 'nan': '-0'},
54 'ZenithLum': {'core_name': 'ZenithLum', 'unit': 'Cd/m2', 'time_of_meas_shift': 'ind2prec', 'nan': '-0'},
55 'WindDir': {'core_name': 'WindDir', 'unit': 'deg', 'time_of_meas_shift': None, 'nan': '-0'},
56 'WindSpeed': {'core_name': 'WindSpeed', 'unit': 'm/s', 'time_of_meas_shift': None, 'nan': '-0'},
57 'TotalSkyCover': {'core_name': 'TotalSkyCover', 'unit': '1tenth', 'time_of_meas_shift': None, 'nan': 5},
58 'OpaqueSkyCover': {'core_name': 'OpaqueSkyCover', 'unit': '1tenth', 'time_of_meas_shift': None, 'nan': 5},
59 'Visibility': {'core_name': 'Visibility', 'unit': 'km', 'time_of_meas_shift': None, 'nan': '-0'},
60 'CeilingH': {'core_name': 'CeilingH', 'unit': 'm', 'time_of_meas_shift': None, 'nan': 20000.0},
61 'WeatherObs': {'core_name': '', 'unit': '', 'time_of_meas_shift': None, 'nan': '-0'},
62 'WeatherCode': {'core_name': '', 'unit': '', 'time_of_meas_shift': None, 'nan': '-0'},
63 'PrecWater': {'core_name': 'PrecWater', 'unit': 'mm', 'time_of_meas_shift': None, 'nan': '-0'},
64 'Aerosol': {'core_name': 'Aerosol', 'unit': '1thousandth', 'time_of_meas_shift': None, 'nan': '-0'},
65 'Snow': {'core_name': '', 'unit': 'cm', 'time_of_meas_shift': None, 'nan': '-0'},
66 'DaysSinceSnow': {'core_name': '', 'unit': 'days', 'time_of_meas_shift': None, 'nan': '-0'},
67 'Albedo': {'core_name': '', 'unit': '', 'time_of_meas_shift': None, 'nan': '-0'},
68 'LiquidPrecD': {'core_name': 'LiquidPrecD', 'unit': 'mm/h', 'time_of_meas_shift': None, 'nan': '-0'},
69 'LiquidPrepQuant': {'core_name': '', 'unit': '', 'time_of_meas_shift': None, 'nan': '-0'}
70 }
73def to_mos(
74 core_df: pd.DataFrame,
75 meta: MetaData,
76 start: dt.datetime,
77 stop: dt.datetime,
78 fillna: bool,
79 result_folder: str = None,
80 filename: str = None,
81 export_in_utc: bool = False
82) -> (pd.DataFrame, str):
83 """Create a MOS file from the core data.
85 Args:
86 core_df (pd.DataFrame): DataFrame containing core data.
87 meta (MetaData): Metadata associated with the weather data.
88 start (dt.datetime): Timestamp for the start of the MOS file in UTC.
89 stop (dt.datetime): Timestamp for the end of the MOS file in UTC.
90 fillna (bool): Boolean indicating whether NaN values should be filled.
91 result_folder (str):
92 Path to the folder where to save the file. Default will use
93 the `results_file_path` method.
94 filename (str): Name of the file to be saved. The default is constructed
95 based on the meta-data as well as start and stop time
96 export_in_utc (bool): Timezone to be used for the export.
97 True (default) to use the core_df timezone, UTC+0,
98 False (default) to use timezone from metadata
100 Returns:
101 pd.DataFrame: DataFrame containing the weather data formatted for MOS export,
102 excluding metadata.
103 str: Path to the exported file.
104 """
105 format_modelica_TMY3 = ModelicaTMY3Format.export_format()
107 timezone = 0 if export_in_utc else meta.timezone
109 ### evaluate correctness of format
110 auxiliary.evaluate_transformations(
111 core_format=definitions.format_core_data, other_format=format_modelica_TMY3
112 )
114 df = core_df.copy()
116 ### measurement time conversion
117 df = time_observation_transformations.shift_time_by_dict(format_modelica_TMY3, df)
119 ### if possible avoid back and forth interpolating -> pass through variables without shifting
120 df = pass_through_handling.pass_through_measurements_with_back_and_forth_interpolating(
121 df, format_modelica_TMY3
122 )
124 ### select only desired period
125 df = time_observation_transformations.truncate_data_from_start_to_stop(
126 df, start, stop
127 )
129 ### select the desired columns
130 df = auxiliary.force_data_variable_convention(df, format_modelica_TMY3)
132 first_utc_year = min(df.index.year)
133 df = df.shift(periods=timezone, freq="h", axis=0)
135 # In case the timezone-shift leads to year before the actual year of the data,
136 # the time index gets shifted. This way, the first day of simulation is always
137 # present in the data.
138 time_of_year = (
139 (df.index.year - first_utc_year) * 365 * 24 * 3600
140 + calendar.leapdays(first_utc_year, df.index.year) * 24 * 3600
141 + (df.index.dayofyear - 1) * 24 * 3600
142 + df.index.hour * 3600
143 )
144 if not np.any((0 <= time_of_year) & (time_of_year <= 86400)):
145 logger.critical("Data does not pass the first day of simulation, carefully check simulation results.")
146 df["timeOfYear"] = time_of_year
148 # to avoid having the one-year duration between start and end of data as
149 # it will make the tmy3_reader loop the data
150 if (df["timeOfYear"].iloc[-1] - df["timeOfYear"].iloc[0]) == 365 * 24 * 3600:
151 # copy last row
152 last_row = df.iloc[-1]
154 # Convert the Series to a new DataFrame with a single row
155 new_row_df = last_row.to_frame().T
157 # continue time values
158 new_row_df["timeOfYear"].iloc[0] = new_row_df["timeOfYear"].iloc[0] + 3600
159 new_row_df.index = new_row_df.index + dt.timedelta(hours=1)
161 # add new row to df, make sure its int
162 df = pd.concat([df, new_row_df])
163 df["timeOfYear"] = df["timeOfYear"].astype(int)
165 ### fill nan
166 if fillna:
167 # fill first and last lines nans (possibly lost through shifting)
168 df.iloc[0, :] = df.bfill().iloc[0, :]
169 df.iloc[-1, :] = df.ffill().iloc[-1, :]
170 # fill dummy values
171 auxiliary.fill_nan_from_format_dict(df, format_modelica_TMY3)
173 ### Create header
174 header_of = (
175 "#1:for TMY3reader"
176 + "\ndouble tab1("
177 + str(int(df.index.size))
178 + ","
179 + str(int(df.columns.size))
180 + ")"
181 )
182 header_of += f"\n#LOCATION,{meta.station_name},,,,,{meta.latitude}," \
183 f"{meta.longitude},{timezone},something"
184 header_of += (
185 "\n#Explanation of Location line:"
186 + "\n# Element 7: latitude"
187 + "\n# Element 8: longitude"
188 + "\n# Element 9: time zone in hours from UTC"
189 + "\n#"
190 )
192 # Data periods
193 header_of += (
194 "\n#DATA PERIODS, data available from "
195 + str(dt.datetime.strptime(str(df.index[0]), "%Y-%m-%d %H:%M:%S"))
196 + " "
197 + "(second="
198 + str(df["timeOfYear"].iloc[0])
199 + ") to "
200 + str(dt.datetime.strptime(str(df.index[-1]), "%Y-%m-%d %H:%M:%S"))
201 + " "
202 + "(second="
203 + str(df["timeOfYear"].iloc[-1])
204 + ")"
205 + "\n# info: TMY3Reader requirement: Time 0 = 01.01. 00:00:00 at local"
206 " time (see time zone above)"
207 + "\n#"
208 )
209 # data source
210 header_of += (
211 "\n#USED DATA-COLLECTOR: "
212 + "ebc-weather-tool with input source "
213 + str(meta.input_source)
214 + " (collected at "
215 + str(dt.datetime.now())
216 + ') "-0" marks not available data'
217 "\n#Info: The AixLib/IBPSA TMY3reader requires the below mentioned units and"
218 " measurement times. Last Check: 28.02.2022"
219 )
221 # Information about the data
222 header_of += (
223 "\n#C1 Time in seconds. Beginning of a year is 0s."
224 + "\n#C2 Dry bulb temperature in Celsius at indicated time"
225 + "\n#C3 Dew point temperature in Celsius at indicated time"
226 + "\n#C4 Relative humidity in percent at indicated time"
227 + "\n#C5 Atmospheric station pressure in Pa at indicated time, TMY3Reader:"
228 " not used per default"
229 + "\n#C6 Extraterrestrial horizontal radiation in Wh/m2, TMY3Reader: not used"
230 + "\n#C7 Extraterrestrial direct normal radiation in Wh/m2, TMY3Reader: not used"
231 + "\n#C8 Horizontal infrared radiation intensity in Wh/m2"
232 + "\n#C9 Global horizontal radiation in Wh/m2"
233 + "\n#C10 Direct normal radiation in Wh/m2"
234 + "\n#C11 Diffuse horizontal radiation in Wh/m2"
235 + "\n#C12 Averaged global horizontal illuminance in lux during minutes preceding"
236 " the indicated time, TMY3Reader: not used"
237 + "\n#C13 Direct normal illuminance in lux during minutes preceding the"
238 " indicated time, TMY3Reader: not used"
239 + "\n#C14 Diffuse horizontal illuminance in lux during minutes preceding the"
240 " indicated time, TMY3Reader: not used"
241 + "\n#C15 Zenith luminance in Cd/m2 during minutes preceding the indicated"
242 " time, TMY3Reader: not used"
243 + "\n#C16 Wind direction at indicated time. N=0, E=90, S=180, W=270"
244 + "\n#C17 Wind speed in m/s at indicated time"
245 + "\n#C18 Total sky cover in tenth at indicated time"
246 + "\n#C19 Opaque sky cover tenth indicated time"
247 + "\n#C20 Visibility in km at indicated time, TMY3Reader: not used"
248 + "\n#C21 Ceiling height in m"
249 + "\n#C22 Present weather observation, TMY3Reader: not used"
250 + "\n#C23 Present weather codes, TMY3Reader: not used"
251 + "\n#C24 Precipitable water in mm, TMY3Reader: not used"
252 + "\n#C25 Aerosol optical depth, TMY3Reader: not used"
253 + "\n#C26 Snow depth in cm, TMY3Reader: not used"
254 + "\n#C27 Days since last snowfall, TMY3Reader: not used"
255 + "\n#C28 Albedo, TMY3Reader: not used"
256 + "\n#C29 Liquid precipitation depth in mm/h at indicated time, TMY3Reader: not used"
257 + "\n#C30 Liquid precipitation quantity, TMY3Reader: not used"
258 )
260 ### write to csv
261 if filename is None:
262 _utc_flag = "_utc" if export_in_utc else ""
263 filename = (
264 f"{meta.station_id}_{start.strftime('%Y%m%d')}_{stop.strftime('%Y%m%d')}"
265 f"_{meta.station_name}{_utc_flag}.mos"
266 )
267 filepath = definitions.results_file_path(filename, result_folder)
269 df.to_csv(
270 filepath,
271 sep="\t",
272 float_format="%.2f",
273 header=False,
274 index_label="timeOfYear",
275 index=False,
276 )
278 # Read the contents and prepend the header_of to the file
279 with open(filepath, "r+") as file:
280 content = file.read()
281 file.seek(0, 0)
282 file.write(f"{header_of}\n{content}")
284 logger.info("MOS file saved to %s.", filepath)
285 return df, filepath