Coverage for aixcalibuha/utils/visualizer.py: 87%
305 statements
« prev ^ index » next coverage.py v7.4.1, created at 2024-01-27 10:48 +0000
« prev ^ index » next coverage.py v7.4.1, created at 2024-01-27 10:48 +0000
1"""
2Module with classes and function to help visualize
3different processes inside the framework. Both plots
4and print-function/log-function will be implemented here.
5The Visualizer Class inherits the Logger class, as logging
6will always be used as a default.
7"""
8import os
9import logging
10import csv
11from shutil import copyfile
12import matplotlib.pyplot as plt
13import numpy as np
14from ebcpy.utils import setup_logger
15import aixcalibuha
18def short_name(ini_name: str, max_len: int):
19 """
20 Shortens long strings to a max length from the front.
21 long_string_name => ...ing_name with len(new_name) = max_len
23 :param str ini_name:
24 Long string to shorten.
25 :param int max_len:
26 Max len of the new string.
27 :return: str
28 The shorten string.
29 """
30 if len(ini_name) > max_len:
31 num_dots = len(ini_name) - max_len
32 if num_dots > 3:
33 num_dots = 3
34 formatted_name = "." * num_dots + ini_name[-(max_len - num_dots):]
35 else:
36 formatted_name = ini_name
37 return formatted_name
40class CalibrationLogger:
41 """Base class for showing the process of functions in
42 this Framework with print-statements and saving everything
43 relevant as a log-file.
45 :param str,os.path.normpath cd:
46 Directory where to store the output of the Logger and possible
47 child-classes. If the given directory can not be created, an error
48 will be raised.
49 :param str name:
50 Name of the reason of logging, e.g. classification, processing etc.
51 :param aixcalibuha.CalibrationClass calibration_class:
52 Calibration class used in the calibration-process.
53 :param logging.Logger logger:
54 If given, this logger is used to print and or save the messsages.
55 Else, a new one is set up.
56 """
58 # Instantiate class parameters
59 integer_prec = 4 # Number of integer parts
60 decimal_prec = 6
61 _counter_calibration = 0 # Number of function calls of calibration
62 _prec = decimal_prec
63 _width = integer_prec + decimal_prec + 1 # Calculate the actual width
65 def __init__(self, cd, name, calibration_class, logger=None):
66 """Instantiate class parameters"""
67 self._tuner_paras = None
68 self._goals = None
69 if logger is None:
70 self.logger = setup_logger(cd=cd, name=name)
71 else:
72 if not isinstance(logger, logging.Logger):
73 raise TypeError(f"Given logger is of type {type(logger)} "
74 f"but should be type logging.Logger")
75 self.logger = logger
76 self.cd = cd
77 self.calibration_class = calibration_class
79 def log(self, msg, level=logging.INFO):
80 """Wrapper function to directly log in the internal logger"""
81 self.logger.log(msg=msg, level=level)
83 def error(self, msg):
84 """Wrapper function to directly log an error"""
85 self.logger.error(msg=msg)
87 def _set_prec_and_with_for_tuner_paras(self):
88 if self.tuner_paras.bounds is None:
89 self.integer_prec = 4 # Number of integer parts
90 else:
91 bounds_min, bounds_max = self.tuner_paras.get_bounds()
92 maximal_value = max(max(bounds_max), max(abs(bounds_min)))
93 self.integer_prec = len(str(int(maximal_value)))
94 self._counter_calibration = 0 # Number of function calls of calibration
95 self._width = self.integer_prec + self.decimal_prec + 1 # Calculate the actual width
97 def calibration_callback_func(self, xk, obj, verbose_information, penalty=None):
98 """
99 Logs the current values of the objective function.
101 :param np.array xk:
102 Array with the current values of the calibration
103 :param float obj:
104 Current objective value.
105 :param dict verbose_information:
106 A dict with difference-values of for all goals and the
107 corresponding weightings
108 :param float penalty:
109 Penaltyfactor from current evaluation
110 """
111 xk_descaled = self.tuner_paras.descale(xk)
112 self._counter_calibration += 1
113 if penalty is None:
114 info_string = self._get_tuner_para_values_as_string(
115 xk_descaled, obj, verbose_information
116 )
117 else:
118 info_string = self._get_tuner_para_values_as_string(
119 xk_descaled, obj, verbose_information, penalty
120 )
121 self.logger.info(info_string)
123 def validation_callback_func(self, obj):
124 """
125 Log the validation result information
127 :param float obj:
128 Objective value of validation.
129 """
130 self.log(f"{self.goals.statistical_measure} of validation: {obj}")
132 def save_calibration_result(self, best_iterate, model_name, **kwargs):
133 """
134 Process the result, re-run the simulation and generate
135 a logFile for the minimal quality measurement
137 :param dict best_iterate:
138 Result object of the minimization
139 :param str model_name:
140 Name of the model being calibrated
141 """
142 if "Iterate" not in best_iterate:
143 self.logger.error("No best iterate. Can't save result")
144 return
145 result_log = f"\nResults for calibration of model: {model_name}\n"
146 result_log += f"Number of iterations: {self._counter_calibration}\n"
147 result_log += "Final parameter values:\n"
148 # Set the iteration counter to the actual number of the best iteration is printed
149 self._counter_calibration = best_iterate["Iterate"]
150 result_log += f"{self._get_tuner_para_names_as_string()}\n"
151 final_values = self._get_tuner_para_values_as_string(
152 xk_descaled=best_iterate["Parameters"],
153 obj=best_iterate["Objective"],
154 unweighted_objective=best_iterate["Unweighted Objective"],
155 penalty=best_iterate["Penaltyfactor"])
156 result_log += f"{final_values}\n"
157 self.logger.info(result_log)
158 self._counter_calibration = 0
160 def calibrate_new_class(self, calibration_class, cd=None, for_validation=False):
161 """Function to setup the figures for a new class of calibration.
162 This function is called when instantiating this Class. If you
163 uses continuuos calibration classes, call this function before
164 starting the next calibration-class.
166 :param aixcalibuha.CalibrationClass calibration_class:
167 Class holding information on names, tuner_paras, goals
168 and time-intervals of calibration.
169 :param str,os.path.normpath cd:
170 Optional change in working directory to store files
171 :param bool for_validation:
172 If it's only for validation, only plot the goals
173 """
174 if cd is not None:
175 self.cd = cd
176 self.calibration_class = calibration_class
178 @property
179 def cd(self) -> str:
180 """Get the current working directory for storing plots."""
181 return self._cd
183 @cd.setter
184 def cd(self, cd: str):
185 """Set the current working directory for storing plots."""
186 if not os.path.exists(cd):
187 os.makedirs(cd)
188 self._cd = cd
190 @property
191 def tuner_paras(self) -> aixcalibuha.TunerParas:
192 return self._tuner_paras
194 @tuner_paras.setter
195 def tuner_paras(self, tuner_paras):
196 """
197 Set the currently used TunerParas object to use the information for logging.
199 :param tuner_paras: aixcalibuha.
200 """
201 if not isinstance(tuner_paras, aixcalibuha.TunerParas):
202 raise TypeError(f"Given tuner_paras is of "
203 f"type {type(tuner_paras).__name__} "
204 "but type TunerParas is needed.")
205 self._tuner_paras = tuner_paras
206 self._set_prec_and_with_for_tuner_paras()
208 @property
209 def goals(self) -> aixcalibuha.Goals:
210 """Get current goals instance"""
211 return self._goals
213 @goals.setter
214 def goals(self, goals: aixcalibuha.Goals):
215 """
216 Set the currently used Goals object to use the information for logging.
218 :param ebcpy.aixcalibuha.Goals goals:
219 Goals to be set to the object
220 """
221 if not isinstance(goals, aixcalibuha.Goals):
222 raise TypeError(f"Given goals is of type {type(goals).__name__} "
223 "but type Goals is needed.")
224 self._goals = goals
226 @property
227 def calibration_class(self) -> aixcalibuha.CalibrationClass:
228 """Get current calibration class object"""
229 return self._calibration_class
231 @calibration_class.setter
232 def calibration_class(self, calibration_class: aixcalibuha.CalibrationClass):
233 """
234 Set the current calibration class.
236 :param aixcalibuha.CalibrationClass calibration_class:
237 Class holding information on names, tuner_paras, goals
238 and time-intervals of calibration.
239 """
240 if not isinstance(calibration_class, aixcalibuha.CalibrationClass):
241 raise TypeError(f"Given calibration_class "
242 f"is of type {type(calibration_class).__name__} "
243 "but type CalibrationClass is needed.")
244 self._calibration_class = calibration_class
245 self.tuner_paras = calibration_class.tuner_paras
246 if calibration_class.goals is not None:
247 self.goals = calibration_class.goals
249 def log_initial_names(self):
250 """Function to log the initial names and the statistical measure
251 before calibration."""
252 self.logger.info(self._get_tuner_para_names_as_string())
254 def log_intersection_of_tuners(self, intersected_tuner_parameters, **kwargs):
255 """
256 If an intersection for multiple classes occurs, an information about
257 the statistics of the dataset has to be provided.
259 :param dict intersected_tuner_parameters:
260 Dict with cols being the name of the tuner parameter and the
261 value being the list with all the different "best" values for
262 the tuner parameter.
263 """
264 _formatted_str = "\n".join([f"{tuner}: {values}"
265 for tuner, values in intersected_tuner_parameters.items()])
266 self.logger.info("Multiple 'best' values for the following tuner parameters "
267 "were identified in different classes:\n%s", _formatted_str)
269 def _get_tuner_para_names_as_string(self):
270 """
271 Returns a string with the names of current tunerParameters
273 :return: str info_string
274 The desired string
275 """
276 initial_names = list(self.tuner_paras.get_names())
278 info_string = "{0:9s}".format("Iteration")
280 # Names of tuner parameter
281 for ini_name in initial_names:
282 # Limit string length to a certain amount.
283 # The full name has to be displayed somewhere else
284 formatted_name = short_name(ini_name=ini_name, max_len=self._width)
285 info_string += " {0:{width}s}".format(formatted_name, width=self._width)
286 # Add string for qualitative measurement used (e.g. NRMSE, MEA etc.)
287 info_string += " {0:{width}s}".format(self.goals.statistical_measure,
288 width=self._width)
289 info_string += "penaltyfactor"
290 info_string += f" Unweighted {self.goals.statistical_measure}"
291 return info_string
293 def _get_tuner_para_values_as_string(self,
294 xk_descaled,
295 obj,
296 unweighted_objective,
297 penalty=None):
298 """
299 Returns a string with the values of current tuner parameters
300 as well as the objective value.
302 :param np.array xk_descaled:
303 Array with the current values of the calibration, descaled to bounds
304 :param float obj:
305 Current objective value.
306 :param dict unweighted_objective:
307 Further information about the objective value of each individual goal
308 :param None/float penalty:
309 Penaltyfactor.
310 :return: str
311 The desired string.
312 """
313 # This will limit the number of iterations to 999999999 (for correct format).
314 # More iterations will most likely never be used.
315 info_string = '{0:9d}'.format(self._counter_calibration)
317 for x_value in xk_descaled:
318 info_string += " {0:{width}.{prec}f}".format(x_value,
319 width=self._width,
320 prec=self._prec)
321 # Add the last return value of the objective function.
322 info_string += " {0:{width}.{prec}f}".format(obj, width=self._width,
323 prec=self._prec)
324 if penalty:
325 info_string += " {0:{width}.{prec}f}".format(penalty, width=self._width,
326 prec=self._prec - 3)
327 else:
328 info_string += " {}".format("-")
329 _verbose_info = "= " + " + ".join(["{0:.{prec}}*{1:.{prec}}".format(val[0],
330 val[1], prec=4)
331 for goal, val in unweighted_objective.items()])
332 info_string += f" {_verbose_info}"
334 return info_string
337class CalibrationVisualizer(CalibrationLogger):
338 """More advanced class to not only log ongoing function
339 evaluations but also show the process of the functions
340 by plotting interesting causalities and saving these plots.
342 :keyword boolean show_plot:
343 If False, all created plots are not shown during calibration but only
344 stored at the end of the process.
345 :keyword boolean create_tsd_plot:
346 If False, the plot of the time series data (goals) is not created and
347 thus shown in during calibration. It therefore is also not stored, even if
348 you set the save_tsd_plot keyword-argument to true.
349 :keyword boolean save_tsd_plot:
350 If True, at each iteration the created plot of the
351 time-series is saved. This may make the process much slower
352 :keyword float show_plot_pause_time:
353 Set the time (in seconds) the plt.draw() pauses. May be altered if show_plot
354 yields plot which disappear to fast. Default is 1-e3 s.
355 """
357 def __init__(self, cd,
358 name,
359 calibration_class,
360 logger=None,
361 **kwargs):
362 """Instantiate class parameters"""
364 # Instantiate the logger:
365 super().__init__(cd=cd,
366 name=name,
367 calibration_class=calibration_class,
368 logger=logger)
370 # Setup dummy parameters so class-functions
371 # now the type of those later created objects:
372 self._n_cols_goals, self._n_rows_goals, self._n_cols_tuner, self._n_rows_tuner = 1, 1, 1, 1
373 self.fig_tuner, self.ax_tuner = None, None
374 self.fig_goal, self.ax_goal = None, None
375 self.fig_obj, self.ax_obj = None, None
376 self._num_goals = 0
377 self.goals_dir = "TimeSeriesPlot"
378 # Set supported kwargs:
379 plt.ioff() # Turn of interactive mode.
380 self.save_tsd_plot = kwargs.get("save_tsd_plot", False)
381 self.create_tsd_plot = kwargs.get("create_tsd_plot", True)
382 self.show_plot = kwargs.get("show_plot", True)
383 self.file_type = kwargs.get("file_type", "svg")
384 self.show_plot_pause_time = kwargs.get("show_plot_pause_time", 1e-3)
385 if not isinstance(self.show_plot_pause_time, (float, int)):
386 raise TypeError(
387 f"Given 'show_plot_pause_time' needs to "
388 f"be float or int but is {type(self.show_plot_pause_time)}."
389 )
391 def calibrate_new_class(self, calibration_class, cd=None, for_validation=False):
392 """Function to setup the figures for a new class of calibration.
393 This function is called when instantiating this Class. If you
394 uses continuuos calibration classes, call this function before
395 starting the next calibration-class.
397 :param aixcalibuha.CalibrationClass calibration_class:
398 Class holding information on names, tuner_paras, goals
399 and time-intervals of calibration.
400 :param str,os.path.normpath cd:
401 Optional change in working directory to store files
402 :param bool for_validation:
403 If it's only for validation, only plot the goals
404 """
405 super().calibrate_new_class(calibration_class, cd)
407 name = calibration_class.name
409 # Close all old figures to create new ones.
410 plt.close("all")
412 if not for_validation:
413 # %% Set-up figure for objective-plotting
414 self.fig_obj, self.ax_obj = plt.subplots(1, 1)
415 self.fig_obj.suptitle(name + ": Objective")
416 self.ax_obj.set_ylabel(self.goals.statistical_measure)
417 self.ax_obj.set_xlabel("Number iterations")
418 # If the changes are small, it seems like the plot does
419 # not fit the printed values. This boolean assures that no offset is used.
420 self.ax_obj.ticklabel_format(useOffset=False)
422 # %% Setup Tuner-Paras figure
423 # Make a almost quadratic layout based on the number of tuner-parameters evolved.
424 num_tuners = len(self.tuner_paras.get_names())
425 self._n_cols_tuner = int(np.floor(np.sqrt(num_tuners)))
426 self._n_rows_tuner = int(np.ceil(num_tuners / self._n_cols_tuner))
427 self.fig_tuner, self.ax_tuner = plt.subplots(self._n_rows_tuner, self._n_cols_tuner,
428 squeeze=False, sharex=True)
429 self.fig_tuner.suptitle(name + ": Tuner Parameters")
430 self._plot_tuner_parameters(for_setup=True)
432 # %% Setup Goals figure
433 # Only a dummy, as the figure is recreated at every iteration
434 if self.goals is not None:
435 self._num_goals = len(self.goals.get_goals_list())
436 self._n_cols_goals = int(np.floor(np.sqrt(self._num_goals)))
437 self._n_rows_goals = int(np.ceil(self._num_goals / self._n_cols_goals))
438 self.fig_goal, self.ax_goal = plt.subplots(self._n_rows_goals,
439 self._n_cols_goals,
440 squeeze=False,
441 sharex=True)
442 self.fig_goal.suptitle(name + ": Goals")
444 def calibration_callback_func(self, xk, obj, verbose_information, penalty=None):
445 """
446 Logs the current values of the objective function.
448 :param np.array xk:
449 Array with the current values of the calibration
450 :param float obj:
451 Current objective value.
452 :param dict verbose_information:
453 A dict with difference-values of for all goals and the
454 corresponding weightings
455 """
456 # Call the logger function to print and log
457 super().calibration_callback_func(xk, obj, verbose_information, penalty)
458 # Plot the current objective value
459 self.ax_obj.plot(self._counter_calibration, obj, "ro")
461 # Plot the tuner parameters
462 self._plot_tuner_parameters(xk=xk)
464 # Plot the measured and simulated data
465 if self.goals is not None and self.create_tsd_plot:
466 self._plot_goals()
468 self._show_plot()
470 def validation_callback_func(self, obj):
471 """
472 Log the validation result information.
473 Also plot if selected.
475 :param float obj:
476 Objective value of validation.
477 """
478 super().validation_callback_func(obj=obj)
479 # Plot the measured and simulated data
480 if self.goals is not None and self.create_tsd_plot:
481 self._plot_goals(at_validation=True)
483 self._show_plot(for_validation=True)
485 def _show_plot(self, for_validation=False):
486 """Show plot if activated"""
487 if not self.show_plot:
488 return
489 plt.draw()
490 if self.create_tsd_plot:
491 self.fig_goal.canvas.draw_idle()
492 if not for_validation:
493 self.fig_obj.canvas.draw_idle()
494 self.fig_tuner.canvas.draw_idle()
495 plt.pause(self.show_plot_pause_time)
496 else:
497 plt.show()
499 def save_calibration_result(self, best_iterate, model_name, **kwargs):
500 """
501 Process the result, re-run the simulation and generate
502 a logFile for the minimal quality measurement
504 :param scipy.optimize.minimize.result best_iterate:
505 Result object of the minimization
506 :param str model_name:
507 Name of the model being calibrated
508 """
509 if "Iterate" not in best_iterate:
510 self.logger.error("No best iterate. Can't save result")
511 return
512 super().save_calibration_result(best_iterate, model_name, **kwargs)
513 itercount = kwargs["itercount"]
514 duration = kwargs["duration"]
516 # Extract filepathes
517 iterpath = os.path.join(self.cd, f'Iteration_{itercount}')
518 if not os.path.exists(iterpath):
519 os.mkdir(iterpath)
521 filepath_tuner = os.path.join(iterpath, "tuner_parameter_plot.%s" % self.file_type)
522 filepath_obj = os.path.join(iterpath, "objective_plot.%s" % self.file_type)
523 if self.save_tsd_plot:
524 bestgoal = os.path.join(self.cd,
525 self.goals_dir,
526 str(best_iterate["Iterate"]) + f"_goals.{self.file_type}")
527 # Copy best goals figure
528 copyfile(bestgoal, f'{iterpath}\\best_goals.%s' % self.file_type)
530 # Save calibration results as csv
531 res_dict = dict(best_iterate['Parameters'])
532 res_dict['Objective'] = best_iterate["Objective"]
533 res_dict['Duration'] = duration
534 res_csv = f'{self.cd}\\Iteration_{itercount}\\RESUL' \
535 f'TS_{self.calibration_class.name}_iteration{itercount}.csv'
536 with open(res_csv, 'w') as rescsv:
537 writer = csv.DictWriter(rescsv, res_dict.keys())
538 writer.writeheader()
539 writer.writerow(res_dict)
541 # Save figures & close plots
542 self.fig_tuner.savefig(filepath_tuner)
543 self.fig_obj.savefig(filepath_obj)
544 plt.close("all")
546 if best_iterate['better_current_result'] and self.save_tsd_plot:
547 # save improvement of recalibration ("best goals df" as csv)
548 best_iterate['Goals'].get_goals_data().to_csv(
549 os.path.join(iterpath, 'goals_df.csv'),
550 sep=",",
551 decimal="."
552 )
554 def log_intersection_of_tuners(self, intersected_tuner_parameters, **kwargs):
555 """
556 If an intersection for multiple classes occurs, an information about
557 the statistics of the dataset has to be provided.
559 :param dict intersected_tuner_parameters:
560 Dict with cols being the name of the tuner parameter and the
561 value being the list with all the different "best" values for
562 the tuner parameter.
563 """
564 super().log_intersection_of_tuners(intersected_tuner_parameters, **kwargs)
565 x_labels = intersected_tuner_parameters.keys()
566 data = list(intersected_tuner_parameters.values())
567 fig_intersection, ax_intersection = plt.subplots(1, len(x_labels), squeeze=False)
568 for i, x_label in enumerate(x_labels):
569 # Remove name of record (modelica) for visualization
570 x_label_vis = x_label.replace('TunerParameter.', '')
571 cur_ax = ax_intersection[0][i]
572 cur_ax.violinplot(data[i], showmeans=True, showmedians=False,
573 showextrema=True)
574 # cur_ax.plot([1] * len(data[i]), data[i], "ro", label="Results")
575 cur_ax.plot([1] * len(data[i]), data[i], "ro", label="Ergebnisse")
577 cur_ax.get_xaxis().set_tick_params(direction='out')
578 cur_ax.xaxis.set_ticks_position('bottom')
579 cur_ax.set_xticks(np.arange(1, 2))
580 cur_ax.set_xlim(0.25, 1.75)
581 cur_ax.set_xticklabels([x_label_vis])
582 cur_ax.legend(loc="upper right")
584 # Always store in the parent diretory as this info is relevant for all classes
585 # fig_intersection.suptitle("Intersection of Tuner Parameters")
586 fig_intersection.suptitle("Intersection of Tuner Parameters")
587 path_intersections = os.path.join(os.path.dirname(self.cd), "tunerintersections")
588 if not os.path.exists(path_intersections):
589 os.makedirs(path_intersections)
590 if "itercount" in kwargs:
591 fig_intersection.savefig(
592 os.path.join(
593 path_intersections,
594 f'tuner_parameter_intersection_plot_it{kwargs["itercount"]}.{self.file_type}')
595 )
596 else:
597 fig_intersection.savefig(
598 os.path.join(path_intersections,
599 f'tuner_parameter_intersection_plot.{self.file_type}')
600 )
602 if self.show_plot:
603 plt.draw()
604 plt.pause(15)
606 def _plot_tuner_parameters(self, xk=None, for_setup=False):
607 """
608 Plot the tuner parameter values history for better user interaction
610 :param np.array xk:
611 current iterate, scaled.
612 :param bool for_setup:
613 True if the function is called to initialize the calibration
614 """
615 tuner_counter = 0
616 for row in range(self._n_rows_tuner):
617 for col in range(self._n_cols_tuner):
618 cur_ax = self.ax_tuner[row][col]
619 tuner_names_vis = self.tuner_paras.get_names()
620 # Remove name of record (modelica)
621 for i, name in enumerate(tuner_names_vis):
622 tuner_names_vis[i] = name.replace('TunerParameter.', '')
623 if tuner_counter >= len(self.tuner_paras.get_names()):
624 cur_ax.axis("off")
625 else:
626 tuner_para_name = self.tuner_paras.get_names()[tuner_counter]
627 if for_setup:
628 cur_ax.set_ylabel(tuner_names_vis[tuner_counter])
629 max_value = self.tuner_paras.get_value(tuner_para_name, "max")
630 min_value = self.tuner_paras.get_value(tuner_para_name, "min")
631 ini_val = self.tuner_paras.get_value(tuner_para_name, "initial_value")
632 cur_ax.axhline(max_value, color="r")
633 cur_ax.axhline(min_value, color="r")
634 cur_ax.plot(self._counter_calibration, ini_val, "bo")
635 if xk is not None:
636 cur_val = self.tuner_paras.descale(xk)[tuner_counter]
637 cur_ax.plot(self._counter_calibration, cur_val, "bo")
638 tuner_counter += 1
640 def _plot_goals(self, at_validation=False):
641 """Plot the measured and simulated data for the current iterate"""
643 # Get information on the relevant-intervals of the calibration:
644 rel_intervals = self.calibration_class.relevant_intervals
646 _goals_df = self.goals.get_goals_data()
647 _goals_names = self.goals.get_goals_list()
648 goal_counter = 0
649 for row in range(self._n_rows_goals):
650 for col in range(self._n_cols_goals):
651 cur_ax = self.ax_goal[row][col]
652 cur_ax.cla()
653 if goal_counter >= self._num_goals:
654 cur_ax.axis("off")
655 else:
656 cur_goal = _goals_names[goal_counter]
657 cur_ax.plot(_goals_df[cur_goal, self.goals.sim_tag_str],
658 label=cur_goal + f"_{self.goals.sim_tag_str}",
659 linestyle="--", color="r")
660 cur_ax.plot(_goals_df[cur_goal, self.goals.meas_tag_str],
661 label=cur_goal + f"_{self.goals.meas_tag_str}",
662 color="b")
663 # Mark the disregarded intervals in grey
664 _start = self.calibration_class.start_time
665 _first = True # Only create one label
666 for interval in rel_intervals:
667 _end = interval[0]
668 if _first:
669 cur_ax.axvspan(_start, _end,
670 facecolor="grey",
671 alpha=0.7,
672 label="Disregarded Interval")
673 _first = False
674 # Only create one label
675 else:
676 cur_ax.axvspan(_start, _end,
677 facecolor="grey", alpha=0.7)
678 _start = interval[1]
679 # Final plot of grey
680 cur_ax.axvspan(rel_intervals[-1][-1],
681 self.calibration_class.stop_time,
682 facecolor="grey",
683 alpha=0.5)
685 cur_ax.legend(loc="lower right")
686 cur_ax.set_xlabel("Time / s")
687 goal_counter += 1
689 if at_validation:
690 name_id = "Validation"
691 else:
692 name_id = self._counter_calibration
694 if self.save_tsd_plot:
695 _savedir = os.path.join(self.cd, self.goals_dir)
696 if not os.path.exists(_savedir):
697 os.makedirs(_savedir)
698 self.fig_goal.savefig(
699 os.path.join(_savedir,
700 f"{name_id}_goals.{self.file_type}"))