diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/categorical/categorical.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/categorical/categorical.py index f09d3781..108f859e 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/categorical/categorical.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/categorical/categorical.py @@ -1,27 +1,25 @@ """Categorical objects for grouping data into bins.""" -# pylint: disable=W0212,E0401 +# pylint: disable=E0401 +# pylint: disable=W0212 import calendar -import textwrap from dataclasses import dataclass, field from enum import Enum, auto from typing import Any import matplotlib.ticker as mticker + # pylint: enable=E0401 import numpy as np import pandas as pd from ladybug.legend import Color +from ladybug.location import Location from matplotlib import patches from matplotlib import pyplot as plt -from matplotlib.colors import (BoundaryNorm, Colormap, ListedColormap, to_hex, - to_rgba) -from matplotlib.figure import Figure -from matplotlib.legend import Legend -from mpl_toolkits.axes_grid1 import make_axes_locatable +from matplotlib.colors import BoundaryNorm, Colormap, ListedColormap, to_hex, to_rgba from python_toolkit.bhom.analytics import bhom_analytics -from ..helpers import rolling_window, validate_timeseries +from ..helpers import rolling_window, sunrise_sunset, validate_timeseries from ..plot._heatmap import heatmap from ..plot.utilities import contrasting_color @@ -32,8 +30,8 @@ class Categorical: Args: bins (tuple[float], optional): - The bin edges for the categories. These are right-inclusive, with - the exception of the first bin which is also left-inclusive. + The bin edges for the categories. These are right-inclusive, with the exception of the first bin which is + also left-inclusive. bin_names (tuple[str], optional): The names of the categories. colors (tuple[str | tuple], optional): @@ -100,11 +98,9 @@ def from_cmap( cmap (Colormap): The colormap to use. bin_names (tuple[str], optional): - The names for each of the bins. Defaults to () which names - each bin using its boundaries. + The names for each of the bins. Defaults to () which names each bin using its boundaries. name (str, optional): - The name for this categories object. Defaults to "" which - uses the colormap name. + The name for this categories object. Defaults to "" which uses the colormap name. Returns: Categories: The resulting categories object. @@ -196,7 +192,9 @@ def norm(self) -> BoundaryNorm: else: pass if len(boundaries) == 1: - raise ValueError("The current Categorical object has unbounded edges and cannot be used to create a BoundaryNorm.") + raise ValueError( + "The current Categorical object has unbounded edges and cannot be used to create a BoundaryNorm." + ) return BoundaryNorm(boundaries=boundaries, ncolors=self.cmap.N) @property @@ -217,7 +215,10 @@ def lb_colors(self) -> tuple[Color]: tuple[Color]: The ladybug color objects. """ - return tuple(Color(*i) for i in (np.array([to_rgba(color) for color in self.colors]) * 255).astype(int)) + return tuple( + Color(*i) + for i in (np.array([to_rgba(color) for color in self.colors]) * 255).astype(int) + ) @property def _bin_name_interval(self) -> dict[str, pd.Interval]: @@ -309,7 +310,10 @@ def get_color(self, value: float | int, as_array: bool = False) -> str: The color as a hex string. """ if value <= self.bins[0] or value > self.bins[-1]: - raise ValueError(f"The input value/s are outside the range of the categories ({min(self).left} < x <= {max(self).right}).") + raise ValueError( + "The input value/s are outside the range of the categories " + f"({min(self).left} < x <= {max(self).right})." + ) color = self.cmap(self.norm(value)) if not as_array: return to_hex(color, keep_alpha=True) @@ -327,9 +331,13 @@ def categorise(self, data: Any) -> pd.Categorical: pd.Categorical: The categorised data. """ - categorical = pd.cut(data, self.bins, labels=self.bin_names_detailed, include_lowest=True) + categorical = pd.cut( + data, self.bins, labels=self.bin_names, include_lowest=True, right=True + ) if categorical.isna().any(): - raise ValueError(f"The input value/s are outside the range of the categories ({self.bins[0]} <= x <= {self.bins[-1]}).") + raise ValueError( + f"The input value/s are outside the range of the categories ({self.bins[0]} <= x < {self.bins[-1]})." + ) return categorical @bhom_analytics() @@ -350,7 +358,7 @@ def value_counts( pd.Series: The number of counts within each categorical bin. """ - result = self.categorise(data).value_counts()[list(self.bin_names_detailed)] + result = self.categorise(data).value_counts()[list(self.bin_names)] if density: return result / len(data) return result @@ -376,7 +384,9 @@ def timeseries_summary_monthly(self, series: pd.Series, density: bool = False) - if not isinstance(series.index, pd.DatetimeIndex): raise ValueError("The series must have a time series.") - counts = (self.categorise(series).groupby(series.index.month).value_counts().unstack()).sort_index(axis=0) + counts = ( + self.categorise(series).groupby(series.index.month).value_counts().unstack() + ).sort_index(axis=0) counts.columns.name = None counts.index.name = "Month" if density: @@ -407,38 +417,40 @@ def summarise( return "\n".join(statements) @bhom_analytics() - def create_legend(self, ax: plt.Axes = None, verbose: bool = True, **kwargs) -> Legend: - """Create a legend for this categorical. + def create_legend_handles_labels( + self, label_type: str = "value" + ) -> tuple[list[patches.Patch], list[str]]: + """Create legend handles and labels for this categorical. Args: - ax (plt.Axes, optional): - The axes to add the legend to. - verbose (bool, optional): - Whether to use the verbose descriptions or the interval index. - **kwargs: - Additional keyword arguments to pass to the legend. + label_type (str, optional): + Whether to use the bin "value", "interval" or "name" for the legend labels. Returns: - Legend: - The legend. + tuple[list[patches.Patch], list[str]]: + (handles, labels) """ - if ax is None: - ax = plt.gca() + match label_type: + case "value": + labels = self.bins + case "interval": + labels = self.descriptions + case "name": + labels = self.bin_names + case _: + raise ValueError("The label must be 'value', 'interval' or 'name'.") handles = [] - labels = [] - for color, description, iidx in list(zip(*[self.colors, self.descriptions, self.interval_index])): + for color in self.colors: handles.append( patches.Patch( facecolor=color, edgecolor=None, ) ) - labels.append(description if verbose else iidx) - lgd = ax.legend(handles=handles, labels=labels, **kwargs) - return lgd + return handles, labels @bhom_analytics() def annual_monthly_histogram( @@ -455,8 +467,7 @@ def annual_monthly_histogram( series (pd.Series): The pandas Series to plot. Must have a datetime index. ax (plt.Axes, optional): - An optional plt.Axes object to populate. Defaults to None, - which creates a new plt.Axes object. + An optional plt.Axes object to populate. Defaults to None, which creates a new plt.Axes object. show_legend (bool, optional): Whether to show the legend. Defaults to False. show_labels (bool, optional): @@ -474,12 +485,15 @@ def annual_monthly_histogram( if ax is None: ax = plt.gca() + color_lookup = dict(zip(self.bin_names, self.colors)) + t = self.timeseries_summary_monthly(series, density=True) + t.plot( ax=ax, kind="bar", stacked=True, - color=self.colors, + color=[color_lookup[i] for i in t.columns], width=kwargs.pop("width", 1), legend=False, **kwargs, @@ -551,6 +565,99 @@ def annual_heatmap(self, series: pd.Series, ax: plt.Axes = None, **kwargs) -> pl return ax + @bhom_analytics() + def annual_heatmap_histogram( + self, + series: pd.Series, + location: Location = None, + figsize: tuple[float] = (15, 5), + show_legend: bool = True, + **kwargs, + ) -> plt.Figure: + """Plot the shade benefit category. + + Args: + series (pd.Series): + A pandas Series object with a datetime index. + location (Location, optional): + A location object used to plot sun up/down times in the heatmap axis. + Defaults to None. + figsize (tuple[float], optional): + The size of the figure. Defaults to (15, 5). + show_legend (bool, optional): + Whether to show the legend. Defaults to True. + **kwargs: + Additional keyword arguments to pass to the plotting function. + title (str, optional): + The title of the plot. Defaults to None. + sunrise_color (str, optional): + The color of the sunrise line. Defaults to "k" ... if location is also provided. + sunset_color (str, optional): + The color of the sunset line. Defaults to "k" ... if location is also provided. + ncol (int, optional): + The number of columns in the legend. Defaults to 5. + show_labels (bool, optional): + Whether to show the labels on the bars. Defaults to True. + + Returns: + plt.Figure: + A figure object. + """ + + sunrise_color = kwargs.pop("sunrise_color", "k") + sunset_color = kwargs.pop("sunset_color", "k") + ncol = kwargs.pop("ncol", 5) + show_labels = kwargs.pop("show_labels", True) + title = kwargs.pop("title", None) + + ti = self.name + ("" if title is None else f"\n{title}") + + if show_legend: + fig, (hmap_ax, legend_ax, bar_ax) = plt.subplots( + 3, 1, figsize=figsize, height_ratios=(7, 0.5, 2) + ) + legend_ax.set_axis_off() + handles, labels = self.create_legend_handles_labels(label_type="name") + legend_ax.legend( + handles, + labels, + loc="center", + bbox_to_anchor=(0.5, 0), + fontsize="small", + ncol=ncol, + borderpad=2.5, + ) + else: + fig, (hmap_ax, bar_ax) = plt.subplots(2, 1, figsize=figsize, height_ratios=(7, 2)) + + self.annual_heatmap(series, show_colorbar=False, ax=hmap_ax) + + self.annual_monthly_histogram(ax=bar_ax, series=series, show_labels=show_labels) + + hmap_ax.set_title(ti) + + # add sun up indicator lines + if location is not None: + ylimz = hmap_ax.get_ylim() + xlimz = hmap_ax.get_xlim() + ymin = min(hmap_ax.get_ylim()) + sun_rise_set = sunrise_sunset(location=location) + sunrise = [ + ymin + (((i.time().hour * 60) + (i.time().minute)) / (60 * 24)) + for i in sun_rise_set.sunrise + ] + sunset = [ + ymin + (((i.time().hour * 60) + (i.time().minute)) / (60 * 24)) + for i in sun_rise_set.sunset + ] + xx = np.arange(min(hmap_ax.get_xlim()), max(hmap_ax.get_xlim()) + 1, 1) + hmap_ax.plot(xx, sunrise, zorder=9, c=sunrise_color, lw=1) + hmap_ax.plot(xx, sunset, zorder=9, c=sunset_color, lw=1) + hmap_ax.set_xlim(xlimz) + hmap_ax.set_ylim(ylimz) + + return fig + class ComfortClass(Enum): """Thermal comfort categories.""" @@ -561,7 +668,7 @@ class ComfortClass(Enum): @property def color(self) -> str: - """Get the associatd color.""" + """Get the associated color.""" d = { ComfortClass.TOO_COLD: "#3C65AF", ComfortClass.COMFORTABLE: "#2EB349", @@ -612,7 +719,10 @@ def simplify(self) -> "CategoricalComfort": continue d[comfort_class] = bin_left if len(d.keys()) != len(ComfortClass): - raise ValueError(f"The comfort classes must include all comfort classes {[i.name for i in ComfortClass]}.") + raise ValueError( + "The comfort classes must include all comfort classes " + f"{[i.name for i in ComfortClass]}." + ) return CategoricalComfort( bins=list(d.values()) + [self.bins[-1]], @@ -621,64 +731,3 @@ def simplify(self) -> "CategoricalComfort": name=self.name + " (simplified)", comfort_classes=list(ComfortClass), ) - - def heatmap_histogram(self, series: pd.Series, show_colorbar: bool = True, figsize: tuple[float] = (15, 5)) -> Figure: - """Create a heatmap histogram. This combines the heatmap and histogram. - - Args: - series (pd.Series): - A time indexed pandas series. - show_colorbar (bool, optional): - Whether to show the colorbar in the plot. Defaults to True. - figsize (tuple[float], optional): - Change the figsize. Defaults to (15, 5). - - Returns: - Figure: - A figure! - """ - - fig = plt.figure(figsize=figsize, constrained_layout=True) - spec = fig.add_gridspec(ncols=1, nrows=2, width_ratios=[1], height_ratios=[5, 2], hspace=0.0) - heatmap_ax = fig.add_subplot(spec[0, 0]) - histogram_ax = fig.add_subplot(spec[1, 0]) - - # Add heatmap - self.annual_heatmap(series, ax=heatmap_ax, show_colorbar=False) - # Add stacked plot - self.annual_monthly_histogram(series=series, ax=histogram_ax, show_labels=True) - - if show_colorbar: - # add colorbar - divider = make_axes_locatable(histogram_ax) - colorbar_ax = divider.append_axes("bottom", size="20%", pad=0.7) - cb = fig.colorbar( - mappable=heatmap_ax.get_children()[0], - cax=colorbar_ax, - orientation="horizontal", - drawedges=False, - extend="both", - ) - cb.outline.set_visible(False) - for bin_name, interval in list(zip(*[self.bin_names, self.interval_index])): - if np.isinf(interval.left): - ha = "right" - position = interval.right - elif np.isinf(interval.right): - ha = "left" - position = interval.left - else: - ha = "center" - position = np.mean([interval.left, interval.right]) - colorbar_ax.text( - position, - 1.05, - textwrap.fill(bin_name, 11), - ha=ha, - va="bottom", - fontsize="x-small", - ) - - heatmap_ax.set_title(f"{self.name}\n{series.name}", y=1, ha="left", va="bottom", x=0) - - return fig diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/external_comfort/comfort_feasibility.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/external_comfort/comfort_feasibility.py new file mode 100644 index 00000000..773d9a5e --- /dev/null +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/external_comfort/comfort_feasibility.py @@ -0,0 +1,715 @@ +"""Methods for determining the feasible ranges of thermal comfort indices +based on simple modification of inputs to their calculation.""" + +# pylint: disable=C0302,E0401,E1101 +import json +import warnings +from calendar import month_abbr +from concurrent.futures import ThreadPoolExecutor, as_completed +from enum import Enum +from pathlib import Path + +import numpy as np +import pandas as pd +from honeybee.config import folders as hb_folders +from ladybug.datatype.temperature import UniversalThermalClimateIndex +from ladybug.epw import EPW, HourlyContinuousCollection +from ladybug_comfort.collection.pet import PET, OutdoorSolarCal, PhysiologicalEquivalentTemperature +from ladybug_comfort.collection.pmv import PMV, StandardEffectiveTemperature +from ladybug_comfort.collection.utci import UTCI +from python_toolkit.bhom.logging import CONSOLE_LOGGER +from tqdm import tqdm + +from ..helpers import evaporative_cooling_effect_collection +from ..ladybug_extension.datacollection import average, collection_to_series + +# pylint: enable=E0401 + + +class ThermalComfortIndex(Enum): + """An enumeration of the comfort indices that can be calculated.""" + + UTCI = UniversalThermalClimateIndex + PET = PhysiologicalEquivalentTemperature + SET = StandardEffectiveTemperature + + @property + def default_met_rate(self) -> float: + """Get the default MET rate for the comfort index.""" + match self.value: + case ThermalComfortIndex.UTCI.value: + return None + case ThermalComfortIndex.PET.value: + return 2.4 + case ThermalComfortIndex.SET.value: + return 2.4 + case _: + raise NotImplementedError(f"{self.name} is not supported.") + + @property + def default_clo_value(self) -> float: + """Get the default CLO value for the comfort index.""" + match self.value: + case ThermalComfortIndex.UTCI.value: + return None + case ThermalComfortIndex.PET.value: + return 0.7 + case ThermalComfortIndex.SET.value: + return 0.7 + case _: + raise NotImplementedError(f"{self.name} is not supported.") + + @property + def default_comfort_limits(self) -> tuple[float]: + """Get the default comfort limits for the comfort index. + + Reference: + Blazejczyk, Krzysztof, Yoram Epstein, Gerd Jendritzky, + Henning Staiger, and Birger Tinz. “Comparison of UTCI to + Selected Thermal Indices.” International Journal of + Biometeorology 56, no. 3 (May 2012): 515–35. + https://doi.org/10.1007/s00484-011-0453-2. + + """ + match self.value: + case ThermalComfortIndex.UTCI.value: + return (9, 26) + case ThermalComfortIndex.PET.value: + return (18, 23) + case ThermalComfortIndex.SET.value: + return (17, 30) + case _: + raise NotImplementedError(f"{self.name} is not supported.") + + @property + def default_wind_modifier(self) -> float: + """Get the default wind modifier for the comfort index. This + value denotes the factor applied to wind in order to make it + applicable to the different comfort index - usually due to the + different wind speed height for that index.""" + match self.value: + case ThermalComfortIndex.UTCI.value: + return 1.0 + case ThermalComfortIndex.PET.value: + return 2 / 3 + case ThermalComfortIndex.SET.value: + return 2 / 3 + case _: + raise NotImplementedError(f"{self.name} is not supported.") + + +def thermal_comfort_data( + epw: EPW, + thermal_comfort_index: ThermalComfortIndex, + shade_proportion: float = 0, + additional_air_moisture: float = 0, + wind_multiplier: float = 1, + met_rate: float = None, + clo_value: float = None, +) -> pd.DataFrame: + """Calculate the thermal comfort index for a given EPW file and parameters. + + Args: + epw (EPW): + An EPW object with the weather data. + thermal_comfort_index (ThermalComfortIndex): + The thermal comfort index to calculate. + shade_proportion (float, optional): + A number between 0 and 1 that represents the proportion of shade + given to an abstract point. Default is 0. + additional_air_moisture (float, optional): + A number between 0 and 1 that represents the effectiveness of + evaporative cooling on the air. Default is 0. 1 would be fully + saturated air. + wind_multiplier (float, optional): + A multiplier for the wind speed. Default is 1. + met_rate (float, optional): + The metabolic rate of the person in met. Default is None, which + would use the default value for the provided comfort index. + clo_value (float, optional): + The clothing insulation value in clo. Default is None, which would + use the default value for the provided comfort index. + + Returns: + pd.DataFrame: A pandas DataFrame with the thermal comfort index values. + + Note: + - This method does not account for surface temperature heating from + solar radiation. + - The method will save the results or the provided EPW file in the + default Ladybug simulation folder named "thermal_comfort". + - UTCI cannot accept MET and CLO values and these will be ignored if + provided. + """ + + # get the met and clo values if needed + if met_rate is None: + met_rate = thermal_comfort_index.default_met_rate + if clo_value is None: + clo_value = thermal_comfort_index.default_clo_value + + if thermal_comfort_index.value == ThermalComfortIndex.UTCI.value: + if met_rate is not None: + CONSOLE_LOGGER.warning("UTCI does not accept MET rate. It will be ignored.") + if clo_value is not None: + CONSOLE_LOGGER.warning("UTCI does not accept CLO value. It will be ignored.") + + # validate inputs + if not isinstance(epw, EPW): + raise ValueError("epw must be of type EPW.") + if not isinstance(thermal_comfort_index, ThermalComfortIndex): + raise ValueError("thermal_comfort_index must be of type ThermalComfortIndex.") + if not (shade_proportion >= 0) & (shade_proportion <= 1): + raise ValueError("shade_proportion must be between 0 and 1.") + if not (additional_air_moisture >= 0) & (additional_air_moisture <= 1): + raise ValueError("additional_air_moisture must be between 0 and 1.") + if wind_multiplier < 0: + raise ValueError("wind_multiplier must be greater than 0.") + if not (met_rate is None or met_rate >= 0): + raise ValueError("met_rate must be greater than or equal to 0.") + if not (clo_value is None or clo_value >= 0): + raise ValueError("clo_value must be greater than or equal to 0.") + + # create folder to store results in + root_dir = Path(hb_folders.default_simulation_folder) / "thermal_comfort" + out_dir = root_dir / Path(epw.file_path).name + out_dir.mkdir(parents=True, exist_ok=True) + + # save epw file to the folder + epw_file = out_dir / Path(epw.file_path).name + if not epw_file.exists(): + epw.save(epw_file) + + # create config identifier + input_id = f"{shade_proportion:0.1f}_{additional_air_moisture:0.1f}_{wind_multiplier:0.1f}" + config_id = f"{thermal_comfort_index.name}_{input_id}_{met_rate}_{clo_value}" + + # load existing file if it exists + collection = None + comfort_index_file = out_dir / f"{config_id}.json" + if comfort_index_file.exists(): + with open(comfort_index_file, "r", encoding="utf-8") as fp: + collection = HourlyContinuousCollection.from_dict(json.load(fp)) + + if collection is None: + # calculate MRT components + mrt_shaded_file = out_dir / "MRTshaded.json" + mrt_unshaded_file = out_dir / "MRTunshaded.json" + if mrt_unshaded_file.exists(): + with open(mrt_unshaded_file, "r", encoding="utf-8") as fp: + mrt_unshaded = HourlyContinuousCollection.from_dict(json.load(fp)) + else: + mrt_unshaded: HourlyContinuousCollection = OutdoorSolarCal( + location=epw.location, + direct_normal_solar=epw.direct_normal_radiation, + diffuse_horizontal_solar=epw.diffuse_horizontal_radiation, + horizontal_infrared=epw.horizontal_infrared_radiation_intensity, + surface_temperatures=epw.dry_bulb_temperature, + ).mean_radiant_temperature + with open(mrt_unshaded_file, "w", encoding="utf-8") as fp: + json.dump(mrt_unshaded.to_dict(), fp) + if mrt_shaded_file.exists(): + with open(mrt_shaded_file, "r", encoding="utf-8") as fp: + mrt_shaded = HourlyContinuousCollection.from_dict(json.load(fp)) + else: + mrt_shaded = mrt_unshaded.get_aligned_collection(epw.dry_bulb_temperature.values) + with open(mrt_shaded_file, "w", encoding="utf-8") as fp: + json.dump(mrt_shaded.to_dict(), fp) + mrt = average( + [mrt_shaded, mrt_unshaded], + [shade_proportion, 1 - shade_proportion], + ) + + # calculate DBT and RH components + dbt_file = out_dir / f"DBT_{additional_air_moisture:0.1f}.json" + rh_file = out_dir / f"RH_{additional_air_moisture:0.1f}.json" + dbt, rh = None, None + if dbt_file.exists(): + with open(dbt_file, "r", encoding="utf-8") as fp: + dbt = HourlyContinuousCollection.from_dict(json.load(fp)) + if rh_file.exists(): + with open(rh_file, "r", encoding="utf-8") as fp: + rh = HourlyContinuousCollection.from_dict(json.load(fp)) + if dbt is None or rh is None: + dbt, rh = evaporative_cooling_effect_collection( + epw=epw, evaporative_cooling_effectiveness=additional_air_moisture + ) + with open(dbt_file, "w", encoding="utf-8") as fp: + json.dump(dbt.to_dict(), fp) + with open(rh_file, "w", encoding="utf-8") as fp: + json.dump(rh.to_dict(), fp) + + # calculate wind speed components + + vel = epw.wind_speed * wind_multiplier * thermal_comfort_index.default_wind_modifier + + # calculate the thermal comfort index + match thermal_comfort_index.value: + case ThermalComfortIndex.UTCI.value: + collection = UTCI( + air_temperature=dbt, + rel_humidity=rh, + wind_speed=vel, + rad_temperature=mrt, + ).universal_thermal_climate_index + case ThermalComfortIndex.PET.value: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + collection = PET( + air_temperature=dbt, + rel_humidity=rh, + rad_temperature=mrt, + air_speed=vel, + barometric_pressure=epw.atmospheric_station_pressure, + met_rate=met_rate, + clo_value=clo_value, + ).physiologic_equivalent_temperature + case ThermalComfortIndex.SET.value: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + collection = PMV( + air_temperature=dbt, + rel_humidity=rh, + rad_temperature=mrt, + air_speed=vel, + met_rate=met_rate, + clo_value=clo_value, + ).standard_effective_temperature + case _: + raise NotImplementedError(f"{thermal_comfort_index.name} is not supported.") + collection.header.metadata = { + "EPW": Path(epw.file_path).name, + "ComfortIndex": thermal_comfort_index.name, + "ShadeProportion": shade_proportion, + "AirMoisture": additional_air_moisture, + "WindMultiplier": wind_multiplier, + "MET": met_rate, + "CLO": clo_value, + } + with open(comfort_index_file, "w", encoding="utf-8") as fp: + json.dump(collection.to_dict(), fp) + + # convert collection to pandas Series + df = collection_to_series(collection).to_frame() + df.columns = pd.MultiIndex.from_tuples( + [collection.header.metadata.values()], + names=collection.header.metadata.keys(), + ) + + return df + + +def thermal_comfort_datas( + epws: tuple[EPW], + thermal_comfort_indices: tuple[ThermalComfortIndex] = (ThermalComfortIndex.UTCI,), + shade_proportions: tuple[float] = (0, 1), + additional_air_moistures: tuple[float] = (0, 0.7), + wind_multipliers: tuple[float] = (0, 1, 1.1), + met_rates: tuple[float] = (None,), + clo_values: tuple[float] = (None,), + run_parallel: bool = False, +) -> pd.DataFrame: + """Calculate multiple thermal comfort indices - this is a wrapper around + another function that makes parallelisation more efficient. + + Args: + epws (tuple[EPW]): + A tuple of EPW objects with the weather data. + thermal_comfort_indices (tuple[ThermalComfortIndex], optional): + A tuple of thermal comfort indices to calculate. Default is (UTCI). + shade_proportions (tuple[float], optional): + A tuple of numbers between 0 and 1 that represents the proportion of + shade given to an abstract point. Default is (0, 1) which would + simulate no-shade and full-shade. + additional_air_moistures (tuple[float], optional): + A tuple of numbers between 0 and 1 that represents the effectiveness + of evaporative cooling on the air. Default is (0, 0.7) which would + simulate no addiitonal moisture and 70% effective moisture + addition to air - typical of PDEC tower. + wind_multipliers (tuple[float], optional): + A tuple of multipliers for the wind speed. Default is (0, 1.1) + which would simulate no-wind and wind + 10%. + met_rates (tuple[float], optional): + A tuple of metabolic rates of the person in met. Default is (None) + which just simulates the defualt for the provided thermal comfort + index. + clo_values (tuple[float], optional): + A tuple of clothing insulation values in clo. Default is (None) + which just simulates the defualt for the provided thermal comfort + index. + run_parallel (bool, optional): + Set to True to run the calculations in parallel. Default is False. + + Returns: + pd.DataFrame: A pandas DataFrame with the thermal comfort index values. + """ + + # validation + for arg, val in locals().items(): + if arg == "run_parallel": + continue + if not isinstance(val, (list, tuple)): + raise ValueError(f"{arg} must be iterable.") + if not all(isinstance(i, EPW) for i in epws): + raise ValueError("All epws must be of type EPW.") + if not all(isinstance(i, ThermalComfortIndex) for i in thermal_comfort_indices): + raise ValueError("All thermal_comfort_indices must be of type ThermalComfortIndex.") + if not all((i >= 0) & (i <= 1) for i in shade_proportions): + raise ValueError("All shade_proportions must be between 0 and 1.") + if not all((i >= 0) & (i <= 1) for i in additional_air_moistures): + raise ValueError("All additional_air_moistures must be between 0 and 1.") + if not all(i >= 0 for i in wind_multipliers): + raise ValueError("All wind_multipliers must be greater than 0.") + + # create a list of all possible combinations of the input values + target_iterations = [] + for epw in epws: + # create folder to store results in + root_dir = Path(hb_folders.default_simulation_folder) / "thermal_comfort" + out_dir = root_dir / Path(epw.file_path).name + out_dir.mkdir(parents=True, exist_ok=True) + + # save epw file to the folder + epw_file = out_dir / Path(epw.file_path).name + if not epw_file.exists(): + epw.save(epw_file) + + for tci in thermal_comfort_indices: + for sp in shade_proportions: + # calculate MRT components - included here to allow parallelisation + mrt_shaded_file = out_dir / "MRTshaded.json" + mrt_unshaded_file = out_dir / "MRTunshaded.json" + if not mrt_unshaded_file.exists(): + mrt_unshaded: HourlyContinuousCollection = OutdoorSolarCal( + location=epw.location, + direct_normal_solar=epw.direct_normal_radiation, + diffuse_horizontal_solar=epw.diffuse_horizontal_radiation, + horizontal_infrared=epw.horizontal_infrared_radiation_intensity, + surface_temperatures=epw.dry_bulb_temperature, + ).mean_radiant_temperature + with open(mrt_unshaded_file, "w", encoding="utf-8") as fp: + json.dump(mrt_unshaded.to_dict(), fp) + if not mrt_shaded_file.exists(): + mrt_shaded = mrt_unshaded.get_aligned_collection( + epw.dry_bulb_temperature.values + ) + with open(mrt_shaded_file, "w", encoding="utf-8") as fp: + json.dump(mrt_shaded.to_dict(), fp) + for am in additional_air_moistures: + # calculate DBT and RH components - included here to allow parallelisation + dbt_file = out_dir / f"DBT_{am:0.1f}.json" + rh_file = out_dir / f"RH_{am:0.1f}.json" + if not dbt_file.exists() or not rh_file.exists(): + dbt, rh = evaporative_cooling_effect_collection( + epw=epw, evaporative_cooling_effectiveness=am + ) + with open(dbt_file, "w", encoding="utf-8") as fp: + json.dump(dbt.to_dict(), fp) + with open(rh_file, "w", encoding="utf-8") as fp: + json.dump(rh.to_dict(), fp) + for wm in wind_multipliers: + if tci.value == ThermalComfortIndex.UTCI.value: + target_iterations.append( + { + "epw": epw, + "thermal_comfort_index": tci, + "shade_proportion": sp, + "additional_air_moisture": am, + "wind_multiplier": wm, + "met_rate": None, + "clo_value": None, + } + ) + else: + for mr in met_rates: + for cv in clo_values: + target_iterations.append( + { + "epw": epw, + "thermal_comfort_index": tci, + "shade_proportion": sp, + "additional_air_moisture": am, + "wind_multiplier": wm, + "met_rate": mr, + "clo_value": cv, + } + ) + # shuffle the iterations + np.random.shuffle(target_iterations) + + df = [] + if run_parallel: + # run calculations in parallel + l = len(target_iterations) + with tqdm(total=l) as pbar: + pbar.set_description("Calculating thermal comfort indices") + with ThreadPoolExecutor() as executor: + CONSOLE_LOGGER.disabled = True + futures = [ + executor.submit(thermal_comfort_data, **kwargs) for kwargs in target_iterations + ] + CONSOLE_LOGGER.disabled = False + for future in as_completed(futures): + df.append(future.result()) + pbar.update(1) + else: + pbar = tqdm(target_iterations) + for it in pbar: + pbar.set_description( + f"Processing {Path(it['epw'].file_path).stem} ({it['thermal_comfort_index'].name}-{it['shade_proportion']:0.1f}-{it['additional_air_moisture']:0.1f}-{it['wind_multiplier']:0.1f}-{it['met_rate']}-{it['clo_value']})" + ) + df.append(thermal_comfort_data(**it)) + + return pd.concat(df, axis=1).sort_index(axis=1) + + +def thermal_comfort_bounds( + epw: EPW, + thermal_comfort_index: ThermalComfortIndex, + shade_limits: tuple[float] = (0, 1), + wind_limits: tuple[float] = (0, 1.1), + moisture_limits: tuple[float] = (0, 0.7), + met_rate: float = None, + clo_value: float = None, + show_all: bool = False, +) -> pd.DataFrame: + """Calculate the minimum and maximum values of the thermal comfort index + for a given EPW file and parameters. + + Args: + epw (EPW): + An EPW object with the weather data. + thermal_comfort_index (ThermalComfortIndex): + The thermal comfort index to calculate. + shade_limits (tuple[float], optional): + A tuple of two numbers between 0 and 1 that represents the range of + shade given to an abstract point. Default is (0, 1). + wind_limits (tuple[float], optional): + A tuple of two numbers between 0 and 1.1 that represents the range of + wind speed multipliers. Default is (0, 1.1). + moisture_limits (tuple[float], optional): + A tuple of two numbers between 0 and 0.7 that represents the range of + effectiveness of evaporative cooling on the air. Default is (0, 0.7). + met_rate (float, optional): + The metabolic rate of the person in met. Default is None, which would + use the default value for the provided comfort index. + clo_value (float, optional): + The clothing insulation value in clo. Default is None, which would + use the default value for the provided comfort index. + show_all (bool, optional): + Set to True to return all anual hourly values instead of just the + min and max. Default is False. This is useful for debugging. + + Returns: + pd.DataFrame: A pandas DataFrame with the min and max values of the thermal + comfort index. + """ + + if len(shade_limits) != 2: + raise ValueError("shade_limits should be a tuple of two values.") + if len(wind_limits) != 2: + raise ValueError("wind_limits should be a tuple of two values.") + if len(moisture_limits) != 2: + raise ValueError("moisture_limits should be a tuple of two values.") + + # run calculation of thermal comfort indices (timeseries) + df = thermal_comfort_datas( + epws=[epw], + thermal_comfort_indices=[thermal_comfort_index], + shade_proportions=shade_limits, + wind_multipliers=wind_limits, + additional_air_moistures=moisture_limits, + met_rates=[met_rate], + clo_values=[clo_value], + run_parallel=False, + ) + + if show_all: + return df + + # get the min and max values for each row + min_values = df.min(axis=1).rename(thermal_comfort_index.name) + max_values = df.max(axis=1).rename(thermal_comfort_index.name) + df = pd.concat([min_values, max_values], axis=1) + df.columns = pd.MultiIndex.from_tuples( + [(thermal_comfort_index.name, "Min"), (thermal_comfort_index.name, "Max")], + names=["Thermal Comfort Index", "Bounds"], + ) + + return df + + +def thermal_comfort_summary( + epw: EPW, + thermal_comfort_index: ThermalComfortIndex, + comfort_limits: tuple[float] = None, + hour_limits: tuple[int] = (0, 23), + shade_limits: tuple[float] = (0, 1), + wind_limits: tuple[float] = (0, 1.1), + moisture_limits: tuple[float] = (0, 0.7), + met_rate: float = None, + clo_value: float = None, + formatted: bool = False, +) -> pd.DataFrame: + """Return the proportion of hours within the specified range for each month. + + ARGS: + epw (EPW): + An EPW object with the weather data. + thermal_comfort_index (ThermalComfortIndex): + The thermal comfort index to calculate. + comfort_limits (tuple[float], optional): + A tuple of two numbers that represents the range of comfort for the + thermal comfort index. Default is None, which would use the default + comfort limits for the provided comfort index. + hour_limits (tuple[int], optional): + A tuple of two numbers that represents the range of hours to consider. + Default is (0, 23) which would consider all hours. + shade_limits (tuple[float], optional): + A tuple of two numbers between 0 and 1 that represents the range of + shade given to an abstract point. Default is (0, 1). + wind_limits (tuple[float], optional): + A tuple of two numbers between 0 and 1.1 that represents the range of + wind speed multipliers. Default is (0, 1.1). + moisture_limits (tuple[float], optional): + A tuple of two numbers between 0 and 0.7 that represents the range of + effectiveness of evaporative cooling on the air. Default is (0, 0.7). + met_rate (float, optional): + The metabolic rate of the person in met. Default is None, which would + use the default value for the provided comfort index. + clo_value (float, optional): + The clothing insulation value in clo. Default is None, which would + use the default value for the provided comfort index. + formatted (bool, optional): + Set to True to return a formatted DataFrame. Default is False. + + Returns: + pd.DataFrame | pd.io.formats.style.Style: + A pandas DataFrame with the proportion of time within the + comfort range for each month -OR- a formatted DataFrame. + """ + + if comfort_limits is None: + comfort_limits = thermal_comfort_index.default_comfort_limits + if len(comfort_limits) != 2: + raise ValueError("comfort_limits should be a tuple of two values.") + if comfort_limits[0] > comfort_limits[1]: + raise ValueError("comfort_limits should be in ascending order.") + + if len(hour_limits) != 2: + raise ValueError("hour_limits should be a tuple of two values.") + for hour in hour_limits: + if hour < 0 or hour > 23: + raise ValueError("hour_limits should be between 0 and 23.") + + if met_rate is None: + met_rate = thermal_comfort_index.default_met_rate + if clo_value is None: + clo_value = thermal_comfort_index.default_clo_value + + # run calculation + df = thermal_comfort_bounds( + epw=epw, + thermal_comfort_index=thermal_comfort_index, + shade_limits=shade_limits, + wind_limits=wind_limits, + moisture_limits=moisture_limits, + met_rate=met_rate, + clo_value=clo_value, + show_all=False, + ) + + # create filter/mask + if hour_limits[0] < hour_limits[1]: + mask = (df.index.hour >= hour_limits[0]) & (df.index.hour <= hour_limits[1]) + else: + mask = (df.index.hour >= hour_limits[0]) | (df.index.hour <= hour_limits[1]) + + # filter dataset + df = df[mask] + + # count the proportion of hours within the comfort limits + threshold_datasets = { + f"Too cold (<{min(comfort_limits)}°C)": df < min(comfort_limits), + f"Comfortable ({min(comfort_limits)}°C to {max(comfort_limits)}°C)": ( + df >= min(comfort_limits) + ) + & (df <= max(comfort_limits)), + f"Too hot (>{max(comfort_limits)}°C)": df > max(comfort_limits), + } + new_df = [] + for k, v in threshold_datasets.items(): + # groupby month and get proportion meeting targets + _sums = v.groupby(v.index.month).sum() + _counts = v.groupby(v.index.month).count() + _temp = _sums / _counts + + # sort each row by size to get low/high proportion + _data = [] + for _, row in _temp.iterrows(): + _data.append([min(row), max(row)]) + _temp = pd.DataFrame(data=_data, index=_temp.index) + _temp.columns = pd.MultiIndex.from_tuples( + [ + (thermal_comfort_index.name, k, "Lowest proportion"), + (thermal_comfort_index.name, k, "Highest proportion"), + ], + names=["Index", "Condition", "Bound"], + ) + _temp.index = [month_abbr[i] for i in _temp.index] + new_df.append(_temp) + + new_df = pd.concat(new_df, axis=1) + + if formatted: + caption = f"Feasible proportion of time achieving target comfort {thermal_comfort_index.name} conditions ({min(comfort_limits)}°C to {max(comfort_limits)}°C) using {epw}" + caption += f" from {hour_limits[0]:02d}:00 to {hour_limits[1]:02d}:59" + caption += f". Including effects from shade ({min(shade_limits):0.0%} to {max(shade_limits):0.0%}), wind ({min(wind_limits):0.0%} to {max(wind_limits):0.0%}), and air moisture ({min(moisture_limits):0.0%} to {max(moisture_limits):0.0%})." + if (met_rate is not None) and (clo_value is not None): + caption = caption[:-1] + caption += ( + f", and using a MET rate of {met_rate:0.1f} and CLO value of {clo_value:0.1f}." + ) + return ( + new_df.style.set_caption(caption) + .background_gradient(cmap="Blues", subset=new_df.columns[0:2], low=0, high=1, axis=None) + .background_gradient( + cmap="Greens", subset=new_df.columns[2:4], low=0, high=1, axis=None + ) + .background_gradient(cmap="Reds", subset=new_df.columns[4:6], low=0, high=1, axis=None) + .format("{:.1%}") + .set_table_styles( + [ + { + "selector": "caption", + "props": [ + ("color", "#555555"), + ("font-size", "x-small"), + ("caption-side", "bottom"), + ("font-style", "bold"), + ("text-align", "left"), + ], + }, + { + "selector": "td:hover", + "props": [ + ("background-color", "#ffffb3"), + ("color", "black"), + ], + }, + { + "selector": ".index_name", + "props": [("color", "#555555"), ("font-weight", "normal")], + }, + { + "selector": "th:not(.index_name)", + "props": [ + ("background-color", "white"), + ("color", "black"), + ], + }, + ] + ) + ) + + return new_df diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/external_comfort/utci.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/external_comfort/utci.py index 076a3ae7..cda24a14 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/external_comfort/utci.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/external_comfort/utci.py @@ -6,28 +6,28 @@ from calendar import month_abbr from concurrent.futures import ProcessPoolExecutor -# pylint: enable=E0401 - import numpy as np import numpy.typing as npt import pandas as pd from ladybug.datacollection import HourlyContinuousCollection -from ladybug.datatype.temperature import ( - UniversalThermalClimateIndex as LB_UniversalThermalClimateIndex, -) +from ladybug.datatype.temperature import \ + UniversalThermalClimateIndex as LB_UniversalThermalClimateIndex from ladybug.epw import EPW from ladybug_comfort.collection.solarcal import OutdoorSolarCal from ladybug_comfort.collection.utci import UTCI +from python_toolkit.bhom.analytics import bhom_analytics from scipy.interpolate import interp1d, interp2d from tqdm import tqdm -from python_toolkit.bhom.analytics import bhom_analytics -from ..categorical.categories import UTCI_DEFAULT_CATEGORIES, CategoricalComfort +from ..categorical.categories import (UTCI_DEFAULT_CATEGORIES, + CategoricalComfort) from ..helpers import evaporative_cooling_effect, month_hour_binned_series -from ..ladybug_extension.datacollection import ( - collection_from_series, - collection_to_series, -) +from ..ladybug_extension.datacollection import (collection_from_series, + collection_to_series) + +# pylint: enable=E0401 + + def _saturated_vapor_pressure_hpa(dry_bulb_temperature: np.ndarray): diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/helpers.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/helpers.py index 1c58f877..0e7ac76e 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/helpers.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/helpers.py @@ -20,22 +20,19 @@ from caseconverter import snakecase from honeybee.config import folders as hb_folders from ladybug.datatype.temperature import WetBulbTemperature -from ladybug.epw import EPW, AnalysisPeriod, HourlyContinuousCollection, Location +from ladybug.epw import (EPW, AnalysisPeriod, HourlyContinuousCollection, + Location) from ladybug.psychrometrics import wet_bulb_from_db_rh -from ladybug.skymodel import ( - calc_horizontal_infrared, - calc_sky_temperature, - estimate_illuminance_from_irradiance, - get_extra_radiation, - zhang_huang_solar, - zhang_huang_solar_split, -) +from ladybug.skymodel import (calc_horizontal_infrared, calc_sky_temperature, + estimate_illuminance_from_irradiance, + get_extra_radiation, zhang_huang_solar, + zhang_huang_solar_split) from ladybug.sunpath import Sunpath from ladybug_geometry.geometry2d import Vector2D from meteostat import Hourly, Point - from python_toolkit.bhom.analytics import bhom_analytics from python_toolkit.bhom.logging import CONSOLE_LOGGER + from .ladybug_extension.dt import lb_datetime_from_datetime # pylint: enable=E0401 diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/ladybug_extension/epw.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/ladybug_extension/epw.py index b19db869..3a00206e 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/ladybug_extension/epw.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/ladybug_extension/epw.py @@ -9,8 +9,6 @@ from pathlib import Path from typing import Any -# pylint: enable=E0401 - import numpy as np import pandas as pd from ladybug.analysisperiod import AnalysisPeriod @@ -24,25 +22,17 @@ from ladybug.epw import EPW, EPWFields, MonthlyCollection from ladybug.header import Header from ladybug.location import Location -from ladybug.psychrometrics import ( - dew_point_from_db_wb, - enthalpy_from_db_hr, - humid_ratio_from_db_rh, - rel_humid_from_db_wb, - wet_bulb_from_db_rh, -) +from ladybug.psychrometrics import (dew_point_from_db_wb, enthalpy_from_db_hr, + humid_ratio_from_db_rh, + rel_humid_from_db_wb, wet_bulb_from_db_rh) from ladybug.skymodel import clearness_index as lb_ci from ladybug.sunpath import Sun, Sunpath from ladybug_comfort.degreetime import cooling_degree_time, heating_degree_time - from python_toolkit.bhom.analytics import bhom_analytics -from ..helpers import ( - air_pressure_at_height, - radiation_at_height, - temperature_at_height, - timedelta_tostring, - wind_speed_at_height, -) + +from ..helpers import (air_pressure_at_height, radiation_at_height, + temperature_at_height, timedelta_tostring, + wind_speed_at_height) from .analysisperiod import analysis_period_to_datetimes from .datacollection import average as average_collection from .datacollection import collection_to_series @@ -50,6 +40,10 @@ from .header import header_to_string from .location import average_location, location_to_string +# pylint: enable=E0401 + + + @bhom_analytics() def epw_to_dataframe( @@ -1054,7 +1048,7 @@ def seasonality_from_temperature_timeseries( series = series.loc[~series.index.duplicated()] # check that series is long enough - if max(series.index) - min(series.index) < pd.Timedelta(hours=364): + if max(series.index) - min(series.index) < pd.Timedelta(hours=364*24): raise ValueError( "Input dataset must be at least 365 days long to determine seasonality." ) @@ -1217,7 +1211,7 @@ def seasonality_from_temperature_timeseries( ).date() temp = dbt.to_frame() - s = pd.Series(index=dbt.index, data=np.nan) + s = pd.Series(index=dbt.index, data=np.nan, dtype=str) s.loc[spring_start] = "Spring" s.loc[summer_start] = "Summer" s.loc[autumn_start] = "Autumn" diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/__utci_prototypes.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/__utci_prototypes.py deleted file mode 100644 index 7cc2319c..00000000 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/__utci_prototypes.py +++ /dev/null @@ -1,682 +0,0 @@ -"""Protoype UTCI plotting methods.""" -# pylint: disable=line-too-long -# # pylint: disable=E0401 -# import calendar -# import textwrap -# import warnings -# from calendar import month_abbr -# from datetime import timedelta - -# # pylint: enable=E0401 - -# import matplotlib.dates as mdates -# import matplotlib.patches as mpatches -# import matplotlib.pyplot as plt -# import matplotlib.ticker as mticker -# import numpy as np -# import pandas as pd -# from ladybug.datacollection import HourlyContinuousCollection -# from ladybug.datatype.temperature import UniversalThermalClimateIndex -# from ladybug.epw import EPW -# from ladybug.sunpath import Sunpath -# from matplotlib.colorbar import ColorbarBase -# from matplotlib.colors import BoundaryNorm, ListedColormap -# from matplotlib.figure import Figure -# from matplotlib.gridspec import GridSpec -# from mpl_toolkits.axes_grid1 import make_axes_locatable - -# from ..categorical.categories import Categorical - -# # from ..external_comfort.utci import ( -# # UTCICategories, -# # feasible_comfort_category, -# # utci_comfort_categories, -# # ) -# from ..external_comfort.utci import shade_benefit_category -# from ..helpers import sunrise_sunset -# from ..ladybug_extension.analysisperiod import describe_analysis_period -# from ..ladybug_extension.datacollection import collection_to_series -# from ._heatmap import heatmap - - -# def utci_shade_benefit( -# unshaded_utci: HourlyContinuousCollection | pd.Series, -# shaded_utci: HourlyContinuousCollection | pd.Series, -# comfort_limits: tuple[float] = (9, 26), -# **kwargs, -# ) -> plt.Figure: -# """Plot the shade benefit category. - -# Args: -# unshaded_utci (HourlyContinuousCollection | pd.Series): -# A dataset containing unshaded UTCI values. -# shaded_utci (HourlyContinuousCollection | pd.Series): -# A dataset containing shaded UTCI values. -# comfort_limits (tuple[float], optional): -# The range within which "comfort" is achieved. Defaults to (9, 26). - -# Keyword Args: -# - title (str, optional): -# A title to add to the plot. Defaults to None. -# - epw (EPW, optional): -# If included, plot the sun up hours. Defaults to None. - -# Returns: -# plt.Figure: -# A figure object. -# """ - -# warnings.warn( -# "This method is mostly broken, though it *nearly* works. Included here just to inspire someone to fix it! Please." -# ) - -# utci_shade_benefit_categories = shade_benefit_category( -# unshaded_utci, -# shaded_utci, -# comfort_limits=comfort_limits, -# ) - -# epw = kwargs.get("epw", None) -# title = kwargs.get("title", None) - -# if epw is not None: -# if not isinstance(epw, EPW): -# raise ValueError(f"epw must be of type EPW, it is currently {type(epw)}") -# if len(epw.dry_bulb_temperature) != len(utci_shade_benefit_categories): -# raise ValueError( -# f"Input sizes do not match ({len(utci_shade_benefit_categories)} != {len(epw.dry_bulb_temperature)})" -# ) - -# # convert values into categories -# cat = pd.Categorical(utci_shade_benefit_categories) - -# # get numeric values -# numeric = pd.Series(cat.codes, index=utci_shade_benefit_categories.index) - -# # create colormap -# colors = ["#00A499", "#5D822D", "#EE7837", "#585253"] -# if len(colors) != len(cat.categories): -# raise ValueError( -# f"The number of categories does not match the number of colours in the colormap ({len(colors)} != {len(cat.categories)})." -# ) -# cmap = ListedColormap(colors) - -# # create tcf_properties -# imshow_properties = { -# "cmap": cmap, -# } - -# # create canvas -# fig = plt.figure(constrained_layout=True) -# spec = fig.add_gridspec( -# ncols=1, nrows=3, width_ratios=[1], height_ratios=[4, 2, 0.5], hspace=0.0 -# ) -# heatmap_ax = fig.add_subplot(spec[0, 0]) -# histogram_ax = fig.add_subplot(spec[1, 0]) -# cb_ax = fig.add_subplot(spec[2, 0]) - -# # Add heatmap -# hmap = heatmap(numeric, ax=heatmap_ax, show_colorbar=False, **imshow_properties) - -# # add sun up indicator lines -# if epw is not None: -# sp = Sunpath.from_location(epw.location) -# sun_up_down = pd.DataFrame( -# [ -# sp.calculate_sunrise_sunset_from_datetime(i) -# for i in utci_shade_benefit_categories.resample("D").count().index -# ] -# ).reset_index(drop=True) -# sun_up_down.index = sun_up_down.index + mdates.date2num(numeric.index.min()) -# sunrise = pd.Series( -# data=[ -# 726449 -# + timedelta(hours=i.hour, minutes=i.minute, seconds=i.second).seconds -# / 86400 -# for i in sun_up_down.sunrise -# ], -# index=sun_up_down.index, -# ) -# sunrise = sunrise.reindex( -# sunrise.index.tolist() + [sunrise.index[-1] + 1] -# ).ffill() -# sunset = pd.Series( -# data=[ -# 726449 -# + timedelta(hours=i.hour, minutes=i.minute, seconds=i.second).seconds -# / 86400 -# for i in sun_up_down.sunset -# ], -# index=sun_up_down.index, -# ) -# sunset = sunset.reindex(sunset.index.tolist() + [sunset.index[-1] + 1]).ffill() -# for s in [sunrise, sunset]: -# heatmap_ax.plot(s.index, s.values, zorder=9, c="#F0AC1B", lw=1) - -# # Add colorbar legend and text descriptors for comfort bands -# ticks = np.linspace(0, len(cat.categories), (len(cat.categories) * 2) + 1)[1::2] -# # cb = fig.colorbar( -# # hmap, -# # ax=heatmap_ax, -# # cax=cb_ax, -# # orientation="horizontal", -# # ticks=ticks, -# # drawedges=False, -# # ) -# # cb.outline.set_visible(False) -# # plt.setp(plt.getp(cb_ax, "xticklabels"), color="none") -# # cb.set_ticks([]) - -# # Add labels to the colorbar -# tick_locs = np.linspace(0, len(cat.categories) - 1, len(cat.categories) + 1) -# tick_locs = (tick_locs[1:] + tick_locs[:-1]) / 2 -# category_percentages = ( -# utci_shade_benefit_categories.value_counts() -# / utci_shade_benefit_categories.count() -# ) -# for n, (tick_loc, category) in enumerate(zip(*[tick_locs, cat.categories])): -# cb_ax.text( -# tick_loc, -# 1.05, -# textwrap.fill(category, 15) + f"\n{category_percentages[n]:0.0%}", -# ha="center", -# va="bottom", -# size="small", -# ) - -# # Add stacked plot -# t = utci_shade_benefit_categories -# t = t.groupby([t.index.month, t]).count().unstack().T -# t = t / t.sum() -# months = [calendar.month_abbr[i] for i in range(1, 13, 1)] -# t.T.plot.bar( -# ax=histogram_ax, -# stacked=True, -# color=colors, -# legend=False, -# width=1, -# ) -# histogram_ax.set_xlabel(None) -# histogram_ax.set_xlim(-0.5, 11.5) -# histogram_ax.set_ylim(0, 1) -# histogram_ax.set_xticklabels(months, ha="center", rotation=0) -# for spine in ["top", "right"]: -# histogram_ax.spines[spine].set_visible(False) -# histogram_ax.yaxis.set_major_formatter(mticker.PercentFormatter(1)) - -# # # Add header percentages for bar plot -# for month, row in ( -# ( -# utci_shade_benefit_categories.groupby( -# utci_shade_benefit_categories.index.month -# ) -# .value_counts() -# .unstack() -# .T -# / utci_shade_benefit_categories.groupby( -# utci_shade_benefit_categories.index.month -# ).count() -# ) -# .T.fillna(0) -# .iterrows() -# ): -# txt = "" -# for n, val in enumerate(row.values[::-1]): -# txtx = f"{val:0.0%}{txt}" -# histogram_ax.text( -# month - 1, -# 1.02, -# txtx, -# va="bottom", -# ha="center", -# color=colors[::-1][n], -# fontsize="small", -# ) -# txt += "\n" - -# title_base = "Shade benefit" -# if title is None: -# heatmap_ax.set_title(title_base, y=1, ha="left", va="bottom", x=0) -# else: -# heatmap_ax.set_title( -# f"{title_base}\n{title}", -# y=1, -# ha="left", -# va="bottom", -# x=0, -# ) - -# return fig - - -# # def utci_feasibility( -# # epw: EPW, -# # simplified: bool = False, -# # comfort_limits: tuple = (9, 26), -# # included_additional_moisture: bool = False, -# # analysis_periods: Union[AnalysisPeriod, tuple[AnalysisPeriod]] = (AnalysisPeriod()), -# # met_rate_adjustment: float = None, -# # ) -> Figure: -# # """Plot the UTCI feasibility for each month of the year. - -# # Args: -# # epw (EPW): -# # An EPW object. -# # simplified (bool, optional): -# # Default is False. -# # comfort_limits (tuple, optional): -# # Default is (9, 26). Only used if simplified is True. -# # included_additional_moisture (bool, optional): -# # Default is False. If True, then include evap cooling in this analysis. -# # analysis_periods (Union[AnalysisPeriod, tuple[AnalysisPeriod]], optional): -# # An AnalysisPeriod or a tuple of AnalysisPeriods to be used for the analysis. -# # Defaults to (AnalysisPeriod(),). -# # met_rate_adjustment (float, optional): -# # A value to be added to the metabolic rate of the UTCI model. This can be used -# # to account for changes in metabolic rate due to clothing, exercise, etc. -# # Defaults to None. -# # Returns: -# # Figure: -# # A matplotlib Figure object. -# # """ - -# # df = feasible_comfort_category( -# # epw, -# # simplified=simplified, -# # comfort_limits=comfort_limits, -# # include_additional_moisture=included_additional_moisture, -# # analysis_periods=analysis_periods, -# # met_rate_adjustment_value=met_rate_adjustment, -# # ) - -# # labels, _ = utci_comfort_categories( -# # simplified=simplified, -# # comfort_limits=comfort_limits, -# # rtype="category", -# # ) -# # colors, _ = utci_comfort_categories( -# # simplified=simplified, -# # comfort_limits=comfort_limits, -# # rtype="color", -# # ) - -# # fig, axes = plt.subplots(1, 12, figsize=(10, 4), sharey=True, sharex=False) - -# # ypos = range(len(df)) -# # for n, ax in enumerate(axes): -# # # get values -# # low = df.iloc[n].filter(regex="lowest") -# # high = df.iloc[n].filter(regex="highest") -# # ypos = range(len(low)) - -# # ax.barh( -# # ypos, -# # width=high.values - low.values, -# # left=low.values, -# # color=colors, -# # zorder=3, -# # alpha=0.8, -# # ) - -# # for rect in ax.patches: -# # width = rect.get_width() -# # height = rect.get_height() -# # _x = rect.get_x() -# # _y = rect.get_y() -# # if width == 0: -# # if _x == 1: -# # # text saying 100% of hours are in this category -# # ax.text( -# # 0.5, -# # _y + (height / 2), -# # textwrap.fill("All times", 15), -# # ha="center", -# # va="center", -# # rotation=0, -# # fontsize="xx-small", -# # zorder=3, -# # ) -# # continue - -# # ax.text( -# # _x - 0.03, -# # _y + (height), -# # f"{_x:0.1%}", -# # ha="right", -# # va="top", -# # rotation=90, -# # fontsize="xx-small", -# # zorder=3, -# # ) -# # ax.text( -# # _x + width + 0.03, -# # _y + (height), -# # f"{_x + width:0.1%}", -# # ha="left", -# # va="top", -# # rotation=90, -# # fontsize="xx-small", -# # zorder=3, -# # ) - -# # if simplified: -# # for nn, i in enumerate(colors): -# # ax.axhspan(ymin=nn - 0.5, ymax=nn + 0.5, fc=i, alpha=0.2, zorder=1) -# # else: -# # for nn, i in enumerate(UTCICategories): -# # ax.axhspan( -# # ymin=nn - 0.5, ymax=nn + 0.5, fc=i.color, alpha=0.2, zorder=1 -# # ) - -# # ax.set_xlim(-0.1, 1.1) -# # ax.set_ylim(-0.5, len(ypos) - 0.5) -# # for spine in ["left", "bottom"]: -# # ax.spines[spine].set_visible(False) -# # ax.tick_params(labelleft=False, left=False) -# # ax.set_xticks([-0.1, 0.5, 1.1]) -# # ax.set_xticklabels(["", month_abbr[n + 1], ""]) -# # ax.grid(False) - -# # if n == 5: -# # handles = [] -# # if simplified: -# # for col, lab in list(zip(*[["#3C65AF", "#2EB349", "#C31F25"], labels])): -# # handles.append(mpatches.Patch(color=col, label=lab, alpha=0.3)) -# # else: -# # for i in UTCICategories: -# # handles.append( -# # mpatches.Patch(color=i.color, label=i.value, alpha=0.3) -# # ) - -# # ax.legend( -# # handles=handles, -# # bbox_to_anchor=(0.5, -0.1), -# # loc="upper center", -# # ncol=3 if simplified else 4, -# # borderaxespad=0, -# # frameon=False, -# # ) - -# # ti = f"{location_to_string(epw.location)}\nFeasible ranges of UTCI temperatures ({describe_ap(analysis_periods)})" -# # if met_rate_adjustment: -# # ti += f" with MET rate adjustment to {met_rate_adjustment} MET" -# # plt.suptitle( -# # textwrap.fill(ti, 90), -# # x=0.075, -# # y=0.9, -# # ha="left", -# # va="bottom", -# # ) - -# # plt.tight_layout() -# # return fig - - -# def utci_distance_to_comfortable( -# collection: HourlyContinuousCollection, -# title: str = None, -# comfort_thresholds: tuple[float] = (9, 26), -# low_limit: float = 15, -# high_limit: float = 25, -# ) -> Figure: -# """Plot the distance (in C) to comfortable for a given Ladybug HourlyContinuousCollection -# containing UTCI values. - -# Args: -# collection (HourlyContinuousCollection): -# A Ladybug Universal Thermal Climate Index HourlyContinuousCollection object. -# title (str, optional): -# A title to place at the top of the plot. Defaults to None. -# comfort_thresholds (list[float], optional): -# The comfortable band of UTCI temperatures. Defaults to [9, 26]. -# low_limit (float, optional): -# The distance from the lower edge of the comfort threshold to include in the "too cold" -# part of the heatmap. Defaults to 15. -# high_limit (float, optional): -# The distance from the upper edge of the comfort threshold to include in the "too hot" -# part of the heatmap. Defaults to 25. -# Returns: -# Figure: -# A matplotlib Figure object. -# """ - -# if not isinstance(collection.header.data_type, UniversalThermalClimateIndex): -# raise ValueError("This method only works for UTCI data.") - -# if not len(comfort_thresholds) == 2: -# raise ValueError("comfort_thresholds must be a list of length 2.") - -# # Create matrices containing the above/below/within UTCI distances to comfortable -# series = collection_to_series(collection) - -# low, high = comfort_thresholds -# midpoint = np.mean([low, high]) - -# distance_above_comfortable = (series[series > high] - high).to_frame() -# distance_above_comfortable_matrix = ( -# distance_above_comfortable.set_index( -# [ -# distance_above_comfortable.index.dayofyear, -# distance_above_comfortable.index.hour, -# ] -# )["Universal Thermal Climate Index (C)"] -# .astype(np.float64) -# .unstack() -# .T.reindex(range(24), axis=0) -# .reindex(range(365), axis=1) -# ) - -# distance_below_comfortable = (low - series[series < low]).to_frame() -# distance_below_comfortable_matrix = ( -# distance_below_comfortable.set_index( -# [ -# distance_below_comfortable.index.dayofyear, -# distance_below_comfortable.index.hour, -# ] -# )["Universal Thermal Climate Index (C)"] -# .astype(np.float64) -# .unstack() -# .T.reindex(range(24), axis=0) -# .reindex(range(365), axis=1) -# ) - -# distance_below_midpoint = ( -# midpoint - series[(series >= low) & (series <= midpoint)] -# ).to_frame() -# distance_below_midpoint_matrix = ( -# distance_below_midpoint.set_index( -# [ -# distance_below_midpoint.index.dayofyear, -# distance_below_midpoint.index.hour, -# ] -# )["Universal Thermal Climate Index (C)"] -# .astype(np.float64) -# .unstack() -# .T.reindex(range(24), axis=0) -# .reindex(range(365), axis=1) -# ) - -# distance_above_midpoint = ( -# series[(series <= high) & (series > midpoint)] - midpoint -# ).to_frame() -# distance_above_midpoint_matrix = ( -# distance_above_midpoint.set_index( -# [ -# distance_above_midpoint.index.dayofyear, -# distance_above_midpoint.index.hour, -# ] -# )["Universal Thermal Climate Index (C)"] -# .astype(np.float64) -# .unstack() -# .T.reindex(range(24), axis=0) -# .reindex(range(365), axis=1) -# ) - -# distance_above_comfortable_cmap = plt.get_cmap("YlOrRd") # Reds -# distance_above_comfortable_lims = [0, high_limit] -# distance_above_comfortable_norm = BoundaryNorm( -# np.linspace( -# distance_above_comfortable_lims[0], distance_above_comfortable_lims[1], 100 -# ), -# ncolors=distance_above_comfortable_cmap.N, -# clip=True, -# ) - -# distance_below_comfortable_cmap = plt.get_cmap("YlGnBu") # Blues -# distance_below_comfortable_lims = [0, low_limit] -# distance_below_comfortable_norm = BoundaryNorm( -# np.linspace( -# distance_below_comfortable_lims[0], distance_below_comfortable_lims[1], 100 -# ), -# ncolors=distance_below_comfortable_cmap.N, -# clip=True, -# ) - -# distance_below_midpoint_cmap = plt.get_cmap("YlGn_r") # Greens_r -# distance_below_midpoint_lims = [0, midpoint - low] -# distance_below_midpoint_norm = BoundaryNorm( -# np.linspace( -# distance_below_midpoint_lims[0], distance_below_midpoint_lims[1], 100 -# ), -# ncolors=distance_below_midpoint_cmap.N, -# clip=True, -# ) - -# distance_above_midpoint_cmap = plt.get_cmap("YlGn_r") # Greens_r -# distance_above_midpoint_lims = [0, high - midpoint] -# distance_above_midpoint_norm = BoundaryNorm( -# np.linspace( -# distance_above_midpoint_lims[0], distance_above_midpoint_lims[1], 100 -# ), -# ncolors=distance_above_midpoint_cmap.N, -# clip=True, -# ) - -# extent = [ -# mdates.date2num(series.index.min()), -# mdates.date2num(series.index.max()), -# 726449, -# 726450, -# ] - -# fig = plt.figure(constrained_layout=False, figsize=(15, 5)) -# gs = GridSpec(2, 3, figure=fig, width_ratios=[1, 1, 1], height_ratios=[20, 1]) -# hmap_ax = fig.add_subplot(gs[0, :]) -# cb_low_ax = fig.add_subplot(gs[1, 0]) -# cb_mid_ax = fig.add_subplot(gs[1, 1]) -# cb_high_ax = fig.add_subplot(gs[1, 2]) - -# hmap_ax.imshow( -# np.ma.array( -# distance_below_comfortable_matrix, -# mask=np.isnan(distance_below_comfortable_matrix), -# ), -# extent=extent, -# aspect="auto", -# cmap=distance_below_comfortable_cmap, -# norm=distance_below_comfortable_norm, -# interpolation="none", -# ) -# hmap_ax.imshow( -# np.ma.array( -# distance_below_midpoint_matrix, -# mask=np.isnan(distance_below_midpoint_matrix), -# ), -# extent=extent, -# aspect="auto", -# cmap=distance_below_midpoint_cmap, -# norm=distance_below_midpoint_norm, -# interpolation="none", -# ) -# hmap_ax.imshow( -# np.ma.array( -# distance_above_comfortable_matrix, -# mask=np.isnan(distance_above_comfortable_matrix), -# ), -# extent=extent, -# aspect="auto", -# cmap=distance_above_comfortable_cmap, -# norm=distance_above_comfortable_norm, -# interpolation="none", -# ) -# hmap_ax.imshow( -# np.ma.array( -# distance_above_midpoint_matrix, -# mask=np.isnan(distance_above_midpoint_matrix), -# ), -# extent=extent, -# aspect="auto", -# cmap=distance_above_midpoint_cmap, -# norm=distance_above_midpoint_norm, -# interpolation="none", -# ) - -# # Axis formatting -# hmap_ax.invert_yaxis() -# hmap_ax.xaxis_date() -# hmap_ax.xaxis.set_major_formatter(mdates.DateFormatter("%b")) -# hmap_ax.yaxis_date() -# hmap_ax.yaxis.set_major_formatter(mdates.DateFormatter("%H:%M")) -# hmap_ax.tick_params(labelleft=True, labelright=True, labelbottom=True) -# plt.setp(hmap_ax.get_xticklabels(), ha="left", color="k") -# plt.setp(hmap_ax.get_yticklabels(), color="k") - -# # Spine formatting -# for spine in ["top", "bottom", "left", "right"]: -# hmap_ax.spines[spine].set_visible(False) - -# # Grid formatting -# hmap_ax.grid(visible=True, which="major", color="white", linestyle=":", alpha=1) - -# # Colorbars -# low_cb = ColorbarBase( -# cb_low_ax, -# cmap=distance_below_comfortable_cmap, -# orientation="horizontal", -# norm=distance_below_comfortable_norm, -# label='Degrees below "comfortable"', -# extend="max", -# ) -# low_cb.outline.set_visible(False) -# cb_low_ax.xaxis.set_major_locator(mticker.MaxNLocator(5)) - -# mid_cb = ColorbarBase( -# cb_mid_ax, -# cmap=distance_below_midpoint_cmap, -# orientation="horizontal", -# norm=distance_below_midpoint_norm, -# label='Degrees about "comfortable"', -# extend="neither", -# ) -# mid_cb.outline.set_visible(False) -# cb_mid_ax.xaxis.set_major_locator(mticker.MaxNLocator(5)) - -# high_cb = ColorbarBase( -# cb_high_ax, -# cmap=distance_above_comfortable_cmap, -# orientation="horizontal", -# norm=distance_above_comfortable_norm, -# label='Degrees above "comfortable"', -# extend="max", -# ) -# high_cb.outline.set_visible(False) -# cb_high_ax.xaxis.set_major_locator(mticker.MaxNLocator(5)) - -# if title is None: -# hmap_ax.set_title( -# 'Distance to "comfortable"', color="k", y=1, ha="left", va="bottom", x=0 -# ) -# else: -# hmap_ax.set_title( -# f"Distance to comfortable - {title}", -# color="k", -# y=1, -# ha="left", -# va="bottom", -# x=0, -# ) - -# # Tidy plot -# plt.tight_layout() - -# return fig diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_evaporative_cooling_potential.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_evaporative_cooling_potential.py index c0092c4d..6c1c1a6d 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_evaporative_cooling_potential.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_evaporative_cooling_potential.py @@ -1,20 +1,25 @@ """Method for plotting evaporative cooling potential.""" -import matplotlib.pyplot as plt from pathlib import Path + +import matplotlib.pyplot as plt +import pandas as pd from ladybug.epw import EPW -from ._heatmap import heatmap from ..ladybug_extension.datacollection import collection_to_series -from ..ladybug_extension.location import location_to_string +from ._heatmap import heatmap -def evaporative_cooling_potential(epw: EPW, ax: plt.Axes = None, **kwargs) -> plt.Axes: +def evaporative_cooling_potential( + dbt: pd.Series, dpt: pd.Series, ax: plt.Axes = None, agg_year: bool = False, agg: str = "mean", **kwargs +) -> plt.Axes: """Plot evaporative cooling potential (DBT - DPT). Args: - epw (EPW): - An EPW object. + dbt (pd.Series): + A pandas Series containing Dry Bulb Temperature data. + dpt (pd.Series): + A pandas Series containing Dew Point Temperature data. ax (plt.Axes, optional): The matplotlib axes to plot the figure on. Defaults to None. **kwargs: @@ -24,13 +29,27 @@ def evaporative_cooling_potential(epw: EPW, ax: plt.Axes = None, **kwargs) -> pl plt.Axes: The matplotlib axes. """ + if len(dbt) != len(dpt): + raise ValueError("The length of the two series must be the same.") + + if any(dbt.index != dpt.index): + raise ValueError("The indices of the two series must be the same.") + if ax is None: ax = plt.gca() - dpt = collection_to_series(epw.dew_point_temperature) - dbt = collection_to_series(epw.dry_bulb_temperature) ecp = (dbt - dpt).clip(lower=0).rename("Evaporative Cooling Potential (C)") + if agg_year: + # check for presence of Feb 29 in ecp index + if len(ecp[(ecp.index.month == 2) & (ecp.index.day == 29)]) != 0: + idx = pd.date_range(start="2016-01-01", periods=8784, freq="h") + else: + idx = pd.date_range(start="2017-01-01", periods=8760, freq="h") + + ecp = ecp.groupby([ecp.index.month, ecp.index.day, ecp.index.hour]).agg(agg) + ecp.index = idx + if "cmap" not in kwargs: kwargs["cmap"] = "GnBu" @@ -38,13 +57,38 @@ def evaporative_cooling_potential(epw: EPW, ax: plt.Axes = None, **kwargs) -> pl ax.text( 1, 1, - "*value shown indicate cooling effect from saturating air with moisture (DBT - DPT)", + "*values shown indicate cooling effect from saturating air with moisture (DBT - DPT)", transform=ax.transAxes, ha="right", va="top", fontsize="small", ) + return ax + + +def evaporative_cooling_potential_epw(epw: EPW, ax: plt.Axes = None, **kwargs) -> plt.Axes: + """Plot evaporative cooling potential (DBT - DPT). + + Args: + epw (EPW): + An EPW object. + ax (plt.Axes, optional): + The matplotlib axes to plot the figure on. Defaults to None. + **kwargs: + Additional keyword arguments to pass to the heatmap function. + + Returns: + plt.Axes: The matplotlib axes. + """ + + if ax is None: + ax = plt.gca() + + evaporative_cooling_potential( + dbt=collection_to_series(epw.dry_bulb_temperature), dpt=collection_to_series(epw.dew_point_temperature), ax=ax, **kwargs + ) + ax.set_title(Path(epw.file_path).name) return ax diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_misc.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_misc.py index 3a2e3ca9..a3f078ec 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_misc.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_misc.py @@ -1,21 +1,20 @@ """Miscellaneous plots that don't really fit anywhere else.""" + # pylint: disable=line-too-long from calendar import month_abbr # pylint: disable=E0401 import matplotlib.dates as mdates import matplotlib.pyplot as plt import matplotlib.ticker as mtick - import numpy as np import pandas as pd - from ladybug.epw import EPW, Location from ladybug.sunpath import Sunpath -from ..ladybug_extension.datacollection import collection_to_series +from ..categorical.categories import Categorical from ..helpers import sunrise_sunset +from ..ladybug_extension.datacollection import collection_to_series from ._heatmap import heatmap -from ..categorical.categories import Categorical def cloud_cover_categories(epw: EPW, ax: plt.Axes = None) -> plt.Axes: @@ -49,9 +48,7 @@ def cloud_cover_categories(epw: EPW, ax: plt.Axes = None) -> plt.Axes: 9: "overcast", 10: "overcast", } - mtx = mtx.T.groupby(mapper).sum()[ - ["clear", "mostly clear", "partly cloudy", "mostly cloudy", "overcast"] - ] + mtx = mtx.T.groupby(mapper).sum()[["clear", "mostly clear", "partly cloudy", "mostly cloudy", "overcast"]] mtx = (mtx.T / mtx.sum(axis=1)).T mtx.plot.area(ax=ax, color=["#95B5DF", "#B1C4DD", "#C9D0D9", "#ACB0B6", "#989CA1"]) @@ -95,37 +92,20 @@ def hours_sunlight(location: Location, ax: plt.Axes = None) -> plt.Axes: index=srss_df.index, ) civil_twilight = ( - [ - i.seconds / (60 * 60) - for i in (srss_df["civil twilight end"] - srss_df["civil twilight start"]) - ] - - daylight + [i.seconds / (60 * 60) for i in (srss_df["civil twilight end"] - srss_df["civil twilight start"])] - daylight ).rename("civil twilight") nautical_twilight = ( - [ - i.seconds / (60 * 60) - for i in ( - srss_df["nautical twilight end"] - srss_df["nautical twilight start"] - ) - ] + [i.seconds / (60 * 60) for i in (srss_df["nautical twilight end"] - srss_df["nautical twilight start"])] - daylight - civil_twilight ).rename("nautical twilight") astronomical_twilight = ( - [ - i.seconds / (60 * 60) - for i in ( - srss_df["astronomical twilight end"] - - srss_df["astronomical twilight start"] - ) - ] + [i.seconds / (60 * 60) for i in (srss_df["astronomical twilight end"] - srss_df["astronomical twilight start"])] - daylight - civil_twilight - nautical_twilight ).rename("astronomical twilight") - night = ( - 24 - (daylight + civil_twilight + nautical_twilight + astronomical_twilight) - ).rename("night") + night = (24 - (daylight + civil_twilight + nautical_twilight + astronomical_twilight)).rename("night") temp = pd.concat( [daylight, civil_twilight, nautical_twilight, astronomical_twilight, night], @@ -223,37 +203,20 @@ def hours_sunrise_sunset(location: Location, ax: plt.Axes = None) -> plt.Axes: index=srss_df.index, ) civil_twilight = ( - [ - i.seconds / (60 * 60) - for i in (srss_df["civil twilight end"] - srss_df["civil twilight start"]) - ] - - daylight + [i.seconds / (60 * 60) for i in (srss_df["civil twilight end"] - srss_df["civil twilight start"])] - daylight ).rename("civil twilight") nautical_twilight = ( - [ - i.seconds / (60 * 60) - for i in ( - srss_df["nautical twilight end"] - srss_df["nautical twilight start"] - ) - ] + [i.seconds / (60 * 60) for i in (srss_df["nautical twilight end"] - srss_df["nautical twilight start"])] - daylight - civil_twilight ).rename("nautical twilight") astronomical_twilight = ( - [ - i.seconds / (60 * 60) - for i in ( - srss_df["astronomical twilight end"] - - srss_df["astronomical twilight start"] - ) - ] + [i.seconds / (60 * 60) for i in (srss_df["astronomical twilight end"] - srss_df["astronomical twilight start"])] - daylight - civil_twilight - nautical_twilight ).rename("astronomical twilight") - night = ( - 24 - (daylight + civil_twilight + nautical_twilight + astronomical_twilight) - ).rename("night") + night = (24 - (daylight + civil_twilight + nautical_twilight + astronomical_twilight)).rename("night") temp = pd.concat( [daylight, civil_twilight, nautical_twilight, astronomical_twilight, night], @@ -289,9 +252,7 @@ def hours_sunrise_sunset(location: Location, ax: plt.Axes = None) -> plt.Axes: fc="#B9AC86", label="Civil twilight", ) - ax.fill_between( - hours.index, hours["sunrise"], hours["sunset"], fc="#FCE49D", label="Day" - ) + ax.fill_between(hours.index, hours["sunrise"], hours["sunset"], fc="#FCE49D", label="Day") ax.fill_between( hours.index, hours["sunset"], diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_psychrometric.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_psychrometric.py index a2732604..e9d6ce18 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_psychrometric.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_psychrometric.py @@ -1,13 +1,10 @@ """Methods for plotting psychrometric charts.""" -# pylint: disable=E1101 -# pylint: disable=E0401 +# pylint: disable=E1101,E0401 import textwrap from dataclasses import dataclass, field from enum import Enum from warnings import warn -# pylint: disable=E0401 - import matplotlib.pyplot as plt import numpy as np from ladybug.analysisperiod import AnalysisPeriod @@ -18,15 +15,17 @@ from matplotlib.collections import PatchCollection from matplotlib.colors import Colormap from matplotlib.patches import Polygon - from python_toolkit.bhom.analytics import bhom_analytics -from ..ladybug_extension.analysisperiod import ( - analysis_period_to_datetimes, - describe_analysis_period, -) + +from ..ladybug_extension.analysisperiod import (analysis_period_to_datetimes, + describe_analysis_period) from ..ladybug_extension.epw import EPW, epw_to_dataframe from ..ladybug_extension.location import location_to_string +# pylint: disable=E0401 + + + @bhom_analytics() def strategy_warning(polygon_name): @@ -151,6 +150,7 @@ def psychrometric( analysis_period: AnalysisPeriod = None, wet_bulb: bool = False, psychro_polygons: PsychrometricPolygons = None, + figsize: tuple[float, float] = (10, 7), ) -> plt.Figure: """Create a psychrometric chart using a LB backend. @@ -158,13 +158,20 @@ def psychrometric( epw (EPW): An EPW object. cmap (Colormap, optional): - A colormap to color things with!. Defaults to "viridis". + A colormap to color things with!. + Default is "viridis". analysis_period (AnalysisPeriod, optional): - An analysis period to filter values by. Default is whole year. + An analysis period to filter values by. + Default is whole year. wet_bulb (bool, optional): - Plot wet-bulb temperature constant lines instead of enthalpy. Default is False. + Plot wet-bulb temperature constant lines instead of enthalpy. + Default is False. psychro_polygons (PsychrometricPolygons, optional): - A PsychrometricPolygons object to use for plotting comfort polygons. + A PsychrometricPolygons object to use for plotting comfort polygons. + Default is None. + figsize (tuple[float, float], optional): + A tuple of floats for the figure size. + Default is (10, 7). Returns: plt.Figure: @@ -202,7 +209,7 @@ def lb_mesh_to_patch_collection( return patch_collection p = lb_mesh_to_patch_collection(psychart.colored_mesh, psychart.hour_values) - fig, ax = plt.subplots(1, 1, figsize=(10, 7)) + fig, ax = plt.subplots(1, 1, figsize=figsize) collections = ax.add_collection(p) if wet_bulb: diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_seasonality.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_seasonality.py new file mode 100644 index 00000000..59d943e9 --- /dev/null +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_seasonality.py @@ -0,0 +1,161 @@ +"""Method to plot the seasonality of an EPW file.""" + +import matplotlib.dates as mdates +import matplotlib.patches as mpatches +import matplotlib.pyplot as plt +import pandas as pd +from ladybug.epw import EPW +from matplotlib.collections import PatchCollection + +from ..ladybug_extension.epw import ( + seasonality_from_day_length, + seasonality_from_month, + seasonality_from_temperature, +) +from .utilities import contrasting_color + + +def seasonality_comparison( + epw: EPW, ax: plt.Axes = None, color_config: dict[str, str] = None, **kwargs +) -> plt.Axes: + """Create a plot which shows where the seasohn threshholds are for the input EPW object. + + Args: + epw (EPW): + An EPW object. + ax (plt.Axes, optional): + A matplotlib axes object. Default is None which uses the current axes. + color_config (dict[str, str], optional): + A dictionary of colors for each season. If None, then default will be used. + **kwargs: + title (str): + The title of the plot. If not provided, then the name of the EPW file is used. + """ + + from_day_length = seasonality_from_day_length(epw=epw).rename("From day-length", inplace=False) + from_month = seasonality_from_month(epw=epw).rename("From month", inplace=False) + from_temperature = seasonality_from_temperature(epw=epw).rename( + "From temperature", inplace=False + ) + + seasons = ["Winter", "Spring", "Summer", "Autumn"] + + if color_config is None: + color_config = { + "Winter": "#8DB9CA", + "Spring": "#AFC1A2", + "Summer": "#E6484D", + "Autumn": "#EE7837", + } + else: + if [i not in color_config for i in seasons]: + raise ValueError( + f"The color_config dictionary must contain colors for all four seasons {seasons}." + ) + + title_str = kwargs.pop("title", str(epw)) + + ywidth = 0.9 + + if ax is None: + ax = plt.gca() + + season_df = pd.concat( + [ + from_month, + from_day_length, + from_temperature, + ], + axis=1, + ) + season_df.index = [mdates.date2num(i) for i in season_df.index] + + y = (1 - ywidth) / 2 + patches = [] + for col in season_df.columns: + for season in seasons: + local = season_df[col][season_df[col] == season] + if any(local.index.diff().unique() > 1): + # get the points at which the values change + shiftpt = local.index.diff().argmax() + patches.append( + mpatches.Rectangle( + xy=(local.index[0], y), + height=ywidth, + width=local.index[shiftpt - 1] - local.index[0], + facecolor=color_config[season], + edgecolor="w", + ) + ) + patches.append( + mpatches.Rectangle( + xy=(local.index[shiftpt], y), + height=ywidth, + width=local.index[-1], + facecolor=color_config[season], + edgecolor="w", + ) + ) + else: + patches.append( + mpatches.Rectangle( + xy=(local.index[0], y), + height=ywidth, + width=local.index[-1] - local.index[0], + facecolor=color_config[season], + edgecolor="w", + ) + ) + y += 1 + pc = PatchCollection(patches=patches, match_original=True, zorder=3) + ax.add_collection(pc) + + # add annotations + y = (1 - ywidth) / 2 + for n, col in enumerate(season_df.columns): + for season in seasons: + local = season_df[col][season_df[col] == season] + if any(local.index.diff().unique() > 1): + # get the points at which the values change + shiftpt = local.index.diff().argmax() + ax.text( + local.index[shiftpt] + 1, + n + 0.5, + f"{mdates.num2date(local.index[shiftpt]):%b %d}", + ha="left", + va="top", + c=contrasting_color(color_config[season]), + ) + else: + ax.text( + local.index[0] + 1, + n + 0.5, + f"{mdates.num2date(local.index[0]):%b %d}", + ha="left", + va="top", + c=contrasting_color(color_config[season]), + ) + y += 1 + + ax.set_xlim(season_df.index[0], season_df.index[-1]) + ax.set_ylim(0, 3) + + ax.set_yticks([0.5, 1.5, 2.5]) + ax.set_yticklabels(season_df.columns, rotation=0, ha="right") + + ax.xaxis.set_major_locator(mdates.MonthLocator()) + ax.xaxis.set_major_formatter(mdates.DateFormatter("%B")) + plt.setp(ax.get_xticklabels(), rotation=0, ha="left") + + # create and add legend + new_handles = [] + for _, color in color_config.items(): + new_handles.append(mpatches.Patch(color=color, edgecolor=None)) + plt.legend( + new_handles, color_config.keys(), bbox_to_anchor=(0.5, -0.12), loc="upper center", ncol=4 + ) + + # add title + ax.set_title(title_str) + + return ax diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_utci.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_utci.py index 1af88181..0ce45f27 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_utci.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/_utci.py @@ -1,34 +1,35 @@ """Plot methods for UTCI datasets""" -# pylint: disable=C0302 -# pylint: disable=E0401 + +# pylint: disable=C0302,E0401 import calendar import textwrap - -# pylint enable=E0401 +import warnings import matplotlib.dates as mdates import matplotlib.pyplot as plt +import matplotlib.ticker as mticker import numpy as np import pandas as pd from ladybug.datacollection import HourlyContinuousCollection from ladybug.datatype.temperature import ( UniversalThermalClimateIndex as LB_UniversalThermalClimateIndex, ) +from ladybug.location import Location from matplotlib.figure import Figure -from matplotlib.ticker import PercentFormatter from mpl_toolkits.axes_grid1 import make_axes_locatable +from python_toolkit.bhom.analytics import bhom_analytics from scipy.interpolate import make_interp_spline -from python_toolkit.bhom.analytics import bhom_analytics -from ..categorical.categories import ( - UTCI_DEFAULT_CATEGORIES, - CategoricalComfort, -) +from ..categorical.categories import UTCI_DEFAULT_CATEGORIES, Categorical, CategoricalComfort +from ..external_comfort.utci import shade_benefit_category +from ..helpers import sunrise_sunset from ..ladybug_extension.datacollection import collection_to_series from ._heatmap import heatmap from .colormaps import UTCI_DIFFERENCE_COLORMAP from .utilities import contrasting_color, lighten_color +# pylint enable=E0401 + @bhom_analytics() def utci_comfort_band_comparison( @@ -112,9 +113,113 @@ def utci_comfort_band_comparison( # add labels to bars # get bar total heights - height = np.array([[i.get_height() for i in c] for c in ax.containers]).T.sum( - axis=1 - )[0] + height = np.array([[i.get_height() for i in c] for c in ax.containers]).T.sum(axis=1)[0] + for c in ax.containers: + labels = [] + for v in c: + label = f"{v.get_height():0.1%}" if density else f"{v.get_height():0.0f}" + if v.get_height() / height > 0.04: + labels.append(label) + else: + labels.append("") + + ax.bar_label( + c, + labels=labels, + label_type="center", + color=contrasting_color(v.get_facecolor()), + ) + + ax.tick_params(axis="both", which="both", length=0) + ax.grid(False) + plt.xticks(rotation=0) + ax.yaxis.set_major_locator(plt.NullLocator()) + + return ax + + +@bhom_analytics() +def utci_comfort_band_comparison_series( + utci_series: tuple[pd.Series], + ax: plt.Axes = None, + identifiers: tuple[str] = None, + utci_categories: CategoricalComfort = UTCI_DEFAULT_CATEGORIES, + density: bool = True, + **kwargs, +) -> plt.Axes: + """Create a proportional bar chart showing how different UTCI collections + compare in terms of time within each comfort band. + + Args: + utci_series (list[pd.Series]): + A list of UTCI series objects. + ax (plt.Axes, optional): + The matplotlib Axes to plot on. Defaults to None which uses the current Axes. + identifiers (list[str], optional): + A list of names to give each collection. Defaults to None. + utci_categories (Categories, optional): + The UTCI categories to use. Defaults to UTCI_DEFAULT_CATEGORIES. + density (bool, optional): + If True, then show percentage, otherwise show count. Defaults to True. + **kwargs: + Additional keyword arguments to pass to the function. + + Returns: + plt.Axes: + A matplotlib Axes object. + """ + + if not isinstance(utci_series, (list, tuple)): + raise ValueError("utci_series must be a list or tuple of pd.Series objects.") + + if any(len(i) != len(utci_series[0]) for i in utci_series): + raise ValueError("All collections must be the same length.") + + if ax is None: + ax = plt.gca() + + # set the title + ax.set_title(kwargs.pop("title", None)) + + if identifiers is None: + identifiers = [f"{n}" for n in range(len(utci_series))] + + if len(identifiers) != len(utci_series): + raise ValueError( + "The number of identifiers given does not match the number of UTCI collections given!" + ) + + counts = pd.concat( + [utci_categories.value_counts(i, density=density) for i in utci_series], + axis=1, + keys=identifiers, + ) + counts.T.plot( + ax=ax, + kind="bar", + stacked=True, + color=utci_categories.colors, + width=0.8, + legend=False, + ) + + if kwargs.pop("legend", True): + handles, labels = ax.get_legend_handles_labels() + ax.legend( + handles[::-1], + labels[::-1], + title=utci_categories.name, + bbox_to_anchor=(1, 0.5), + loc="center left", + ) + + for spine in ["top", "right", "bottom", "left"]: + ax.spines[spine].set_visible(False) + + # add labels to bars + + # get bar total heights + height = np.array([[i.get_height() for i in c] for c in ax.containers]).T.sum(axis=1)[0] for c in ax.containers: labels = [] for v in c: @@ -304,9 +409,7 @@ def utci_comparison_diurnal_day( ax.set_title("\n".join([i for i in title if i is not None])) # combine utcis and add names to columns - df = pd.concat( - [collection_to_series(i) for i in utci_collections], axis=1, keys=collection_ids - ) + df = pd.concat([collection_to_series(i) for i in utci_collections], axis=1, keys=collection_ids) ylim = kwargs.pop("ylim", [df.min().min(), df.max().max()]) df_agg = df.groupby([df.index.month, df.index.hour]).agg(agg).loc[month] df_agg.index = range(24) @@ -345,13 +448,9 @@ def utci_comparison_diurnal_day( # add grid using a hacky fix for i in ax.get_xticks(): - ax.axvline( - i, color=ax.xaxis.label.get_color(), ls=":", lw=0.5, alpha=0.1, zorder=5 - ) + ax.axvline(i, color=ax.xaxis.label.get_color(), ls=":", lw=0.5, alpha=0.1, zorder=5) for i in ax.get_yticks(): - ax.axhline( - i, color=ax.yaxis.label.get_color(), ls=":", lw=0.5, alpha=0.1, zorder=5 - ) + ax.axhline(i, color=ax.yaxis.label.get_color(), ls=":", lw=0.5, alpha=0.1, zorder=5) if show_legend: handles, labels = ax.get_legend_handles_labels() @@ -393,13 +492,9 @@ def utci_heatmap_difference( A matplotlib Axes object. """ - if not isinstance( - utci_collection1.header.data_type, LB_UniversalThermalClimateIndex - ): + if not isinstance(utci_collection1.header.data_type, LB_UniversalThermalClimateIndex): raise ValueError("Input collection 1 is not a UTCI collection.") - if not isinstance( - utci_collection2.header.data_type, LB_UniversalThermalClimateIndex - ): + if not isinstance(utci_collection2.header.data_type, LB_UniversalThermalClimateIndex): raise ValueError("Input collection 2 is not a UTCI collection.") if ax is None: @@ -448,9 +543,7 @@ def utci_pie( plt.Axes: A matplotlib Axes object. """ - if not isinstance( - utci_collection.header.data_type, LB_UniversalThermalClimateIndex - ): + if not isinstance(utci_collection.header.data_type, LB_UniversalThermalClimateIndex): raise ValueError("Input collection is not a UTCI collection.") if ax is None: @@ -655,12 +748,8 @@ def utci_heatmap_histogram( A matplotlib Figure object. """ - if not isinstance( - utci_collection.header.data_type, LB_UniversalThermalClimateIndex - ): - raise ValueError( - "Collection data type is not UTCI and cannot be used in this plot." - ) + if not isinstance(utci_collection.header.data_type, LB_UniversalThermalClimateIndex): + raise ValueError("Collection data type is not UTCI and cannot be used in this plot.") series = collection_to_series(utci_collection) @@ -669,9 +758,7 @@ def utci_heatmap_histogram( # Instantiate figure fig = plt.figure(figsize=figsize, constrained_layout=True) - spec = fig.add_gridspec( - ncols=1, nrows=2, width_ratios=[1], height_ratios=[5, 2], hspace=0.0 - ) + spec = fig.add_gridspec(ncols=1, nrows=2, width_ratios=[1], height_ratios=[5, 2], hspace=0.0) heatmap_ax = fig.add_subplot(spec[0, 0]) histogram_ax = fig.add_subplot(spec[1, 0]) @@ -679,9 +766,7 @@ def utci_heatmap_histogram( utci_categories.annual_heatmap(series, ax=heatmap_ax, show_colorbar=False, **kwargs) # Add stacked plot - utci_categories.annual_monthly_histogram( - series=series, ax=histogram_ax, show_labels=True - ) + utci_categories.annual_monthly_histogram(series=series, ax=histogram_ax, show_labels=True) if show_colorbar: # add colorbar @@ -753,12 +838,8 @@ def utci_histogram( if ax is None: ax = plt.gca() - if not isinstance( - utci_collection.header.data_type, LB_UniversalThermalClimateIndex - ): - raise ValueError( - "Collection data type is not UTCI and cannot be used in this plot." - ) + if not isinstance(utci_collection.header.data_type, LB_UniversalThermalClimateIndex): + raise ValueError("Collection data type is not UTCI and cannot be used in this plot.") ti = kwargs.pop("title", None) if ti is not None: @@ -800,9 +881,7 @@ def utci_histogram( series = collection_to_series(utci_collection) # plot data - series.plot( - kind="hist", ax=ax, bins=bins, color=color, alpha=alpha, density=density - ) + series.plot(kind="hist", ax=ax, bins=bins, color=color, alpha=alpha, density=density) # set xlims xlim = kwargs.pop( @@ -854,6 +933,101 @@ def utci_histogram( fontsize="small", ) - ax.yaxis.set_major_formatter(PercentFormatter(xmax=1, decimals=2)) + ax.yaxis.set_major_formatter(mticker.PercentFormatter(xmax=1, decimals=2)) return ax + + +def utci_shade_benefit( + unshaded_utci: HourlyContinuousCollection | pd.Series, + shaded_utci: HourlyContinuousCollection | pd.Series, + comfort_limits: tuple[float] = (9, 26), + location: Location = None, + color_config: dict[str, str] = None, + figsize: tuple[float] = (15, 5), + **kwargs, +) -> plt.Figure: + """Plot the shade benefit category. + + Args: + unshaded_utci (HourlyContinuousCollection | pd.Series): + A dataset containing unshaded UTCI values. + shaded_utci (HourlyContinuousCollection | pd.Series): + A dataset containing shaded UTCI values. + comfort_limits (tuple[float], optional): + The range within which "comfort" is achieved. Defaults to (9, 26). + location (Location, optional): + A location object used to plot sun up/down times. Defaults to None. + color_config (dict[str, str], optional): + A dictionary of colors for each shade-benefit category. + If None then defaults will be used. + figsize (tuple[float], optional): + The size of the figure. Defaults to (15, 5). + **kwargs: + Additional keyword arguments to pass to the plotting function. + title (str, optional): + The title of the plot. Defaults to None. + + Returns: + plt.Figure: + A figure object. + """ + + vals = { + "Comfortable with shade": 0.5, + "Comfortable without shade": 1.5, + "Shade is beneficial": 2.5, + "Shade is detrimental": 3.5, + "Undefined": 4.5, + } + + # get the utci benefit category + utci_shade_benefit_categories = shade_benefit_category( + unshaded_utci=unshaded_utci, + shaded_utci=shaded_utci, + comfort_limits=comfort_limits, + ) + + # validation + if color_config is None: + color_config = { + "Comfortable with shade": "#8080ff", + "Comfortable without shade": "#408040", + "Shade is beneficial": "#ffd280", + "Shade is detrimental": "#ff8080", + "Undefined": "#a3a2a2", + "Sun up": "w", + } + else: + for i in vals: + if i not in color_config: + raise ValueError(f"Color for '{i}' not defined in color_config.") + if "Sun up" not in color_config: + color_config["Sun up"] = "k" + warnings.warn("Color for 'Sun up' not defined in color_config. Using black.") + + title = f"Comfortable between {min(comfort_limits)}°C and {max(comfort_limits)}°C UTCI" + if "title" in kwargs: + title = f"{kwargs.pop('title')}\n{title}" + + # map to values + usbc = utci_shade_benefit_categories.map(vals) + + # create categorical + cat = Categorical( + name="Shade Benefit", + bin_names=vals.keys(), + bins=[0, 1, 2, 3, 4, 5], + colors=[color_config[i] for i in vals], + ) + + fig = cat.annual_heatmap_histogram( + series=usbc, + location=location, + figsize=figsize, + title=title, + sunrise_color="w", + sunset_color="w", + ) + + return fig diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/colormaps.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/colormaps.py index b7bf494a..2c923de7 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/colormaps.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/plot/colormaps.py @@ -2,17 +2,17 @@ from .utilities import colormap_sequential -DBT_COLORMAP = colormap_sequential("white", "#bc204b") -RH_COLORMAP = colormap_sequential("white", "#8db9ca") -MRT_COLORMAP = colormap_sequential("white", "#6d104e") +DBT_COLORMAP = colormap_sequential("#ffffff", "#bc204b") +RH_COLORMAP = colormap_sequential("#ffffff", "#8db9ca") +MRT_COLORMAP = colormap_sequential("#ffffff", "#6d104e") WS_COLORMAP = colormap_sequential( "#d0e8e4", "#8db9ca", "#006da8", "#24135f", ) -UTCI_DIFFERENCE_COLORMAP = colormap_sequential("#00A9E0", "w", "#702F8A") -UTCI_DISTANCE_TO_COMFORTABLE = colormap_sequential("#00A9E0", "w", "#ba000d") +UTCI_DIFFERENCE_COLORMAP = colormap_sequential("#00A9E0", "#ffffff", "#702F8A") +UTCI_DISTANCE_TO_COMFORTABLE = colormap_sequential("#00A9E0", "#ffffff", "#ba000d") _ = colormap_sequential( "#E40303", "#FF8C00", "#FFED00", "#008026", "#24408E", "#732982" ) diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/solar.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/solar.py index bafaf705..d58f5cc1 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/solar.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/solar.py @@ -1,49 +1,44 @@ """Methods for handling solar radiation.""" # pylint: disable=E0401 +import concurrent.futures import itertools +import json +import pickle import textwrap +from dataclasses import dataclass from datetime import datetime -import json -import concurrent.futures -from dataclasses import dataclass, field from enum import Enum, auto from pathlib import Path from typing import Any -# pylint: enable=E0401 - import matplotlib.ticker as mticker import numpy as np import pandas as pd from honeybee.config import folders as hb_folders -from ladybug.wea import EPW, AnalysisPeriod, Wea, Location +from ladybug.sunpath import Sun, Sunpath +from ladybug.wea import EPW, AnalysisPeriod, Location, Wea + +# pylint: enable=E0401 +from ladybug_radiance.skymatrix import SkyMatrix +from ladybug_radiance.visualize.radrose import RadiationRose +from ladybugtools_toolkit.helpers import Vector2D, angle_from_north from matplotlib import pyplot as plt +from matplotlib.cm import ScalarMappable +from matplotlib.colors import Normalize +from matplotlib.ticker import MultipleLocator +from matplotlib.tri import Triangulation +from python_toolkit.bhom.logging import CONSOLE_LOGGER from tqdm import tqdm -from .helpers import ( - OpenMeteoVariable, - angle_from_north, - angle_to_vector, - cardinality, - circular_weighted_mean, - rolling_window, - scrape_meteostat, - scrape_openmeteo, - wind_speed_at_height, - remove_leap_days, -) -from python_toolkit.bhom.analytics import bhom_analytics -from python_toolkit.bhom.logging import CONSOLE_LOGGER +from .helpers import OpenMeteoVariable, angle_from_north, remove_leap_days, scrape_openmeteo from .ladybug_extension.analysisperiod import ( analysis_period_to_boolean, analysis_period_to_datetimes, describe_analysis_period, ) -from .ladybug_extension.location import location_to_string -from ladybug.sunpath import Sunpath, Sun from .ladybug_extension.datacollection import header_to_string - +from .ladybug_extension.location import location_to_string from .plot.utilities import contrasting_color, format_polar_plot @@ -92,9 +87,7 @@ def __post_init__(self): # convert to lists self.global_horizontal_irradiance = np.array(self.global_horizontal_irradiance) self.direct_normal_irradiance = np.array(self.direct_normal_irradiance) - self.diffuse_horizontal_irradiance = np.array( - self.diffuse_horizontal_irradiance - ) + self.diffuse_horizontal_irradiance = np.array(self.diffuse_horizontal_irradiance) self.datetimes = pd.DatetimeIndex(self.datetimes) # validate @@ -143,15 +136,9 @@ def to_dict(self) -> dict: return { "_t": "BH.oM.LadybugTools.Solar", - "global_horizontal_irradiance": [ - float(i) for i in self.global_horizontal_irradiance - ], - "direct_normal_irradiance": [ - float(i) for i in self.direct_normal_irradiance - ], - "diffuse_horizontal_irradiance": [ - i for i in self.diffuse_horizontal_irradiance - ], + "global_horizontal_irradiance": [float(i) for i in self.global_horizontal_irradiance], + "direct_normal_irradiance": [float(i) for i in self.direct_normal_irradiance], + "diffuse_horizontal_irradiance": [i for i in self.diffuse_horizontal_irradiance], "datetimes": [i.isoformat() for i in self.datetimes], "source": self.source, } @@ -239,9 +226,7 @@ def from_dataframe( raise ValueError(f"df must be of type {type(pd.DataFrame)}") if not isinstance(df.index, pd.DatetimeIndex): - raise ValueError( - f"The DataFrame's index must be of type {type(pd.DatetimeIndex)}" - ) + raise ValueError(f"The DataFrame's index must be of type {type(pd.DatetimeIndex)}") # remove NaN values df.dropna(axis=0, how="any", inplace=True) @@ -250,13 +235,9 @@ def from_dataframe( df = df.loc[~df.index.duplicated()] return cls( - global_horizontal_irradiance=df[ - global_horizontal_irradiance_column - ].tolist(), + global_horizontal_irradiance=df[global_horizontal_irradiance_column].tolist(), direct_normal_irradiance=df[direct_normal_irradiance_column].tolist(), - diffuse_horizontal_irradiance=df[ - diffuse_horizontal_irradiance_columns - ].tolist(), + diffuse_horizontal_irradiance=df[diffuse_horizontal_irradiance_columns].tolist(), datetimes=df.index.tolist(), source=source, ) @@ -288,13 +269,9 @@ def from_csv( df = pd.read_csv(csv_path, **kwargs) return cls.from_dataframe( df, - global_horizontal_irradiance=df[ - global_horizontal_irradiance_column - ].tolist(), + global_horizontal_irradiance=df[global_horizontal_irradiance_column].tolist(), direct_normal_irradiance=df[direct_normal_irradiance_column].tolist(), - diffuse_horizontal_irradiance=df[ - diffuse_horizontal_irradiance_columns - ].tolist(), + diffuse_horizontal_irradiance=df[diffuse_horizontal_irradiance_columns].tolist(), datetimes=df.index.tolist(), source=csv_path.name, ) @@ -358,13 +335,9 @@ def from_openmeteo( df.dropna(how="any", axis=0, inplace=True) - global_horizontal_irradiance = df[ - "Global Horizontal Radiation (Wh/m2)" - ].tolist() + global_horizontal_irradiance = df["Global Horizontal Radiation (Wh/m2)"].tolist() direct_normal_irradiance = df["Direct Normal Radiation (Wh/m2)"].tolist() - diffuse_horizontal_irradiance = df[ - "Diffuse Horizontal Radiation (Wh/m2)" - ].tolist() + diffuse_horizontal_irradiance = df["Diffuse Horizontal Radiation (Wh/m2)"].tolist() if ( len(global_horizontal_irradiance) == 0 @@ -443,9 +416,9 @@ def wea(self, location: Location) -> Wea: """ # create annual values - if max(self.datetimes) - min(self.datetimes) < pd.Timedelta( - days=365 - ) - pd.Timedelta(minutes=60): + if max(self.datetimes) - min(self.datetimes) < pd.Timedelta(days=365) - pd.Timedelta( + minutes=60 + ): raise ValueError( "The Solar object must contain at least 1 year's worth of data to generate a Wea." ) @@ -458,12 +431,8 @@ def wea(self, location: Location) -> Wea: return Wea.from_annual_values( location=location, - direct_normal_irradiance=grouped[ - "Direct Normal Irradiance (Wh/m2)" - ].tolist(), - diffuse_horizontal_irradiance=grouped[ - "Diffuse Horizontal Irradiance (Wh/m2)" - ].tolist(), + direct_normal_irradiance=grouped["Direct Normal Irradiance (Wh/m2)"].tolist(), + diffuse_horizontal_irradiance=grouped["Diffuse Horizontal Irradiance (Wh/m2)"].tolist(), ) @staticmethod @@ -532,7 +501,10 @@ def directional_irradiance_matrix( idx = analysis_period_to_datetimes(cols[0].header.analysis_period) results = [] - pbar = tqdm(total=len(midpoints), desc="Calculating irradiance", ) + pbar = tqdm( + total=len(midpoints), + desc="Calculating irradiance", + ) with concurrent.futures.ProcessPoolExecutor() as executor: futures = [] for _az in midpoints: @@ -552,17 +524,13 @@ def directional_irradiance_matrix( # convert results into a massive array headers = [] for _az in midpoints: - for rad_type, unit in zip( - *[["Total", "Direct", "Diffuse", "Reflected"], units] - ): + for rad_type, unit in zip(*[["Total", "Direct", "Diffuse", "Reflected"], units]): headers.append((altitude, _az, rad_type, unit)) df = pd.DataFrame( np.array(results).reshape(len(midpoints) * 4, 8760), columns=idx, - index=pd.MultiIndex.from_tuples( - headers, names=["Altitude", "Azimuth", "Type", "Unit"] - ), + index=pd.MultiIndex.from_tuples(headers, names=["Altitude", "Azimuth", "Type", "Unit"]), ).T df.to_hdf(sp, key="df", complevel=9, complib="blosc:zlib") @@ -642,17 +610,13 @@ def detailed_irradiance_matrix( # convert results into a massive array headers = [] for _alt, _az in combinations: - for rad_type, unit in zip( - *[["Total", "Direct", "Diffuse", "Reflected"], units] - ): + for rad_type, unit in zip(*[["Total", "Direct", "Diffuse", "Reflected"], units]): headers.append((_alt, _az, rad_type, unit)) df = pd.DataFrame( np.array(results).reshape(len(combinations) * 4, 8760), columns=idx, - index=pd.MultiIndex.from_tuples( - headers, names=["Altitude", "Azimuth", "Type", "Unit"] - ), + index=pd.MultiIndex.from_tuples(headers, names=["Altitude", "Azimuth", "Type", "Unit"]), ).T df.to_hdf(sp, key="df", complevel=9, complib="blosc:zlib") @@ -905,9 +869,7 @@ def plot_directional_irradiance( # labelling if labelling: for rect, idx, val, colr in list( - zip( - *[bars, data.index.get_level_values("Azimuth"), data.values, colors] - ) + zip(*[bars, data.index.get_level_values("Azimuth"), data.values, colors]) ): if len(set(data.values)) == 1: ax.text( @@ -939,7 +901,7 @@ def plot_directional_irradiance( ha="right" if idx < 180 else "left", va="center", fontsize="xx-small", - c=contrasting_color(colr) + c=contrasting_color(colr), # bbox=dict(ec="none", fc="w", alpha=0.5, boxstyle="round,pad=0.3"), ) @@ -952,3 +914,337 @@ def plot_directional_irradiance( ) return ax + + +def radiation_rose( + epw_file: Path, + ax: plt.Axes = None, + rad_type: str = "total", + analysis_period: AnalysisPeriod = AnalysisPeriod(), + tilt_angle: float = 0, + cmap: str = "YlOrRd", + directions: int = 36, + label: bool = True, + bar_width: float = 1, + lims: tuple[float, float] = None, +) -> plt.Axes: + """Create a solar radiation rose + + Args: + epw_file (Path): + The EPW file representing the weather data/location to be visualised. + ax (plt.Axes, optional): + A polar axis onto which the radiation rose will be plotted. + Defaults to None. + rad_type (str, optional): + The type of radiation to plot. + Defaults to "total", with options of "total", "direct" and "diffuse". + analysis_period (AnalysisPeriod, optional): + The analysis period over which radiation shall be summarised. + Defaults to AnalysisPeriod(). + tilt_angle (float, optional): + The tilt (from 0 at horizon, to 90 facing the sky) to asses. + Defaults to 0. + cmap (str, optional): + The colormap to apply. + Defaults to "YlOrRd". + directions (int, optional): + The number of directions to bin data into. + Defaults to 36. + label (bool, optional): + Set to True to include labels on teh plot. + Defaults to True. + bar_width (float, optional): + Set the bar width for each of the bins. + Defaults to 1. + lims (tuple[float, float], optional): + Set the limits of the plot. + Defaults to None. + + Returns: + plt.Axes: + The matplotlib axes. + """ + if ax is None: + _, ax = plt.subplots(subplot_kw={"projection": "polar"}) + + if ax.name != "polar": + raise ValueError("ax must be a polar axis.") + + # create sky conditions + smx = SkyMatrix.from_epw(epw_file=epw_file, high_density=True, hoys=analysis_period.hoys) + rr = RadiationRose(sky_matrix=smx, direction_count=directions, tilt_angle=tilt_angle) + + # get properties to plot + angles = np.deg2rad( + [angle_from_north(j) for j in [Vector2D(*i[:2]) for i in rr.direction_vectors]] + ) + values = getattr(rr, f"{rad_type}_values") + if lims is None: + norm = Normalize(vmin=0, vmax=max(values)) + else: + norm = Normalize(vmin=lims[0], vmax=lims[1]) + cmap = plt.get_cmap(cmap) + colors = [cmap(i) for i in [norm(v) for v in values]] + + # generate plot + rects = ax.bar( + x=angles, + height=values, + width=((np.pi / directions) * 2) * bar_width, + color=colors, + ) + format_polar_plot(ax) + + # add colormap + sm = ScalarMappable(cmap=cmap, norm=norm) + sm.set_array([]) + cbar = plt.colorbar( + sm, + ax=ax, + orientation="vertical", + label="Cumulative irradiance (W/m$^2$)", + fraction=0.046, + pad=0.04, + ) + cbar.outline.set_visible(False) + + # add labels + if label: + offset_distance = max(values) / 10 + if directions > 36: + max_angle = angles[np.argmax(values)] + max_val = max(values) + ax.text( + max_angle, + max_val + offset_distance, + f"{max_val:0.0f}W/m$^2$\n{np.rad2deg(max_angle):0.0f}°", + fontsize="xx-small", + ha="center", + va="center", + rotation=0, + rotation_mode="anchor", + color="k", + ) + else: + for rect, color in list(zip(*[rects, colors])): + theta = rect.get_x() + (rect.get_width() / 2) + theta_deg = np.rad2deg(theta) + val = rect.get_height() + + if theta_deg < 180: + if val < max(values) / 2: + ha = "left" + anchor = val + offset_distance + else: + ha = "right" + anchor = val - offset_distance + ax.text( + theta, + anchor, + f"{val:0.0f}", + fontsize="xx-small", + ha=ha, + va="center", + rotation=90 - theta_deg, + rotation_mode="anchor", + color=contrasting_color(color), + ) + else: + if val < max(values) / 2: + ha = "right" + anchor = val + offset_distance + else: + ha = "left" + anchor = val - offset_distance + ax.text( + theta, + anchor, + f"{val:0.0f}", + fontsize="xx-small", + ha=ha, + va="center", + rotation=-theta_deg - 90, + rotation_mode="anchor", + color=contrasting_color(color), + ) + + ax.set_title( + f"{epw_file.name}\n{rad_type.title()} irradiance ({tilt_angle}° altitude)\n{describe_analysis_period(analysis_period)}" + ) + + plt.tight_layout() + + return ax + + +def tilt_orientation_factor( + epw_file: Path, + ax: plt.Axes = None, + rad_type: str = "total", + analysis_period: AnalysisPeriod = AnalysisPeriod(), + cmap: str = "YlOrRd", + directions: int = 36, + tilts: int = 9, + quantiles: tuple[float] = (0.05, 0.25, 0.5, 0.75, 0.95), + lims: tuple[float, float] = None, +) -> plt.Axes: + """Create a tilt-orientation factor plot. + + Args: + epw_file (Path): + The EPW file representing the weather data/location to be visualised. + ax (plt.Axes, optional): + The axes to plot on. + Defaults to None. + rad_type (str, optional): + The type of radiation to plot. + Defaults to "total", with options of "total", "direct" and "diffuse". + analysis_period (AnalysisPeriod, optional): + The analysis period over which radiation shall be summarised. + Defaults to AnalysisPeriod(). + cmap (str, optional): + The colormap to apply. + Defaults to "YlOrRd". + directions (int, optional): + The number of directions to bin data into. + Defaults to 36. + tilts (int, optional): + The number of tilts to calculate. + Defaults to 9. + quantiles (tuple[float], optional): + The quantiles to plot. + Defaults to (0.05, 0.25, 0.5, 0.75, 0.95). + lims (tuple[float, float], optional): + The limits of the plot. + Defaults to None. + + Returns: + plt.Axes: + The matplotlib axes. + """ + + # create dir for cached results + _dir = Path(hb_folders.default_simulation_folder) / "_lbt_tk_solar" + _dir.mkdir(exist_ok=True, parents=True) + ndir = directions + + if ax is None: + ax = plt.gca() + + cmap = plt.get_cmap(cmap) + + # create sky matrix + smx = SkyMatrix.from_epw(epw_file=epw_file, high_density=True, hoys=analysis_period.hoys) + + # create roses per tilt angle + _directions = np.linspace(0, 360, directions + 1)[:-1].tolist() + _tilts = np.linspace(0, 90, tilts)[:-1].tolist() + [89.999] + rrs: list[RadiationRose] = [] + for ta in tqdm(_tilts): + sp = _dir / f"{epw_file.stem}_{ndir}_{ta:0.4f}.pickle" + if sp.exists(): + rr = pickle.load(open(sp, "rb")) + else: + rr = RadiationRose(sky_matrix=smx, direction_count=directions, tilt_angle=ta) + pickle.dump(rr, open(sp, "wb")) + rrs.append(rr) + _directions.append(360) + + # create matrix of values from results + values = np.array([getattr(i, f"{rad_type}_values") for i in rrs]) + + # repeat first result at end to close the circle + values = values.T.tolist() + values.append(values[0]) + values = np.array(values).T + + # create x, y coordinates per result value + __directions, __tilts = np.meshgrid(_directions, _tilts) + + # get location of max + _max = values.flatten().max() + if _max == 0: + raise ValueError(f"No solar radiation within {analysis_period}.") + + _max_idx = values.flatten().argmax() + _max_alt = __tilts.flatten()[_max_idx] + _max_az = __directions.flatten()[_max_idx] + + # create colormap + if lims is None: + norm = Normalize(vmin=0, vmax=_max) + else: + norm = Normalize(vmin=lims[0], vmax=lims[1]) + + # create triangulation + tri = Triangulation(x=__directions.flatten(), y=__tilts.flatten()) + + # create quantile lines + quantiles = [0.05, 0.25, 0.5, 0.75, 0.95] + levels = [np.quantile(a=values.flatten(), q=i) for i in quantiles] + quant_colors = [cmap(i) for i in [norm(v) for v in levels]] + quant_colors_inv = [contrasting_color(i) for i in quant_colors] + max_color_inv = contrasting_color(cmap(norm(_max))) + + # plot data + tcf = ax.tricontourf(tri, values.flatten(), levels=100, cmap=cmap, norm=norm) + tcl = ax.tricontour( + tri, + values.flatten(), + levels=levels, + colors=quant_colors_inv, + linestyles=":", + alpha=0.5, + ) + + # add contour labels + def cl_fmt(x): + return f"{x:,.0f}W/m$^2$" + + _ = ax.clabel(tcl, fontsize="small", fmt=cl_fmt) + + # add colorbar + cb = plt.colorbar( + tcf, + ax=ax, + orientation="vertical", + drawedges=False, + fraction=0.05, + aspect=25, + pad=0.02, + label="Cumulative irradiance (W/m$^2$)", + ) + cb.outline.set_visible(False) + for quantile_val in levels: + cb.ax.plot([0, 1], [quantile_val] * 2, color="k", ls="-", alpha=0.5) + + # add max-location + ax.scatter(_max_az, _max_alt, c=max_color_inv, s=10, marker="x") + alt_offset = (90 / 100) * 0.5 if _max_alt <= 45 else -(90 / 100) * 0.5 + az_offset = (360 / 100) * 0.5 if _max_az <= 180 else -(360 / 100) * 0.5 + ha = "left" if _max_az <= 180 else "right" + va = "bottom" if _max_alt <= 45 else "top" + ax.text( + _max_az + az_offset, + _max_alt + alt_offset, + f"{_max:,.0f}W/m$^2$\n({_max_az:0.0f}°, {_max_alt:0.0f}°)", + ha=ha, + va=va, + c=max_color_inv, + weight="bold", + size="small", + ) + + ax.set_xlim(0, 360) + ax.set_ylim(0, 90) + ax.xaxis.set_major_locator(MultipleLocator(base=30)) + ax.yaxis.set_major_locator(MultipleLocator(base=10)) + ax.set_xlabel("Panel orientation (clockwise from North at 0°)") + ax.set_ylabel("Panel tilt (0° facing the horizon, 90° facing the sky)") + + ax.set_title( + f"{epw_file.name}\n{rad_type.title()} irradiance (cumulative)\n{describe_analysis_period(analysis_period)}" + ) + + return ax diff --git a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/wind.py b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/wind.py index 4dd30125..57217be3 100644 --- a/LadybugTools_Engine/Python/src/ladybugtools_toolkit/wind.py +++ b/LadybugTools_Engine/Python/src/ladybugtools_toolkit/wind.py @@ -10,8 +10,6 @@ from pathlib import Path from typing import Any -# pylint: enable=E0401 - import matplotlib.patches as mpatches import matplotlib.pyplot as plt import matplotlib.ticker as mticker @@ -33,7 +31,6 @@ angle_to_vector, cardinality, circular_weighted_mean, - rolling_window, scrape_meteostat, scrape_openmeteo, wind_speed_at_height, @@ -47,6 +44,8 @@ from .plot._timeseries import timeseries from .plot.utilities import contrasting_color, format_polar_plot +# pylint: enable=E0401 + @dataclass(init=True, eq=True, repr=True) class Wind: @@ -60,9 +59,10 @@ class Wind: datetimes (Union[pd.DatetimeIndex, list[Union[datetime, np.datetime64, pd.Timestamp]]]): An iterable of datetime-like objects. height_above_ground (float, optional): - The height above ground (in m) where the input wind speeds and directions were collected. Defaults to 10m. + The height above ground (in m) where the input wind speeds and directions were + collected. Defaults to 10m. source (str, optional): - A source string to describe where the input data comes from. Defaults to None. + A source string to describe where the input data comes from. Defaults to None. """ wind_speeds: list[float] @@ -75,12 +75,8 @@ def __post_init__(self): if self.height_above_ground < 0.1: raise ValueError("Height above ground must be >= 0.1m.") - if not ( - len(self.wind_speeds) == len(self.wind_directions) == len(self.datetimes) - ): - raise ValueError( - "wind_speeds, wind_directions and datetimes must be the same length." - ) + if not len(self.wind_speeds) == len(self.wind_directions) == len(self.datetimes): + raise ValueError("wind_speeds, wind_directions and datetimes must be the same length.") if len(self.wind_speeds) <= 1: raise ValueError( @@ -170,7 +166,7 @@ def to_file(self, path: Path) -> Path: if Path(path).suffix != ".json": raise ValueError("path must be a JSON file.") - with open(Path(path), "w") as fp: + with open(Path(path), "w", encoding="utf-8") as fp: fp.write(self.to_json()) return Path(path) @@ -178,7 +174,7 @@ def to_file(self, path: Path) -> Path: @classmethod def from_file(cls, path: Path) -> "Wind": """Create this object from a JSON file.""" - with open(Path(path), "r") as fp: + with open(Path(path), "r", encoding="utf-8") as fp: return cls.from_json(fp.read()) def to_csv(self, path: Path) -> Path: @@ -217,16 +213,15 @@ def from_dataframe( height_above_ground (float, optional): Defaults to 10m. source (str, optional): - A source string to describe where the input data comes from. Defaults to "DataFrame"". + A source string to describe where the input data comes from. + Defaults to "DataFrame"". """ if not isinstance(df, pd.DataFrame): raise ValueError(f"df must be of type {type(pd.DataFrame)}") if not isinstance(df.index, pd.DatetimeIndex): - raise ValueError( - f"The DataFrame's index must be of type {type(pd.DatetimeIndex)}" - ) + raise ValueError(f"The DataFrame's index must be of type {type(pd.DatetimeIndex)}") # remove NaN values df.dropna(axis=0, how="any", inplace=True) @@ -255,7 +250,8 @@ def from_csv( Args: csv_path (Path): - The path to the CSV file containing speed and direction columns, and a datetime index. + The path to the CSV file containing speed and direction columns, + and a datetime index. wind_speed_column (str): The name of the column where wind-speed data exists. wind_direction_column (str): @@ -306,7 +302,8 @@ def from_openmeteo( start_date: datetime | str, end_date: datetime | str, ) -> "Wind": - """Create a Wind object from data obtained from the Open-Meteo database of historic weather station data. + """Create a Wind object from data obtained from the Open-Meteo database + of historic weather station data. Args: latitude (float): @@ -357,7 +354,8 @@ def from_meteostat( end_date: datetime | str, altitude: float = 10, ) -> "Wind": - """Create a Wind object from data obtained from the Meteostat database of historic weather station data. + """Create a Wind object from data obtained from the Meteostat database + of historic weather station data. Args: latitude (float): @@ -392,9 +390,7 @@ def from_meteostat( ) @classmethod - def from_average( - cls, wind_objects: list["Wind"], weights: list[float] = None - ) -> "Wind": + def from_average(cls, wind_objects: list["Wind"], weights: list[float] = None) -> "Wind": """Create an average Wind object from a set of input Wind objects, with optional weighting for each.""" # create default weightings if None @@ -415,9 +411,7 @@ def from_average( df_wd = pd.concat([i.wd for i in wind_objects], axis=1).dropna() # construct the weighted means - wd_avg = np.array( - [circular_weighted_mean(i, weights) for _, i in df_wd.iterrows()] - ) + wd_avg = np.array([circular_weighted_mean(i, weights) for _, i in df_wd.iterrows()]) ws_avg = np.average(df_ws, axis=1, weights=weights) dts = df_ws.index @@ -451,10 +445,12 @@ def from_uv( datetimes (list[datetime]): An iterable of datetime-like objects. height_above_ground (float, optional): - The height above ground (in m) where the input wind speeds and directions were collected. + The height above ground (in m) where the input wind speeds and + directions were collected. Defaults to 10m. source (str, optional): - A source string to describe where the input data comes from. Defaults to None. + A source string to describe where the input data comes from. + Defaults to None. Returns: Wind: @@ -498,9 +494,9 @@ def index(self) -> pd.DatetimeIndex: @property def ws(self) -> pd.Series: """Convenience accessor for wind speeds as a time-indexed pd.Series object.""" - return pd.Series( - self.wind_speeds, index=self.index, name="Wind Speed (m/s)" - ).sort_index(ascending=True, inplace=False) + return pd.Series(self.wind_speeds, index=self.index, name="Wind Speed (m/s)").sort_index( + ascending=True, inplace=False + ) @property def wd(self) -> pd.Series: @@ -511,7 +507,8 @@ def wd(self) -> pd.Series: @property def df(self) -> pd.DataFrame: - """Convenience accessor for wind direction and speed as a time-indexed pd.DataFrame object.""" + """Convenience accessor for wind direction and speed as a time-indexed + pd.DataFrame object.""" return pd.concat([self.wd, self.ws], axis=1) @property @@ -552,9 +549,7 @@ def mean_speed(self, remove_calm: bool = False) -> float: Mean wind speed. """ - return np.linalg.norm( - self.filter_by_speed(min_speed=1e-10 if remove_calm else 0).mean_uv - ) + return np.linalg.norm(self.filter_by_speed(min_speed=1e-10 if remove_calm else 0).mean_uv) @property def mean_direction(self) -> tuple[float, float]: @@ -603,11 +598,13 @@ def percentile(self, percentile: float) -> float: return self.ws.quantile(percentile) def resample(self, rule: pd.DateOffset | pd.Timedelta | str) -> "Wind": - """Resample the wind data collection to a different timestep. This can only be used to downsample. + """Resample the wind data collection to a different timestep. + This can only be used to downsample. Args: rule (Union[pd.DateOffset, pd.Timedelta, str]): - A rule for resampling. This uses the same inputs as a Pandas Series.resample() method. + A rule for resampling. This uses the same inputs as a Pandas + Series.resample() method. Returns: Wind: @@ -652,7 +649,8 @@ def to_height( target_height (float): Height to translate to (in m). terrain_roughness_length (float, optional): - Terrain roughness (how big objects are to adjust translation). Defaults to 1. + Terrain roughness (how big objects are to adjust translation). + Defaults to 1. log_function (bool, optional): Whether to use log-function or pow-function. Defaults to True. @@ -675,10 +673,9 @@ def to_height( source=f"{self.source} translated to {target_height}m", ) - def apply_directional_factors( - self, directions: int, factors: tuple[float] - ) -> "Wind": - """Adjust wind speed values by a set of factors per direction. Factors start at north, and move clockwise. + def apply_directional_factors(self, directions: int, factors: tuple[float]) -> "Wind": + """Adjust wind speed values by a set of factors per direction. + Factors start at north, and move clockwise. Example: >>> wind = Wind.from_epw(epw_path) @@ -687,8 +684,8 @@ def apply_directional_factors( ... factors=(0.5, 0.75, 1, 0.75) ... ) - Where northern winds would be multiplied by 0.5, eastern winds by 0.75, southern winds by 1, and western winds - by 0.75. + Where northern winds would be multiplied by 0.5, eastern winds by 0.75, + southern winds by 1, and western winds by 0.75. Args: directions (int): @@ -751,12 +748,8 @@ def filter_by_analysis_period( df = df[~((df.index.month == 2) & (df.index.day == 29))] # filter available data - possible_datetimes = [ - DateTime(dt.month, dt.day, dt.hour, dt.minute) for dt in df.index - ] - lookup = dict( - zip(AnalysisPeriod().datetimes, analysis_period_to_boolean(analysis_period)) - ) + possible_datetimes = [DateTime(dt.month, dt.day, dt.hour, dt.minute) for dt in df.index] + lookup = dict(zip(AnalysisPeriod().datetimes, analysis_period_to_boolean(analysis_period))) mask = [lookup[i] for i in possible_datetimes] df = df[mask] @@ -854,13 +847,16 @@ def filter_by_time( def filter_by_direction( self, left_angle: float = 0, right_angle: float = 360, inclusive: bool = True ) -> "Wind": - """Filter the current object by wind direction, based on the angle as observed from a location. + """Filter the current object by wind direction, based on the angle as + observed from a location. Args: left_angle (float): - The left-most angle, to the left of which wind speeds and directions will be removed. + The left-most angle, to the left of which wind speeds and + directions will be removed. right_angle (float): - The right-most angle, to the right of which wind speeds and directions will be removed. + The right-most angle, to the right of which wind speeds and + directions will be removed. inclusive (bool, optional): Include values that are exactly the left or right angle values. @@ -960,16 +956,12 @@ def _direction_bin_edges(directions: int) -> list[float]: raise ValueError("directions must be > 2.") direction_bin_edges = np.unique( - ( - (np.linspace(0, 360, directions + 1) - ((360 / directions) / 2)) % 360 - ).tolist() + ((np.linspace(0, 360, directions + 1) - ((360 / directions) / 2)) % 360).tolist() + [0, 360] ) return direction_bin_edges - def process_direction_data( - self, directions: int - ) -> tuple[pd.Series, list[tuple[float]]]: + def process_direction_data(self, directions: int) -> tuple[pd.Series, list[tuple[float]]]: """Process wind direction data for this object. Args: @@ -1027,7 +1019,8 @@ def process_other_data( The bins to sort this data into. Defaults to None, which if other_data is None, uses Beaufort scale bins. name (str, optional): A name to be given to the other data. - Defaults to None which uses "other", or the name of the input Series if input is a Series. + Defaults to None which uses "other", or the name of the input Series + if input is a Series. Returns: tuple[pd.Series, list[tuple[float]]]: @@ -1041,9 +1034,7 @@ def process_other_data( other_bins = BEAUFORT_CATEGORIES.bins if len(other_data) != len(self): - raise ValueError( - f"other_data must be the same length as this {type(self)} object." - ) + raise ValueError(f"other_data must be the same length as this {type(self)} object.") if isinstance(other_data, list | tuple | np.ndarray): other_data = pd.Series( @@ -1080,9 +1071,7 @@ def process_other_data( # bin the other data categories = pd.cut(other_data, bins=other_bins, include_lowest=False) - bin_tuples = [ - tuple([i.left, i.right]) for i in categories.cat.categories.tolist() - ] + bin_tuples = [tuple([i.left, i.right]) for i in categories.cat.categories.tolist()] mapper = dict( zip( @@ -1109,7 +1098,8 @@ def bin_data( other_data: list[float] = None, other_bins: list[float] = None, ) -> pd.DataFrame: - """Create categories for wind direction and "other" data. By defualt "other" data is the wind speed associate with the wind direction in this object. + """Create categories for wind direction and "other" data. By default "other" + data is the wind speed associate with the wind direction in this object. Args: directions (int, optional): @@ -1121,12 +1111,11 @@ def bin_data( Returns: pd.DataFrame: - A DataFrame containing the wind direction categories and the "other" data categories. + A DataFrame containing the wind direction categories and the "other" + data categories. """ - other_categories, _ = self.process_other_data( - other_data=other_data, other_bins=other_bins - ) + other_categories, _ = self.process_other_data(other_data=other_data, other_bins=other_bins) direction_categories, _ = self.process_direction_data(directions=directions) return pd.concat([direction_categories, other_categories], axis=1) @@ -1145,17 +1134,22 @@ def histogram( directions (int, optional): The number of directions to use. Defaults to 36. other_data (list[float], optional): - A list of other data to bin by direction. If None, then wind speed will be used. + A list of other data to bin by direction. + If None, then wind speed will be used. other_bins (list[float]): - The other data bins to use for the histogram. These bins are right inclusive. + The other data bins to use for the histogram. + These bins are right inclusive. density (bool, optional): - If True, then return the probability density function. Defaults to False. + If True, then return the probability density function. + Defaults to False. remove_calm (bool, optional): - If True, then remove calm wind speeds from the histogram. Defaults to False. + If True, then remove calm wind speeds from the histogram. + Defaults to False. Returns: pd.DataFrame: - A numpy array, containing the number or probability for each bin, for each direction bin. + A numpy array, containing the number or probability for each bin, + for each direction bin. """ other_categories, other_bin_tuples = self.process_other_data( @@ -1212,7 +1206,8 @@ def prevailing( n (int, optional): The number of prevailing directions to return. Default is 1. as_cardinal (bool, optional): - If True, then return the prevailing directions as cardinal directions. Defaults to False. + If True, then return the prevailing directions as cardinal directions. + Defaults to False. Returns: list[float] | list[str]: @@ -1220,7 +1215,7 @@ def prevailing( """ binned = self.bin_data(directions=directions) - + if ignore_calm: binned = binned.loc[self.ws > threshold] @@ -1243,15 +1238,18 @@ def probabilities( percentiles: tuple[float, float] = (0.5, 0.95), other_data: list[float] = None, ) -> pd.DataFrame: - """Calculate the probabilities of wind speeds at the given percentiles, for the direction bins specified. + """Calculate the probabilities of wind speeds at the given percentiles, + for the direction bins specified. Args: directions (int, optional): The number of wind directions to bin values into. percentiles (tuple[float, float], optional): - A tuple of floats between 0-1 describing percentiles. Defaults to (0.5, 0.95). + A tuple of floats between 0-1 describing percentiles. + Defaults to (0.5, 0.95). other_data (list[float], optional): - A list of other data to bin by direction. If None, then wind speed will be used. + A list of other data to bin by direction. + If None, then wind speed will be used. Returns: pd.DataFrame: @@ -1282,7 +1280,8 @@ def _probability_density_function( other_data: list[float] = None, other_bins: list[float] = None, ) -> pd.DataFrame: - """Create a table with the probability density function for each "other_data" and direction. + """Create a table with the probability density function for each + "other_data" and direction. Args: directions (int, optional): @@ -1329,7 +1328,8 @@ def _cumulative_density_function( other_data: list[float] = None, other_bins: list[float] = None, ) -> pd.DataFrame: - """Create a table with the cumulative probability density function for each "other_data" and direction. + """Create a table with the cumulative probability density function for each + "other_data" and direction. Args: directions (int, optional): @@ -1382,9 +1382,7 @@ def exceedance( A table containing exceedance values. """ - other_categories, other_bin_tuples = self.process_other_data( - other_data=other_data - ) + other_categories, _ = self.process_other_data(other_data=other_data) direction_categories, direction_bin_tuples = self.process_direction_data( directions=directions ) @@ -1414,39 +1412,54 @@ def exceedance( return df.fillna(0) - def wind_matrix(self) -> pd.DataFrame: - """Calculate average wind speed and direction for each month and hour of day in a pandas DataFrame. + def wind_matrix(self, other_data: pd.Series = None) -> pd.DataFrame: + """Calculate average wind direction and speed (or aligned other data) + for each month and hour of in the Wind object. + + Args: + other_data (pd.Series, optional): + The other data to calculate the matrix for. + Returns: pd.DataFrame: - A DataFrame containing average wind speed and direction for each month and hour of day. + A DataFrame containing average other_data and direction for each + month and hour of day. """ + if other_data is None: + other_data = self.ws + + if not isinstance(other_data, pd.Series): + raise ValueError("other_data must be a time indexed pandas Series.") + + if len(other_data) != len(self.wd): + raise ValueError(f"other_data must be the same length as this {type(self)} object.") + + if not all(other_data.index == self.wd.index): + raise ValueError("other_data must have the same index as this Wind object.") + wind_directions = ( ( ( - self.wd.groupby( - [self.wd.index.month, self.wd.index.hour], axis=0 - ).apply(circular_weighted_mean) + self.wd.groupby([self.wd.index.month, self.wd.index.hour], axis=0).apply( + circular_weighted_mean + ) ) % 360 ) .unstack() .T ) - wind_directions.columns = [ - calendar.month_abbr[i] for i in wind_directions.columns - ] - wind_speeds = ( - self.ws.groupby([self.ws.index.month, self.ws.index.hour], axis=0) + wind_directions.columns = [calendar.month_abbr[i] for i in wind_directions.columns] + _other_data = ( + other_data.groupby([other_data.index.month, other_data.index.hour], axis=0) .mean() .unstack() .T ) - wind_speeds.columns = [calendar.month_abbr[i] for i in wind_speeds.columns] + _other_data.columns = [calendar.month_abbr[i] for i in _other_data.columns] - df = pd.concat( - [wind_directions, wind_speeds], axis=1, keys=["direction", "speed"] - ) + df = pd.concat([wind_directions, _other_data], axis=1, keys=["direction", "other"]) df.index.name = "hour" return df @@ -1507,39 +1520,46 @@ def summarise(self, directions: int = 36) -> list[str]: f"Peak wind speeds were observed at {self.ws.idxmax()}, reaching {self.ws.max():.2f}m/s from {self.wd.loc[self.ws.idxmax()]}°.", ) - return_strings.append( - f"{self.calm():.2%} of the time, wind speeds are calm (≤ 1e-10m/s)." - ) + return_strings.append(f"{self.calm():.2%} of the time, wind speeds are calm (≤ 1e-10m/s).") return return_strings # pylint: enable=line-too-long - - def prevailing_wind_speeds(self, n: int=1, directions: int=36, ignore_calm: bool=True, threshold: float=1e-10) -> tuple[list[pd.Series], list[tuple[float, float]]]: + + def prevailing_wind_speeds( + self, n: int = 1, directions: int = 36, ignore_calm: bool = True, threshold: float = 1e-10 + ) -> tuple[list[pd.Series], list[tuple[float, float]]]: """Gets the wind speeds for the prevailing directions - + Args: n (int): Number of prevailing directions to return. Defaults to 1 - directions (int): - Number of direction bins to use when calculating the prevailing directions. Defaults to 36 - + Number of direction bins to use when calculating the prevailing directions. + Defaults to 36 ignore_calm (bool): - Whether to ignore calm hours when getting the prevailing directions. Defaults to True - + Whether to ignore calm hours when getting the prevailing directions. + Defaults to True threshold (float): The threshold for calm hours. Defaults to 1e-10 - + Returns: (list[pandas.Series], list[(float, float)]): - Tuple containing a list of time-indexed series containing wind speed data for each prevailing direction, from most to least prevailing, + Tuple containing a list of time-indexed series containing wind + speed data for each prevailing direction, from most to least prevailing, and a list of wind directions corresponding to the serieses. """ - prevailing_directions = self.prevailing(n=n, directions=directions, ignore_calm=ignore_calm, threshold=threshold) - prevailing_wind_speeds = [self.ws.loc[self.bin_data(directions=directions)["Wind Direction (degrees)"] == direction] for direction in prevailing_directions] + prevailing_directions = self.prevailing( + n=n, directions=directions, ignore_calm=ignore_calm, threshold=threshold + ) - return (prevailing_wind_speeds, prevailing_directions) + prevailing_wind_speeds = [ + self.ws.loc[ + self.bin_data(directions=directions)["Wind Direction (degrees)"] == direction + ] + for direction in prevailing_directions + ] + return (prevailing_wind_speeds, prevailing_directions) def weibull_pdf(self) -> tuple[float]: """Calculate the parameters of an exponentiated Weibull continuous random variable. @@ -1622,6 +1642,7 @@ def plot_windmatrix( ax: plt.Axes = None, show_values: bool = False, show_arrows: bool = True, + other_data: pd.Series = None, **kwargs, ) -> plt.Axes: """Create a plot showing the annual wind speed and direction bins @@ -1634,8 +1655,13 @@ def plot_windmatrix( Whether to show values in the cells. Defaults to False. show_arrows (bool, optional): Whether to show the directional arrows on each patch. + other_data: (pd.Series, optional): + The other data to align with the wind direction and speed. + Defaults to None which uses wind speed. **kwargs: Additional keyword arguments to pass to the pcolor function. + title (str, optional): + A title for the plot. Defaults to None. Returns: plt.Axes: @@ -1646,43 +1672,52 @@ def plot_windmatrix( if ax is None: ax = plt.gca() + if other_data is None: + other_data = self.ws + + title = self.source + title += f'\n{kwargs.pop("title", None)}' ax.set_title(textwrap.fill(f"{self.source}", 75)) - df = self.wind_matrix() - _wind_speeds = df["speed"] + df = self.wind_matrix(other_data=other_data) + _other_data = df["other"] _wind_directions = df["direction"] if any( [ - _wind_speeds.shape != (24, 12), + _other_data.shape != (24, 12), _wind_directions.shape != (24, 12), - _wind_directions.shape != _wind_speeds.shape, - not np.array_equal(_wind_directions.index, _wind_speeds.index), - not np.array_equal(_wind_directions.columns, _wind_speeds.columns), + _wind_directions.shape != _other_data.shape, + not _wind_directions.index.equals(_other_data.index), + not _wind_directions.columns.equals(_other_data.columns), + # not np.array_equal(_wind_directions.index, _other_data.index), + # not np.array_equal(_wind_directions.columns, _other_data.columns), ] ): raise ValueError( - "The wind_speeds and wind_directions must cover all months of the " + "The other_data and wind_directions must cover all months of the " "year, and all hours of the day, and align with each other." ) cmap = kwargs.pop("cmap", "YlGnBu") - vmin = kwargs.pop("vmin", _wind_speeds.values.min()) - vmax = kwargs.pop("vmax", _wind_speeds.values.max()) - cbar_title = kwargs.pop("cbar_title", "m/s") + vmin = kwargs.pop("vmin", _other_data.values.min()) + vmax = kwargs.pop("vmax", _other_data.values.max()) + cbar_title = kwargs.pop("cbar_title", None) + unit = kwargs.pop("unit", None) norm = kwargs.pop("norm", Normalize(vmin=vmin, vmax=vmax, clip=True)) mapper = kwargs.pop("mapper", ScalarMappable(norm=norm, cmap=cmap)) - pc = ax.pcolor(_wind_speeds, cmap=cmap, vmin=vmin, vmax=vmax, **kwargs) + pc = ax.pcolor(_other_data, cmap=cmap, vmin=vmin, vmax=vmax, **kwargs) _x = -np.sin(np.deg2rad(_wind_directions.values)) _y = -np.cos(np.deg2rad(_wind_directions.values)) direction_matrix = angle_from_north([_x, _y]) - if (show_arrows): + if show_arrows: + arrow_scale = 0.8 ax.quiver( np.arange(1, 13, 1) - 0.5, np.arange(0, 24, 1) + 0.5, - _x * _wind_speeds.values / 2, - _y * _wind_speeds.values / 2, + (_x * _other_data.values / 2) * arrow_scale, + (_y * _other_data.values / 2) * arrow_scale, pivot="mid", fc="white", ec="black", @@ -1693,7 +1728,7 @@ def plot_windmatrix( if show_values: for _xx, col in enumerate(_wind_directions.values.T): for _yy, _ in enumerate(col.T): - local_value = _wind_speeds.values[_yy, _xx] + local_value = _other_data.values[_yy, _xx] cell_color = mapper.to_rgba(local_value) text_color = contrasting_color(cell_color) # direction text @@ -1706,16 +1741,17 @@ def plot_windmatrix( va="bottom", fontsize="xx-small", ) - # speed text + # other_data text ax.text( _xx + 1, _yy + 1, - f"{_wind_speeds.values[_yy][_xx]:0.1f}m/s", + f"{_other_data.values[_yy][_xx]:0.1f}{unit}", color=text_color, ha="right", va="top", fontsize="xx-small", ) + ax.set_xticks(np.arange(1, 13, 1) - 0.5) ax.set_xticklabels([calendar.month_abbr[i] for i in np.arange(1, 13, 1)]) ax.set_yticks(np.arange(0, 24, 1) + 0.5) @@ -1817,13 +1853,15 @@ def plot_windrose( directions (int, optional): The number of directions to use. Defaults to 36. other_data (list[float], optional): - A list of other data to bin by direction. If None, then wind speed will be used. + A list of other data to bin by direction. + If None, then wind speed will be used. other_bins (list[float]): - The other data bins to use for the histogram. These bins are right inclusive. If other data is None, - then the default Beaufort bins will be used, otherwise 11 evenly spaced bins will be used. + The other data bins to use for the histogram. These bins are right inclusive. + If other data is None, then the default Beaufort bins will be used, + otherwise 11 evenly spaced bins will be used. colors: (str | tuple[float] | Colormap, optional): - A list of colors to use for the other bins. May also be a colormap. Defaults to the colors used for - Beaufort wind comfort categories. + A list of colors to use for the other bins. May also be a colormap. + Defaults to the colors used for Beaufort wind comfort categories. title (str, optional): title to display above the plot. Defaults to the source of this wind object. legend (bool, optional): @@ -1871,12 +1909,11 @@ def plot_windrose( f"colors must be a list of length {len(binned.columns)}, or a colormap." ) - # HACK start - a fix introduced here to ensure that bar ends are curved when using a polar plot. + # HACK to ensure that bar ends are curved when using a polar plot. fig = plt.figure() rect = [0.1, 0.1, 0.8, 0.8] hist_ax = plt.Axes(fig, rect) hist_ax.bar(np.array([1]), np.array([1])) - # HACK end if title is None or title == "": ax.set_title(textwrap.fill(f"{self.source}", 75)) diff --git a/LadybugTools_Engine/Python/tests/test_plot.py b/LadybugTools_Engine/Python/tests/test_plot.py index efa6a385..36cf6569 100644 --- a/LadybugTools_Engine/Python/tests/test_plot.py +++ b/LadybugTools_Engine/Python/tests/test_plot.py @@ -4,37 +4,32 @@ import pytest from ladybug.analysisperiod import AnalysisPeriod from ladybug_comfort.collection.utci import UTCI -from ladybugtools_toolkit.ladybug_extension.datacollection import collection_to_series +from ladybugtools_toolkit.ladybug_extension.datacollection import \ + collection_to_series +from ladybugtools_toolkit.plot._degree_days import (cooling_degree_days, + degree_days, + heating_degree_days) from ladybugtools_toolkit.plot._diurnal import diurnal, stacked_diurnals from ladybugtools_toolkit.plot._heatmap import heatmap -from ladybugtools_toolkit.plot._sunpath import sunpath -from ladybugtools_toolkit.plot._degree_days import ( - cooling_degree_days, - heating_degree_days, - degree_days, -) from ladybugtools_toolkit.plot._psychrometric import psychrometric -from ladybugtools_toolkit.plot._utci import ( - utci_comfort_band_comparison, - utci_comparison_diurnal_day, - utci_day_comfort_metrics, - utci_heatmap_difference, - utci_heatmap_histogram, - utci_histogram, - utci_journey, - utci_pie, -) +from ladybugtools_toolkit.plot._radiant_cooling_potential import \ + radiant_cooling_potential +from ladybugtools_toolkit.plot._seasonality import seasonality_comparison +from ladybugtools_toolkit.plot._sunpath import sunpath +from ladybugtools_toolkit.plot._utci import (utci_comfort_band_comparison, + utci_comparison_diurnal_day, + utci_day_comfort_metrics, + utci_heatmap_difference, + utci_heatmap_histogram, + utci_histogram, utci_journey, + utci_pie, utci_shade_benefit) from ladybugtools_toolkit.plot.colormaps import colormap_sequential from ladybugtools_toolkit.plot.spatial_heatmap import spatial_heatmap -from ladybugtools_toolkit.plot.utilities import ( - contrasting_color, - create_triangulation, - lighten_color, - relative_luminance, -) -from ladybugtools_toolkit.plot._radiant_cooling_potential import ( - radiant_cooling_potential, -) +from ladybugtools_toolkit.plot.utilities import (contrasting_color, + create_triangulation, + lighten_color, + relative_luminance) +from matplotlib.figure import Figure from . import EPW_OBJ @@ -45,7 +40,6 @@ EPW_OBJ.wind_speed, ).universal_thermal_climate_index - def test_cooling_degree_days(): """_""" assert isinstance(cooling_degree_days(epw=EPW_OBJ), plt.Axes) @@ -126,9 +120,7 @@ def test_lighten_color(): def test_colormap_sequential(): """_""" - assert sum(colormap_sequential("red", "green", "blue")(0.25)) == pytest.approx( - 1.750003844675125, rel=0.01 - ) + assert sum(colormap_sequential("red", "green", "blue")(0.25)) == pytest.approx(1.750003844675125, rel=0.01) def test_spatial_heatmap(): @@ -160,9 +152,7 @@ def test_sunpath(): assert isinstance( sunpath( EPW_OBJ.location, - analysis_period=AnalysisPeriod( - st_month=3, end_month=4, st_hour=9, end_hour=18 - ), + analysis_period=AnalysisPeriod(st_month=3, end_month=4, st_hour=9, end_hour=18), data_collection=EPW_OBJ.dry_bulb_temperature, cmap="inferno", ), @@ -172,9 +162,7 @@ def test_sunpath(): assert isinstance( sunpath( EPW_OBJ.location, - analysis_period=AnalysisPeriod( - st_month=3, end_month=4, st_hour=9, end_hour=18 - ), + analysis_period=AnalysisPeriod(st_month=3, end_month=4, st_hour=9, end_hour=18), cmap="inferno", ), plt.Axes, @@ -184,9 +172,7 @@ def test_sunpath(): def test_timeseries_diurnal(): """_""" - assert isinstance( - diurnal(collection_to_series(EPW_OBJ.dry_bulb_temperature)), plt.Axes - ) + assert isinstance(diurnal(collection_to_series(EPW_OBJ.dry_bulb_temperature)), plt.Axes) assert isinstance( diurnal(collection_to_series(EPW_OBJ.dry_bulb_temperature), period="daily"), plt.Axes, @@ -230,15 +216,11 @@ def test_timeseries_diurnal(): def test_heatmap(): """_""" - assert isinstance( - heatmap(collection_to_series(EPW_OBJ.dry_bulb_temperature)), plt.Axes - ) + assert isinstance(heatmap(collection_to_series(EPW_OBJ.dry_bulb_temperature)), plt.Axes) plt.close("all") mask = np.random.random(8760) > 0.5 - assert isinstance( - heatmap(collection_to_series(EPW_OBJ.dry_bulb_temperature), mask=mask), plt.Axes - ) + assert isinstance(heatmap(collection_to_series(EPW_OBJ.dry_bulb_temperature), mask=mask), plt.Axes) plt.close("all") mask_bad = np.random.random(10) > 0.5 @@ -273,9 +255,7 @@ def test_utci_day_comfort_metrics(): utci_day_comfort_metrics( collection_to_series(LB_UTCI_COLLECTION), collection_to_series(EPW_OBJ.dry_bulb_temperature), - collection_to_series( - EPW_OBJ.dry_bulb_temperature, "Mean Radiant Temperature (C)" - ), + collection_to_series(EPW_OBJ.dry_bulb_temperature, "Mean Radiant Temperature (C)"), collection_to_series(EPW_OBJ.relative_humidity), collection_to_series(EPW_OBJ.wind_speed), month=6, @@ -288,9 +268,7 @@ def test_utci_day_comfort_metrics(): def test_utci_heatmap_difference(): """_""" - assert isinstance( - utci_heatmap_difference(LB_UTCI_COLLECTION, LB_UTCI_COLLECTION - 3), plt.Axes - ) + assert isinstance(utci_heatmap_difference(LB_UTCI_COLLECTION, LB_UTCI_COLLECTION - 3), plt.Axes) plt.close("all") @@ -359,3 +337,16 @@ def test_utci_pie(): """_""" assert isinstance(utci_pie(LB_UTCI_COLLECTION), plt.Axes) plt.close("all") + + +def test_shade_benefit_plot() -> None: + """_""" + unshaded_utci = collection_to_series(LB_UTCI_COLLECTION) + shaded_utci = unshaded_utci + np.random.randint(-5, 5, len(unshaded_utci)) + + assert isinstance(utci_shade_benefit(unshaded_utci=unshaded_utci, shaded_utci=shaded_utci), Figure) + plt.close("all") + +def test_seasonality_plot() -> None: + """_""" + assert isinstance(seasonality_comparison(epw=EPW_OBJ), plt.Axes)