Source code for pywasp.wasp.generalized_wind_climate

"""Generalized Wind Climate

Extends `pywaspyio.generalized_wind_climate` to include interpolation
"""

__all__ = [
    "interpolate_gwc",
]

import logging
import numpy as np
import xarray as xr
import warnings
from scipy.interpolate import griddata as sp_griddata
from lxml import etree

import windkit as wk
from windkit.wind_climate.generalized_wind_climate import (
    _validate_gwc_wrapper_factory,
    REQ_DIMS_GWC,
)  # pylint: disable=unused-wildcard-import
from windkit.wind_climate.geostrophic_wind_climate import is_geowc, REQ_DIMS_GEOWC
from windkit.wind_climate.binned_wind_climate import (
    _bwc_from_counts,
)
from windkit.weibull import (
    fit_weibull_wasp_m1_m3_fgtm,
    weibull_freq_gt_mean,
    weibull_moment,
)
from windkit.xarray_structures.metadata import _update_history
from windkit import spatial as wksp
from windkit.spatial import (
    get_crs,
    reproject,
    count_spatial_points,
    equal_spatial_shape,
    interp_structured_like,
    interp_unstructured_like,
    is_raster,
)

from pywasp.io import _nat_neighbor_interp as natn
from pywasp.core import Rvea0326
from pywasp.wasp.binned_wind_climate import bwc_resample_wsbins_like
from pywasp.wasp.meso_climate import get_climate
from pywasp.wasp.air_density import _get_air_density_source_data


_GWC_INTERP = Rvea0326.gwc.gwcinterpolate

logger = logging.getLogger(__name__)


def _2D_to_out_locs(gwc, out_locs, exclude_dims=REQ_DIMS_GWC):
    # Set structure of 2D point gwc object to match that of out_locs
    if wksp.is_raster(out_locs):
        gwc = wksp.to_raster(gwc, True)

    if wksp._is_vertical(out_locs):
        return xr.broadcast(gwc, out_locs, exclude=exclude_dims)[0]
    else:
        return gwc


def _broadcast_single_point(gwc, out_locs):
    REQ_DIMS = REQ_DIMS_GEOWC if is_geowc(gwc) else REQ_DIMS_GWC

    # Drop spatial information from gwc
    gwc_pt = wksp._spatial_stack(gwc, get_crs(out_locs), revertable=False).squeeze(
        "point", drop=True
    )
    # Broadcast single point across entire domain, first output is gwc
    res = gwc_pt.broadcast_like(out_locs, exclude=REQ_DIMS)
    # Dimensions of the broadcasted dimensions are not copied over
    for d in res.dims:
        if d not in res.coords:
            res.coords[d] = out_locs.coords[d]

    return res


[docs] def interpolate_gwc( gwc, /, output_locs, method="given", use_moments=True, engine="fortran" ): """Spatially interpolate GWC data to a grid of locations. The interpolation is done in several steps: 1. Calculate the moments of the Weibull distribution from the A and k parameters. 2. Spatially interpolate the moments to the output locations. 3. Fit new A and k parameters from the interpolated moments. The first and third moments as well as the frequency of wind speeds greater than the mean are used, corresponding to WASP methodology. .. warning:: This function is experimental and its signature may change. The default engine is "fortran", but this will be deprecated in the future and the default will be "windkit". Parameters ---------- gwc : xr.Dataset GWC dataset output_locs : windkit.spatial.BBox Output locations method : str, optional Interpolation method. By default "given" Options are: - "given" for using the given values in the GWC dataset (shapes must be the same) - "nearest" for nearest neighbor interpolation - "linear" for bilinear interpolation - "cubic" for bicubic interpolation - "natural" for natural neighbor interpolation use_moments : bool Only used when the gwc is a geostrophic binned wind climate. Whether to use the moments in the binned wind climate for interpolation or interpolate the histogram directly using the specified 'method'. engine : {'windkit','fortran'} Backend to use for interpolation. Returns ------- xr.Dataset Interpolated GWC dataset. Raises ------ ValueError If the GWC and output locations do not have the same CRS. ValueError If the GWC contains any of "__m1__", "__m3__", "__fgtm__". These are reserved for internal use. ValueError If the shape of GWC and output locations are not the same when method is "given". UserWarning If the GWC is in geographic coordinates and method is "nearest", "linear", "cubic", or "natural". Interpolation in geographic coordinates can be inaccurate. """ if engine == "windkit": return _interpolate_gwc( gwc, output_locs, method=method, use_moments=use_moments ) else: warnings.warn( "The 'fortran' engine is deprecated. Use 'windkit' instead!", FutureWarning, ) return _interpolate_gwc_pw(gwc, output_locs, method=method)
def _interpolate_gwc(gwc, /, output_locs, method="given", use_moments=True): """ Spatially interpolate GWC data to a grid of locations. The interpolation is done in several steps: 1. Calculate the moments of the Weibull distribution from the A and k parameters. 2. Spatially interpolate the moments to the output locations. 3. Fit new A and k parameters from the interpolated moments. The first and third moments as well as the frequency of wind speeds greater than the mean are used, corresponding to WASP methodology. Parameters ---------- gwc : xr.Dataset GWC dataset. output_locs : windkit.spatial.BBox Output locations. method : str, optional Interpolation method. By default "given". Options are: - "given" for using the given values in the GWC dataset in this case the shapes of the output dataset and the GWC dataset must be the same. - "nearest" for nearest neighbor interpolation - "linear" for bilinear interpolation - "cubic" for bicubic interpolation - "natural" for natural neighbor interpolation use_moments: bool Only used when the gwc is a geostrophic binned wind climate. Whether to use the moments in the binned wind climate for interpolation or interpolate the histogram directly using the specified 'method'. Returns ------- xr.Dataset Interpolated GWC dataset. Raises ------ ValueError If the GWC contains any of "__m1__", "__m3__", "__fgtm__". These are reserved for internal use. ValueError If the shape of GWC and output locations are not the same when method is "given". UserWarning If the GWC is in geographic coordinates and method is "nearest", "linear", "cubic", or "natural". Interpolation in geographic coordinates can be inaccurate. """ crs_gwc = get_crs(gwc) crs_out = get_crs(output_locs) is_geowc_ = is_geowc(gwc) if crs_out.is_geographic and method in ["nearest", "linear", "cubic", "natural"]: warnings.warn( "Interpolation of GWCs in geographic coordinates can be inaccurate. " ) if not crs_gwc.equals(crs_out): gwc = reproject(gwc.copy(), crs_out) if count_spatial_points(gwc) == 1: gwc_out = _broadcast_single_point(gwc, output_locs) return _update_history(gwc_out) if method == "given": gwc_pt = wksp.to_point(gwc) out_locs_pt = wksp.to_point(output_locs) if not equal_spatial_shape(gwc_pt, out_locs_pt): raise ValueError( """No interpolation specificed, but gwc and out_locs are not spatially equal. Please specify an interpolation method.""" ) return _update_history(gwc) if any([var in gwc.data_vars for var in ["__m1__", "__m3__", "__fgtm__"]]): raise ValueError( "GWC cannot contain any of '__m1__', '__m3__', '__fgtm__'. These are reserved for internal use." ) if is_geowc_: # these are additional variables that have to be interpolated for the gestrophic wind climate rou_rose_vars = ["__logz0meso__", "flow_sep_height", "slfmeso", "displ"] gwc["__logz0meso__"] = np.log(gwc["z0meso"]) if use_moments and method != "nearest": geobwc = _bwc_from_counts(gwc["geo_wv_freq"]) gwc[["wsfreq", "wdfreq"]] = geobwc[["wsfreq", "wdfreq"]] gwc["__m1__"] = wk.mean_ws_moment(gwc, 1, bysector=True) gwc["__m3__"] = wk.mean_ws_moment(gwc, 3, bysector=True) gwc["__fgtm__"] = wk.ws_freq_gt_mean(gwc, bysector=True) gwc = gwc[ ["__m1__", "__m3__", "__fgtm__", "wdfreq", "geo_wv_freq", "geo_turn"] + rou_rose_vars ] else: gwc = gwc[["geo_wv_freq", "geo_turn"] + rou_rose_vars] else: gwc["__m1__"] = weibull_moment(gwc.A, gwc.k, 1) gwc["__m3__"] = weibull_moment(gwc.A, gwc.k, 3) gwc["__fgtm__"] = weibull_freq_gt_mean(gwc.A, gwc.k) gwc = gwc.drop_vars(["A", "k"], errors="ignore") if is_raster(gwc) and method != "natural": gwc_out = interp_structured_like(gwc, output_locs, method=method) if is_geowc_: gwc_out["geo_wv_freq"] = gwc_out["geo_wv_freq"].where( gwc_out["geo_wv_freq"] >= 0, 0.0 ) if use_moments and method != "nearest": # we need the histogram in traditional wsfreq/wdfreq format for bwc_resample_wsbins geowc_freq = _bwc_from_counts(gwc_out["geo_wv_freq"])[ ["wsfreq", "wdfreq"] ] # calculate A and k using interpolated moments A, k = fit_weibull_wasp_m1_m3_fgtm( gwc_out["__m1__"], gwc_out["__m3__"], gwc_out["__fgtm__"] ) geowc_freq["A"] = A geowc_freq["k"] = k # resample histogram to the weibull distributions given by the new A and k geowc_freq = bwc_resample_wsbins_like( geowc_freq, geowc_freq, fit_weibull=False, errors="ignore" ) geowc_freq["geo_wv_freq"] = geowc_freq["wsfreq"] * geowc_freq["wdfreq"] geowc_freq["geo_turn"] = ( gwc_out["geo_turn"].dims, gwc_out["geo_turn"].values, ) geowc_freq[rou_rose_vars] = gwc_out[rou_rose_vars] gwc_out = geowc_freq.drop_vars(["wsfreq", "wdfreq"], errors="ignore") else: gwc_out = interp_unstructured_like(gwc, output_locs, method=method) if is_geowc_: gwc_out["z0meso"] = np.exp(gwc_out["__logz0meso__"]) if not is_geowc(gwc_out): gwc_out["wdfreq"] = gwc_out["wdfreq"] / gwc_out["wdfreq"].sum(dim="sector") A, k = fit_weibull_wasp_m1_m3_fgtm( gwc_out["__m1__"], gwc_out["__m3__"], gwc_out["__fgtm__"] ) gwc_out["A"] = A gwc_out["k"] = k gwc_out = gwc_out.drop_vars( ["__m1__", "__m3__", "__fgtm__", "__logz0meso__"], errors="i" ) return _update_history(gwc_out) @_validate_gwc_wrapper_factory(run_extra_checks=False) def _interpolate_gwc_pw( gwc, out_locs, method="given" ): # pylist:disable=too-many-locals """Interpolate gwc object to new grid with optional reprojection Parameters ---------- gwc: xarray.Dataset PyWAsP Generalized Wind Climate dataset out_locs: xarray.Dataset Dataset with at least west_east, south_north dimensions and a crs coordinate. method: str "nearest" for nearest neighbor interpolation "natural" for natural neighbor interpolation "given" ignores spatial information, validate data has the same spatial size, after both gwc and out_locs have been converted to the point spatial structure. Used most often for cross-predictions, where the gwc and the output have different locations. Returns ------- xr.Dataset PyWAsP Generalized Wind Climate dataset on new grid """ if method is None: warnings.warn( "The interpolation method None is deprecated and now defaults to given.", FutureWarning, ) method = "given" if ( wksp.get_crs(gwc).is_geographic or wksp.get_crs(out_locs).is_geographic ) and method in ["nearest", "natural"]: warnings.warn( "This dataset has geographic coordinates. Querying for nearest neighbour is unreliable, as points that appear far away in latitude/longitude coordinate space may actually be close, particularly near the poles!" ) # Set variables needed for all methods out_crs = wksp.get_crs(out_locs) gwc_is_single_pt = (gwc["south_north"].size == 1) and (gwc["west_east"].size == 1) if "height" in gwc.coords: gwc_is_single_pt = gwc_is_single_pt and (gwc["height"].size == 1) ##################################################################################### # Allow for "given" option, which returns the input dataset if spatially equal ##################################################################################### if method == "given": gwc_pt = wksp.to_point(gwc) out_locs_pt = wksp.to_point(out_locs) if not equal_spatial_shape(gwc_pt, out_locs_pt): raise ValueError( """No interpolation specificed, but gwc and out_locs are not spatially equal. Please specify an interpolation method.""" ) return _update_history(gwc) ##################################################################################### # Return single point extended across all of the output points if only one GWC # # This is handled by broadcasting the GWC point to all of the spatial dimensions of # the out_locs array. Therefore, we only have to spatial stack the GWC object, and # drop the point dimension and all associated coordinates ##################################################################################### elif gwc_is_single_pt: logger.info("Applying single point GWC to all output points") res = _broadcast_single_point(gwc, out_locs) ############################################################################ # Nearest Neighbor Interpolation # # We use the scipy.interpolate.griddata function to get the indices of each GWC point # for each associated out_locs point. The GWC data is always spatially stacked, but # the out_locs can be any spatial structure. We handle the data in two steps, in the # first step, we process it in 2D depending on if it is a raster structure or a point # structure for those two dimensions, i.e. does it have dimensions south_north and # west_east, or only...S4v4n4r4m... # a "point" or "stacked_point". Then we broadcast that result to all # heights, if there is a height dimension. # ############################################################################ elif (not gwc_is_single_pt) and (method == "nearest"): logger.info("Nearest neighbor interpolation from scipy.interpolate.griddata.") # Create a 2D tuple of the input points, GWC doesn't have height at this time gwc_pt = wksp._spatial_stack(gwc, out_crs, remove_height=True, revertable=False) points = (gwc_pt.west_east, gwc_pt.south_north) # If raster, we need to stack first before combining if wksp.is_raster(out_locs): out_locs_pt = wksp._spatial_stack(out_locs, remove_height=True) else: out_locs_pt = out_locs # Get indices of the nearest point and reshape, this will always have a point # structure, as it will follow what exists from the gwc_pt object. xi = (out_locs_pt.west_east, out_locs_pt.south_north) idx = sp_griddata(points, np.arange(gwc_pt.point.size), xi, method="nearest") gwc_out_2d = gwc_pt.isel(point=idx.flatten().astype("int")) # Update coordinates if wksp.is_stacked_point(out_locs): gwc_out_2d = gwc_out_2d.rename_dims(point="stacked_point") gwc_out_2d = gwc_out_2d.assign_coords( west_east=out_locs_pt["west_east"], south_north=out_locs_pt["south_north"] ) # Set structure to match that of out_locs res = _2D_to_out_locs(gwc_out_2d, out_locs) elif (not gwc_is_single_pt) and (method == "natural"): ################################################################################# # Natural Neighbors Interpolation # # Use a spatial stacked GWC and a stackpoint formatted out_locs to do a 2D # interpolation. ################################################################################# gwc_pt = wksp._spatial_stack(gwc, out_crs, revertable=False) # Locate the natural neighbors members, delauny_tri, tri_info = natn.find_natural_neighbors(gwc_pt, out_locs) # Drop members (out_locs) that don't have any neighbors gen = [(key, val) for (key, val) in members.items() if len(val) != 0] # Create 2d list of points in output grid [x, y] out_locs_pt = wksp.to_stacked_point(out_locs) grid_points = np.vstack([out_locs_pt.west_east, out_locs_pt.south_north]).T # Create output arrays natural = wk.create_gwc( out_locs_pt, gwc_pt.sizes["sector"], gen_heights=gwc_pt["gen_height"], gen_roughnesses=gwc_pt["gen_roughness"], not_empty=False, ) natural = wksp.to_stacked_point(wksp._struct._from_scalar(natural)) if wksp._is_vertical(natural): natural = natural.isel(height=0, drop=True) natural = natural.transpose( "stacked_point", "sector", "gen_height", "gen_roughness" ) # loop over each member that has neighbors for k_member, v_member in gen: # Calculate the weighting from the natural neighbors wgts, nn_pts = natn.nn_weights( gwc_pt.west_east, gwc_pt.south_north, grid_points[k_member], delauny_tri, v_member, tri_info, ) # Collect neighbors for member, and transpose to match fortran's order gwc_neigh = gwc_pt.isel(point=nn_pts).transpose( "point", "sector", "gen_height", "gen_roughness", ) wgts = wgts.astype(np.float32) A_loc, k_loc, f_loc = _GWC_INTERP( gwc_neigh.A, gwc_neigh.k, gwc_neigh.wdfreq, wgts ) natural.A[k_member, ...] = A_loc natural.k[k_member, ...] = k_loc natural.wdfreq[k_member, ...] = f_loc # Set structure to match that of out_locs res = _2D_to_out_locs(natural, out_locs) else: raise ValueError(f"Unknown method ({method})") return _update_history(res.transpose(*(REQ_DIMS_GWC), ...)) def _gwc_to_gwcfile(gwc, filename): crs = get_crs(gwc) if not crs.is_geographic: raise ValueError( "Generalized wind climates exported to a .gwc file must be in a geographic coordinate system, not in a projected coordinate system. You can use the windkit reproject method to achieve this." ) ds_meso = get_climate( gwc, stab_source="ERA5", baro_source="ERA5", interp_method="linear" ) ds_air_density = _get_air_density_source_data(gwc, source="ERA5") single_point_coords = ["south_north", "west_east"] coords = { coord: f"{gwc[coord].values.flatten()[0]}" for coord in single_point_coords } required_air_density_vars = [ "temperature", "source_elevation", "surface_pressure", "specific_humidity", "lapse_rate", ] air_density_vars = { v: ds_air_density[v].values.flatten()[0] for v in required_air_density_vars } air_density_vars["temperature_celcius"] = air_density_vars["temperature"] - 273.15 air_density_vars["relative_humidity"] = Rvea0326.air_density_externals_mod.rhforsh( air_density_vars["specific_humidity"], air_density_vars["temperature_celcius"], air_density_vars["surface_pressure"], )[0] air_density_vars["source_elevation_temperature"] = ( float(air_density_vars["source_elevation"]) + 2 ) air_density_vars = { k: str(v) for k, v in air_density_vars.items() } # convert everthing to strings # Create the root element root = etree.Element( "RveaGeneralisedMeanWindClimate", FormatVersion="02.01.0001", LowestCompatibleFormatVersion="02.01.0001", SavingComponent="Rvea0360 version 01.02.0024", SaveTime="2024-11-27T09:12:13Z", Description="", ) # Add NominalLocation element nominal_location = etree.SubElement(root, "NominalLocation") coordinate = etree.SubElement( nominal_location, "RveaLatitudeLongitudeCoordinate", FormatVersion="01.01.0001", EarliestCompatibleFormatVersion="01.01.0001", ) etree.SubElement(coordinate, "Latitude", Degrees=coords["south_north"]) etree.SubElement(coordinate, "Longitude", Degrees=coords["west_east"]) # Add StandardConditions element standard_conditions = etree.SubElement(root, "StandardConditions") roughness_lengths = etree.SubElement( standard_conditions, "ReferenceRoughnessLengths", Count=f"{gwc.sizes['gen_roughness']}", ) for value in gwc.gen_roughness.values: etree.SubElement( roughness_lengths, "RoughnessLength", AerodynamicRoughnessLengthMetres=f"{value}", ) heights = etree.SubElement( standard_conditions, "ReferenceHeights", Count=f"{gwc.sizes['gen_height']}" ) for value in gwc.gen_height.values: etree.SubElement(heights, "Height", HeightAglMetres=f"{value}") # Add BarometricReferenceData element barometric_data = etree.SubElement( root, "BarometricReferenceData", AltitudeAslForReferencePressure=air_density_vars["source_elevation"], AltitudeAslForReferenceTemperature=air_density_vars[ "source_elevation_temperature" ], ReferencePressure=air_density_vars["surface_pressure"], ReferenceTemperatureCelcius=air_density_vars["temperature_celcius"], RelativeHumidity=air_density_vars["relative_humidity"], TemperatureLapseRate=air_density_vars["lapse_rate"], ) context_info = etree.SubElement(barometric_data, "ContextInformation") climate_info = etree.SubElement( context_info, "ClimateInfoSourceSouvenir", VersionNumber="00.00.01" ) etree.SubElement( climate_info, "RequestedLocation", LatitudeDegreesDecimal=coords["south_north"], LongitudeDegreesDecimal=coords["west_east"], ) data_provenance = etree.SubElement(climate_info, "DataProvenance") etree.SubElement( data_provenance, "Origin" ).text = "DTU 2020-11-25. Period: 2010-01-01-2020-01-01. Averaging method: mean of monthly means. Source data time interval: 1hr. Source data: ERA5 surface variables (https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-era5-single-levels-monthly-means?tab=overview) and pressure levels (https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-era5-pressure-levels-monthly-means)" etree.SubElement( data_provenance, "SourceFile", SourceFileName=ds_air_density.attrs["source_file"], Checksum="557FF2A305D20C83CE58BBBE5AE2E0AD588671874799FCA9405FB0758F2C022F", ) # Add GeostrophicShearRose element n_secs = ds_meso.sizes["sector"] shear_rose = etree.SubElement( root, "GeostrophicShearRose", CountOfSectors=f"{n_secs}" ) shear_vectors = [] for isec in range(n_secs): meso_sec = ds_meso.isel(sector=isec) shear_vectors.append( { "Index": f"{isec + 1}", "CentreAngleDegrees": f"{meso_sec['sector'].item()}", "SectorWidthDegrees": f"{360.0 / n_secs}", "Magnitude": f"{meso_sec['mean_dgdz'].item()}", "Direction": f"{meso_sec['mean_dgdz_dir'].item()}", } ) for vector in shear_vectors: etree.SubElement(shear_rose, "GeostrophicShearVector", **vector) stability_inputs = etree.SubElement(root, "StabilityInputs") # Create StabilityInputsTstarRose element tstar_rose = etree.SubElement( stability_inputs, "StabilityInputsTstarRose", CountOfSectors=f"{n_secs}" ) for isec in range(n_secs): meso_sec = ds_meso.isel(sector=isec) sector = { "Index": f"{isec + 1}", "CentreAngleDegrees": f"{meso_sec['sector'].item()}", "SectorWidthDegrees": f"{360.0 / n_secs}", "TStarMeanLand": f"{meso_sec['mean_temp_scale_land'].item()}", "TStarStdDevLand": f"{meso_sec['rms_temp_scale_land'].item()}", "TStarMeanWater": f"{meso_sec['mean_temp_scale_sea'].item()}", "TStarStdDevWater": f"{meso_sec['rms_temp_scale_sea'].item()}", "HstarMeanLand": f"{meso_sec['mean_pblh_scale_land'].item()}", "HstarMeanWater": f"{meso_sec['mean_pblh_scale_sea'].item()}", } etree.SubElement(tstar_rose, "StabilityInputsTstarSector", **sector) context_info = etree.SubElement(tstar_rose, "ContextInformation") source_info = etree.SubElement(context_info, "SourceInfo") etree.SubElement( source_info, "Insertion" ).text = "Stability input data imported by WAsP GUI, version 12.09.0034 at 2024-11-28T10:34:41" etree.SubElement( source_info, "DataVersion", Composition="1.01.0001", Identity="1.01.0001" ) data_provenance = etree.SubElement(source_info, "DataProvenance") etree.SubElement( data_provenance, "Origin" ).text = "Rogier Floors from DTU Wind Energy. 2022-05-18 (baro) and 2023-01-25 (meso). Period: 2011-01-01 to 2020-12-31. Averaging method: mean of monthly means. Source data time interval: 1hr. Source data: ERA5 surface variables (https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-era5-single-levels-monthly-means?tab=overview) and pressure levels (https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-era5-pressure-levels-monthly-means)" etree.SubElement( data_provenance, "SourceFile", SourceFileName="Era5baro.nc/Era5mesov1.nc", Checksum="F133016E7F13350D5C2532160980D2F168EF25A4751997B429B42314CDD0A180", ) etree.SubElement( data_provenance, "RequestedLocation", LatitudeDegreesDecimal=coords["south_north"], LongitudeDegreesDecimal=coords["west_east"], ) etree.SubElement( data_provenance, "SourceAssembly", AssemblyName="Rvea0359", AssemblyVersion="2.016", ) n_secs = gwc.sizes["sector"] wind_roses = etree.SubElement( root, "WindRoses", CountOfSectors=f"{n_secs}", SectorOneCentreAngle="0.0" ) for z0 in gwc.gen_roughness.values: for h in gwc.gen_height.values: weibull_wind_rose = etree.SubElement( wind_roses, "WindAtlasWeibullWindRose", ReferenceRoughnessLength=f"{z0}", ReferenceHeight=f"{h}", FormatVersion="2.0", CountOfSectors=f"{n_secs}", ) weibull = gwc.sel(gen_roughness=z0, gen_height=h).squeeze() for isec in range(n_secs): ds_weibull_sec = weibull.isel(sector=isec) weibull_sec = { "Index": f"{isec + 1}", "CentreAngleDegrees": f"{ds_weibull_sec['sector'].item()}", "SectorWidthDegrees": f"{360.0 / n_secs}", "SectorFrequency": f"{ds_weibull_sec['wdfreq'].item()}", "WeibullA": f"{ds_weibull_sec['A'].item()}", "WeibullK": f"{ds_weibull_sec['k'].item()}", } etree.SubElement(weibull_wind_rose, "WeibullWind", **weibull_sec) context_info = etree.SubElement(root, "ContextInformation") source_info = etree.SubElement(context_info, "Generator") wasp_info = etree.SubElement(source_info, "WaspSystem") etree.SubElement( wasp_info, "CalculatingComponent", Name="Rvea0334", Version="1.02.0112" ) flow_model = etree.SubElement(wasp_info, "FlowModel") etree.SubElement(flow_model, "Ibz", Version="Rvea0352/Rvea0358") etree.ElementTree(root).write( str(filename), pretty_print=True, xml_declaration=True, encoding="UTF-8" )