diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 969a879d..41cf7f95 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,12 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-merge-conflict - id: debug-statements - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.6.8 + rev: v0.6.9 hooks: # Run the linter. - id: ruff diff --git a/docs/source/api/constants_api.md b/docs/source/api/constants_api.md index 1b992418..0565da39 100644 --- a/docs/source/api/constants_api.md +++ b/docs/source/api/constants_api.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The {mod}`~pyrealm.constants` module diff --git a/docs/source/api/core_api.md b/docs/source/api/core_api.md index 69eac468..a5460a49 100644 --- a/docs/source/api/core_api.md +++ b/docs/source/api/core_api.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The {mod}`~pyrealm.core` module diff --git a/docs/source/api/demography_api.md b/docs/source/api/demography_api.md index 9290a0b0..db9dd62e 100644 --- a/docs/source/api/demography_api.md +++ b/docs/source/api/demography_api.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The {mod}`~pyrealm.demography` module diff --git a/docs/source/api/pmodel_api.md b/docs/source/api/pmodel_api.md index 9bb4f669..b81a90d0 100644 --- a/docs/source/api/pmodel_api.md +++ b/docs/source/api/pmodel_api.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The {mod}`~pyrealm.pmodel` module diff --git a/docs/source/api/splash_api.md b/docs/source/api/splash_api.md index fe55c923..3cb0f3ac 100644 --- a/docs/source/api/splash_api.md +++ b/docs/source/api/splash_api.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The {mod}`~pyrealm.splash` module diff --git a/docs/source/api/tmodel_api.md b/docs/source/api/tmodel_api.md index b7fa9312..de3c0158 100644 --- a/docs/source/api/tmodel_api.md +++ b/docs/source/api/tmodel_api.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The {mod}`~pyrealm.tmodel` module diff --git a/docs/source/conf.py b/docs/source/conf.py index 8fb7a28c..0b60ec4d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -12,6 +12,9 @@ from dataclasses import dataclass, field from datetime import datetime +# Import Matplotlib to avoid this message in notebooks: +# "Matplotlib is building the font cache; this may take a moment." +import matplotlib.pyplot # noqa: F401 import sphinxcontrib.bibtex.plugin from sphinxcontrib.bibtex.style.referencing import BracketStyle from sphinxcontrib.bibtex.style.referencing.author_year import AuthorYearReferenceStyle @@ -96,6 +99,7 @@ class MyReferenceStyle(AuthorYearReferenceStyle): ("py:class", "numpy._typing._array_like._ScalarType_co"), ("py:class", "numpy._typing._generic_alias.ScalarType"), ("py:class", "numpy.float32"), + ("py:class", "numpy.float64"), ("py:class", "numpy.int64"), ("py:class", "numpy.timedelta64"), ("py:class", "numpy.bool_"), diff --git a/docs/source/development/code_qa_and_typing.md b/docs/source/development/code_qa_and_typing.md index 848f7a5c..ed2c797d 100644 --- a/docs/source/development/code_qa_and_typing.md +++ b/docs/source/development/code_qa_and_typing.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Code quality and static typing diff --git a/docs/source/development/code_testing.md b/docs/source/development/code_testing.md index 43829394..3d76d292 100644 --- a/docs/source/development/code_testing.md +++ b/docs/source/development/code_testing.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Package testing and profiling diff --git a/docs/source/development/documentation.md b/docs/source/development/documentation.md index 0ce6b2f8..5552f055 100644 --- a/docs/source/development/documentation.md +++ b/docs/source/development/documentation.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Documentation @@ -60,6 +71,16 @@ kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- ``` diff --git a/docs/source/development/github_actions.md b/docs/source/development/github_actions.md index d26a7452..a0815478 100644 --- a/docs/source/development/github_actions.md +++ b/docs/source/development/github_actions.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # GitHub Actions diff --git a/docs/source/development/overview.md b/docs/source/development/overview.md index 837b1efe..8138127f 100644 --- a/docs/source/development/overview.md +++ b/docs/source/development/overview.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Developing `pyrealm` diff --git a/docs/source/development/profiling_and_benchmarking.md b/docs/source/development/profiling_and_benchmarking.md index d705d43f..4570cf26 100644 --- a/docs/source/development/profiling_and_benchmarking.md +++ b/docs/source/development/profiling_and_benchmarking.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Profiling and benchmarking diff --git a/docs/source/development/pyrealm_build_data.md b/docs/source/development/pyrealm_build_data.md index b59ec2b2..ef4df8dc 100644 --- a/docs/source/development/pyrealm_build_data.md +++ b/docs/source/development/pyrealm_build_data.md @@ -5,144 +5,109 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- -# The `pyrealm_build_data` package +# The {mod}`~pyrealm_build_data` module -The `pyrealm` repository includes both the `pyrealm` package and the -`pyrealm_build_data` package. The `pyrealm_build_data` package contains datasets that -are used in the `pyrealm` build and testing process. This includes: - -* Example datasets that are used in the package documentation, such as simple spatial - datasets for showing the use of the P Model. -* "Golden" datasets for regression testing `pyrealm` implementations against the outputs - of other implementations. These datasets will include a set of input data and then - output predictions from other implementations. -* Datasets for providing profiling of `pyrealm` code and for benchmarking new versions - of the package code against earlier implementations to check for performance issues. - -Note that `pyrealm_build_data` is a source distribution only (`sdist`) component of -`pyrealm`, so is not included in binary distributions (`wheel`) that are typically -installed by end users. This means that files in `pyrealm_build_data` are not available -if a user has simply used `pip install pyrealm`: please *do not* use -`pyrealm_build_data` within the main `pyrealm` code. - -## Package contents - -The package is organised into submodules that reflect the data use or previous -implementation. - -### The `bigleaf` submodule - -This submodule contains benchmark outputs from the `bigleaf` package in `R`, which has -been used as the basis for core hygrometry functions. The `bigleaf_conversions.R` R -script runs a set of test values through `bigleaf`. The first part of the file prints -out some simple test values that have been used in package doctests and then the second -part of the file generates more complex benchmarking inputs that are saved, along with -`bigleaf` outputs as `bigleaf_test_values.json`. - -Running `bigleaf_conversions.R` requires an installation of R along with the `jsonlite` -and `bigleaf` packages, and the script can then be run from within the submodule folder -as: - -```sh -Rscript bigleaf_conversions.R +```{eval-rst} +.. automodule:: pyrealm_build_data + :autosummary: + :members: + :special-members: __init__ ``` -### The `rpmodel` submodule - -This submodule contains benchmark outputs from the `rpmodel` package in `R`, which has -been used as the basis for initial development of the standard P Model. - -#### Test inputs - -The `generate_test_inputs.py` file defines a set of constants for running P Model -calculations and then defines a set of scalar and array inputs for the forcing variables -required to run the P Model. The array inputs are set of 100 values sampled randomly -across the ranges of plausible forcing value inputs in order to benchmark the -calculations of the P Model implementation. All of these values are stored in the -`test_inputs.json` file. +## The `bigleaf` submodule -It requires `python` and the `numpy` package and can be run as: - -```sh -python generate_test_inputs.py +```{eval-rst} +.. automodule:: pyrealm_build_data.bigleaf + :autosummary: + :members: + :special-members: __init__ ``` -#### Simple `rpmodel` benchmarking - -The `test_outputs_rpmodel.R` contains R code to run the test input data set, and store -the expected predictions from the `rpmodel` package as `test_outputs_rpmodel.json`. It -requires an installation of `R` and the `rpmodel` package and can be run as: +## The `community` submodule -```sh -Rscript test_outputs_rpmodel.R +```{eval-rst} +.. automodule:: pyrealm_build_data.community + :autosummary: + :members: + :special-members: __init__ ``` -#### Global array test - -The remaining files in the submodule are intended to provide a global test dataset for -benchmarking the use of `rpmodel` on a global time-series, so using 3 dimensional arrays -with latitude, longitude and time coordinates. It is currently not used in testing -because of issues with the `rpmodel` package in version 1.2.0. It may also be replaced -in testing with the `uk_data` submodule, which is used as an example dataset in the -documentation. +## The `rpmodel` submodule -The files are: - -* pmodel_global.nc: An input global NetCDF file containing forcing variables at 0.5° - spatial resolution and for two time steps. -* test_global_array.R: An R script to run `rpmodel` using the dataset. -* rpmodel_global_gpp_do_ftkphio.nc: A NetCDF file containing `rpmodel` predictions using - corrections for temperature effects on the `kphio` parameter. -* rpmodel_global_gpp_no_ftkphio.nc: A NetCDF file containing `rpmodel` predictions with - fixed `kphio`. +```{eval-rst} +.. automodule:: pyrealm_build_data.rpmodel + :autosummary: + :members: + :special-members: __init__ +``` -To generate the predicted outputs again requires an R installation with the `rpmodel` -package: +## The `sandoval_kphio` submodule -```sh -Rscript test_global_array.R +```{eval-rst} +.. automodule:: pyrealm_build_data.sandoval_kphio + :autosummary: + :members: + :special-members: __init__ ``` -### The `subdaily` submodule +## The `splash` submodule -At present, this submodule only contains a single file containing the predictions for -the `BE_Vie` fluxnet site from the original implementation of the `subdaily` module, -published in {cite}`mengoli:2022a`. Generating these predictions requires an -installation of R and then code from the following repository: +```{eval-rst} +.. automodule:: pyrealm_build_data.splash + :autosummary: + :members: + :special-members: __init__ +``` -[https://github.com/GiuliaMengoli/P-model_subDaily](https://github.com/GiuliaMengoli/P-model_subDaily) +## The `subdaily` submodule -TODO - This submodule should be updated to include the required code along with the -settings files and a runner script to reproduce this code. Or possibly to checkout the -required code as part of a shell script. +```{eval-rst} +.. automodule:: pyrealm_build_data.subdaily + :autosummary: + :members: + :special-members: __init__ +``` -### The `t_model` submodule +## The `t_model` submodule -The `t_model.r` contains the original implementation of the T Model calculations in R -{cite:p}`Li:2014bc`. The `rtmodel_test_outputs.r` script sources this file and then -generates some simple bencmarking predictions, which are saved as `rtmodel_output.csv`. +```{eval-rst} +.. automodule:: pyrealm_build_data.t_model + :autosummary: + :members: + :special-members: __init__ +``` -To generate the predicted outputs again requires an R installation +## The `two_leaf` submodule -```sh -Rscript rtmodel_test_outputs.r +```{eval-rst} +.. automodule:: pyrealm_build_data.two_leaf + :autosummary: + :members: + :special-members: __init__ ``` -### The `uk_data` submodule +## The `uk_data` submodule -This submodule contains the Python script `create_2D_uk_inputs.py`, which is used to -generate the NetCDF output file `UK_WFDE5_FAPAR_2018_JuneJuly.nc`. This contains P Model -forcings for the United Kingdom at 0.5° spatial resolution and hourly temporal -resolution over 2 months (1464 temporal observations). It is used for demonstrating the -use of the subdaily P Model. - -The script is currently written with a hard-coded set of paths to key source data - the -WFDE5 v2 climate data and a separate source of interpolated hourly fAPAR. This should -probably be rewritten to generate reproducible content from publically available sources -of these datasets. +```{eval-rst} +.. automodule:: pyrealm_build_data.uk_data + :autosummary: + :members: + :special-members: __init__ +``` diff --git a/docs/source/development/release_process.md b/docs/source/development/release_process.md index e9ff6b00..27dd9021 100644 --- a/docs/source/development/release_process.md +++ b/docs/source/development/release_process.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Package release process diff --git a/docs/source/index.md b/docs/source/index.md index 43aef59b..32ec1e69 100644 --- a/docs/source/index.md +++ b/docs/source/index.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The `pyrealm` package diff --git a/docs/source/users/constants.md b/docs/source/users/constants.md index 899076b3..91600360 100644 --- a/docs/source/users/constants.md +++ b/docs/source/users/constants.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Package constants @@ -35,7 +46,7 @@ of default model constants. The core API and details for each class can be seen These can be used to generate the default set of model parameters: -```{code-cell} +```{code-cell} ipython3 from pyrealm.constants import CoreConst, TModelTraits core_const = CoreConst() @@ -47,7 +58,7 @@ print(tmodel_const) And individual values can be altered using the parameter arguments: -```{code-cell} +```{code-cell} ipython3 # Estimate processes under the moon's gravity... core_const_moon = CoreConst(k_G=1.62) # ... allowing a much greater maximum height @@ -61,7 +72,7 @@ In order to ensure that a set of parameters cannot change while models are being instances of these parameter classes are **frozen**. You cannot edit an existing instance and will need to create a new instance to use different parameters. -```{code-cell} +```{code-cell} ipython3 :tags: [raises-exception] core_const_moon.k_G = 9.80665 @@ -75,7 +86,7 @@ export of parameter settings to dictionaries and to JSON formatted files. The co shows these methods working. First, a trait definition in a JSON file is read into a dictionary: -```{code-cell} +```{code-cell} ipython3 import json import pprint @@ -88,7 +99,7 @@ the {meth}`~pyrealm.constants.base.ConstantsClass.from_dict` method. The {meth}`~pyrealm.constants.base.ConstantsClass.from_json` method allows this to be done more directly and the resulting instances are identical. -```{code-cell} +```{code-cell} ipython3 traits1 = TModelTraits.from_dict(trt_dict) traits2 = TModelTraits.from_json("../files/traits.json") diff --git a/docs/source/users/demography/canopy.md b/docs/source/users/demography/canopy.md index f9c0688c..b3c17943 100644 --- a/docs/source/users/demography/canopy.md +++ b/docs/source/users/demography/canopy.md @@ -1,17 +1,29 @@ --- jupytext: - formats: md:myst text_representation: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 +settings: + output_matplotlib_strings: remove --- -# Canopy model +# The canopy model :::{admonition} Warning @@ -20,176 +32,421 @@ notes and initial demonstration code. ::: -The canopy model uses the perfect plasticity approximation (PPA) {cite}`purves:2008a`, -which assumes that plants are always able to plastically arrange their crown within the -broader canopy of the community to maximise their crown size and fill the available -space $A$. When the area $A$ is filled, a new lower canopy layer is formed until all -of the individual crown area has been distributed across within the canopy. - -The key variables in calculating the canopy model are the crown projected area $A_p$ -and leaf projected area $\tilde{A}_{cp}(z)$, which are calculated for a stem -of a given size using the [crown model](./crown.md). - -```{code-cell} +```{code-cell} ipython3 from matplotlib import pyplot as plt +import matplotlib as mpl +from matplotlib.patches import Polygon import numpy as np import pandas as pd from pyrealm.demography.flora import PlantFunctionalType, Flora from pyrealm.demography.community import Community -from pyrealm.demography.crown import CrownProfile +from pyrealm.demography.crown import CrownProfile, get_crown_xy from pyrealm.demography.canopy import Canopy +from pyrealm.demography.t_model_functions import StemAllometry ``` -## Canopy closure and canopy gap fraction +The `canopy` module in `pyrealm` is used to calculate a model of the light environment +across all of cohorts within a plant [community](./community.md). Each cohort consists +of: -A simple method for finding the first canopy closure height is to find a height $z^*_1$ -at which the sum of crown projected area across all stems $N_s$ in a community equals $A$: +* a number of identically-sized individuals, +* of the same [plant functional type (PFT)](./flora.md), +* that share the same [stem allometry](./t_model.md) and [crown model](./crown.md). + +The purpose of the `canopy` module is to estimate how light is absorbed down through the +canopy and allow the absorption of incoming light at different heights in the canopy to +be partitioned across stems within each cohort. + +## Light extinction for a single stem + +The key variables in determining the light environment for a given stem are as follows: + +* The projected crown area ${A}_{p}(z)$ sets how crown area accumulates, given the crown + shape, from the top of the stem to the ground. +* The projected leaf area $\tilde{A}_{cp}(z)$ modifies the crown area to allow for the + vertical displacement of crown area by the crown gap fraction. +* The leaf area index $L$ for the PFT is a simple factor that sets the leaf density of + the crown, allowing stems with identical crown area to vary in the density of actual + leaf surface for light capture. Values of $L$ are always expressed as the area of leaf + surface per square meter. +* The extinction coefficient $k$ for a PFT sets how much light is absorbed when passing + through the leaf surface of that PFT. + +For a single stem, the fraction of light absorbed through the entire crown is described +by the Beer-Lambert law: $$ -\sum_1^{N_s}{ A_p(z^*_1)} = A +f_{abs} = 1 - e^{-kL} $$ -However, the canopy model is modified by a community-level -**canopy gap fraction** ($f_G$) that captures the overall proportion of the canopy area -that is left unfilled by canopy. This gap fraction, capturing processes such as crown -shyness, describes the proportion of open sky visible from the forest floor. This -gives the following definition of the height of canopy layer closure ($z^*_l$) for a -given canopy layer $l = 1, ..., l_m$: +However, to calculate a vertical profile of light extinction through a crown with total +area $A_c$ and maximum stem height $H$, that equation needs to be expanded to calculate +the fraction of $L$ that falls between pairs of vertical heights $z_a > z_b$. The actual +area amount of leaf area $A_l$ for an individual stem falling between those two heights +is simply the diffence in projected leaf area between the two heights: $$ -\sum_1^{N_s}{ A_p(z^*_l)} = l A(1 - f_G) +A_{l[a,b]} = \tilde{A}_{cp}(z_a) - \tilde{A}_{cp}(z_b) $$ -The set of heights $z^*$ can be found numerically by using a root solver to find -values of $z^*_l$ for $l = 1, ..., l_m$ that satisfy: +Given that information, the calculation of $f_{abs}$ becomes: $$ -\sum_1^{N_s}{ A_p(z^*_l)} - l A(1 - f_G) = 0 +f_{abs[a,b]} = 1 - e^{\left(-k\dfrac{L A_{l[a,b]}}{A_c}\right)} $$ -The total number of layers $l_m$ in a canopy, where the final layer may not be fully -closed, can be found given the total crown area across stems as: +When $z_a = H$ and $z_b=0$, then $A_{l[a,b]} = A_c$ and hence simplifies to the original +equation. + +The code below creates a simple example community containing a single cohort containing +a single stem and then calculates the light extinction profile down through the canopy. + +```{code-cell} ipython3 +# Create a simple community with a single stem +simple_pft = PlantFunctionalType(name="defaults", m=2, n=2) +simple_flora = Flora([simple_pft]) +stem_dbh = np.array([0.5]) +simple_stem = StemAllometry(stem_traits=simple_flora, at_dbh=stem_dbh) + +# The total area is exactly the crown area +total_area = simple_stem.crown_area[0] + +# Define a simple community +simple_community = Community( + flora=simple_flora, + cell_area=total_area, + cell_id=1, + cohort_dbh_values=stem_dbh, + cohort_n_individuals=np.array([1]), + cohort_pft_names=np.array(["defaults"]), +) + +# Get the canopy model for the simple case from the canopy top +# to the ground +hghts = np.linspace(simple_stem.stem_height[0], 0, num=101)[:, None] +simple_canopy = Canopy( + community=simple_community, + layer_heights=hghts, +) +``` + +As a simple check that the calculation across height layers is correct, the canopy +instance returns a vertical light extinction profile. The last value in this profile +should equal the whole canopy $f_{abs}$ calculated using the simple Beer-Lambert +equation and the PFT trait values. + +```{code-cell} ipython3 +print(simple_canopy.extinction_profile[-1]) +``` + +```{code-cell} ipython3 +print(1 - np.exp(-simple_pft.par_ext * simple_pft.lai)) +``` + +The plot below shows: + +1. The shape of crown radius for the stem (solid line) along with the projected leaf + radius (dashed line). The leaf radius does not show the actual expected boundary of + leaf area - which follows the crown - but is useful to visualise the displacement of + leaf area on the same scale as the crown radius. +2. The vertical profile of the actual leaf area $A_{l[a,b]}$ between two height. +3. The resulting light absorption at each height. +4. The light extinction profile through the canopy. + +```{code-cell} ipython3 +:tags: [hide-input] + +fig, (ax1, ax2, ax3, ax4) = plt.subplots(ncols=4, sharey=True, figsize=(12, 6)) + +# Generate plot structures for stem profiles +ch, crown_radius = get_crown_xy( + crown_profile=simple_canopy.crown_profile, + stem_allometry=simple_community.stem_allometry, + attr="crown_radius", + two_sided=False, +)[0] + +ch, projected_leaf_radius = get_crown_xy( + crown_profile=simple_canopy.crown_profile, + stem_allometry=simple_community.stem_allometry, + attr="projected_leaf_radius", + two_sided=False, +)[0] + +ax1.plot(crown_radius, ch, color="red") +ax1.plot(projected_leaf_radius, ch, linestyle="--", color="red") +ax1.set_xlabel("Profile radius (m)") +ax1.set_ylabel("Vertical height (m)") + +# Plot the leaf area between heights for stems +ax2.plot(simple_canopy.stem_leaf_area, hghts, color="red") +ax2.set_xlabel("Leaf area (m2)") + +# Plot the fraction of light absorbed at different heights +ax3.plot(simple_canopy.f_abs, hghts, color="red") +ax3.set_xlabel("Light absorption fraction (-)") + +# Plot the light extinction profile through the canopy. +ax4.plot(simple_canopy.extinction_profile, hghts, color="red") +ax4.set_xlabel("Cumulative light\nabsorption fraction (-)") +``` + +## Light extinction within a community + +Within a community, the calculations above need to be modified to account for: + +* the number of cohorts $n$, +* the number of individuals $i_{h}$ within each cohort, +* differences in the LAI $L_{h}$ and light extinction coefficients $k_{h}$ between + cohorts, +* scaling LAI to the total area available to the community $A_T$ rather than the cohort + specific crown area $A_h$. + +Within the community, each cohort now requires a whole cohort LAI component $L_H$, +which consists of the total leaf area index across individuals divided by the total +community area to give an average leaf area index across the available space: $$ -l_m = \left\lceil \frac{\sum_1^{N_s}{A_c}}{ A(1 - f_G)}\right\rceil +L_H = \frac{i_h L_h A_h}{A_T} $$ -+++ +The Beer-Lambert equation across the cohorts is then: + +$$ +f_{abs} = 1 - e^{\left(\sum\limits_{m=1}^{n}-k_{[m]} L_{H[m]}\right)} + = 1 - \prod\limits_{m=1}^{n}e^{-k_{[m]} L_{H[m]}} +$$ -## Implementation in `pyrealm` +This equation can be adjusted as before to partition light absorption within vertical +layers and the implementation is demonstrated below using a simple community containing +two plant functional types: -The {class}`~pyrealm.demography.canopy.Canopy` class automatically finds the canopy -closure heights, given a {class}`~pyrealm.demography.community.Community` instance -and the required canopy gap fraction. +* a shorter understory tree with a columnar canopy and no crown gaps +* a taller canopy tree with a top heavy canopy and more crown gaps -The code below creates a simple community: +and then three cohorts: -```{code-cell} -# Two PFTs -# - a shorter understory tree with a columnar canopy and no crown gaps -# - a taller canopy tree with a top heavy canopy and more crown gaps +* 7 saplings of the short PFT +* 3 larger stems of the short PFT +* 2 large stems of tall PFT +```{code-cell} ipython3 +# Define PFTs short_pft = PlantFunctionalType( - name="short", h_max=15, m=1.5, n=1.5, f_g=0, ca_ratio=380 + name="short", + h_max=15, + m=1.5, + n=1.5, + par_ext=0.4, + f_g=0, + ca_ratio=380, + lai=4, +) +tall_pft = PlantFunctionalType( + name="tall", h_max=30, m=3, n=1.5, par_ext=0.6, f_g=0.2, ca_ratio=500 ) -tall_pft = PlantFunctionalType(name="tall", h_max=30, m=1.5, n=2, f_g=0.2, ca_ratio=500) # Create the flora flora = Flora([short_pft, tall_pft]) -# Create a simply community with three cohorts -# - 15 saplings of the short PFT -# - 5 larger stems of the short PFT -# - 2 large stems of tall PFT - +# Define a simply community with three cohorts community = Community( flora=flora, cell_area=32, cell_id=1, - cohort_dbh_values=np.array([0.02, 0.20, 0.5]), - cohort_n_individuals=np.array([15, 5, 2]), + cohort_dbh_values=np.array([0.1, 0.20, 0.5]), + cohort_n_individuals=np.array([7, 3, 2]), cohort_pft_names=np.array(["short", "short", "tall"]), ) + +# Calculate the canopy profile across vertical heights +hghts = np.linspace(community.stem_allometry.stem_height.max(), 0, num=101)[:, None] +canopy = Canopy(community=community, layer_heights=hghts) ``` -We can then look at the expected allometries for the stems in each cohort: +The plot below then shows a simplistic 2D representation of the community. -```{code-cell} -print("H = ", community.stem_allometry.stem_height) -print("Ac = ", community.stem_allometry.crown_area) -``` +```{code-cell} ipython3 +fig, ax = plt.subplots(ncols=1) + +# Extract the crown profiles as XY arrays for plotting +profiles = get_crown_xy( + crown_profile=canopy.crown_profile, + stem_allometry=community.stem_allometry, + attr="crown_radius", + as_xy=True, +) -We can now calculate the canopy model for the community: +for idx, crown in enumerate(profiles): -```{code-cell} -canopy = Canopy(community=community, canopy_gap_fraction=2 / 32) + # Get spaced but slightly randomized stem locations + n_stems = community.cohort_data["n_individuals"][idx] + stem_locations = np.linspace(0, 10, num=n_stems) + np.random.normal(size=n_stems) + + # Plot the crown model for each stem + for stem_loc in stem_locations: + ax.add_patch(Polygon(crown + np.array([stem_loc, 0]), color="#00550055")) + +ax.autoscale_view() +ax.set_aspect(1) ``` -We can then look at three key properties of the canopy model: the layer closure -heights ($z^*_l$) and the projected crown areas and leaf areas at each of those -heights for each stem in the three cohorts. +As before, we can verify that the cumulative light extinction at the bottom of the +vertical profile is equal to the expected value across the whole community. -There are four canopy layers, with the top two very close together because of the -large crown area in the two stems in the cohort of `tall` trees. +```{code-cell} ipython3 +# Calculate L_h for each cohort +cohort_lai = ( + community.cohort_data["n_individuals"] + * community.stem_traits.lai + * community.stem_allometry.crown_area +) / community.cell_area + +# Calculate 1 - e ^ -k L +print(1 - np.exp(np.sum(-community.stem_traits.par_ext * cohort_lai))) +``` -```{code-cell} -canopy.layer_heights +```{code-cell} ipython3 +print(canopy.extinction_profile[-1]) ``` -The `stem_crown_area` attribute then provides the crown area of each stem found in each -layer. +```{code-cell} ipython3 +:tags: [hide-input] + +cols = ["r", "b", "g"] + +mpl.rcParams["axes.prop_cycle"] = mpl.cycler(color=cols) + +fig, (ax1, ax2, ax3, ax4) = plt.subplots(ncols=4, sharey=True, figsize=(12, 6)) + +# Generate plot structures for stem profiles +crown_profile = get_crown_xy( + crown_profile=canopy.crown_profile, + stem_allometry=community.stem_allometry, + attr="crown_radius", + two_sided=False, +) + +leaf_profile = get_crown_xy( + crown_profile=canopy.crown_profile, + stem_allometry=community.stem_allometry, + attr="projected_leaf_radius", + two_sided=False, +) + +for (stem_rh, stem_cr), (stem_lh, stem_plr), col in zip( + crown_profile, leaf_profile, cols +): + ax1.plot(stem_cr, stem_rh, color=col) + ax1.plot(stem_plr, stem_lh, linestyle="--", color=col) + +ax1.set_xlabel("Profile radius (m)") +ax1.set_ylabel("Vertical height (m)") + +# Plot the leaf area between heights for stems +ax2.plot(canopy.stem_leaf_area, hghts) +ax2.set_xlabel("Leaf area per stem (m2)") + +# Plot the fraction of light absorbed at different heights +ax3.plot(canopy.f_abs, hghts, color="grey") +ax3.plot(1 - canopy.cohort_f_trans, hghts) +ax3.set_xlabel("Light absorption fraction (-)") -```{code-cell} -canopy.stem_crown_area +# Plot the light extinction profile through the canopy. +ax4.plot(canopy.extinction_profile, hghts, color="grey") +_ = ax4.set_xlabel("Cumulative light\nabsorption fraction (-)") ``` -Given the canopy gap fraction, the available crown area per layer is 30 m2, so -the first two layers are taken up entirely by the two stems in the cohort of large -trees. We can confirm that the calculation is correct by calculating the total crown area -across the cohorts at each height: +## Canopy closure and canopy gap fraction + +:::{admonition} TODO + +Need to work out how to include the gap fraction in the calculation of light extinction +because at the moment, the gap fraction in the PPA calculates the layer closure heights +accounting for that, but the LAI is not accounting for it so there is no shift in the +light extinction profile. + +::: + +In addition to calculating profiles from a provided sequence of vertical heights, the +canopy model also implements the calculation of canopy layers, following the perfect +plasticity approximation (PPA) {cite}`purves:2008a`. This model divides the vertical +structure of the canopy into discrete closed layers. The model assumes that plants are +always able to plastically arrange their crown within the broader canopy of the +community to maximise their crown size and fill the available space $A$. When the area +$A$ is filled, a new lower canopy layer is formed until all of the individual crown area +has been distributed across within the canopy. + +A simple method for finding the first canopy closure height is to find a height $z^*_1$ +at which the sum of crown projected area across all stems $N_s$ in a community equals +$A$: + +$$ +\sum_1^{N_s}{ A_p(z^*_1)} = A +$$ + +However, the canopy model also allows for modification by a community-level **canopy gap +fraction** ($f_G$) that captures the overall proportion of the canopy area that is left +unfilled by canopy. This gap fraction, capturing processes such as crown shyness, +describes the proportion of open sky visible from the forest floor. This gives the +following definition of the height of canopy layer closure ($z^*_l$) for a given canopy +layer $l = 1, ..., l_m$: + +$$ +\sum_1^{N_s}{ A_p(z^*_l)} = l A(1 - f_G) +$$ + +The set of heights $z^*$ can be found numerically by using a root solver to find values +of $z^*_l$ for $l = 1, ..., l_m$ that satisfy: + +$$ +\sum_1^{N_s}{ A_p(z^*_l)} - l A(1 - f_G) = 0 +$$ -```{code-cell} -np.sum(canopy.stem_crown_area * community.cohort_data["n_individuals"], axis=1) +The total number of layers $l_m$ in a canopy, where the final layer may not be fully +closed, can be found given the total crown area across stems as: + +$$ +l_m = \left\lceil \frac{\sum_1^{N_s}{A_c}}{ A(1 - f_G)}\right\rceil +$$ + +```{code-cell} ipython3 +canopy_ppa = Canopy(community=community, canopy_gap_fraction=2 / 32, fit_ppa=True) ``` -Those are equal to the layer closure areas of 30, 60 and 90 m2 and the last layer does -not quite close. The slight numerical differences result from the precision of the root -solver for finding $z^*_l$ and this can be adjusted by using the `layer_tolerance` -argument to the `Canopy` class +The `canopy_ppa.heights` attribute now contains the heights at which the PPA +layers close: -The projected leaf area per stem is reported in the `stem_leaf_area` attribute. This is -identical to the projected crown area for the first two cohorts because the crown gap -fraction $f_g$ is zero for this PFT. The projected leaf area is however displaced -towards the ground in the last cohort, because the `tall` PFT has a large gap fraction. +```{code-cell} ipython3 +canopy_ppa.heights +``` -```{code-cell} -canopy.stem_leaf_area +And the final value in the canopy extinction profile still matches the expectation from +above: + +```{code-cell} ipython3 +print(canopy_ppa.extinction_profile[-1]) ``` ### Visualizing layer closure heights and areas -We can use the {class}`~pyrealm.demography.crown.CrownProfile` class to calculate a -community crown and leaf area profile across a range of height values. For each height, +We can use the crown profile calculated for the previous canopy model to calculate a +whole community crown and leaf area profile for the community. For each height, we calculate the sum of the product of stem projected area and the number of individuals in each cohort. -```{code-cell} -# Set of vertical height to calculate crown profiles -at_z = np.linspace(0, 26, num=261)[:, None] - -# Calculate the crown profile for the stem for each cohort -crown_profiles = CrownProfile( - stem_traits=community.stem_traits, stem_allometry=community.stem_allometry, z=at_z -) - +```{code-cell} ipython3 # Calculate the total projected crown area across the community at each height community_crown_area = np.nansum( - crown_profiles.projected_crown_area * community.cohort_data["n_individuals"], axis=1 + canopy.crown_profile.projected_crown_area * community.cohort_data["n_individuals"], + axis=1, ) + # Do the same for the projected leaf area community_leaf_area = np.nansum( - crown_profiles.projected_leaf_area * community.cohort_data["n_individuals"], axis=1 + canopy.crown_profile.projected_leaf_area * community.cohort_data["n_individuals"], + axis=1, ) ``` @@ -198,24 +455,28 @@ superimpose the calculated $z^*_l$ values and the cumulative canopy area for eac to confirm that the calculated values coincide with the profile. Note here that the total area at each closed layer height is omitting the community gap fraction. -```{code-cell} -fig, ax = plt.subplots(ncols=1) +```{code-cell} ipython3 +:tags: [hide-input] + +fig, (ax1, ax2) = plt.subplots(ncols=2, sharey=True, figsize=(12, 6)) # Calculate the crown area at which each canopy layer closes. -closure_areas = np.arange(1, canopy.n_layers + 1) * canopy.crown_area_per_layer +closure_areas = np.arange(1, len(canopy_ppa.heights) + 1) * canopy.filled_community_area + +# LH plot - projected leaf area with height. # Add lines showing the canopy closure heights and closure areas. -for val in canopy.layer_heights: - ax.axhline(val, color="red", linewidth=0.5, zorder=0) +for val in canopy_ppa.heights: + ax1.axhline(val, color="red", linewidth=0.5, zorder=0) for val in closure_areas: - ax.axvline(val, color="red", linewidth=0.5, zorder=0) + ax1.axvline(val, color="red", linewidth=0.5, zorder=0) # Show the community projected crown area profile -ax.plot(community_crown_area, at_z, zorder=1, label="Crown area") -ax.plot( +ax1.plot(community_crown_area, canopy.heights, zorder=1, label="Crown area") +ax1.plot( community_leaf_area, - at_z, + canopy.heights, zorder=1, linestyle="--", color="black", @@ -223,80 +484,131 @@ ax.plot( label="Leaf area", ) +# Add cumulative canopy area at top +ax1_top = ax1.twiny() +ax1_top.set_xlim(ax1.get_xlim()) +area_labels = [f"$A_{l + 1}$ = {z:.1f}" for l, z in enumerate(np.nditer(closure_areas))] +ax1_top.set_xticks(closure_areas) +ax1_top.set_xticklabels(area_labels, rotation=90) + +ax1.set_ylabel("Vertical height ($z$, m)") +ax1.set_xlabel("Community-wide projected area (m2)") +ax1.legend(frameon=False) + +# RH plot - light extinction +for val in canopy_ppa.heights: + ax2.axhline(val, color="red", linewidth=0.5, zorder=0) + +for val in canopy_ppa.extinction_profile: + ax2.axvline(val, color="red", linewidth=0.5, zorder=0) + +ax2.plot(canopy.extinction_profile, hghts) + +ax2_top = ax2.twiny() +ax2_top.set_xlim(ax2.get_xlim()) +extinction_labels = [ + f"$f_{{abs{l + 1}}}$ = {z:.3f}" + for l, z in enumerate(np.nditer(canopy_ppa.extinction_profile)) +] +ax2_top.set_xticks(canopy_ppa.extinction_profile) +ax2_top.set_xticklabels(extinction_labels, rotation=90) + +ax2.set_xlabel("Light extinction (-)") # Add z* values on the righthand axis -ax_rhs = ax.twinx() -ax_rhs.set_ylim(ax.get_ylim()) +ax2_rhs = ax2.twinx() +ax2_rhs.set_ylim(ax2.get_ylim()) z_star_labels = [ - f"$z^*_{l + 1} = {val:.2f}$" - for l, val in enumerate(np.nditer(canopy.layer_heights)) + f"$z^*_{l + 1} = {val:.2f}$" for l, val in enumerate(np.nditer(canopy_ppa.heights)) ] -ax_rhs.set_yticks(canopy.layer_heights.flatten()) -ax_rhs.set_yticklabels(z_star_labels) +ax2_rhs.set_yticks(canopy_ppa.heights.flatten()) +_ = ax2_rhs.set_yticklabels(z_star_labels) +``` -# Add cumulative canopy area at top -ax_top = ax.twiny() -ax_top.set_xlim(ax.get_xlim()) -area_labels = [f"$A_{l + 1}$ = {z:.1f}" for l, z in enumerate(np.nditer(closure_areas))] -ax_top.set_xticks(closure_areas) -ax_top.set_xticklabels(area_labels) +## Light allocation -ax.set_ylabel("Vertical height ($z$, m)") -ax.set_xlabel("Community-wide projected area (m2)") -ax.legend(frameon=False) -``` + + +In order to use the light extinction with the P Model, we need to calculate the fraction +of absorbed photosynthetically active radiation $f_{APAR}$ within each layer for each +cohort. These values can be multiplied by the canopy-top photosynthetic photon flux +density (PPFD) to give the actual light absorbed for photosynthesis. -### Light transmission through the canopy +The steps below show this partitioning process for the PPA layers calculated above. -Now we can use the leaf area by layer and the Beer-Lambert equation to calculate light -attenuation through the canopy layers. +1. Calculate the fraction of light transmitted $f_{tr}$ through each layer for each + cohort. The two arrays below show the extinction coefficients for the PFT of each + cohort and then the cohort LAI ($L_H$, columns) components within each layer (rows). + The transmission through each component is then $f_{tr}=e^{-kL_H}$ and + $f_{abs} = 1 - f_{tr}$ . -$f_{abs} = 1 - e ^ {-kL}$, +```{code-cell} ipython3 +print("k = \n", community.stem_traits.par_ext, "\n") +print("L_H = \n", canopy_ppa.cohort_lai) +``` -where $k$ is the light extinction coefficient ($k$) and $L$ is the leaf area index -(LAI). The LAI can be calculated for each stem and layer: +```{code-cell} ipython3 +layer_cohort_f_tr = np.exp(-community.stem_traits.par_ext * canopy_ppa.cohort_lai) +print(layer_cohort_f_tr) +``` -```{code-cell} -# LAI = Acp_within_layer / canopy_area -# print(LAI) +```{code-cell} ipython3 +layer_cohort_f_abs = 1 - layer_cohort_f_tr +print(layer_cohort_f_abs) ``` -This can be used to calculate the LAI of individual stems but also the LAI of each layer -in the canopy: + These matrices show that there is complete transmission ($f_{abs} = 0, f_{tr} = 1$) + where a given stem has no leaf area within the layer but otherwise the leaves of each + stem absorb some light. -```{code-cell} -# LAI_stem = LAI.sum(axis=0) -# LAI_layer = LAI.sum(axis=1) +2. Calculate the total transmission across cohorts within each layer, as the product of + the individual cohort transmission within the layers, and then the absorption within + each layer -# print("LAI stem = ", LAI_stem) -# print("LAI layer = ", LAI_layer) +```{code-cell} ipython3 +layer_f_tr = np.prod(layer_cohort_f_tr, axis=1) +print(layer_f_tr) ``` -The layer LAI values can now be used to calculate the light transmission of each layer and -hence the cumulative light extinction profile through the canopy. +```{code-cell} ipython3 +layer_f_abs = 1 - layer_f_tr +print(layer_f_abs) +``` -```{code-cell} -# f_abs = 1 - np.exp(-pft.traits.par_ext * LAI_layer) -# ext = np.cumprod(f_abs) +3. Calculate the transmission and extinction profiles through the layers as the + cumulative product of light transmitted. -# print("f_abs = ", f_abs) -# print("extinction = ", ext) +```{code-cell} ipython3 +transmission_profile = np.cumprod(layer_f_tr) +print(transmission_profile) ``` -One issue that needs to be resolved is that the T Model implementation in `pyrealm` -follows the original implementation of the T Model in having LAI as a fixed trait of -a given plant functional type, so is constant for all stems of that PFT. +```{code-cell} ipython3 +extinction_profile = 1 - transmission_profile +print(extinction_profile) +``` -```{code-cell} -# print("f_abs = ", (1 - np.exp(-pft.traits.par_ext * pft.traits.lai))) +4. Calculate the fraction of light transmitted through each each layer: + +```{code-cell} ipython3 +layer_fapar = -np.diff(transmission_profile, prepend=1) +print(layer_fapar) ``` -## Things to worry about later +5. Calculate the relative absorbance across cohort within each layer and then use this + to partition the light absorbed in that layer across the cohorts: -Herbivory - leaf fall (transmission increases, truncate at 0, replace from NSCs) vs leaf -turnover (transmission constant, increased GPP penalty) +```{code-cell} ipython3 +cohort_fapar = ( + layer_cohort_f_abs / layer_cohort_f_abs.sum(axis=1)[:, None] +) * layer_fapar[:, None] +print(cohort_fapar) +``` -Leaf area dynamics in PlantFATE - acclimation to herbivory and transitory decreases in -transimission, need non-structural carbohydrates to recover from total defoliation. +6. Last, divide the cohort $f_{APAR}$ through by the number of individuals in each + cohort to given the $f_{APAR}$ for each stem at each height. -Leaf economics. +```{code-cell} ipython3 +stem_fapar = cohort_fapar / community.cohort_data["n_individuals"] +print(stem_fapar) +``` diff --git a/docs/source/users/demography/community.md b/docs/source/users/demography/community.md index 0961b8e2..8bee4431 100644 --- a/docs/source/users/demography/community.md +++ b/docs/source/users/demography/community.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Plant Communities @@ -19,7 +30,7 @@ This area of `pyrealm` is in active development. ::: -```{code-cell} +```{code-cell} ipython3 from matplotlib import pyplot as plt import numpy as np import pandas as pd @@ -28,7 +39,7 @@ from pyrealm.demography.flora import PlantFunctionalType, Flora from pyrealm.demography.community import Community ``` -```{code-cell} +```{code-cell} ipython3 short_pft = PlantFunctionalType( name="short", h_max=15, m=1.5, n=1.5, f_g=0, ca_ratio=380 ) @@ -52,10 +63,10 @@ community = Community( ) ``` -```{code-cell} +```{code-cell} ipython3 community ``` -```{code-cell} +```{code-cell} ipython3 ``` diff --git a/docs/source/users/demography/crown.md b/docs/source/users/demography/crown.md index dcf41bc7..69e64018 100644 --- a/docs/source/users/demography/crown.md +++ b/docs/source/users/demography/crown.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The tree crown model @@ -20,8 +31,10 @@ notes and initial demonstration code. ::: -```{code-cell} +```{code-cell} ipython3 from matplotlib import pyplot as plt +from matplotlib.lines import Line2D +from matplotlib.patches import Polygon, Patch import numpy as np import pandas as pd @@ -39,6 +52,7 @@ from pyrealm.demography.t_model_functions import ( from pyrealm.demography.crown import ( CrownProfile, + get_crown_xy, ) ``` @@ -146,7 +160,7 @@ The {class}`~pyrealm.demography.flora.PlantFunctionalType` class is typically used to set specific PFTs, but the functions to calculate $q_m$ and $p_{zm}$ are used directly below to provides a demonstration of the impacts of each trait. -```{code-cell} +```{code-cell} ipython3 # Set a range of values for m and n traits m = n = np.arange(1.0, 5, 0.1) @@ -155,7 +169,7 @@ q_m = calculate_crown_q_m(m=m, n=n[:, None]) z_max_prop = calculate_crown_z_max_proportion(m=m, n=n[:, None]) ``` -```{code-cell} +```{code-cell} ipython3 fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(10.9, 4)) # Plot q_m as a function of m and n @@ -191,7 +205,7 @@ profiles for PFTs. It requires: The code below creates a set of PFTS with differing crown trait values and then creates a `Flora` object using the PFTs. -```{code-cell} +```{code-cell} ipython3 # A PFT with a small crown area and equal m and n values narrow_pft = PlantFunctionalType(name="narrow", h_max=20, m=1.5, n=1.5, ca_ratio=20) # A PFT with an intermediate crown area and m < n @@ -206,18 +220,19 @@ flora The Flora object can also be used to show a table of canopy variables: -```{code-cell} +```{code-cell} ipython3 # TODO - add a Flora.to_pandas() method flora_data = pd.DataFrame({k: getattr(flora, k) for k in flora.trait_attrs}) flora_data[["name", "ca_ratio", "m", "n", "f_g", "q_m", "z_max_prop"]] ``` The next section of code generates the `StemAllometry` to use for the profiles. -The T Model uses DBH to define stem size - here the the code is being used to -back-calculate the required DBH values to give three stems with similar heights -near the maximum height for each PFT. +The T Model requires DBH to define stem size - here the +{meth}`~pyrealm.demography.t_model_functions.calculate_dbh_from_height` function +is used to back-calculate the required DBH values to give three stems with similar +heights that are near the maximum height for each PFT. -```{code-cell} +```{code-cell} ipython3 # Generate the expected stem allometries at similar heights for each PFT stem_height = np.array([19, 17, 15]) stem_dbh = calculate_dbh_from_height( @@ -226,14 +241,14 @@ stem_dbh = calculate_dbh_from_height( stem_dbh ``` -```{code-cell} +```{code-cell} ipython3 # Calculate the stem allometries allometry = StemAllometry(stem_traits=flora, at_dbh=stem_dbh) ``` We can again use {mod}`pandas` to get a table of those allometric predictions: -```{code-cell} +```{code-cell} ipython3 pd.DataFrame({k: getattr(allometry, k) for k in allometry.allometry_attrs}) ``` @@ -243,7 +258,7 @@ that is with a shape `(N, 1)`. We can then calculate the crown profiles. -```{code-cell} +```{code-cell} ipython3 # Create a set of vertical heights as a column array. z = np.linspace(-1, 20.0, num=211)[:, None] @@ -259,7 +274,7 @@ above calculated at each height $z$: * The projected crown area * The projected leaf area -```{code-cell} +```{code-cell} ipython3 crown_profiles ``` @@ -269,14 +284,26 @@ The code below generates a plot of the vertical shape profiles of the crowns for stem. For each stem: * the dashed line shows how the relative crown radius $q(z)$ varies with height $z$, -* the solid line shows the actual crown radius $r)z)$ varies with height, and +* the solid line shows the actual crown radius $r(z)$ varies with height, and * the dotted horizontal line shows the height at which the maximum crown radius is - found ($z_max$). + found ($z_{max}$). -Note that the equation for the relative radius $q(z)$ does define values where -$z <0$ or $z > H$. +:::{admonition} Note -```{code-cell} +The predictions of the equation for the relative radius $q(z)$ are not limited to +height values within the range of the actual height of a given stem +($0 \leq z \leq H$). This is critical for calculating behaviour with height across +multiple stems when calculating canopy profiles for a community. The plot below +includes predictions of $q(z)$ below ground level and above stem height. + +The {meth}`~pyrealm.demography.crown.get_crown_xy` helper function can be used to +extract plotting structures for each stem within a `CrownProfile` that *are* +restricted to actual valid heights for that stem and is demonstrated in the +[code below](#plotting-tools-for-crown-shapes). + +::: + +```{code-cell} ipython3 fig, ax = plt.subplots(ncols=1) # Find the maximum of the actual and relative maximum crown widths @@ -315,10 +342,10 @@ ax.set_aspect(aspect=1) ``` We can also use the `CanopyProfile` class with a single row of heights to calculate -the crown profile at the expected $z_max$ and show that this matches the expected +the crown profile at the expected $z_{max}$ and show that this matches the expected crown area from the T Model allometry. -```{code-cell} +```{code-cell} ipython3 # Calculate the crown profile across those heights for each PFT z_max = flora.z_max_prop * stem_height profile_at_zmax = CrownProfile(stem_traits=flora, stem_allometry=allometry, z=z_max) @@ -334,15 +361,15 @@ using the PFTs defined above because they have very different crown areas, so th below generates new profiles for a new set of PFTs that have similar crown area ratios but different shapes and gap fractions. -```{code-cell} +```{code-cell} ipython3 no_gaps_pft = PlantFunctionalType( name="no_gaps", h_max=20, m=1.5, n=1.5, f_g=0, ca_ratio=380 ) few_gaps_pft = PlantFunctionalType( - name="few_gaps", h_max=20, m=1.5, n=4, f_g=0.05, ca_ratio=400 + name="few_gaps", h_max=20, m=1.5, n=4, f_g=0.1, ca_ratio=400 ) many_gaps_pft = PlantFunctionalType( - name="many_gaps", h_max=20, m=4, n=1.5, f_g=0.2, ca_ratio=420 + name="many_gaps", h_max=20, m=4, n=1.5, f_g=0.3, ca_ratio=420 ) # Calculate allometries for each PFT at the same stem DBH @@ -370,7 +397,7 @@ lines) change with height along the stem. lines are identical, but as `f_g` increases, more of the leaf area is displaced down within the crown. -```{code-cell} +```{code-cell} ipython3 fig, ax = plt.subplots(ncols=1) for pft_idx, offset, colour in zip((0, 1, 2), (0, 5, 10), ("r", "g", "b")): @@ -390,7 +417,7 @@ We can also generate predictions for a single PFT with varying crown gap fractio the plot below, note that all leaf area is above $z_{max}$ when $f_g=1$ and all leaf area is *below* -```{code-cell} +```{code-cell} ipython3 fig, ax = plt.subplots(ncols=1) # Loop over f_g values @@ -434,6 +461,78 @@ ax.set_xlabel(r"Projected leaf area ($\tilde{A}_{cp}(z)$, m2)") ax.legend(frameon=False) ``` -```{code-cell} +## Plotting tools for crown shapes + +The {meth}`~pyrealm.demography.crown.get_crown_xy` function makes it easier to extract +neat crown profiles from `CrownProfile` objects, for use in plotting crown data. The +function takes a paired `CrownProfile` and `StemAllometry` and extracts a particular +crown profile variable, and removes predictions for each stem that are outside of +the stem range for that stem. It converts the data for each stem into coordinates that +will plot as a complete two-sided crown outline. The returned value is a list with an +entry for each stem in one of two formats. + +* A pair of coordinate arrays: height and variable value. +* An single XY array with height and variable values in the columns, as used for + example in `matplotlib` Patch objects. + +The code below uses this function to generate plotting data for the crown radius, +projected crown radius and projected leaf radius. These last two variables do not +have direct computational use - the cumulative projected area is what matters - but +allow the projected variables to be visualised at the same scale as the crown radius. + +```{code-cell} ipython3 +# Set stem offsets for separating stems along the x axis +stem_offsets = np.array([0, 6, 12]) + +# Get the crown radius in XY format to plot as a polygon +crown_radius_as_xy = get_crown_xy( + crown_profile=area_crown_profiles, + stem_allometry=area_allometry, + attr="crown_radius", + stem_offsets=stem_offsets, + as_xy=True, +) + +# Get the projected crown and leaf radii to plot as lines +projected_crown_radius_xy = get_crown_xy( + crown_profile=area_crown_profiles, + stem_allometry=area_allometry, + attr="projected_crown_radius", + stem_offsets=stem_offsets, +) + +projected_leaf_radius_xy = get_crown_xy( + crown_profile=area_crown_profiles, + stem_allometry=area_allometry, + attr="projected_leaf_radius", + stem_offsets=stem_offsets, +) +``` + +```{code-cell} ipython3 +fig, ax = plt.subplots() + +# Bundle the three plotting structures and loop over the three stems. +for cr_xy, (ch, cpr), (lh, lpr) in zip( + crown_radius_as_xy, projected_crown_radius_xy, projected_leaf_radius_xy +): + ax.add_patch(Polygon(cr_xy, color="lightgrey")) + ax.plot(cpr, ch, color="0.4", linewidth=2) + ax.plot(lpr, lh, color="red", linewidth=1) + +ax.set_aspect(0.5) +plt.legend( + handles=[ + Patch(color="lightgrey", label="Crown profile"), + Line2D([0], [0], label="Projected crown", color="0.4", linewidth=2), + Line2D([0], [0], label="Projected leaf", color="red", linewidth=1), + ], + ncols=3, + loc="upper center", + bbox_to_anchor=(0.5, 1.15), +) +``` + +```{code-cell} ipython3 ``` diff --git a/docs/source/users/demography/flora.md b/docs/source/users/demography/flora.md index 4b9bf3f7..8db0615d 100644 --- a/docs/source/users/demography/flora.md +++ b/docs/source/users/demography/flora.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Plant Functional Types and Traits @@ -23,7 +34,7 @@ notes and initial demonstration code. This page introduces the main components of the {mod}`~pyrealm.demography` module that describe plant functional types (PFTs) and their traits. -```{code-cell} +```{code-cell} ipython3 from matplotlib import pyplot as plt import numpy as np import pandas as pd @@ -100,7 +111,7 @@ their maximum height. Note that the `q_m` and `z_max_prop` traits are calculated from the `m` and `n` traits and cannot be set directly. -```{code-cell} +```{code-cell} ipython3 short_pft = PlantFunctionalType(name="short", h_max=10) medium_pft = PlantFunctionalType(name="medium", h_max=20) tall_pft = PlantFunctionalType(name="tall", h_max=30) @@ -108,7 +119,7 @@ tall_pft = PlantFunctionalType(name="tall", h_max=30) The traits values set for a PFT instance can then be shown: -```{code-cell} +```{code-cell} ipython3 short_pft ``` @@ -129,13 +140,13 @@ that will be used in a demographic simulation. It can be created directly by pro the list of {class}`~pyrealm.demography.flora.PlantFunctionalType` instances. The only requirement is that each PFT instance uses a different name. -```{code-cell} +```{code-cell} ipython3 flora = Flora([short_pft, medium_pft, tall_pft]) flora ``` -```{code-cell} +```{code-cell} ipython3 pd.DataFrame({k: getattr(flora, k) for k in flora.trait_attrs}) ``` @@ -153,7 +164,7 @@ within {class}`~pyrealm.demography.community.Community` objects. A `StemTraits` instance can be created directly by providing arrays for each trait, but is more easily created from a `Flora` object by providing a list of PFT names: -```{code-cell} +```{code-cell} ipython3 # Get stem traits for a range of stems stem_pfts = ["short", "short", "short", "medium", "medium", "tall"] stem_traits = flora.get_stem_traits(pft_names=stem_pfts) diff --git a/docs/source/users/demography/module_overview.md b/docs/source/users/demography/module_overview.md index e2aaceab..45d7f248 100644 --- a/docs/source/users/demography/module_overview.md +++ b/docs/source/users/demography/module_overview.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The demography module diff --git a/docs/source/users/demography/t_model.md b/docs/source/users/demography/t_model.md index 23510daa..cd38532c 100644 --- a/docs/source/users/demography/t_model.md +++ b/docs/source/users/demography/t_model.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The T Model module @@ -19,7 +30,7 @@ The T Model {cite}`Li:2014bc` provides a model of both: (PFT), and * a carbon allocation model, given stem allometry and potential GPP. -```{code-cell} +```{code-cell} ipython3 from matplotlib import pyplot as plt import numpy as np import pandas as pd @@ -31,7 +42,7 @@ from pyrealm.demography.t_model_functions import StemAllocation, StemAllometry To generate predictions under the T Model, we need a Flora object providing the [trait values](./flora.md) for each of the PFTsto be modelled: -```{code-cell} +```{code-cell} ipython3 # Three PFTS short_pft = PlantFunctionalType(name="short", h_max=10) medium_pft = PlantFunctionalType(name="medium", h_max=20) @@ -61,14 +72,14 @@ the predictions of the T Model for: The DBH input can be a scalar array or a one dimensional array providing a single value for each PFT. This then calculates a single estimate at the given size for each stem. -```{code-cell} +```{code-cell} ipython3 # Calculate a single prediction single_allometry = StemAllometry(stem_traits=flora, at_dbh=np.array([0.1, 0.1, 0.1])) ``` We can display those predictions as a `pandas.DataFrame`: -```{code-cell} +```{code-cell} ipython3 pd.DataFrame( {k: getattr(single_allometry, k) for k in single_allometry.allometry_attrs} ) @@ -79,7 +90,7 @@ predictions are made at each DBH value for each PFT and the allometry attributes predictions arranged with each PFT as a column and each DBH prediction as a row. This makes them convenient to plot using `matplotlib`. -```{code-cell} +```{code-cell} ipython3 # Column array of DBH values from 0 to 1.6 metres dbh_col = np.arange(0, 1.6, 0.01)[:, None] # Get the predictions @@ -89,7 +100,7 @@ allometries = StemAllometry(stem_traits=flora, at_dbh=dbh_col) The code below shows how to use the returned allometries to generate a plot of the scaling relationships across all of the PFTs in a `Flora` instance. -```{code-cell} +```{code-cell} ipython3 fig, axes = plt.subplots(ncols=2, nrows=4, sharex=True, figsize=(10, 10)) plot_details = [ @@ -118,14 +129,14 @@ The T Model also predicts how potential GPP will be allocated to respiration, tu and growth for stems with a given PFT and allometry. Again, a single value can be provided to get a single estimate of the allocation model for each stem: -```{code-cell} +```{code-cell} ipython3 single_allocation = StemAllocation( stem_traits=flora, stem_allometry=single_allometry, at_potential_gpp=np.array([55]) ) single_allocation ``` -```{code-cell} +```{code-cell} ipython3 pd.DataFrame( {k: getattr(single_allocation, k) for k in single_allocation.allocation_attrs} ) @@ -136,14 +147,14 @@ allocation per stem. In the first example, the code takes the allometric predict from above and calculates the GPP allocation for stems of varying size with the same potential GPP: -```{code-cell} +```{code-cell} ipython3 potential_gpp = np.repeat(5, dbh_col.size)[:, None] allocation = StemAllocation( stem_traits=flora, stem_allometry=allometries, at_potential_gpp=potential_gpp ) ``` -```{code-cell} +```{code-cell} ipython3 fig, axes = plt.subplots(ncols=2, nrows=5, sharex=True, figsize=(10, 12)) plot_details = [ @@ -175,7 +186,7 @@ fig.delaxes(axes[-1]) An alternative calculation is to make allocation predictions for varying potential GPP for constant allometries: -```{code-cell} +```{code-cell} ipython3 # Column array of DBH values from 0 to 1.6 metres dbh_constant = np.repeat(0.2, 50)[:, None] # Get the allometric predictions @@ -189,7 +200,7 @@ allocation_2 = StemAllocation( ) ``` -```{code-cell} +```{code-cell} ipython3 fig, axes = plt.subplots(ncols=2, nrows=5, sharex=True, figsize=(10, 12)) axes = axes.flatten() @@ -206,6 +217,6 @@ for ax, (var, ylab) in zip(axes, plot_details): fig.delaxes(axes[-1]) ``` -```{code-cell} +```{code-cell} ipython3 ``` diff --git a/docs/source/users/hygro.md b/docs/source/users/hygro.md index a235875e..bfd2f7fd 100644 --- a/docs/source/users/hygro.md +++ b/docs/source/users/hygro.md @@ -5,15 +5,26 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Hygrometric functions -```{code-cell} +```{code-cell} ipython3 # This code loads required packages and then creates a representative range of # values of the core variables to use in function plots. # @@ -72,7 +83,7 @@ and returns kPa, so if you are using VP to prepare input data for ## Saturated vapour pressure -```{code-cell} +```{code-cell} ipython3 # Create a sequence of air temperatures and calculate the saturated vapour pressure vp_sat = hygro.calc_vp_sat(ta_1d) @@ -85,7 +96,7 @@ pyplot.show() ## Vapour pressure to VPD -```{code-cell} +```{code-cell} ipython3 vpd = hygro.convert_vp_to_vpd(vp_2d, ta_2d.transpose()) # Plot vpd @@ -100,7 +111,7 @@ pyplot.show() ## Relative humidity to VPD -```{code-cell} +```{code-cell} ipython3 vpd = hygro.convert_rh_to_vpd(rh_2d, ta_2d.transpose()) # Plot vpd @@ -117,7 +128,7 @@ pyplot.show() ## Specific humidity to VPD -```{code-cell} +```{code-cell} ipython3 # Create a sequence of air temperatures and calculate the saturated vapour pressure vpd1 = hygro.convert_sh_to_vpd(sh_1d, ta=20, patm=101.325) vpd2 = hygro.convert_sh_to_vpd(sh_1d, ta=30, patm=101.325) diff --git a/docs/source/users/pmodel/c3c4model.md b/docs/source/users/pmodel/c3c4model.md index ac4ba204..9630134f 100644 --- a/docs/source/users/pmodel/c3c4model.md +++ b/docs/source/users/pmodel/c3c4model.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # C3 / C4 Competition @@ -41,7 +52,7 @@ expected C4 fraction ($F_4$) in a community (see correction term for the estimated percentage tree cover and the plot below shows how $F_4$ changes with $A_4$, given differing estimates of tree cover. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] import numpy as np @@ -87,7 +98,7 @@ The plot below shows how $h$ varies with the expected GPP from C3 plants alone. dashed line shows the C3 GPP estimate above which canopy closure leads to complete shading of C4 plants. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Just use the competition model to predict h across a GPP gradient @@ -123,7 +134,7 @@ cover of 0.5. ### Code -```{code-cell} +```{code-cell} ipython3 # Use a simple temperature sequence to generate a range of optimal chi values n_pts = 51 tc_1d = np.linspace(-10, 45, n_pts) @@ -201,7 +212,7 @@ Panel F : The contributions of plants using the C3 and C4 pathways to predicted $\delta\ce{^{13}C}$ . -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Generate the plots diff --git a/docs/source/users/pmodel/isotopic_discrimination.md b/docs/source/users/pmodel/isotopic_discrimination.md index 574b2f67..99bf30b8 100644 --- a/docs/source/users/pmodel/isotopic_discrimination.md +++ b/docs/source/users/pmodel/isotopic_discrimination.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Isotopic discrimination @@ -31,7 +42,7 @@ values of $\chi$. The sequence of $\chi$ values used is created by using the P M estimate $\chi$ across a temperature gradient, giving the range of $\chi$ values shown below for C3 and C4 plants. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] import numpy as np @@ -81,12 +92,12 @@ The calculations differ between C3 and C4 plants, and this is set by the selecti the `method_optchi` argument used for the {class}`~pyrealm.pmodel.pmodel.PModel` instance. -```{code-cell} +```{code-cell} ipython3 carb_c3 = CalcCarbonIsotopes(mod_c3, d13CO2=-8.4, D14CO2=19.2) carb_c3.summarize() ``` -```{code-cell} +```{code-cell} ipython3 carb_c4 = CalcCarbonIsotopes(mod_c4, d13CO2=-8.4, D14CO2=19.2) carb_c4.summarize() ``` @@ -95,7 +106,7 @@ The plots below show how the calculated values alter with $\chi$. The difference direction of these relationships between C3 and C4 pathways creates a predictable isotopic signature of relative contributions of the two pathways. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Create side by side subplots diff --git a/docs/source/users/pmodel/module_overview.md b/docs/source/users/pmodel/module_overview.md index 04c29687..951a560c 100644 --- a/docs/source/users/pmodel/module_overview.md +++ b/docs/source/users/pmodel/module_overview.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The P Model module diff --git a/docs/source/users/pmodel/pmodel_details/envt_variation_outputs.md b/docs/source/users/pmodel/pmodel_details/envt_variation_outputs.md index e7364837..fc69263a 100644 --- a/docs/source/users/pmodel/pmodel_details/envt_variation_outputs.md +++ b/docs/source/users/pmodel/pmodel_details/envt_variation_outputs.md @@ -5,15 +5,26 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # P Model predictions -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] from itertools import product @@ -115,7 +126,7 @@ environmental variables: All of the pairwise plots share the same legend: -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] fig, ax = pyplot.subplots(1, 1, figsize=(6, 1.2)) @@ -180,7 +191,7 @@ absorbed irradiance. Light use efficiency measures conversion efficiency of moles of absorbed irradiance into grams of Carbon ($\mathrm{g\,C}\; \mathrm{mol}^{-1}$ photons). -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] plot_fun("lue", r"LUE ($\mathrm{g\,C}\; \mathrm{mol}^{-1}$ photons).") @@ -192,7 +203,7 @@ The intrinsic water-use efficiency is ratio of net photosynthetic CO2 assimilation to stomatal conductance, and captures the cost of assimilation per unit of water, in units of $\mu\mathrm{mol}\;\mathrm{mol}^{-1}$. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] plot_fun("iwue", r"IWUE ($\mu\mathrm{mol}\;\mathrm{mol}^{-1}$)") @@ -249,7 +260,7 @@ calculated using ``fapar=1, ppfd=1``, which are the default values to ### Gross primary productivity (``gpp``, GPP) -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] plot_fun("gpp", r"GPP ($\mu\mathrm{g\,C}\,\mathrm{m}^{-2}\,\mathrm{s}^{-1}$)") @@ -257,7 +268,7 @@ plot_fun("gpp", r"GPP ($\mu\mathrm{g\,C}\,\mathrm{m}^{-2}\,\mathrm{s}^{-1}$)") ### Dark respiration (``rd``) -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] plot_fun("rd", r"$r_d$ ($\mu\mathrm{mol}\,\mathrm{m}^{-2}\,\mathrm{s}^{-1}$)") @@ -265,7 +276,7 @@ plot_fun("rd", r"$r_d$ ($\mu\mathrm{mol}\,\mathrm{m}^{-2}\,\mathrm{s}^{-1}$)") ### Maximum rate of carboxylation (``vcmax``) -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] plot_fun("vcmax", r"$v_{cmax}$ ($\mu\mathrm{mol}\,\mathrm{m}^{-2}\,\mathrm{s}^{-1}$)") @@ -273,7 +284,7 @@ plot_fun("vcmax", r"$v_{cmax}$ ($\mu\mathrm{mol}\,\mathrm{m}^{-2}\,\mathrm{s}^ ### Maximum rate of carboxylation at standard temperature (``vcmax25``) -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] plot_fun( @@ -283,7 +294,7 @@ plot_fun( ### Maximum rate of electron transport. (``jmax``) -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] plot_fun("jmax", r"$J_{max}$ ($\mu\mathrm{mol}\,\mathrm{m}^{-2}\,\mathrm{s}^{-1}$)") @@ -298,7 +309,7 @@ instability in estimates of $g_s$. The {meth}`~pyrealm.pmodel.pmodel.PModel.estimate_productivity` method will set $g_s$ to be undefined (`np.nan`) when VPD is zero or when $c_a - c_i = 0$. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] plot_fun("gs", r"$g_s$ ($\mu\mathrm{mol}\,\mathrm{m}^{-2}\,\mathrm{s}^{-1}$)") @@ -311,7 +322,7 @@ below show how each variable changes, for a constant environment with `tc` of 20 `patm` of 101325 Pa, `vpd` of 1000 Pa and $\ce{CO2}$ of 400 ppm, when absorbed irradiance changes from 0 to 2000 $\mu\text{mol}\,\mathrm{m}^{-2}\,\text{s}^{-1}$. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Calculate the photosynthetic environment diff --git a/docs/source/users/pmodel/pmodel_details/extreme_values.md b/docs/source/users/pmodel/pmodel_details/extreme_values.md index 7119ac7f..b4de861e 100644 --- a/docs/source/users/pmodel/pmodel_details/extreme_values.md +++ b/docs/source/users/pmodel/pmodel_details/extreme_values.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Extreme forcing values @@ -51,9 +62,8 @@ settings, the roots of these quadratics are: Note that the default values for C3 photosynthesis give **non-zero values below 0°C**. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] -:trusted: true from matplotlib import pyplot import numpy as np @@ -91,9 +101,8 @@ The photorespiratory compensation point ($\Gamma^*$) varies with as a function o temperature and atmospheric pressure, and behaves smoothly with extreme inputs. Note that again, $\Gamma^_$ has non-zero values for sub-zero temperatures. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] -:trusted: true # Calculate gammastar at different pressures tc_1d = np.linspace(-80, 100, n_pts) @@ -117,9 +126,8 @@ pyplot.show() The Michaelis-Menten coefficient for photosynthesis ($K_{mm}$) also varies with temperature and atmospheric pressure and again behaves smoothly with extreme values. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] -:trusted: true fig, ax = pyplot.subplots(1, 1) @@ -142,9 +150,8 @@ The density ($\rho$) and viscosity ($\mu$) of water both vary with temperature a atmospheric pressure. Looking at the density of water, there is a serious numerical issue with low temperatures arising from the equations for the density of water. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] -:trusted: true fig, ax = pyplot.subplots(1, 1) @@ -167,9 +174,8 @@ Zooming in, the behaviour of this function is not reliable at extreme low temper leading to unstable estimates of $\eta^*$ and the P Model should not be used to make predictions below about -30 °C. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] -:trusted: true fig, ax = pyplot.subplots(1, 1) diff --git a/docs/source/users/pmodel/pmodel_details/jmax_limitation.md b/docs/source/users/pmodel/pmodel_details/jmax_limitation.md index 5e07802f..b32d481e 100644 --- a/docs/source/users/pmodel/pmodel_details/jmax_limitation.md +++ b/docs/source/users/pmodel/pmodel_details/jmax_limitation.md @@ -5,15 +5,26 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # $J_{max}$ limitation -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] from matplotlib import pyplot @@ -50,7 +61,7 @@ The plot below shows the effects of each method on the light use efficienct acro temperature gradient. The other forcing variables are fixed ($P=101325.0 , \ce{CO2}= 400 \text{ppm}, \text{VPD}=820$) and $\phi_0$ is also fixed ($\phi_0=0.08$). -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Calculate variation in m_jlim with temperature diff --git a/docs/source/users/pmodel/pmodel_details/optimal_chi.md b/docs/source/users/pmodel/pmodel_details/optimal_chi.md index fb22b49e..3fdac455 100644 --- a/docs/source/users/pmodel/pmodel_details/optimal_chi.md +++ b/docs/source/users/pmodel/pmodel_details/optimal_chi.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Optimal $\chi$ and leaf $\ce{CO2}$ @@ -63,7 +74,7 @@ for use within a P Model. - {class}`~pyrealm.pmodel.optimal_chi.OptimalChiC4NoGammaRootzoneStress` ``` -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] from itertools import product @@ -196,7 +207,7 @@ def plot_opt_chi(mod): This **C3 method** follows the approach detailed in {cite:t}`Prentice:2014bc`, see {class}`~pyrealm.pmodel.optimal_chi.OptimalChiPrentice14` for details. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Run the P Model and plot predictions @@ -210,7 +221,7 @@ This **C4 method** follows the approach detailed in {cite:t}`Prentice:2014bc`, b a C4 specific version of the unit cost ratio ($\beta$). It also sets $m_j = m_c = 1$. See {class}`~pyrealm.pmodel.optimal_chi.OptimalChiC4` for details. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Run the P Model and plot predictions @@ -228,7 +239,7 @@ and also also sets $m_j = 1$, but $m_c$ is calculated as in {class}`~pyrealm.pmodel.optimal_chi.OptimalChiPrentice14`. See {meth}`~pyrealm.pmodel.optimal_chi.OptimalChiC4NoGamma` for details. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Run the P Model and plot predictions @@ -263,7 +274,7 @@ The calculation details are provided in the description of the {class}`~pyrealm.pmodel.optimal_chi.OptimalChiLavergne20C3` method, but the variation in $\beta$ with $\theta$ is shown below. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Theta is required for the calculation of beta @@ -291,7 +302,7 @@ The plots below show the impacts on optimal $\chi$ across a temperature gradient values of VPD and soil moisture, with constant atmospheric pressure (101325 Pa) and CO2 (280 ppm). -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Environments with high and low soil moisture @@ -356,7 +367,7 @@ pyplot.tight_layout() The plots below illustrate the impact of temperature and $\theta$ on $m_j$ and $m_c$, again with constant atmospheric pressure (101325 Pa) and CO2 (280 ppm). -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] fig, ((ax1, ax3), (ax2, ax4)) = pyplot.subplots(2, 2, figsize=(10, 10), sharey=True) @@ -454,7 +465,7 @@ but the variation in $\beta$ with rootzone stress is shown below. * {class}`~pyrealm.pmodel.optimal_chi.OptimalChiC4RootzoneStress` * {class}`~pyrealm.pmodel.optimal_chi.OptimalChiC4NoGammaRootzoneStress` -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] from pyrealm.pmodel.optimal_chi import ( @@ -500,7 +511,7 @@ The plots below show the impacts on optimal $\chi$ across a temperature gradient values of VPD and rootzone stress, with constant atmospheric pressure (101325 Pa) and CO2 (280 ppm). -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Environments with high and low rootzone stress diff --git a/docs/source/users/pmodel/pmodel_details/photosynthetic_environment.md b/docs/source/users/pmodel/pmodel_details/photosynthetic_environment.md index cedd27fe..9e2c902e 100644 --- a/docs/source/users/pmodel/pmodel_details/photosynthetic_environment.md +++ b/docs/source/users/pmodel/pmodel_details/photosynthetic_environment.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Photosynthetic environment @@ -26,7 +37,7 @@ The descriptions below show the typical ranges of these values under common environmental inputs along with links to the more detailed documentation of the key functions. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # This code loads required packages and then creates a representative range of @@ -68,7 +79,7 @@ Details: {func}`pyrealm.pmodel.functions.calc_gammastar` The photorespiratory compensation point ($\Gamma^*$) varies with as a function of temperature and atmospheric pressure: -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Calculate gammastar @@ -91,7 +102,7 @@ Details: {func}`pyrealm.pmodel.functions.calc_kmm` The Michaelis-Menten coefficient for photosynthesis ($K_{mm}$) also varies with temperature and atmospheric pressure: -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Calculate K_mm @@ -118,7 +129,7 @@ pressure ($\eta^*$). The figure shows how $\eta^*$ varies with temperature and pressure. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Calculate the viscosity under the range of values and the standard @@ -146,7 +157,7 @@ Details: {func}`pyrealm.pmodel.functions.calc_co2_to_ca` The partial pressure of $\ce{CO2}$ is a function of the atmospheric concentration of $\ce{CO2}$ in parts per million and the atmospheric pressure: -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Variation in partial pressure diff --git a/docs/source/users/pmodel/pmodel_details/pmodel_overview.md b/docs/source/users/pmodel/pmodel_details/pmodel_overview.md index 9626ee98..475f9d6a 100644 --- a/docs/source/users/pmodel/pmodel_details/pmodel_overview.md +++ b/docs/source/users/pmodel/pmodel_details/pmodel_overview.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- diff --git a/docs/source/users/pmodel/pmodel_details/quantum_yield.md b/docs/source/users/pmodel/pmodel_details/quantum_yield.md index 92a3ae8c..d5162a1f 100644 --- a/docs/source/users/pmodel/pmodel_details/quantum_yield.md +++ b/docs/source/users/pmodel/pmodel_details/quantum_yield.md @@ -5,15 +5,26 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Quantum yield efficiency of photosynthesis -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # This code loads required packages and then creates a representative range of @@ -79,7 +90,7 @@ estimate of $\phi_0$, following {cite:t}`Bernacchi:2003dc` for C3 plants and $\phi_0 = 0.081785$, following the BRC parameterisation in Table 1. of {cite:t}`Stocker:2020dh`. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Calculate temperature dependence of quantum yield efficiency @@ -117,7 +128,7 @@ $\phi_0$ values to an otherwise constant environment. As you would expect given $\text{LUE} = \phi_0 \cdot M_C \cdot m_j$, light use efficiency changes linearly along this gradient of $\phi_0$ values. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # A constant environment to show a range of kphio values @@ -157,7 +168,7 @@ $\phi_{0A} = \dfrac{\phi_{0R}}{(1 + \textrm{AI}^m) ^ n}$ This captures a decrease in maximum $\phi_0$ in arid conditions, as shown below. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] n_vals = 51 @@ -188,7 +199,7 @@ approach also alters the temperature at which $\phi_0$ is maximised as a functio mean growth temperature ($T_g$) in a location. The plot below shows how aridity and mean growth temperature interact to change the location and height of the peak $\phi_0$. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] n_vals = 51 diff --git a/docs/source/users/pmodel/pmodel_details/rpmodel.md b/docs/source/users/pmodel/pmodel_details/rpmodel.md index 7e691cc2..c46c12ba 100644 --- a/docs/source/users/pmodel/pmodel_details/rpmodel.md +++ b/docs/source/users/pmodel/pmodel_details/rpmodel.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The `rpmodel` implementation diff --git a/docs/source/users/pmodel/pmodel_details/soil_moisture.md b/docs/source/users/pmodel/pmodel_details/soil_moisture.md index 2b4b8a53..8d94a557 100644 --- a/docs/source/users/pmodel/pmodel_details/soil_moisture.md +++ b/docs/source/users/pmodel/pmodel_details/soil_moisture.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Soil moisture effects @@ -85,7 +96,7 @@ varies with changing soil moisture for some different values of mean aridity. In the examples below, the default $\theta_0 = 0$ has been changed to $\theta_0 = 0.1$ to make the lower bound more obvious. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] from matplotlib import pyplot as plt @@ -151,7 +162,7 @@ by the resulting factor. The example below shows how the predicted light use efficiency from the P Model changes across an aridity gradient both with and without the soil moisture factor. -```{code-cell} +```{code-cell} ipython3 # Calculate the P Model in a constant environment tc = np.array([20] * 101) sm_gradient = np.linspace(0, 1.0, 101) @@ -174,7 +185,7 @@ for mean_alpha in [0.9, 0.5, 0.3, 0.1, 0.0]: gpp_stressed[mean_alpha] = model.gpp * sm_stress ``` -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] plt.plot(sm_gradient, model.gpp, label="No soil moisture penalty") @@ -234,7 +245,7 @@ y &= \min( a \textrm{AI} ^ {b}, 1)\\ \end{align*} $$ -```{code-cell} +```{code-cell} ipython3 from pyrealm.constants import PModelConst const = PModelConst() @@ -269,7 +280,7 @@ $$ \end{cases} $$ -```{code-cell} +```{code-cell} ipython3 # Calculate the soil moisture stress factor across a soil moisture # gradient for different aridity index values beta = {} @@ -298,7 +309,7 @@ calculated and then applied to the GPP calculated for a model ({attr}`~pyrealm.pmodel.pmodel.PModel.gpp`). In the example below, the result is obviously just $\beta(\theta)$ from above scaled to the constant GPP. -```{code-cell} +```{code-cell} ipython3 for ai in ai_vals: plt.plot(sm_gradient, model.gpp * beta[ai], label=f"AI = {ai}") @@ -309,6 +320,6 @@ plt.legend() plt.show() ``` -```{code-cell} +```{code-cell} ipython3 ``` diff --git a/docs/source/users/pmodel/pmodel_details/worked_examples.md b/docs/source/users/pmodel/pmodel_details/worked_examples.md index f3db6947..f69cc924 100644 --- a/docs/source/users/pmodel/pmodel_details/worked_examples.md +++ b/docs/source/users/pmodel/pmodel_details/worked_examples.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Worked examples @@ -38,9 +49,7 @@ The example shows the steps required using a single site with: ### Estimate photosynthetic environment -```{code-cell} -:trusted: true - +```{code-cell} ipython3 from importlib import resources from matplotlib import pyplot as plt @@ -60,15 +69,11 @@ terse - just the shape of the data - but the {meth}`~pyrealm.pmodel.pmodel_environment.PModelEnvironment.summarize` method provides a more detailed summary of the attributes. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 env ``` -```{code-cell} -:trusted: true - +```{code-cell} ipython3 env.summarize() ``` @@ -77,18 +82,14 @@ env.summarize() Next, the P Model can be fitted to the photosynthetic environment using the ({class}`~pyrealm.pmodel.pmodel.PModel`) class: -```{code-cell} -:trusted: true - +```{code-cell} ipython3 model = PModel(env) ``` The returned model object holds a lot of information. The representation of the model object shows a terse display of the settings used to run the model: -```{code-cell} -:trusted: true - +```{code-cell} ipython3 model ``` @@ -98,9 +99,7 @@ displays a summary of calculated predictions. Initially, this shows two measures photosynthetic efficiency: the intrinsic water use efficiency (``iwue``) and the light use efficiency (``lue``). -```{code-cell} -:trusted: true - +```{code-cell} ipython3 model.summarize() ``` @@ -112,9 +111,7 @@ recording key parameters from the [calculation of $\chi$](./optimal_chi). This object also has a {meth}`~pyrealm.pmodel.optimal_chi.OptimalChiABC.summarize` method: -```{code-cell} -:trusted: true - +```{code-cell} ipython3 model.optchi.summarize() ``` @@ -130,9 +127,7 @@ Here we are using: * An absorption fraction of 0.91 (-), and * a PPFD of 834 µmol m-2 s-1. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 model.estimate_productivity(fapar=0.91, ppfd=834) model.summarize() ``` @@ -164,9 +159,7 @@ to be the same size so some of the variables have repeated data across dimension cell. * Elevation is constant across months, so the data for each month is repeated. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Load an example dataset containing the forcing variables. data_path = resources.files("pyrealm_build_data.rpmodel") / "pmodel_global.nc" ds = xarray.load_dataset(data_path) @@ -185,9 +178,7 @@ The model can now be run using that data. The first step is to convert the eleva data to atmospheric pressure, and then this is used to set the photosynthetic environment for the model: -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Convert elevation to atmospheric pressure patm = calc_patm(elev) @@ -205,9 +196,7 @@ env.summarize() That environment can then be run to calculate the P model predictions for light use efficiency: -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Run the P model model = PModel(env) @@ -220,9 +209,7 @@ plt.title("Light use efficiency") Finally, the light use efficiency can be used to calculate GPP given the photosynthetic photon flux density and fAPAR. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Scale the outputs from values per unit iabs to realised values model.estimate_productivity(fapar, ppfd) diff --git a/docs/source/users/pmodel/subdaily_details/acclimation.md b/docs/source/users/pmodel/subdaily_details/acclimation.md index 193b38f9..31805af1 100644 --- a/docs/source/users/pmodel/subdaily_details/acclimation.md +++ b/docs/source/users/pmodel/subdaily_details/acclimation.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Estimating acclimation @@ -28,7 +39,7 @@ modelling approach to representing slow responses within the P Model, following * The interpolation of realised daily values back onto the subdaily timescale. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] from copy import copy @@ -58,7 +69,7 @@ interpolating data back to subdaily timescales. In practice {cite:t}`mengoli:202 present results using one hour windows around noon or even the single value closest to noon. -```{code-cell} +```{code-cell} ipython3 # Define a set of observations at a subdaily timescale fast_datetimes = np.arange( np.datetime64("1970-01-01"), np.datetime64("1970-01-08"), np.timedelta64(30, "m") @@ -78,7 +89,7 @@ demo_scaler.set_window(window_center=np.timedelta64(12, "h"), half_width=half_wi The plot below shows the rapidly changing variable and the defined daily acclimation windows. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] fig, ax = plt.subplots() @@ -129,7 +140,7 @@ applies the memory effect with three different values of $\alpha$. When $\alpha the realised values are identical to the daily optimum value within the acclimation window. -```{code-cell} +```{code-cell} ipython3 # Extract the optimal values within the daily acclimation windows daily_mean = demo_scaler.get_daily_means(fast_data) @@ -139,7 +150,7 @@ real_3 = memory_effect(daily_mean, alpha=1 / 3) real_1 = memory_effect(daily_mean, alpha=1) ``` -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] fig, ax = plt.subplots() @@ -194,7 +205,7 @@ The code below shows how the used to interpolate realised values back to the subdaily scale, using different settings for the update point and interpolation method. -```{code-cell} +```{code-cell} ipython3 # Fill to the subdaily scale using the default settings: # - update at the end of the acclimation window # - hold the value constant between update points @@ -248,7 +259,7 @@ Plot D : The daily optimal realised value is again able to instantaneously adopt the daily optimal value, but the one day offset for linear interpolation is applied. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] # Create the figure diff --git a/docs/source/users/pmodel/subdaily_details/subdaily_calculations.md b/docs/source/users/pmodel/subdaily_details/subdaily_calculations.md index 1a134c84..49e1195f 100644 --- a/docs/source/users/pmodel/subdaily_details/subdaily_calculations.md +++ b/docs/source/users/pmodel/subdaily_details/subdaily_calculations.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Subdaily P Model calculations @@ -19,9 +30,8 @@ steps used in the estimation process in order to show intermediates results but practice, as shown in the [worked example](worked_example), most of these calculations are handled internally by the model fitting in `pyrealm`. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] -:trusted: true from importlib import resources @@ -47,9 +57,7 @@ The code below uses half hourly data from 2014 for the [BE-Vie FluxNET site](https://fluxnet.org/doi/FLUXNET2015/BE-Vie), which was also used as a demonstration in {cite:t}`mengoli:2022a`. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 data_path = resources.files("pyrealm_build_data.subdaily") / "subdaily_BE_Vie_2014.csv" data = pandas.read_csv(str(data_path)) @@ -70,9 +78,7 @@ This dataset can then be used to calculate the photosynthetic environment at the subdaily timescale. The code below also estimates GPP under the standard P Model with no slow responses for comparison. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Calculate the photosynthetic environment subdaily_env = PModelEnvironment( tc=temp_subdaily, @@ -98,9 +104,7 @@ best to sample those conditions. Typically those might be the observed environme conditions at the observation closest to noon, or the mean environmental conditions in a window around noon. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Create the fast slow scaler fsscaler = SubdailyScaler(datetime_subdaily) @@ -120,9 +124,8 @@ pmodel_subdaily = SubdailyPModel( ) ``` -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] -:trusted: true idx = np.arange(48 * 120, 48 * 130) plt.figure(figsize=(10, 4)) @@ -145,9 +148,7 @@ The daily average conditions during the acclimation window can be sampled and us inputs to the standard P Model to calculate the optimal behaviour of plants under those conditions. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Get the daily acclimation conditions for the forcing variables temp_acclim = fsscaler.get_daily_means(temp_subdaily) co2_acclim = fsscaler.get_daily_means(co2_subdaily) @@ -178,9 +179,7 @@ temperatures so $J_{max}$ and $V_{cmax}$ must first be standardised to expected at 25°C. This is acheived by multiplying by the reciprocal of the exponential part of the Arrhenius equation ($h^{-1}$ in {cite}`mengoli:2022a`). -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Are these any of the existing values in the constants? ha_vcmax25 = 65330 ha_jmax25 = 43900 @@ -195,9 +194,7 @@ jmax25_acclim = pmodel_acclim.jmax * (1 / calc_ftemp_arrh(tk_acclim, ha_jmax25)) The memory effect can now be applied to the three parameters with slow responses to calculate realised values, here using the default 15 day window. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Calculation of memory effect in xi, vcmax25 and jmax25 xi_real = memory_effect(pmodel_acclim.optchi.xi, alpha=1 / 15) vcmax25_real = memory_effect(vcmax25_acclim, alpha=1 / 15, allow_holdover=True) @@ -208,9 +205,8 @@ The plots below show the instantaneously acclimated values for $J_{max25}$, $V_{cmax25}$ and $\xi$ in grey along with the realised slow reponses, after application of the memory effect. -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] -:trusted: true fig, axes = plt.subplots(1, 3, figsize=(16, 5)) @@ -243,9 +239,7 @@ temperature at fast scales: * These values are adjusted to the actual half hourly temperatures to give the fast responses of $J_{max}$ and $V_{cmax}$. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 tk_subdaily = subdaily_env.tc + pmodel_subdaily.env.core_const.k_CtoK # Fill the realised jmax and vcmax from subdaily to daily @@ -265,9 +259,7 @@ passing the realised values of $\xi$ as a fixed constraint to the calculation of optimal $\chi$, rather than calculating the instantaneously optimal values of $\xi$ as is the case in the standard P Model. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Interpolate xi to subdaily scale xi_subdaily = fsscaler.fill_daily_to_subdaily(xi_real) @@ -286,9 +278,7 @@ Model, where $c_i$ includes the slow responses of $\xi$ and $V_{cmax}$ and $J_{m include the slow responses of $V_{cmax25}$ and $J_{max25}$ and fast responses to temperature. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Calculate Ac Ac_subdaily = ( vcmax_subdaily @@ -317,9 +307,3 @@ GPP_subdaily = ( diff = GPP_subdaily - pmodel_subdaily.gpp print(np.nanmin(diff), np.nanmax(diff)) ``` - -```{code-cell} -:trusted: true - - -``` diff --git a/docs/source/users/pmodel/subdaily_details/subdaily_model_and_missing_data.md b/docs/source/users/pmodel/subdaily_details/subdaily_model_and_missing_data.md index 881f71b8..61d730d8 100644 --- a/docs/source/users/pmodel/subdaily_details/subdaily_model_and_missing_data.md +++ b/docs/source/users/pmodel/subdaily_details/subdaily_model_and_missing_data.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Missing data in the subdaily model @@ -46,7 +57,7 @@ The code below gives a concrete example - a time series that starts and ends dur the middle of a one hour acclimation window around noon. Only two of the three observations are provided for the first and last day -```{code-cell} +```{code-cell} ipython3 import numpy as np from pyrealm.pmodel.scaler import SubdailyScaler @@ -88,7 +99,7 @@ problem of the missing data clearly: * One day has a single missing 12:00 data point within the acclimation window. * One day has no data within the acclimation window. -```{code-cell} +```{code-cell} ipython3 fsscaler.get_window_values(data) ``` @@ -97,7 +108,7 @@ The daily average conditions are calculated using the partial data are not allowed - which is the default - the daily average conditions for all days with missing data is also missing (`np.nan`). -```{code-cell} +```{code-cell} ipython3 partial_not_allowed = fsscaler.get_daily_means(data) partial_not_allowed ``` @@ -106,7 +117,7 @@ Setting `allow_partial_data = True` allows the daily average conditions to be ca from the partial available information. This does not solve the problem for the day with no data in the acclimation window, which still results in a missing value. -```{code-cell} +```{code-cell} ipython3 partial_allowed = fsscaler.get_daily_means(data, allow_partial_data=True) partial_allowed ``` @@ -115,7 +126,7 @@ The :func:`~pyrealm.pmodel.subdaily.memory_effect` function is used to calculate realised values of a variable from the optimal values. By default, this function *will raise an error* when missing data are present: -```{code-cell} +```{code-cell} ipython3 :tags: [raises-exception] memory_effect(partial_not_allowed) @@ -125,14 +136,14 @@ The `allow_holdover` option allows the function to be run - the value for the fi is still `np.nan` but the missing observations on day 3, 5 and 7 are filled by holding over the valid observations from the previous day. -```{code-cell} +```{code-cell} ipython3 memory_effect(partial_not_allowed, allow_holdover=True) ``` When the partial data is allowed, the `allow_holdover` is still required to fill the gap on day 5 by holding over the data from day 4. -```{code-cell} +```{code-cell} ipython3 memory_effect(partial_allowed, allow_holdover=True) ``` diff --git a/docs/source/users/pmodel/subdaily_details/subdaily_overview.md b/docs/source/users/pmodel/subdaily_details/subdaily_overview.md index 3278b056..e96a57c9 100644 --- a/docs/source/users/pmodel/subdaily_details/subdaily_overview.md +++ b/docs/source/users/pmodel/subdaily_details/subdaily_overview.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The P Model with acclimation diff --git a/docs/source/users/pmodel/subdaily_details/worked_example.md b/docs/source/users/pmodel/subdaily_details/worked_example.md index fac12b6a..f30d4155 100644 --- a/docs/source/users/pmodel/subdaily_details/worked_example.md +++ b/docs/source/users/pmodel/subdaily_details/worked_example.md @@ -5,17 +5,26 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Worked example of the Subdaily P Model -```{code-cell} -:trusted: true - +```{code-cell} ipython3 from importlib import resources import xarray @@ -60,9 +69,7 @@ fitting basically takes all of the same arguments as the standard The test data use some UK WFDE data for three sites in order to compare predictions over a time series. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Loading the example dataset: dpath = ( resources.files("pyrealm_build_data.uk_data") / "UK_WFDE5_FAPAR_2018_JuneJuly.nc" @@ -83,9 +90,7 @@ sites = xarray.Dataset( The WFDE data need some conversion for use in the PModel, along with the definition of the atmospheric CO2 concentration. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Variable set up # Air temperature in °C from Tair in Kelvin tc = (ds["Tair"] - 273.15).to_numpy() @@ -104,9 +109,7 @@ co2 = np.ones_like(tc) * 400 The code below then calculates the photosynthetic environment. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Generate and check the PModelEnvironment pm_env = PModelEnvironment(tc=tc, patm=patm, vpd=vpd, co2=co2) pm_env.summarize() @@ -117,9 +120,7 @@ pm_env.summarize() The standard implementation of the P Model used below assumes that plants can instantaneously adopt optimal behaviour. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Standard PModels pmodC3 = PModel( env=pm_env, method_kphio="fixed", reference_kphio=1 / 8, method_optchi="prentice14" @@ -128,9 +129,7 @@ pmodC3.estimate_productivity(fapar=fapar, ppfd=ppfd) pmodC3.summarize() ``` -```{code-cell} -:trusted: true - +```{code-cell} ipython3 pmodC4 = PModel( env=pm_env, method_kphio="fixed", reference_kphio=1 / 8, method_optchi="c4_no_gamma" ) @@ -146,9 +145,7 @@ values to holdover previous realised values to cover missing data within the calculations: essentially the plant does not acclimate until the optimal values can be calculated again to update those realised estimates. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Set the acclimation window to an hour either side of noon fsscaler = SubdailyScaler(datetimes) fsscaler.set_window( @@ -187,9 +184,7 @@ The code below then extracts the time series for the two months from the three s shown above and plots the instantaneous predictions against predictions including slow photosynthetic responses. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Store the predictions in the xarray Dataset to use indexing ds["GPP_pmodC3"] = (ds["Tair"].dims, pmodC3.gpp) ds["GPP_subdailyC3"] = (ds["Tair"].dims, subdailyC3.gpp) @@ -246,9 +241,7 @@ plt.tight_layout() The subdaily models can also be obtained directly from the standard models, using the `convert_pmodel_to_subdaily` method: -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Convert standard C3 model converted_C3 = convert_pmodel_to_subdaily( pmodel=pmodC3, @@ -269,9 +262,7 @@ converted_C4 = convert_pmodel_to_subdaily( This produces the same outputs as the `SubdailyPModel` class, but is convenient and more compact when the two models are going to be compared. -```{code-cell} -:trusted: true - +```{code-cell} ipython3 # Models have identical GPP - maximum absolute difference is zero. print(np.nanmax(abs(subdailyC3.gpp.flatten() - converted_C3.gpp.flatten()))) print(np.nanmax(abs(subdailyC4.gpp.flatten() - converted_C4.gpp.flatten()))) diff --git a/docs/source/users/splash.md b/docs/source/users/splash.md index e5c4d283..20236db4 100644 --- a/docs/source/users/splash.md +++ b/docs/source/users/splash.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The `splash` submodule @@ -46,7 +57,7 @@ The data below provides a 2 year daily time series of precipitation, temperature solar fraction (1 - cloud cover) for 0.5° resolution grid cells in a 10° by 10° block of the North Western USA. It also provides the mean elevation of those cells. -```{code-cell} +```{code-cell} ipython3 from importlib import resources import numpy as np import xarray @@ -74,7 +85,7 @@ data The plot below shows the elevation for the example data area, along with the locations of three sites that will be used to compare SPLASH outputs. -```{code-cell} +```{code-cell} ipython3 # Get the latitude and longitude extents extent = ( data["lon"].min(), @@ -95,7 +106,7 @@ The three sites capture wetter coastal conditions with milder temperatures (San Francisco), intermediate rainfall with colder temperatures (Yosemite) and arid conditions with extreme temperatures (Death Valley). -```{code-cell} +```{code-cell} ipython3 # Get three sites to show time series for locations site_data = data.sel(sites, method="nearest") @@ -149,7 +160,7 @@ may well be constant across the longitude dimension for gridded data - but, at t moment, you need to broadcast these variables to match. ``` -```{code-cell} +```{code-cell} ipython3 splash = SplashModel( lat=np.broadcast_to(data.lat.data[None, :, None], data.sf.data.shape), elv=np.broadcast_to(data.elev.data[None, :, :], data.sf.data.shape), @@ -176,7 +187,7 @@ give the expected soil moisture at the end of the year. If this is sufficiently to the start values, the estimate is returned, otherwise the end of year expectations are used as a starting point to recalculate the annual water balances. -```{code-cell} +```{code-cell} ipython3 init_soil_moisture = splash.estimate_initial_soil_moisture(verbose=False) ``` @@ -200,7 +211,7 @@ The plots show the soil moisture for the first day, along with the changes in so moisture from the initial estimates (the 'previous day'). Note the saturated soil moisture of 150mm near the coast and in the mountains. -```{code-cell} +```{code-cell} ipython3 # Calculate the water balance equation for the first day from the initial soil # moisture estimates. aet, wn, ro = splash.estimate_daily_water_balance(init_soil_moisture, day_idx=0) @@ -223,13 +234,13 @@ the daily estimation across all of the dates in the input data from initial soil moisture estimates. It returns a set of time series of soil moisture, runoff and AET for all sites. -```{code-cell} +```{code-cell} ipython3 aet_out, wn_out, ro_out = splash.calculate_soil_moisture(init_soil_moisture) ``` The plots below show the resulting soil moisture and a time series for the three -```{code-cell} +```{code-cell} ipython3 # Add the outputs to the xarray to select the three sites easily. data["aet"] = xarray.DataArray(aet_out, dims=("time", "lat", "lon")) data["wn"] = xarray.DataArray(wn_out, dims=("time", "lat", "lon")) diff --git a/docs/source/users/tmodel/canopy.md b/docs/source/users/tmodel/canopy.md index 3fab2df8..d760ca77 100644 --- a/docs/source/users/tmodel/canopy.md +++ b/docs/source/users/tmodel/canopy.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 (ipykernel) language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # Canopy model @@ -36,7 +47,7 @@ This sketch: object. ``` -```{code-cell} +```{code-cell} ipython3 import numpy as np import matplotlib.pyplot as plt from scipy.optimize import root_scalar @@ -54,11 +65,11 @@ The scaling of a set of trees is automatically calculated using the initial diam the `TTree` instance. This automatically calculates the other dimensions, such as height, using the underlying scaling equations of the T Model. -```{code-cell} +```{code-cell} ipython3 pft.height ``` -```{code-cell} +```{code-cell} ipython3 :lines_to_next_cell: 2 pft.crown_area @@ -97,7 +108,7 @@ r_0 &= \frac{1}{q_m}\sqrt{\frac{A_c}{\pi}} \end{align} $$ -```{code-cell} +```{code-cell} ipython3 :lines_to_next_cell: 2 def calculate_qm(m, n): @@ -146,7 +157,7 @@ r(z) &= r_0 \; q(z) \end{align} $$ -```{code-cell} +```{code-cell} ipython3 def calculate_relative_canopy_radius_at_z(z, H, m, n): """Calculate q(z)""" @@ -155,20 +166,20 @@ def calculate_relative_canopy_radius_at_z(z, H, m, n): return m * n * z_over_H ** (n - 1) * (1 - z_over_H**n) ** (m - 1) ``` -```{code-cell} +```{code-cell} ipython3 # Validate that zm and r0 generate the predicted maximum crown area q_zm = calculate_relative_canopy_radius_at_z(zm, pft.height, m, n) rm = r0 * q_zm print("rm = ", rm) ``` -```{code-cell} +```{code-cell} ipython3 np.allclose(rm**2 * np.pi, pft.crown_area) ``` Vertical crown radius profiles can now be calculated for each stem: -```{code-cell} +```{code-cell} ipython3 # Create an interpolation from ground to maximum stem height, with 5 cm resolution. # Also append a set of values _fractionally_ less than the exact height of stems # so that the height at the top of each stem is included but to avoid floating @@ -190,7 +201,7 @@ np.cumsum(np.convolve(rm, np.ones(2), "valid") + 0.1) Those can be plotted out to show the vertical crown radius profiles -```{code-cell} +```{code-cell} ipython3 # Separate the stems along the x axis for plotting stem_x = np.concatenate( [np.array([0]), np.cumsum(np.convolve(rm, np.ones(2), "valid") + 0.4)] @@ -237,7 +248,7 @@ A_c \left(\dfrac{q(z)}{q_m}\right)^2, & H > z > z_m \\ \end{cases} $$ -```{code-cell} +```{code-cell} ipython3 Stems = float | np.ndarray @@ -281,7 +292,7 @@ def calculate_projected_area( The code below calculates the projected crown area for each stem and then plots the vertical profile for individual stems and across the community. -```{code-cell} +```{code-cell} ipython3 :lines_to_next_cell: 2 # Calculate the projected area for each stem @@ -331,7 +342,7 @@ $$ l_m = \left\lceil \frac{\sum_1^{N_s}{ A_c}}{ A(1 - f_G)}\right\rceil $$ -```{code-cell} +```{code-cell} ipython3 def solve_canopy_closure_height( z: float, l: int, @@ -395,7 +406,7 @@ def calculate_canopy_heights( The example below calculates the projected crown area above ground level for the example stems. These should be identical to the crown area of the stems. -```{code-cell} +```{code-cell} ipython3 # Set the total available canopy space and community gap fraction canopy_area = 32 community_gap_fraction = 2 / 32 @@ -412,7 +423,7 @@ superimpose the calculated $z^*_l$ values and the cumulative canopy area for eac to confirm that the calculated values coincide with the profile. Note here that the total area at each closed layer height is omitting the community gap fraction. -```{code-cell} +```{code-cell} ipython3 community_Ap_z = np.nansum(Ap_z, axis=1) fig, (ax1, ax2) = plt.subplots(1, 2, sharey=True, figsize=(10, 5)) @@ -481,14 +492,14 @@ plt.tight_layout() The projected area from individual stems to each canopy layer can then be calculated at $z^*_l$ and hence the projected area of canopy **within each layer**. -```{code-cell} +```{code-cell} ipython3 # Calculate the canopy area above z_star for each stem Ap_z_star = calculate_projected_area(z=z_star[:, None], pft=pft, m=m, n=n, qm=qm, zm=zm) print(Ap_z_star) ``` -```{code-cell} +```{code-cell} ipython3 :lines_to_next_cell: 2 # Calculate the contribution _within_ each layer per stem @@ -527,7 +538,7 @@ $$ The function below calculates $\tilde{A}_{cp}(z)$. -```{code-cell} +```{code-cell} ipython3 def calculate_leaf_area( z: float, fg: float, @@ -572,7 +583,7 @@ there are no crown gaps and hence all of the leaf area is within the crown surfa $f_g \to 1$, more of the leaf area is displaced deeper into the canopy, leaves in the lower crown intercepting light coming through holes in the upper canopy. -```{code-cell} +```{code-cell} ipython3 fig, ax1 = plt.subplots(1, 1, figsize=(6, 5)) for fg in np.arange(0, 1.01, 0.05): @@ -600,7 +611,7 @@ ax1.legend(frameon=False) We can now calculate the crown area occupied by leaves above the height of each closed layer $z^*_l$: -```{code-cell} +```{code-cell} ipython3 # Calculate the leaf area above z_star for each stem crown_gap_fraction = 0.05 Acp_z_star = calculate_leaf_area( @@ -614,7 +625,7 @@ And from that, the area occupied by leaves **within each layer**. These values a similar to the projected crown area within layers (`Ap_within_layer`, above) but leaf area is displaced into lower layers because $f_g > 0$. -```{code-cell} +```{code-cell} ipython3 # Calculate the contribution _within_ each layer per stem Acp_within_layer = np.diff(Acp_z_star, axis=0, prepend=0) @@ -631,7 +642,7 @@ $f_{abs} = 1 - e ^ {-kL}$, where $k$ is the light extinction coefficient ($k$) and $L$ is the leaf area index (LAI). The LAI can be calculated for each stem and layer: -```{code-cell} +```{code-cell} ipython3 LAI = Acp_within_layer / canopy_area print(LAI) ``` @@ -639,7 +650,7 @@ print(LAI) This can be used to calculate the LAI of individual stems but also the LAI of each layer in the canopy: -```{code-cell} +```{code-cell} ipython3 LAI_stem = LAI.sum(axis=0) LAI_layer = LAI.sum(axis=1) @@ -650,7 +661,7 @@ print("LAI layer = ", LAI_layer) The layer LAI values can now be used to calculate the light transmission of each layer and hence the cumulative light extinction profile through the canopy. -```{code-cell} +```{code-cell} ipython3 f_abs = 1 - np.exp(-pft.traits.par_ext * LAI_layer) ext = np.cumprod(f_abs) @@ -662,7 +673,7 @@ One issue that needs to be resolved is that the T Model implementation in `pyrea follows the original implementation of the T Model in having LAI as a fixed trait of a given plant functional type, so is constant for all stems of that PFT. -```{code-cell} +```{code-cell} ipython3 print("f_abs = ", (1 - np.exp(-pft.traits.par_ext * pft.traits.lai))) ``` diff --git a/docs/source/users/tmodel/tmodel.md b/docs/source/users/tmodel/tmodel.md index fda39de0..d7e75b31 100644 --- a/docs/source/users/tmodel/tmodel.md +++ b/docs/source/users/tmodel/tmodel.md @@ -5,10 +5,21 @@ jupytext: extension: .md format_name: myst format_version: 0.13 + jupytext_version: 1.16.4 kernelspec: display_name: Python 3 language: python name: python3 +language_info: + codemirror_mode: + name: ipython + version: 3 + file_extension: .py + mimetype: text/x-python + name: python + nbconvert_exporter: python + pygments_lexer: ipython3 + version: 3.11.9 --- # The T Model @@ -31,7 +42,7 @@ class description. The class can be used to create a default T Model trait set: -```{code-cell} +```{code-cell} ipython3 import numpy as np from pyrealm import tmodel @@ -42,7 +53,7 @@ print(traits1) It can also be edited to generate different growth patterns: -```{code-cell} +```{code-cell} ipython3 # A slower growing tree with a higher maximum height traits2 = tmodel.TModelTraits(a_hd=50, h_max=40) print(traits2) @@ -71,7 +82,7 @@ diameters and an optional set of traits as a {class}`~pyrealm.constants.tmodel_const.TModelTraits` object. If no traits are provided, the default {class}`~pyrealm.constants.tmodel_const.TModelTraits` settings are used. -```{code-cell} +```{code-cell} ipython3 # Use a sequence of diameters from sapling to large tree diameters = np.linspace(0.02, 2, 100) tree1 = tmodel.TTree(diameters=diameters) # Using default traits @@ -92,7 +103,7 @@ These inputs are then immediately used to calculate the following properties of Using an array of diameter values provides an immediate way to visualise the geometric scaling resulting from a particular set of plant traits: -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] from matplotlib import pyplot @@ -144,12 +155,12 @@ provide estimates of the following growth parameters: The code below calculates growth estimates at each diameter under a constant GPP of 7 TODO - UNITS!. -```{code-cell} +```{code-cell} ipython3 tree1.calculate_growth(np.array([7])) tree2.calculate_growth(np.array([7])) ``` -```{code-cell} +```{code-cell} ipython3 :tags: [hide-input] fig, (ax1, ax2, ax3) = pyplot.subplots(1, 3, figsize=(12, 4)) @@ -183,7 +194,7 @@ The {meth}`~pyrealm.tmodel.TTree.reset_diameters` can be used to update an exist {meth}`~pyrealm.tmodel.TTree.reset_diameters` automatically resets any calculated growth parameters: they will need to be recalculated for the new diameters. -```{code-cell} +```{code-cell} ipython3 tree1.reset_diameters(np.array([0.0001])) print(tree1.height) ``` diff --git a/pyproject.toml b/pyproject.toml index 2823f7cc..e436ab8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,15 +31,15 @@ repository = "https://github.com/ImperialCollegeLondon/pyrealm" version = "1.0.0" [tool.poetry.dependencies] -dacite = "^1.6.0" -numpy = "^2.0.0" +dacite = "^1.6.0" +numpy = "^2.0.0" python = ">=3.10" -scipy = "^1.7.3" -tabulate = "^0.8.10" +scipy = "^1.7.3" +tabulate = "^0.8.10" marshmallow = "^3.22.0" -pandas = "^2.2.2" marshmallow-dataclass = "^8.7.0" +pandas = "^2.2.2" pandas-stubs = "^2.2.2.240909" [tool.poetry.group.types.dependencies] pandas-stubs = "^2.2.0.240218" @@ -132,7 +132,7 @@ select = [ "I", # isort "UP", # pyupgrade "RUF", # RUF specific checks - "NPY201" + "NPY201", ] # On top of the Google convention, disable: @@ -147,6 +147,6 @@ convention = "google" [tool.jupytext] # Stop jupytext from removing mystnb and other settings in MyST Notebook YAML headers -notebook_metadata_filter = "-jupytext.text_representation.jupytext_version,settings,mystnb" -# Also stop it from stripping cell metadata. -cell_metadata_filter = "all" \ No newline at end of file +notebook_metadata_filter = "settings,mystnb,language_info" +# Also stop it from stripping cell metadata, except for specific ones to lose. +cell_metadata_filter = "all,-trusted" diff --git a/pyrealm/constants/core_const.py b/pyrealm/constants/core_const.py index 3f27d653..b9f38463 100644 --- a/pyrealm/constants/core_const.py +++ b/pyrealm/constants/core_const.py @@ -115,7 +115,7 @@ class CoreConst(ConstantsClass): :cite:t:`berger:1978a`.""" # Hygro constants - magnus_coef: NDArray[np.float32] = field( + magnus_coef: NDArray[np.float64] = field( default_factory=lambda: np.array((611.2, 17.62, 243.12)) ) """Three coefficients of the Magnus equation for saturated vapour pressure, @@ -133,7 +133,7 @@ class CoreConst(ConstantsClass): """Set the method used for calculating water density ('fisher' or 'chen').""" # Fisher Dial - fisher_dial_lambda: NDArray[np.float32] = field( + fisher_dial_lambda: NDArray[np.float64] = field( default_factory=lambda: np.array( [1788.316, 21.55053, -0.4695911, 0.003096363, -7.341182e-06] ) @@ -141,7 +141,7 @@ class CoreConst(ConstantsClass): r"""Coefficients of the temperature dependent polynomial for :math:`\lambda` in the Tumlirz equation.""" - fisher_dial_Po: NDArray[np.float32] = field( + fisher_dial_Po: NDArray[np.float64] = field( default_factory=lambda: np.array( [5918.499, 58.05267, -1.1253317, 0.0066123869, -1.4661625e-05] ) @@ -149,7 +149,7 @@ class CoreConst(ConstantsClass): """Coefficients of the temperature dependent polynomial for :math:`P_0` in the Tumlirz equation.""" - fisher_dial_Vinf: NDArray[np.float32] = field( + fisher_dial_Vinf: NDArray[np.float64] = field( default_factory=lambda: np.array( [ 0.6980547, @@ -169,7 +169,7 @@ class CoreConst(ConstantsClass): in the Tumlirz equation.""" # Chen water density - chen_po: NDArray[np.float32] = field( + chen_po: NDArray[np.float64] = field( default_factory=lambda: np.array( [ 0.99983952, @@ -187,7 +187,7 @@ class CoreConst(ConstantsClass): r"""Coefficients of the polynomial relationship of water density with temperature at 1 atm (:math:`P^0`, kg/m^3) from :cite:t:`chen:2008a`.""" - chen_ko: NDArray[np.float32] = field( + chen_ko: NDArray[np.float64] = field( default_factory=lambda: np.array( [19652.17, 148.1830, -2.29995, 0.01281, -4.91564e-5, 1.035530e-7] ) @@ -195,7 +195,7 @@ class CoreConst(ConstantsClass): r"""Polynomial relationship of bulk modulus of water with temperature at 1 atm (:math:`K^0`, kg/m^3) from :cite:t:`chen:2008a`.""" - chen_ca: NDArray[np.float32] = field( + chen_ca: NDArray[np.float64] = field( default_factory=lambda: np.array( [3.26138, 5.223e-4, 1.324e-4, -7.655e-7, 8.584e-10] ) @@ -203,7 +203,7 @@ class CoreConst(ConstantsClass): r"""Coefficients of the polynomial temperature dependent coefficient :math:`A` from :cite:t:`chen:2008a`.""" - chen_cb: NDArray[np.float32] = field( + chen_cb: NDArray[np.float64] = field( default_factory=lambda: np.array( [7.2061e-5, -5.8948e-6, 8.69900e-8, -1.0100e-9, 4.3220e-12] ) @@ -221,11 +221,11 @@ class CoreConst(ConstantsClass): huber_mu_ast: float = 1e-06 r"""Huber reference pressure (:math:`\mu_{ast}` 1.0e-6, Pa s)""" - huber_H_i: NDArray[np.float32] = field( + huber_H_i: NDArray[np.float64] = field( default_factory=lambda: np.array([1.67752, 2.20462, 0.6366564, -0.241605]) ) """Temperature dependent parameterisation of Hi in Huber.""" - huber_H_ij: NDArray[np.float32] = field( + huber_H_ij: NDArray[np.float64] = field( default_factory=lambda: np.array( [ [0.520094, 0.0850895, -1.08374, -0.289555, 0.0, 0.0], diff --git a/pyrealm/constants/pmodel_const.py b/pyrealm/constants/pmodel_const.py index 5e3746a6..fb5c7ebf 100644 --- a/pyrealm/constants/pmodel_const.py +++ b/pyrealm/constants/pmodel_const.py @@ -139,11 +139,11 @@ class PModelConst(ConstantsClass): # - note that kphio_C4 has been updated to account for an unintended double # 8 fold downscaling to account for the fraction of light reaching PS2. # from original values of [-0.008, 0.00375, -0.58e-4] - kphio_C4: NDArray[np.float32] = field( + kphio_C4: NDArray[np.float64] = field( default_factory=lambda: np.array((-0.064, 0.03, -0.000464)) ) """Quadratic scaling of Kphio with temperature for C4 plants""" - kphio_C3: NDArray[np.float32] = field( + kphio_C3: NDArray[np.float64] = field( default_factory=lambda: np.array((0.352, 0.022, -0.00034)) ) """Quadratic scaling of Kphio with temperature for C3 plants""" @@ -193,11 +193,11 @@ class PModelConst(ConstantsClass): """Exponent of the threshold function for Mengoli soil moisture""" # Unit cost ratio (beta) values for different CalcOptimalChi methods - beta_cost_ratio_prentice14: NDArray[np.float32] = field( + beta_cost_ratio_prentice14: NDArray[np.float64] = field( default_factory=lambda: np.array([146.0]) ) r"""Unit cost ratio for C3 plants (:math:`\beta`, 146.0).""" - beta_cost_ratio_c4: NDArray[np.float32] = field( + beta_cost_ratio_c4: NDArray[np.float64] = field( default_factory=lambda: np.array([146.0 / 9]) ) r"""Unit cost ratio for C4 plants (:math:`\beta`, 16.222).""" diff --git a/pyrealm/core/hygro.py b/pyrealm/core/hygro.py index e83880e6..fa48482d 100644 --- a/pyrealm/core/hygro.py +++ b/pyrealm/core/hygro.py @@ -12,7 +12,9 @@ from pyrealm.core.utilities import bounds_checker, evaluate_horner_polynomial -def calc_vp_sat(ta: NDArray, core_const: CoreConst = CoreConst()) -> NDArray: +def calc_vp_sat( + ta: NDArray[np.float64], core_const: CoreConst = CoreConst() +) -> NDArray[np.float64]: r"""Calculate vapour pressure of saturated air. This function calculates the vapour pressure of saturated air in kPa at a given @@ -56,8 +58,10 @@ def calc_vp_sat(ta: NDArray, core_const: CoreConst = CoreConst()) -> NDArray: def convert_vp_to_vpd( - vp: NDArray, ta: NDArray, core_const: CoreConst = CoreConst() -) -> NDArray: + vp: NDArray[np.float64], + ta: NDArray[np.float64], + core_const: CoreConst = CoreConst(), +) -> NDArray[np.float64]: """Convert vapour pressure to vapour pressure deficit. Args: @@ -86,8 +90,10 @@ def convert_vp_to_vpd( def convert_rh_to_vpd( - rh: NDArray, ta: NDArray, core_const: CoreConst = CoreConst() -) -> NDArray: + rh: NDArray[np.float64], + ta: NDArray[np.float64], + core_const: CoreConst = CoreConst(), +) -> NDArray[np.float64]: """Convert relative humidity to vapour pressure deficit. Args: @@ -124,8 +130,10 @@ def convert_rh_to_vpd( def convert_sh_to_vp( - sh: NDArray, patm: NDArray, core_const: CoreConst = CoreConst() -) -> NDArray: + sh: NDArray[np.float64], + patm: NDArray[np.float64], + core_const: CoreConst = CoreConst(), +) -> NDArray[np.float64]: """Convert specific humidity to vapour pressure. Args: @@ -149,8 +157,11 @@ def convert_sh_to_vp( def convert_sh_to_vpd( - sh: NDArray, ta: NDArray, patm: NDArray, core_const: CoreConst = CoreConst() -) -> NDArray: + sh: NDArray[np.float64], + ta: NDArray[np.float64], + patm: NDArray[np.float64], + core_const: CoreConst = CoreConst(), +) -> NDArray[np.float64]: """Convert specific humidity to vapour pressure deficit. Args: @@ -185,7 +196,9 @@ def convert_sh_to_vpd( # The following functions are integrated from the evap.py implementation of SPLASH v1. -def calc_saturation_vapour_pressure_slope(tc: NDArray) -> NDArray: +def calc_saturation_vapour_pressure_slope( + tc: NDArray[np.float64], +) -> NDArray[np.float64]: """Calculate the slope of the saturation vapour pressure curve. Calculates the slope of the saturation pressure temperature curve, following @@ -207,7 +220,7 @@ def calc_saturation_vapour_pressure_slope(tc: NDArray) -> NDArray: ) -def calc_enthalpy_vaporisation(tc: NDArray) -> NDArray: +def calc_enthalpy_vaporisation(tc: NDArray[np.float64]) -> NDArray[np.float64]: """Calculate the enthalpy of vaporization. Calculates the latent heat of vaporization of water as a function of @@ -224,7 +237,7 @@ def calc_enthalpy_vaporisation(tc: NDArray) -> NDArray: return 1.91846e6 * ((tc + 273.15) / (tc + 273.15 - 33.91)) ** 2 -def calc_specific_heat(tc: NDArray) -> NDArray: +def calc_specific_heat(tc: NDArray[np.float64]) -> NDArray[np.float64]: """Calculate the specific heat of air. Calculates the specific heat of air at a constant pressure (:math:`c_{pm}`, J/kg/K) @@ -257,8 +270,8 @@ def calc_specific_heat(tc: NDArray) -> NDArray: def calc_psychrometric_constant( - tc: NDArray, p: NDArray, core_const: CoreConst = CoreConst() -) -> NDArray: + tc: NDArray[np.float64], p: NDArray[np.float64], core_const: CoreConst = CoreConst() +) -> NDArray[np.float64]: r"""Calculate the psychrometric constant. Calculates the psychrometric constant (:math:`\lambda`, Pa/K) given the temperature diff --git a/pyrealm/core/pressure.py b/pyrealm/core/pressure.py index 2a2e44d5..ed8334c5 100644 --- a/pyrealm/core/pressure.py +++ b/pyrealm/core/pressure.py @@ -2,12 +2,15 @@ atmospheric pressure. """ # noqa D210, D415 +import numpy as np from numpy.typing import NDArray from pyrealm.constants import CoreConst -def calc_patm(elv: NDArray, core_const: CoreConst = CoreConst()) -> NDArray: +def calc_patm( + elv: NDArray[np.float64], core_const: CoreConst = CoreConst() +) -> NDArray[np.float64]: r"""Calculate atmospheric pressure from elevation. Calculates atmospheric pressure as a function of elevation with reference to the diff --git a/pyrealm/core/solar.py b/pyrealm/core/solar.py index 1e660bde..afb3fb2d 100644 --- a/pyrealm/core/solar.py +++ b/pyrealm/core/solar.py @@ -602,7 +602,9 @@ def _calc_nighttime_net_radiation( def calc_heliocentric_longitudes( - julian_day: NDArray, n_days: NDArray, core_const: CoreConst = CoreConst() + julian_day: NDArray[np.float64], + n_days: NDArray[np.float64], + core_const: CoreConst = CoreConst(), ) -> tuple[NDArray, NDArray]: """Calculate heliocentric longitude and anomaly. diff --git a/pyrealm/core/utilities.py b/pyrealm/core/utilities.py index 2e86d367..15ad9498 100644 --- a/pyrealm/core/utilities.py +++ b/pyrealm/core/utilities.py @@ -211,13 +211,13 @@ def _get_interval_functions(interval_type: str = "[]") -> tuple[np.ufunc, np.ufu def bounds_checker( - values: NDArray, + values: NDArray[np.float64], lower: float = -np.inf, upper: float = np.inf, interval_type: str = "[]", label: str = "", unit: str = "", -) -> NDArray: +) -> NDArray[np.float64]: r"""Check inputs fall within bounds. This is a simple pass through function that tests whether the values fall within @@ -255,7 +255,7 @@ def bounds_checker( def bounds_mask( - inputs: NDArray, + inputs: NDArray[np.float64], lower: float = -np.inf, upper: float = np.inf, interval_type: str = "[]", @@ -312,7 +312,7 @@ def bounds_mask( # modifying the original input. Using type if not np.issubdtype(inputs.dtype, np.floating): # Copies implicitly - outputs = inputs.astype(np.float32) + outputs = inputs.astype(np.float64) else: outputs = inputs.copy() @@ -336,7 +336,9 @@ def bounds_mask( return outputs -def evaluate_horner_polynomial(x: NDArray, cf: list | NDArray) -> NDArray: +def evaluate_horner_polynomial( + x: NDArray[np.float64], cf: list | NDArray +) -> NDArray[np.float64]: r"""Evaluates a polynomial with coefficients `cf` at `x` using Horner's method. Horner's method is a fast way to evaluate polynomials, especially for large degrees, diff --git a/pyrealm/core/water.py b/pyrealm/core/water.py index 72c7568b..953027c8 100644 --- a/pyrealm/core/water.py +++ b/pyrealm/core/water.py @@ -10,10 +10,10 @@ def calc_density_h2o_chen( - tc: NDArray, - p: NDArray, + tc: NDArray[np.float64], + p: NDArray[np.float64], core_const: CoreConst = CoreConst(), -) -> NDArray: +) -> NDArray[np.float64]: """Calculate the density of water using Chen et al 2008. This function calculates the density of water at a given temperature and pressure @@ -65,10 +65,10 @@ def calc_density_h2o_chen( def calc_density_h2o_fisher( - tc: NDArray, - patm: NDArray, + tc: NDArray[np.float64], + patm: NDArray[np.float64], core_const: CoreConst = CoreConst(), -) -> NDArray: +) -> NDArray[np.float64]: """Calculate water density. Calculates the density of water as a function of temperature and atmospheric @@ -124,11 +124,11 @@ def calc_density_h2o_fisher( def calc_density_h2o( - tc: NDArray, - patm: NDArray, + tc: NDArray[np.float64], + patm: NDArray[np.float64], core_const: CoreConst = CoreConst(), safe: bool = True, -) -> NDArray: +) -> NDArray[np.float64]: """Calculate water density. Calculates the density of water as a function of temperature and atmospheric @@ -179,11 +179,11 @@ def calc_density_h2o( def calc_viscosity_h2o( - tc: NDArray, - patm: NDArray, + tc: NDArray[np.float64], + patm: NDArray[np.float64], core_const: CoreConst = CoreConst(), simple: bool = False, -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate the viscosity of water. Calculates the viscosity of water (:math:`\eta`) as a function of temperature and @@ -247,11 +247,11 @@ def calc_viscosity_h2o( def calc_viscosity_h2o_matrix( - tc: NDArray, - patm: NDArray, + tc: NDArray[np.float64], + patm: NDArray[np.float64], core_const: CoreConst = CoreConst(), simple: bool = False, -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate the viscosity of water. Calculates the viscosity of water (:math:`\eta`) as a function of temperature and diff --git a/pyrealm/demography/canopy.py b/pyrealm/demography/canopy.py index 05c35631..282b20d4 100644 --- a/pyrealm/demography/canopy.py +++ b/pyrealm/demography/canopy.py @@ -6,21 +6,171 @@ from pyrealm.demography.community import Community from pyrealm.demography.crown import ( + CrownProfile, + _validate_z_qz_args, calculate_relative_crown_radius_at_z, calculate_stem_projected_crown_area_at_z, - calculate_stem_projected_leaf_area_at_z, - solve_community_projected_canopy_area, ) +def solve_canopy_area_filling_height( + z: float, + stem_height: NDArray[np.float64], + crown_area: NDArray[np.float64], + m: NDArray[np.float64], + n: NDArray[np.float64], + q_m: NDArray[np.float64], + z_max: NDArray[np.float64], + n_individuals: NDArray[np.float64], + target_area: float = 0, + validate: bool = True, +) -> NDArray[np.float64]: + """Solver function for finding the height where a canopy occupies a given area. + + This function takes the number of individuals in each cohort along with the stem + height and crown area and a given vertical height (:math:`z`). It then uses the + crown shape parameters associated with each cohort to calculate the community wide + projected crown area above that height (:math:`A_p(z)`). This is simply the sum of + the products of the individual stem crown projected area at :math:`z` and the number + of individuals in each cohort. + + The return value is the difference between the calculated :math:`A_p(z)` and a + user-specified target area, This allows the function to be used with a root solver + to find :math:`z` values that result in a given :math:`A_p(z)`. The default target + area is zero, so the default return value will be the actual total :math:`A_p(z)` + for the community. + + A typical use case for the target area would be to specify the area at which a given + canopy layer closes under the perfect plasticity approximation in order to find the + closure height. + + Args: + z: Vertical height on the z axis. + n_individuals: Number of individuals in each cohort + crown_area: Crown area of each cohort + stem_height: Stem height of each cohort + m: Crown shape parameter ``m``` for each cohort + n: Crown shape parameter ``n``` for each cohort + q_m: Crown shape parameter ``q_m``` for each cohort + z_max: Crown shape parameter ``z_m``` for each cohort + target_area: A target projected crown area. + validate: Boolean flag to suppress argument validation. + """ + # Convert z to array for validation and typing + z_arr = np.array(z) + + if validate: + _validate_z_qz_args( + z=z_arr, + stem_properties=[n_individuals, crown_area, stem_height, m, n, q_m, z_max], + ) + + q_z = calculate_relative_crown_radius_at_z( + z=z_arr, stem_height=stem_height, m=m, n=n, validate=False + ) + # Calculate A(p) for the stems in each cohort + A_p = calculate_stem_projected_crown_area_at_z( + z=z_arr, + q_z=q_z, + stem_height=stem_height, + crown_area=crown_area, + q_m=q_m, + z_max=z_max, + validate=False, + ) + + return (A_p * n_individuals).sum() - target_area + + +def fit_perfect_plasticity_approximation( + community: Community, + canopy_gap_fraction: float, + max_stem_height: float, + solver_tolerance: float, +) -> NDArray[np.float64]: + r"""Find canopy layer heights under the PPA model. + + Finds the closure heights of the canopy layers under the perfect plasticity + approximation by fidnding the set of heights that lead to complete closure of canopy + layers through the canopy. The function solves the following equation for integers + :math:`l \in (1,2,..., m)`: + + .. math:: + + \sum_{s=1}^{N_s}{ A_p(z^*_l)} = l A(1 - f_G) + + The right hand side sets out the total area needed to close a given layer :math:`l` + and all layers above it: :math:`l` times the total community area :math:`A` less + any canopy gap fraction (:math:`f_G`). The left hand side then calculates the + projected crown area for each stem :math:`s` :math:`A_p(z^*_l)_{[s]}` and sums those + areas across all stems in the community :math:`N_s`. The specific height + :math:`z^*_l` is then the height at which the two terms are equal and hence solves + the equation for layer :math:`l`. + + Args: + community: A community instance providing plant cohort data + canopy_gap_fraction: The canopy gap fraction + max_stem_height: The maximum stem height in the canopy, used as an upper bound + on finding the closure height of the topmost layer. + solver_tolerance: The absolute tolerance used with the root solver to find the + layer heights. + """ + + # Calculate the number of layers to contain the total community crown area + total_community_crown_area = ( + community.stem_allometry.crown_area * community.cohort_data["n_individuals"] + ).sum() + crown_area_per_layer = community.cell_area * (1 - canopy_gap_fraction) + n_layers = int(np.ceil(total_community_crown_area / crown_area_per_layer)) + + # Initialise the layer heights array and then loop over the layers indices, + # except for the final layer, which will be the partial remaining vegetation below + # the last closed layer. + layer_heights = np.zeros(n_layers, dtype=np.float64) + upper_bound = max_stem_height + + for layer in np.arange(n_layers - 1): + # Set the target area for this layer + target_area = (layer + 1) * crown_area_per_layer + + # TODO - the solution is typically closer to the upper bound of the bracket, + # there might be a better algorithm to find the root (#293). + solution = root_scalar( + solve_canopy_area_filling_height, + args=( + community.stem_allometry.stem_height, + community.stem_allometry.crown_area, + community.stem_traits.m, + community.stem_traits.n, + community.stem_traits.q_m, + community.stem_allometry.crown_z_max, + community.cohort_data["n_individuals"], + target_area, + False, # validate + ), + bracket=(0, upper_bound), + xtol=solver_tolerance, + ) + + if not solution.converged: + raise RuntimeError( + "Estimation of canopy layer closure heights failed to converge." + ) + + # Store the solution and update the upper bound for the next layer down. + layer_heights[layer] = upper_bound = solution.root + + return layer_heights[:, None] + + class Canopy: - """Model of the canopy for a plant community. + """Calculate canopy characteristics for a plant community. This class generates a canopy structure for a community of trees using the - perfect-plasticity approximation model :cite:`purves:2008a`. In this approach, each - individual is assumed to arrange its canopy crown area plastically to take up space - in canopy layers and that new layers form below the canopy top as the available - space is occupied. + perfect-plasticity approximation (PPA) model :cite:`purves:2008a`. In this approach, + each individual is assumed to arrange its canopy crown area plastically to take up + space in canopy layers and that new layers form below the canopy top as the + available space is occupied. Real canopies contain canopy gaps, through process such as crown shyness. This is included in the model through the canopy gap fraction, which sets the proportion @@ -28,6 +178,10 @@ class Canopy: Args: community: A Community object that will be used to generate the canopy model. + layer_heights: A column array of vertical heights at which to calculate canopy + variables. + fit_ppa: Calculate layer heights as the canopy layer closure heights under the + PPA model. canopy_gap_fraction: The proportion of the available space unfilled by canopy (default: 0.05). layer_tolerance: The minimum precision used by the solver to find canopy layer @@ -37,32 +191,75 @@ class Canopy: def __init__( self, community: Community, - canopy_gap_fraction: float = 0.05, - layer_tolerance: float = 0.001, + layer_heights: NDArray[np.float64] | None = None, + fit_ppa: bool = False, + canopy_gap_fraction: float = 0, + solver_tolerance: float = 0.001, ) -> None: + # Store required init vars self.canopy_gap_fraction: float = canopy_gap_fraction """Canopy gap fraction.""" - self.layer_tolerance: float = layer_tolerance - """Numerical tolerance for solving canopy layer closure.""" - self.total_community_crown_area: float - """Total crown area across individuals in the community (metres 2).""" + self.solver_tolerance: float = solver_tolerance + """Numerical tolerance for fitting the PPA model of canopy layer closure.""" + + # Define class attributes self.max_stem_height: float - """Maximum height of any individual in the community (metres).""" - self.crown_area_per_layer: float - """Total crown area permitted in a single canopy layer, given the available - cell area of the community and its canopy gap fraction.""" + """Maximum height of any individual in the community (m).""" self.n_layers: int """Total number of canopy layers.""" self.n_cohorts: int """Total number of cohorts in the canopy.""" - self.layer_heights: NDArray[np.float32] - """Column vector of the heights of canopy layers.""" - self.stem_relative_radius: NDArray[np.float32] - """Relative radius values of stems at canopy layer heights.""" - self.stem_crown_area: NDArray[np.float32] - """Stem projected crown area at canopy layer heights.""" - self.stem_leaf_area: NDArray[np.float32] - """Stem projected leaf area at canopy layer heights.""" + self.heights: NDArray[np.float64] + """The vertical heights at which the canopy structure is calculated.""" + + self.crown_profile: CrownProfile + """The crown profiles of the community stems at the provided layer heights.""" + self.stem_leaf_area: NDArray[np.float64] + """The leaf area of the crown model for each cohort by layer.""" + self.cohort_lai: NDArray[np.float64] + """The leaf area index for each cohort by layer.""" + self.cohort_f_trans: NDArray[np.float64] + """The fraction of light transmitted by each cohort by layer.""" + self.cohort_f_abs: NDArray[np.float64] + """The fraction of light absorbed by each cohort by layer.""" + self.f_trans: NDArray[np.float64] + """The fraction of light transmitted by the whole community by layer.""" + self.f_abs: NDArray[np.float64] + """The fraction of light absorbed by the whole community by layer.""" + self.transmission_profile: NDArray[np.float64] + """The light transmission profile for the whole community by layer.""" + self.extinction_profile: NDArray[np.float64] + """The light extinction profile for the whole community by layer.""" + self.fapar: NDArray[np.float64] + """The fraction of absorbed radiation for the whole community by layer.""" + self.cohort_fapar: NDArray[np.float64] + """The fraction of absorbed radiation for each cohort by layer.""" + self.stem_fapar: NDArray[np.float64] + """The fraction of absorbed radiation for each stem by layer.""" + self.filled_community_area: float + """The area filled by crown after accounting for the crown gap fraction.""" + + # Check operating mode + if fit_ppa ^ (layer_heights is None): + raise ValueError("Either set fit_ppa=True or provide layer heights.") + + # Set simple attributes + self.max_stem_height = community.stem_allometry.stem_height.max() + self.n_cohorts = community.number_of_cohorts + self.filled_community_area = community.cell_area * ( + 1 - self.canopy_gap_fraction + ) + + # Populate layer heights + if layer_heights is not None: + self.heights = layer_heights + else: + self.heights = fit_perfect_plasticity_approximation( + community=community, + canopy_gap_fraction=canopy_gap_fraction, + max_stem_height=self.max_stem_height, + solver_tolerance=solver_tolerance, + ) self._calculate_canopy(community=community) @@ -70,97 +267,60 @@ def _calculate_canopy(self, community: Community) -> None: """Calculate the canopy structure. This private method runs the calculations needed to populate the instance - attributes. + attributes, given the layer heights provided by the user or calculated using the + PPA model. Args: community: The Community object passed to the instance. """ - # Calculate community wide properties: total crown area, maximum height, crown - # area required to fill a layer and total number of canopy layers - self.total_community_crown_area = ( - community.stem_allometry.crown_area * community.cohort_data["n_individuals"] - ).sum() + # Calculate the crown profile at the layer heights + # TODO - reimpose validation + self.crown_profile = CrownProfile( + stem_traits=community.stem_traits, + stem_allometry=community.stem_allometry, + z=self.heights, + ) - self.max_stem_height = community.stem_allometry.stem_height.max() + # Partition the projected leaf area into the leaf area in each layer for each + # stem and then scale up to the cohort leaf area in each layer. + self.stem_leaf_area = np.diff( + self.crown_profile.projected_leaf_area, axis=0, prepend=0 + ) - self.crown_area_per_layer = community.cell_area * (1 - self.canopy_gap_fraction) + # Calculate the leaf area index per layer per stem, using the stem + # specific leaf area index values. LAI is a value per m2, so scale back down by + # the available community area. + self.cohort_lai = ( + self.stem_leaf_area + * community.cohort_data["n_individuals"] + * community.stem_traits.lai + ) / community.cell_area # self.filled_community_area - self.n_layers = int( - np.ceil(self.total_community_crown_area / self.crown_area_per_layer) - ) - self.n_cohorts = community.number_of_cohorts + # Calculate the Beer-Lambert light transmission and absorption components per + # layer and cohort + self.cohort_f_trans = np.exp(-community.stem_traits.par_ext * self.cohort_lai) + self.cohort_f_abs = 1 - self.cohort_f_trans - # Find the closure heights of the canopy layers under the perfect plasticity - # approximation by solving Ac(z) - L_n = 0 across the community where L is the - # total cumulative crown area in layer n and above, discounted by the canopy gap - # fraction. - - self.layer_heights = np.zeros((self.n_layers, 1), dtype=np.float32) - - # Loop over the layers except for the final layer, which will be the partial - # remaining vegetation below the last closed layer. - starting_guess = self.max_stem_height - for layer in np.arange(self.n_layers - 1): - target_area = (layer + 1) * self.crown_area_per_layer - - # TODO - the solution here is typically closer to the upper bracket, might - # be a better algorithm to find the root (#293). - solution = root_scalar( - solve_community_projected_canopy_area, - args=( - community.stem_allometry.stem_height, - community.stem_allometry.crown_area, - community.stem_traits.m, - community.stem_traits.n, - community.stem_traits.q_m, - community.stem_allometry.crown_z_max, - community.cohort_data["n_individuals"], - target_area, - False, # validate - ), - bracket=(0, starting_guess), - xtol=self.layer_tolerance, - ) + # Aggregate across cohorts into a layer wide transimissivity + self.f_trans = self.cohort_f_trans.prod(axis=1) - if not solution.converged: - raise RuntimeError( - "Estimation of canopy layer closure heights failed to converge." - ) - - self.layer_heights[layer] = starting_guess = solution.root - - # Find relative canopy radius at the layer heights - # NOTE - here and in the calls below, validate=False is enforced because the - # Community class structures and code should guarantee valid inputs and so - # turning off the validation internally should simply speed up the code. - self.stem_relative_radius = calculate_relative_crown_radius_at_z( - z=self.layer_heights, - stem_height=community.stem_allometry.stem_height, - m=community.stem_traits.m, - n=community.stem_traits.n, - validate=False, - ) + # Calculate the canopy wide light extinction per layer + self.f_abs = 1 - self.f_trans - # Calculate projected crown area of a cohort stem at canopy closure heights. - self.stem_crown_area = calculate_stem_projected_crown_area_at_z( - z=self.layer_heights, - q_z=self.stem_relative_radius, - crown_area=community.stem_allometry.crown_area, - stem_height=community.stem_allometry.stem_height, - q_m=community.stem_traits.q_m, - z_max=community.stem_allometry.crown_z_max, - validate=False, - ) + # Calculate cumulative light transmission and extinction profiles + self.transmission_profile = np.cumprod(self.f_trans) + self.extinction_profile = 1 - self.transmission_profile - # Find the projected leaf area of a cohort stem at canopy closure heights. - self.stem_leaf_area = calculate_stem_projected_leaf_area_at_z( - z=self.layer_heights, - q_z=self.stem_relative_radius, - crown_area=community.stem_allometry.crown_area, - stem_height=community.stem_allometry.stem_height, - f_g=community.stem_traits.f_g, - q_m=community.stem_traits.q_m, - z_max=community.stem_allometry.crown_z_max, - validate=False, - ) + # Calculate the fapar profile across cohorts and layers + # * The first part of the equation is calculating the relative absorption of + # each cohort within each layer + # * Each layer is then multiplied by fraction of the total light absorbed in the + # layer + # * The resulting matrix can be multiplied by a canopy top PPFD to generate the + # flux absorbed within each layer for each cohort. + self.fapar = -np.diff(self.transmission_profile, prepend=1) + self.cohort_fapar = ( + self.cohort_f_abs / self.cohort_f_abs.sum(axis=1)[:, None] + ) * self.fapar[:, None] + self.stem_fapar = self.cohort_fapar / community.cohort_data["n_individuals"] diff --git a/pyrealm/demography/community.py b/pyrealm/demography/community.py index 11502f35..2f593471 100644 --- a/pyrealm/demography/community.py +++ b/pyrealm/demography/community.py @@ -353,7 +353,7 @@ class Community: flora: Flora # - arrays representing properties of cohorts - cohort_dbh_values: InitVar[NDArray[np.float32]] + cohort_dbh_values: InitVar[NDArray[np.float64]] cohort_n_individuals: InitVar[NDArray[np.int_]] cohort_pft_names: InitVar[NDArray[np.str_]] @@ -365,7 +365,7 @@ class Community: def __post_init__( self, - cohort_dbh_values: NDArray[np.float32], + cohort_dbh_values: NDArray[np.float64], cohort_n_individuals: NDArray[np.int_], cohort_pft_names: NDArray[np.str_], ) -> None: diff --git a/pyrealm/demography/crown.py b/pyrealm/demography/crown.py index 61a60ce1..5f0ac6ee 100644 --- a/pyrealm/demography/crown.py +++ b/pyrealm/demography/crown.py @@ -3,6 +3,7 @@ """ # noqa: D205 from dataclasses import InitVar, dataclass, field +from typing import ClassVar import numpy as np from numpy.typing import NDArray @@ -13,9 +14,9 @@ def _validate_z_qz_args( - z: NDArray[np.float32], - stem_properties: list[NDArray[np.float32]], - q_z: NDArray[np.float32] | None = None, + z: NDArray[np.float64], + stem_properties: list[NDArray[np.float64]], + q_z: NDArray[np.float64] | None = None, ) -> None: """Shared validation of for crown function arguments. @@ -76,7 +77,9 @@ def _validate_z_qz_args( # Now test q_z congruence with z if provided if q_z is not None: - if ((z.size == 1) or (z.ndim == 1)) and (q_z.shape != stem_shape): + if q_z.shape == z.shape: + pass + elif ((z.size == 1) or (z.ndim == 1)) and (q_z.shape != stem_shape): raise ValueError( f"The q_z argument (shape: {q_z.shape}) is not a row array " f"matching stem properties (shape: {stem_shape})" @@ -92,12 +95,12 @@ def _validate_z_qz_args( def calculate_relative_crown_radius_at_z( - z: NDArray[np.float32], - stem_height: NDArray[np.float32], - m: NDArray[np.float32], - n: NDArray[np.float32], + z: NDArray[np.float64], + stem_height: NDArray[np.float64], + m: NDArray[np.float64], + n: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate relative crown radius at a given height. The crown shape parameters ``m`` and ``n`` define the vertical distribution of @@ -140,10 +143,10 @@ def calculate_relative_crown_radius_at_z( def calculate_crown_radius( - q_z: NDArray[np.float32], - r0: NDArray[np.float32], + q_z: NDArray[np.float64], + r0: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate crown radius from relative crown radius and crown r0. The relative crown radius (:math:`q(z)`) at a given height :math:`z` describes the @@ -172,14 +175,14 @@ def calculate_crown_radius( def calculate_stem_projected_crown_area_at_z( - z: NDArray[np.float32], - q_z: NDArray[np.float32], - stem_height: NDArray[np.float32], - crown_area: NDArray[np.float32], - q_m: NDArray[np.float32], - z_max: NDArray[np.float32], + z: NDArray[np.float64], + q_z: NDArray[np.float64], + stem_height: NDArray[np.float64], + crown_area: NDArray[np.float64], + q_m: NDArray[np.float64], + z_max: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: """Calculate stem projected crown area above a given height. This function calculates the projected crown area of a set of stems with given @@ -197,7 +200,7 @@ def calculate_stem_projected_crown_area_at_z( crown_area: Crown area of each stem stem_height: Stem height of each stem q_m: Canopy shape parameter ``q_m``` for each stem - z_max: Height of maximum crown radous for each stem + z_max: Height of maximum crown radius for each stem validate: Boolean flag to suppress argument validation. """ @@ -217,85 +220,16 @@ def calculate_stem_projected_crown_area_at_z( return A_p -def solve_community_projected_canopy_area( - z: float, - stem_height: NDArray[np.float32], - crown_area: NDArray[np.float32], - m: NDArray[np.float32], - n: NDArray[np.float32], - q_m: NDArray[np.float32], - z_max: NDArray[np.float32], - n_individuals: NDArray[np.float32], - target_area: float = 0, - validate: bool = True, -) -> NDArray[np.float32]: - """Solver function for community wide projected canopy area. - - This function takes the number of individuals in each cohort along with the stem - height and crown area and a given vertical height (:math:`z`). It then uses the - crown shape parameters associated with each cohort to calculate the community wide - projected crown area above that height (:math:`A_p(z)`). This is simply the sum of - the products of the individual stem crown projected area at :math:`z` and the number - of individuals in each cohort. - - The return value is the difference between the calculated :math:`A_p(z)` and a - user-specified target area, This allows the function to be used with a root solver - to find :math:`z` values that result in a given :math:`A_p(z)`. The default target - area is zero, so the default return value will be the actual total :math:`A_p(z)` - for the community. - - A typical use case for the target area would be to specify the area at which a given - canopy layer closes under the perfect plasticity approximation in order to find the - closure height. - - Args: - z: Vertical height on the z axis. - n_individuals: Number of individuals in each cohort - crown_area: Crown area of each cohort - stem_height: Stem height of each cohort - m: Crown shape parameter ``m``` for each cohort - n: Crown shape parameter ``n``` for each cohort - q_m: Crown shape parameter ``q_m``` for each cohort - z_max: Crown shape parameter ``z_m``` for each cohort - target_area: A target projected crown area. - validate: Boolean flag to suppress argument validation. - """ - # Convert z to array for validation and typing - z_arr = np.array(z) - - if validate: - _validate_z_qz_args( - z=z_arr, - stem_properties=[n_individuals, crown_area, stem_height, m, n, q_m, z_max], - ) - - q_z = calculate_relative_crown_radius_at_z( - z=z_arr, stem_height=stem_height, m=m, n=n, validate=False - ) - # Calculate A(p) for the stems in each cohort - A_p = calculate_stem_projected_crown_area_at_z( - z=z_arr, - q_z=q_z, - stem_height=stem_height, - crown_area=crown_area, - q_m=q_m, - z_max=z_max, - validate=False, - ) - - return (A_p * n_individuals).sum() - target_area - - def calculate_stem_projected_leaf_area_at_z( - z: NDArray[np.float32], - q_z: NDArray[np.float32], - stem_height: NDArray[np.float32], - crown_area: NDArray[np.float32], - f_g: NDArray[np.float32], - q_m: NDArray[np.float32], - z_max: NDArray[np.float32], + z: NDArray[np.float64], + q_z: NDArray[np.float64], + stem_height: NDArray[np.float64], + crown_area: NDArray[np.float64], + f_g: NDArray[np.float64], + q_m: NDArray[np.float64], + z_max: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: """Calculate projected leaf area above a given height. This function calculates the projected leaf area of a set of stems with given @@ -363,6 +297,12 @@ class CrownProfile: allometric predictions of stem height, crown area and z_max for an actual stem of a given size for each PFT. + In addition to the variables above, the class can also has properties the calculate + the projected crown radius and projected leaf radius. These are simply the radii + that would result in the two projected areas: the values are not directly meaningful + for calculating canopy models, but can be useful for exploring the behavour of + projected area on the same linear scale as the crown radius. + Args: stem_traits: A Flora or StemTraits instance providing plant functional trait data. @@ -375,20 +315,29 @@ class CrownProfile: z_max: A row array providing expected z_max height for each PFT. """ + var_attr_names: ClassVar[tuple[str, ...]] = ( + "relative_crown_radius", + "crown_radius", + "projected_crown_area", + "projected_leaf_area", + "projected_crown_radius", + "projected_leaf_radius", + ) + stem_traits: InitVar[StemTraits | Flora] """A Flora or StemTraits instance providing plant functional trait data.""" stem_allometry: InitVar[StemAllometry] """A StemAllometry instance setting the stem allometries for the crown profile.""" - z: InitVar[NDArray[np.float32]] + z: NDArray[np.float64] """An array of vertical height values at which to calculate crown profiles.""" - relative_crown_radius: NDArray[np.float32] = field(init=False) + relative_crown_radius: NDArray[np.float64] = field(init=False) """An array of the relative crown radius of stems at z heights""" - crown_radius: NDArray[np.float32] = field(init=False) + crown_radius: NDArray[np.float64] = field(init=False) """An array of the actual crown radius of stems at z heights""" - projected_crown_area: NDArray[np.float32] = field(init=False) + projected_crown_area: NDArray[np.float64] = field(init=False) """An array of the projected crown area of stems at z heights""" - projected_leaf_area: NDArray[np.float32] = field(init=False) + projected_leaf_area: NDArray[np.float64] = field(init=False) """An array of the projected leaf area of stems at z heights""" # Information attributes @@ -401,12 +350,11 @@ def __post_init__( self, stem_traits: StemTraits | Flora, stem_allometry: StemAllometry, - z: NDArray[np.float32], ) -> None: """Populate crown profile attributes from the traits, allometry and height.""" # Calculate relative crown radius self.relative_crown_radius = calculate_relative_crown_radius_at_z( - z=z, + z=self.z, m=stem_traits.m, n=stem_traits.n, stem_height=stem_allometry.stem_height, @@ -419,7 +367,7 @@ def __post_init__( # Calculate projected crown area self.projected_crown_area = calculate_stem_projected_crown_area_at_z( - z=z, + z=self.z, q_z=self.relative_crown_radius, crown_area=stem_allometry.crown_area, q_m=stem_traits.q_m, @@ -429,7 +377,7 @@ def __post_init__( # Calculate projected leaf area self.projected_leaf_area = calculate_stem_projected_leaf_area_at_z( - z=z, + z=self.z, q_z=self.relative_crown_radius, f_g=stem_traits.f_g, q_m=stem_traits.q_m, @@ -452,3 +400,115 @@ def __repr__(self) -> str: f"CrownProfile: Prediction for {self._n_stems} stems " f"at {self._n_pred} observations." ) + + @property + def projected_crown_radius(self) -> NDArray[np.float32]: + """An array of the projected crown radius of stems at z heights.""" + return np.sqrt(self.projected_crown_area / np.pi) + + @property + def projected_leaf_radius(self) -> NDArray[np.float32]: + """An array of the projected leaf radius of stems at z heights.""" + return np.sqrt(self.projected_leaf_area / np.pi) + + +def get_crown_xy( + crown_profile: CrownProfile, + stem_allometry: StemAllometry, + attr: str, + stem_offsets: NDArray[np.float32] | None = None, + two_sided: bool = True, + as_xy: bool = False, +) -> list[tuple[NDArray, NDArray]] | list[NDArray]: + """Extract plotting data from crown profiles. + + A CrownProfile instance contains crown radius and projected area data for a set of + stems at given heights, but can contain predictions of these attributes above the + actual heights of some or all of the stems or indeed below ground. + + This function extracts plotting data for a given attribute for each crown that + includes only the predictions within the height range of the actual stem. It can + also mirror the values around the vertical midline to provide a two sided canopy + shape. + + The data are returned as a list with one entry per stem. The default value for each + entry a tuple of two arrays (height, attribute values) but the `as_xy=True` option + will return an `(N, 2)` dimensioned XY array suitable for use with + {class}`~matplotlib.patches.Polygon`. + + Args: + crown_profile: A crown profile instance + stem_allometry: The stem allometry instance used to create the crown profile + attr: The crown profile attribute to plot (see + :class:`~pyrealm.demography.crown.CrownProfile`) + stem_offsets: An optional array of offsets to add to the midline of stems. + two_sided: Should the plotting data show a two sided canopy. + as_xy: Should the plotting data be returned as a single XY array. + + """ + + # Input validation + if attr not in crown_profile.var_attr_names: + raise ValueError(f"Unknown crown profile attribute: {attr}") + + # TODO + # - more validation once the dimensioning has been thought through #317 + # - we're expecting a one d allometry and a 2D profile with multiple heights. + + # Get the attribute and flatten the heights from a column array to one dimensional + attr_values = getattr(crown_profile, attr) + z = crown_profile.z.flatten() + + # Orient the data so that lower heights always come first + if z[0] < z[-1]: + z = np.flip(z) + attr_values = np.flip(attr_values, axis=0) + + # Collect the per stem data + crown_plotting_data: list = [] + + for stem_index in np.arange(attr_values.shape[1]): + # Find the heights and values that fall within the individual stem + height_is_valid = np.logical_and( + z <= stem_allometry.stem_height[stem_index], z >= 0 + ) + valid_attr_values: NDArray = attr_values[height_is_valid, stem_index] + valid_heights: NDArray = z[height_is_valid] + + if two_sided: + # The values are extended to include the reverse profile as well as the zero + # value at the stem height + valid_heights = np.concatenate( + [ + np.flip(valid_heights), + stem_allometry.stem_height[[stem_index]], + valid_heights, + ] + ) + valid_attr_values = np.concatenate( + [-np.flip(valid_attr_values), [0], valid_attr_values] + ) + else: + # Only the zero value is added + valid_heights = np.concatenate( + [ + stem_allometry.stem_height[[stem_index]], + valid_heights, + ] + ) + valid_attr_values = np.concatenate([[0], valid_attr_values]) + + # Add offsets if provided + if stem_offsets is not None: + valid_attr_values += stem_offsets[stem_index] + + if as_xy: + # Combine the values into an (N,2) XY array + crown_plotting_data.append( + np.hstack([valid_attr_values[:, None], valid_heights[:, None]]) + ) + else: + # Return the individual 1D arrays + crown_plotting_data.append((valid_heights, valid_attr_values)) + + return crown_plotting_data diff --git a/pyrealm/demography/flora.py b/pyrealm/demography/flora.py index 41767d5f..a12463f9 100644 --- a/pyrealm/demography/flora.py +++ b/pyrealm/demography/flora.py @@ -46,8 +46,8 @@ def calculate_crown_q_m( - m: float | NDArray[np.float32], n: float | NDArray[np.float32] -) -> float | NDArray[np.float32]: + m: float | NDArray[np.float64], n: float | NDArray[np.float64] +) -> float | NDArray[np.float64]: """Calculate the crown scaling trait ``q_m``. The value of q_m is a constant crown scaling parameter derived from the ``m`` and @@ -66,8 +66,8 @@ def calculate_crown_q_m( def calculate_crown_z_max_proportion( - m: float | NDArray[np.float32], n: float | NDArray[np.float32] -) -> float | NDArray[np.float32]: + m: float | NDArray[np.float64], n: float | NDArray[np.float64] +) -> float | NDArray[np.float64]: r"""Calculate the z_m trait. The z_m proportion (:math:`p_{zm}`) is the constant proportion of stem height at @@ -267,45 +267,45 @@ class Flora: # - trait arrays name: NDArray[np.str_] = field(init=False) r"""The name of the plant functional type.""" - a_hd: NDArray[np.float32] = field(init=False) + a_hd: NDArray[np.float64] = field(init=False) r"""Initial slope of height-diameter relationship (:math:`a`, -)""" - ca_ratio: NDArray[np.float32] = field(init=False) + ca_ratio: NDArray[np.float64] = field(init=False) r"""Initial ratio of crown area to stem cross-sectional area (:math:`c`, -)""" - h_max: NDArray[np.float32] = field(init=False) + h_max: NDArray[np.float64] = field(init=False) r"""Maximum tree height (:math:`H_m`, m)""" - rho_s: NDArray[np.float32] = field(init=False) + rho_s: NDArray[np.float64] = field(init=False) r"""Sapwood density (:math:`\rho_s`, kg Cm-3)""" - lai: NDArray[np.float32] = field(init=False) + lai: NDArray[np.float64] = field(init=False) """Leaf area index within the crown (:math:`L`, -)""" - sla: NDArray[np.float32] = field(init=False) + sla: NDArray[np.float64] = field(init=False) r"""Specific leaf area (:math:`\sigma`, m2 kg-1 C)""" - tau_f: NDArray[np.float32] = field(init=False) + tau_f: NDArray[np.float64] = field(init=False) r"""Foliage turnover time (:math:`\tau_f`,years)""" - tau_r: NDArray[np.float32] = field(init=False) + tau_r: NDArray[np.float64] = field(init=False) r"""Fine-root turnover time (:math:`\tau_r`, years)""" - par_ext: NDArray[np.float32] = field(init=False) + par_ext: NDArray[np.float64] = field(init=False) r"""Extinction coefficient of photosynthetically active radiation (PAR) (:math:`k`, -)""" - yld: NDArray[np.float32] = field(init=False) + yld: NDArray[np.float64] = field(init=False) r"""Yield factor (:math:`y`, -)""" - zeta: NDArray[np.float32] = field(init=False) + zeta: NDArray[np.float64] = field(init=False) r"""Ratio of fine-root mass to foliage area (:math:`\zeta`, kg C m-2)""" - resp_r: NDArray[np.float32] = field(init=False) + resp_r: NDArray[np.float64] = field(init=False) r"""Fine-root specific respiration rate (:math:`r_r`, year-1)""" - resp_s: NDArray[np.float32] = field(init=False) + resp_s: NDArray[np.float64] = field(init=False) r"""Sapwood-specific respiration rate (:math:`r_s`, year-1)""" - resp_f: NDArray[np.float32] = field(init=False) + resp_f: NDArray[np.float64] = field(init=False) r"""Foliage maintenance respiration fraction (:math:`r_f`, -)""" - m: NDArray[np.float32] = field(init=False) + m: NDArray[np.float64] = field(init=False) r"""Crown shape parameter (:math:`m`, -)""" - n: NDArray[np.float32] = field(init=False) + n: NDArray[np.float64] = field(init=False) r"""Crown shape parameter (:math:`n`, -)""" - f_g: NDArray[np.float32] = field(init=False) + f_g: NDArray[np.float64] = field(init=False) r"""Crown gap fraction (:math:`f_g`, -)""" - q_m: NDArray[np.float32] = field(init=False) + q_m: NDArray[np.float64] = field(init=False) """Scaling factor to derive maximum crown radius from crown area.""" - z_max_prop: NDArray[np.float32] = field(init=False) + z_max_prop: NDArray[np.float64] = field(init=False) """Proportion of stem height at which maximum crown radius is found.""" # - other instance attributes @@ -464,45 +464,45 @@ class StemTraits: # Instance trait attributes name: NDArray[np.str_] r"""The name of the plant functional type.""" - a_hd: NDArray[np.float32] + a_hd: NDArray[np.float64] r"""Initial slope of height-diameter relationship (:math:`a`, -)""" - ca_ratio: NDArray[np.float32] + ca_ratio: NDArray[np.float64] r"""Initial ratio of crown area to stem cross-sectional area (:math:`c`, -)""" - h_max: NDArray[np.float32] + h_max: NDArray[np.float64] r"""Maximum tree height (:math:`H_m`, m)""" - rho_s: NDArray[np.float32] + rho_s: NDArray[np.float64] r"""Sapwood density (:math:`\rho_s`, kg Cm-3)""" - lai: NDArray[np.float32] + lai: NDArray[np.float64] """Leaf area index within the crown (:math:`L`, -)""" - sla: NDArray[np.float32] + sla: NDArray[np.float64] r"""Specific leaf area (:math:`\sigma`, m2 kg-1 C)""" - tau_f: NDArray[np.float32] + tau_f: NDArray[np.float64] r"""Foliage turnover time (:math:`\tau_f`,years)""" - tau_r: NDArray[np.float32] + tau_r: NDArray[np.float64] r"""Fine-root turnover time (:math:`\tau_r`, years)""" - par_ext: NDArray[np.float32] + par_ext: NDArray[np.float64] r"""Extinction coefficient of photosynthetically active radiation (PAR) (:math:`k`, -)""" - yld: NDArray[np.float32] + yld: NDArray[np.float64] r"""Yield factor (:math:`y`, -)""" - zeta: NDArray[np.float32] + zeta: NDArray[np.float64] r"""Ratio of fine-root mass to foliage area (:math:`\zeta`, kg C m-2)""" - resp_r: NDArray[np.float32] + resp_r: NDArray[np.float64] r"""Fine-root specific respiration rate (:math:`r_r`, year-1)""" - resp_s: NDArray[np.float32] + resp_s: NDArray[np.float64] r"""Sapwood-specific respiration rate (:math:`r_s`, year-1)""" - resp_f: NDArray[np.float32] + resp_f: NDArray[np.float64] r"""Foliage maintenance respiration fraction (:math:`r_f`, -)""" - m: NDArray[np.float32] + m: NDArray[np.float64] r"""Crown shape parameter (:math:`m`, -)""" - n: NDArray[np.float32] + n: NDArray[np.float64] r"""Crown shape parameter (:math:`n`, -)""" - f_g: NDArray[np.float32] + f_g: NDArray[np.float64] r"""Crown gap fraction (:math:`f_g`, -)""" - q_m: NDArray[np.float32] + q_m: NDArray[np.float64] """Scaling factor to derive maximum crown radius from crown area.""" - z_max_prop: NDArray[np.float32] + z_max_prop: NDArray[np.float64] """Proportion of stem height at which maximum crown radius is found.""" # Post init attributes diff --git a/pyrealm/demography/t_model_functions.py b/pyrealm/demography/t_model_functions.py index dbedb70f..8cabeaa4 100644 --- a/pyrealm/demography/t_model_functions.py +++ b/pyrealm/demography/t_model_functions.py @@ -53,11 +53,11 @@ def _validate_t_model_args(pft_args: list[NDArray], size_args: list[NDArray]) -> def calculate_heights( - h_max: NDArray[np.float32], - a_hd: NDArray[np.float32], - dbh: NDArray[np.float32], + h_max: NDArray[np.float64], + a_hd: NDArray[np.float64], + dbh: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate tree height under the T Model. The height of trees (:math:`H`) are calculated from individual diameters at breast @@ -83,11 +83,11 @@ def calculate_heights( def calculate_dbh_from_height( - h_max: NDArray[np.float32], - a_hd: NDArray[np.float32], - stem_height: NDArray[np.float32], + h_max: NDArray[np.float64], + a_hd: NDArray[np.float64], + stem_height: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate diameter at breast height from stem height under the T Model. This function inverts the normal calculation of stem height (:math:`H`) from @@ -133,12 +133,12 @@ def calculate_dbh_from_height( def calculate_crown_areas( - ca_ratio: NDArray[np.float32], - a_hd: NDArray[np.float32], - dbh: NDArray[np.float32], - stem_height: NDArray[np.float32], + ca_ratio: NDArray[np.float64], + a_hd: NDArray[np.float64], + dbh: NDArray[np.float64], + stem_height: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate tree crown area under the T Model. The tree crown area (:math:`A_{c}`)is calculated from individual diameters at breast @@ -166,11 +166,11 @@ def calculate_crown_areas( def calculate_crown_fractions( - a_hd: NDArray[np.float32], - stem_height: NDArray[np.float32], - dbh: NDArray[np.float32], + a_hd: NDArray[np.float64], + stem_height: NDArray[np.float64], + dbh: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate tree crown fraction under the T Model. The crown fraction (:math:`f_{c}`)is calculated from individual diameters at breast @@ -195,11 +195,11 @@ def calculate_crown_fractions( def calculate_stem_masses( - rho_s: NDArray[np.float32], - stem_height: NDArray[np.float32], - dbh: NDArray[np.float32], + rho_s: NDArray[np.float64], + stem_height: NDArray[np.float64], + dbh: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate stem mass under the T Model. The stem mass (:math:`W_{s}`) is calculated from individual diameters at breast @@ -223,11 +223,11 @@ def calculate_stem_masses( def calculate_foliage_masses( - sla: NDArray[np.float32], - lai: NDArray[np.float32], - crown_area: NDArray[np.float32], + sla: NDArray[np.float64], + lai: NDArray[np.float64], + crown_area: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate foliage mass under the T Model. The foliage mass (:math:`W_{f}`) is calculated from the crown area (:math:`A_{c}`), @@ -251,13 +251,13 @@ def calculate_foliage_masses( def calculate_sapwood_masses( - rho_s: NDArray[np.float32], - ca_ratio: NDArray[np.float32], - stem_height: NDArray[np.float32], - crown_area: NDArray[np.float32], - crown_fraction: NDArray[np.float32], + rho_s: NDArray[np.float64], + ca_ratio: NDArray[np.float64], + stem_height: NDArray[np.float64], + crown_area: NDArray[np.float64], + crown_fraction: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate sapwood mass under the T Model. The sapwood mass (:math:`W_{\cdot s}`) is calculated from the individual crown area @@ -287,8 +287,8 @@ def calculate_sapwood_masses( def calculate_crown_z_max( - z_max_prop: NDArray[np.float32], stem_height: NDArray[np.float32] -) -> NDArray[np.float32]: + z_max_prop: NDArray[np.float64], stem_height: NDArray[np.float64] +) -> NDArray[np.float64]: r"""Calculate height of maximum crown radius. The height of the maximum crown radius (:math:`z_m`) is derived from the crown @@ -313,8 +313,8 @@ def calculate_crown_z_max( def calculate_crown_r0( - q_m: NDArray[np.float32], crown_area: NDArray[np.float32] -) -> NDArray[np.float32]: + q_m: NDArray[np.float64], crown_area: NDArray[np.float64] +) -> NDArray[np.float64]: r"""Calculate scaling factor for width of maximum crown radius. This scaling factor (:math:`r_0`) is derived from the crown shape parameters @@ -339,12 +339,12 @@ def calculate_crown_r0( def calculate_whole_crown_gpp( - potential_gpp: NDArray[np.float32], - crown_area: NDArray[np.float32], - par_ext: NDArray[np.float32], - lai: NDArray[np.float32], + potential_gpp: NDArray[np.float64], + crown_area: NDArray[np.float64], + par_ext: NDArray[np.float64], + lai: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate whole crown gross primary productivity. This function calculates individual GPP across the whole crown, given the individual @@ -373,10 +373,10 @@ def calculate_whole_crown_gpp( def calculate_sapwood_respiration( - resp_s: NDArray[np.float32], - sapwood_mass: NDArray[np.float32], + resp_s: NDArray[np.float64], + sapwood_mass: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate sapwood respiration. Calculates the total sapwood respiration (:math:`R_{\cdot s}`) given the individual @@ -398,10 +398,10 @@ def calculate_sapwood_respiration( def calculate_foliar_respiration( - resp_f: NDArray[np.float32], - whole_crown_gpp: NDArray[np.float32], + resp_f: NDArray[np.float64], + whole_crown_gpp: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate foliar respiration. Calculates the total foliar respiration (:math:`R_{f}`) given the individual crown @@ -425,12 +425,12 @@ def calculate_foliar_respiration( def calculate_fine_root_respiration( - zeta: NDArray[np.float32], - sla: NDArray[np.float32], - resp_r: NDArray[np.float32], - foliage_mass: NDArray[np.float32], + zeta: NDArray[np.float64], + sla: NDArray[np.float64], + resp_r: NDArray[np.float64], + foliage_mass: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate foliar respiration. Calculates the total fine root respiration (:math:`R_{r}`) given the individual @@ -455,13 +455,13 @@ def calculate_fine_root_respiration( def calculate_net_primary_productivity( - yld: NDArray[np.float32], - whole_crown_gpp: NDArray[np.float32], - foliar_respiration: NDArray[np.float32], - fine_root_respiration: NDArray[np.float32], - sapwood_respiration: NDArray[np.float32], + yld: NDArray[np.float64], + whole_crown_gpp: NDArray[np.float64], + foliar_respiration: NDArray[np.float64], + fine_root_respiration: NDArray[np.float64], + sapwood_respiration: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate net primary productivity. The net primary productivity (NPP, :math:`P_{net}`) is calculated as a plant @@ -507,13 +507,13 @@ def calculate_net_primary_productivity( def calculate_foliage_and_fine_root_turnover( - sla: NDArray[np.float32], - zeta: NDArray[np.float32], - tau_f: NDArray[np.float32], - tau_r: NDArray[np.float32], - foliage_mass: NDArray[np.float32], + sla: NDArray[np.float64], + zeta: NDArray[np.float64], + tau_f: NDArray[np.float64], + tau_r: NDArray[np.float64], + foliage_mass: NDArray[np.float64], validate: bool = True, -) -> NDArray[np.float32]: +) -> NDArray[np.float64]: r"""Calculate turnover costs. This function calculates the costs associated with the turnover of fine roots and @@ -544,19 +544,19 @@ def calculate_foliage_and_fine_root_turnover( def calculate_growth_increments( - rho_s: NDArray[np.float32], - a_hd: NDArray[np.float32], - h_max: NDArray[np.float32], - lai: NDArray[np.float32], - ca_ratio: NDArray[np.float32], - sla: NDArray[np.float32], - zeta: NDArray[np.float32], - npp: NDArray[np.float32], - turnover: NDArray[np.float32], - dbh: NDArray[np.float32], - stem_height: NDArray[np.float32], + rho_s: NDArray[np.float64], + a_hd: NDArray[np.float64], + h_max: NDArray[np.float64], + lai: NDArray[np.float64], + ca_ratio: NDArray[np.float64], + sla: NDArray[np.float64], + zeta: NDArray[np.float64], + npp: NDArray[np.float64], + turnover: NDArray[np.float64], + dbh: NDArray[np.float64], + stem_height: NDArray[np.float64], validate: bool = True, -) -> tuple[NDArray[np.float32], NDArray[np.float32], NDArray[np.float32]]: +) -> tuple[NDArray[np.float64], NDArray[np.float64], NDArray[np.float64]]: r"""Calculate growth increments. Given an estimate of net primary productivity (:math:`P_{net}`), less associated @@ -692,28 +692,28 @@ class StemAllometry: """ An instance of :class:`~pyrealm.demography.flora.Flora` or :class:`~pyrealm.demography.flora.StemTraits`, providing plant functional trait data for a set of stems.""" - at_dbh: InitVar[NDArray[np.float32]] + at_dbh: InitVar[NDArray[np.float64]] """An array of diameter at breast height values at which to predict stem allometry values.""" # Post init allometry attributes - dbh: NDArray[np.float32] = field(init=False) + dbh: NDArray[np.float64] = field(init=False) """Diameter at breast height (metres)""" - stem_height: NDArray[np.float32] = field(init=False) + stem_height: NDArray[np.float64] = field(init=False) """Stem height (metres)""" - crown_area: NDArray[np.float32] = field(init=False) + crown_area: NDArray[np.float64] = field(init=False) """Crown area (square metres)""" - crown_fraction: NDArray[np.float32] = field(init=False) + crown_fraction: NDArray[np.float64] = field(init=False) """Vertical fraction of the stem covered by the crown (-)""" - stem_mass: NDArray[np.float32] = field(init=False) + stem_mass: NDArray[np.float64] = field(init=False) """Stem mass (kg)""" - foliage_mass: NDArray[np.float32] = field(init=False) + foliage_mass: NDArray[np.float64] = field(init=False) """Foliage mass (kg)""" - sapwood_mass: NDArray[np.float32] = field(init=False) + sapwood_mass: NDArray[np.float64] = field(init=False) """Sapwood mass (kg)""" - crown_r0: NDArray[np.float32] = field(init=False) + crown_r0: NDArray[np.float64] = field(init=False) """Crown radius scaling factor (-)""" - crown_z_max: NDArray[np.float32] = field(init=False) + crown_z_max: NDArray[np.float64] = field(init=False) """Height of maximum crown radius (metres)""" # Information attributes @@ -723,7 +723,7 @@ class StemAllometry: """The number of stems.""" def __post_init__( - self, stem_traits: Flora | StemTraits, at_dbh: NDArray[np.float32] + self, stem_traits: Flora | StemTraits, at_dbh: NDArray[np.float64] ) -> None: """Populate the stem allometry attributes from the traits and size data.""" @@ -835,30 +835,30 @@ class StemAllocation: stem_allometry: InitVar[StemAllometry] """An instance of :class:`~pyrealm.demography.t_model_functions.StemAllometry` providing the stem size data for which to calculate allocation.""" - at_potential_gpp: InitVar[NDArray[np.float32]] + at_potential_gpp: InitVar[NDArray[np.float64]] """An array of potential gross primary productivity for each stem that should be allocated to respiration, turnover and growth.""" # Post init allometry attributes - potential_gpp: NDArray[np.float32] = field(init=False) + potential_gpp: NDArray[np.float64] = field(init=False) """Potential GPP per unit area (g C m2)""" - whole_crown_gpp: NDArray[np.float32] = field(init=False) + whole_crown_gpp: NDArray[np.float64] = field(init=False) """Estimated GPP across the whole crown (g C)""" - sapwood_respiration: NDArray[np.float32] = field(init=False) + sapwood_respiration: NDArray[np.float64] = field(init=False) """Allocation to sapwood respiration (g C)""" - foliar_respiration: NDArray[np.float32] = field(init=False) + foliar_respiration: NDArray[np.float64] = field(init=False) """Allocation to foliar respiration (g C)""" - fine_root_respiration: NDArray[np.float32] = field(init=False) + fine_root_respiration: NDArray[np.float64] = field(init=False) """Allocation to fine root respiration (g C)""" - npp: NDArray[np.float32] = field(init=False) + npp: NDArray[np.float64] = field(init=False) """Net primary productivity (g C)""" - turnover: NDArray[np.float32] = field(init=False) + turnover: NDArray[np.float64] = field(init=False) """Allocation to leaf and fine root turnover (g C)""" - delta_dbh: NDArray[np.float32] = field(init=False) + delta_dbh: NDArray[np.float64] = field(init=False) """Predicted increase in stem diameter from growth allocation (g C)""" - delta_stem_mass: NDArray[np.float32] = field(init=False) + delta_stem_mass: NDArray[np.float64] = field(init=False) """Predicted increase in stem mass from growth allocation (g C)""" - delta_foliage_mass: NDArray[np.float32] = field(init=False) + delta_foliage_mass: NDArray[np.float64] = field(init=False) """Predicted increase in foliar mass from growth allocation (g C)""" # Information attributes @@ -871,7 +871,7 @@ def __post_init__( self, stem_traits: Flora | StemTraits, stem_allometry: StemAllometry, - at_potential_gpp: NDArray[np.float32], + at_potential_gpp: NDArray[np.float64], ) -> None: """Populate stem allocation attributes from the traits, allometry and GPP.""" diff --git a/pyrealm/pmodel/competition.py b/pyrealm/pmodel/competition.py index 3be943b5..5c5755fb 100644 --- a/pyrealm/pmodel/competition.py +++ b/pyrealm/pmodel/competition.py @@ -12,8 +12,10 @@ def convert_gpp_advantage_to_c4_fraction( - gpp_adv_c4: NDArray, treecover: NDArray, c3c4_const: C3C4Const = C3C4Const() -) -> NDArray: + gpp_adv_c4: NDArray[np.float64], + treecover: NDArray[np.float64], + c3c4_const: C3C4Const = C3C4Const(), +) -> NDArray[np.float64]: r"""Convert C4 GPP advantage to C4 fraction. This function calculates an initial estimate of the fraction of C4 plants based on @@ -60,8 +62,8 @@ def convert_gpp_advantage_to_c4_fraction( def calculate_tree_proportion( - gppc3: NDArray, c3c4_const: C3C4Const = C3C4Const() -) -> NDArray: + gppc3: NDArray[np.float64], c3c4_const: C3C4Const = C3C4Const() +) -> NDArray[np.float64]: r"""Calculate the proportion of GPP from C3 trees. This function estimates the proportion of C3 trees in the community, which can then @@ -181,11 +183,11 @@ class C3C4Competition: def __init__( self, - gpp_c3: NDArray, - gpp_c4: NDArray, - treecover: NDArray, - below_t_min: NDArray, - cropland: NDArray, + gpp_c3: NDArray[np.float64], + gpp_c4: NDArray[np.float64], + treecover: NDArray[np.float64], + below_t_min: NDArray[np.float64], + cropland: NDArray[np.float64], c3c4_const: C3C4Const = C3C4Const(), ): # Check inputs are congruent @@ -198,7 +200,7 @@ def __init__( # annual total GPP estimates for C3 and C4 plants. This uses use # np.full to handle division by zero without raising warnings gpp_adv_c4 = np.full(self.shape, np.nan) - self.gpp_adv_c4: NDArray = np.divide( + self.gpp_adv_c4: NDArray[np.float64] = np.divide( gpp_c4 - gpp_c3, gpp_c3, out=gpp_adv_c4, where=gpp_c3 > 0 ) """The proportional advantage in GPP of C4 over C3 plants""" @@ -224,22 +226,22 @@ def __init__( # Step 5: remove cropland areas frac_c4[cropland] = np.nan # type: ignore - self.frac_c4: NDArray = frac_c4 + self.frac_c4: NDArray[np.float64] = frac_c4 """The estimated fraction of C4 plants.""" - self.gpp_c3_contrib: NDArray = gpp_c3 * (1 - self.frac_c4) + self.gpp_c3_contrib: NDArray[np.float64] = gpp_c3 * (1 - self.frac_c4) """The estimated contribution of C3 plants to GPP (gC m-2 yr-1)""" self.gpp_c4_contrib = gpp_c4 * self.frac_c4 """The estimated contribution of C4 plants to GPP (gC m-2 yr-1)""" # Define attributes used elsewhere - self.Delta13C_C3: NDArray + self.Delta13C_C3: NDArray[np.float64] r"""Contribution from C3 plants to (:math:`\Delta\ce{^13C}`, permil).""" - self.Delta13C_C4: NDArray + self.Delta13C_C4: NDArray[np.float64] r"""Contribution from C4 plants to (:math:`\Delta\ce{^13C}`, permil).""" - self.d13C_C3: NDArray + self.d13C_C3: NDArray[np.float64] r"""Contribution from C3 plants to (:math:`d\ce{^13C}`, permil).""" - self.d13C_C4: NDArray + self.d13C_C4: NDArray[np.float64] r"""Contribution from C3 plants to (:math:`d\ce{^13C}`, permil).""" def __repr__(self) -> str: @@ -247,7 +249,10 @@ def __repr__(self) -> str: return f"C3C4Competition(shape={self.shape})" def estimate_isotopic_discrimination( - self, d13CO2: NDArray, Delta13C_C3_alone: NDArray, Delta13C_C4_alone: NDArray + self, + d13CO2: NDArray[np.float64], + Delta13C_C3_alone: NDArray[np.float64], + Delta13C_C4_alone: NDArray[np.float64], ) -> None: r"""Estimate CO2 isotopic discrimination values. diff --git a/pyrealm/pmodel/functions.py b/pyrealm/pmodel/functions.py index 69f4c98e..a9f3938a 100644 --- a/pyrealm/pmodel/functions.py +++ b/pyrealm/pmodel/functions.py @@ -12,12 +12,12 @@ def calc_ftemp_arrh( - tk: NDArray, + tk: NDArray[np.float64], ha: float | NDArray, tk_ref: float | NDArray | None = None, pmodel_const: PModelConst = PModelConst(), core_const: CoreConst = CoreConst(), -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate enzyme kinetics scaling factor. Calculates the temperature-scaling factor :math:`f` for enzyme kinetics following an @@ -81,9 +81,9 @@ def calc_ftemp_arrh( def calc_ftemp_inst_rd( - tc: NDArray, + tc: NDArray[np.float64], pmodel_const: PModelConst = PModelConst(), -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate temperature scaling of dark respiration. Calculates the temperature-scaling factor for dark respiration at a given @@ -123,14 +123,14 @@ def calc_ftemp_inst_rd( def calc_modified_arrhenius_factor( - tk: NDArray, + tk: NDArray[np.float64], Ha: float | NDArray, Hd: float | NDArray, deltaS: float | NDArray, tk_ref: float | NDArray, mode: str = "M2002", core_const: CoreConst = CoreConst(), -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate the modified Arrhenius factor with temperature for an enzyme. This function returns a temperature-determined factor expressing the rate of an @@ -198,8 +198,8 @@ def calc_modified_arrhenius_factor( def calc_ftemp_kphio( - tc: NDArray, c4: bool = False, pmodel_const: PModelConst = PModelConst() -) -> NDArray: + tc: NDArray[np.float64], c4: bool = False, pmodel_const: PModelConst = PModelConst() +) -> NDArray[np.float64]: r"""Calculate temperature dependence of quantum yield efficiency. Calculates the temperature dependence of the quantum yield efficiency, as a @@ -257,11 +257,11 @@ def calc_ftemp_kphio( def calc_gammastar( - tc: NDArray, - patm: NDArray, + tc: NDArray[np.float64], + patm: NDArray[np.float64], pmodel_const: PModelConst = PModelConst(), core_const: CoreConst = CoreConst(), -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate the photorespiratory CO2 compensation point. Calculates the photorespiratory **CO2 compensation point** in absence of dark @@ -311,10 +311,10 @@ def calc_gammastar( def calc_ns_star( - tc: NDArray, - patm: NDArray, + tc: NDArray[np.float64], + patm: NDArray[np.float64], core_const: CoreConst = CoreConst(), -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate the relative viscosity of water. Calculates the relative viscosity of water (:math:`\eta^*`), given the standard @@ -355,11 +355,11 @@ def calc_ns_star( def calc_kmm( - tc: NDArray, - patm: NDArray, + tc: NDArray[np.float64], + patm: NDArray[np.float64], pmodel_const: PModelConst = PModelConst(), core_const: CoreConst = CoreConst(), -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate the Michaelis Menten coefficient of Rubisco-limited assimilation. Calculates the Michaelis Menten coefficient of Rubisco-limited assimilation @@ -431,11 +431,11 @@ def calc_kmm( def calc_kp_c4( - tc: NDArray, - patm: NDArray, + tc: NDArray[np.float64], + patm: NDArray[np.float64], pmodel_const: PModelConst = PModelConst(), core_const: CoreConst = CoreConst(), -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate the Michaelis Menten coefficient of PEPc. Calculates the Michaelis Menten coefficient of phosphoenolpyruvate carboxylase @@ -474,10 +474,10 @@ def calc_kp_c4( def calc_soilmstress_stocker( - soilm: NDArray, - meanalpha: NDArray = np.array(1.0), + soilm: NDArray[np.float64], + meanalpha: NDArray[np.float64] = np.array(1.0), pmodel_const: PModelConst = PModelConst(), -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate Stocker's empirical soil moisture stress factor. This function calculates a penalty factor :math:`\beta(\theta)` for well-watered GPP @@ -562,10 +562,10 @@ def calc_soilmstress_stocker( def calc_soilmstress_mengoli( - soilm: NDArray = np.array(1.0), - aridity_index: NDArray = np.array(1.0), + soilm: NDArray[np.float64] = np.array(1.0), + aridity_index: NDArray[np.float64] = np.array(1.0), pmodel_const: PModelConst = PModelConst(), -) -> NDArray: +) -> NDArray[np.float64]: r"""Calculate the Mengoli et al. empirical soil moisture stress factor. This function calculates a penalty factor :math:`\beta(\theta)` for well-watered GPP @@ -644,7 +644,9 @@ def calc_soilmstress_mengoli( return np.where(soilm >= psi, y, (y / psi) * soilm) -def calc_co2_to_ca(co2: NDArray, patm: NDArray) -> NDArray: +def calc_co2_to_ca( + co2: NDArray[np.float64], patm: NDArray[np.float64] +) -> NDArray[np.float64]: r"""Convert :math:`\ce{CO2}` ppm to Pa. Converts ambient :math:`\ce{CO2}` (:math:`c_a`) in part per million to Pascals, diff --git a/pyrealm/pmodel/isotopes.py b/pyrealm/pmodel/isotopes.py index 50caeaab..ca7da2f4 100644 --- a/pyrealm/pmodel/isotopes.py +++ b/pyrealm/pmodel/isotopes.py @@ -5,6 +5,7 @@ from warnings import warn +import numpy as np from numpy.typing import NDArray from pyrealm.constants import IsotopesConst @@ -42,8 +43,8 @@ class CalcCarbonIsotopes: def __init__( self, pmodel: PModel, - D14CO2: NDArray, - d13CO2: NDArray, + D14CO2: NDArray[np.float64], + d13CO2: NDArray[np.float64], isotopes_const: IsotopesConst = IsotopesConst(), ): # Check inputs are congruent @@ -57,22 +58,22 @@ def __init__( """Indicates if estimates calculated for C3 or C4 photosynthesis.""" # Attributes defined by methods below - self.Delta13C_simple: NDArray + self.Delta13C_simple: NDArray[np.float64] r"""Discrimination against carbon 13 (:math:`\Delta\ce{^{13}C}`, permil) excluding photorespiration.""" - self.Delta14C: NDArray + self.Delta14C: NDArray[np.float64] r"""Discrimination against carbon 13 (:math:`\Delta\ce{^{13}C}`, permil) including photorespiration.""" - self.Delta13C: NDArray + self.Delta13C: NDArray[np.float64] r"""Discrimination against carbon 14 (:math:`\Delta\ce{^{14}C}`, permil) including photorespiration.""" - self.d13C_leaf: NDArray + self.d13C_leaf: NDArray[np.float64] r"""Isotopic ratio of carbon 13 in leaves (:math:`\delta\ce{^{13}C}`, permil).""" - self.d14C_leaf: NDArray + self.d14C_leaf: NDArray[np.float64] r"""Isotopic ratio of carbon 14 in leaves (:math:`\delta\ce{^{14}C}`, permil).""" - self.d13C_wood: NDArray + self.d13C_wood: NDArray[np.float64] r"""Isotopic ratio of carbon 13 in wood (:math:`\delta\ce{^{13}C}`, permil), given a parameterized post-photosynthetic fractionation.""" diff --git a/pyrealm/pmodel/jmax_limitation.py b/pyrealm/pmodel/jmax_limitation.py index 27628f3f..cedc6793 100644 --- a/pyrealm/pmodel/jmax_limitation.py +++ b/pyrealm/pmodel/jmax_limitation.py @@ -89,14 +89,14 @@ def __init__( # Attributes populated by alternative method - two should always be populated by # the methods used below, but omega and omega_star only apply to smith19 - self.f_j: NDArray + self.f_j: NDArray[np.float64] """:math:`J_{max}` limitation factor, calculated using the method.""" - self.f_v: NDArray + self.f_v: NDArray[np.float64] """:math:`V_{cmax}` limitation factor, calculated using the method.""" - self.omega: NDArray | None = None + self.omega: NDArray[np.float64] | None = None """Component of :math:`J_{max}` calculation for method ``smith19`` (:cite:`Smith:2019dv`).""" - self.omega_star: NDArray | None = None + self.omega_star: NDArray[np.float64] | None = None """Component of :math:`J_{max}` calculation for method ``smith19`` (:cite:`Smith:2019dv`).""" diff --git a/pyrealm/pmodel/optimal_chi.py b/pyrealm/pmodel/optimal_chi.py index 3b5b5ba7..86513b09 100644 --- a/pyrealm/pmodel/optimal_chi.py +++ b/pyrealm/pmodel/optimal_chi.py @@ -110,23 +110,23 @@ def __init__( # default value as they must be populated by the set_beta and estimate_chi # methods, which are called below, and so will be populated before __init__ # returns. - self.beta: NDArray + self.beta: NDArray[np.float64] """The ratio of carboxylation to transpiration cost factors.""" - self.xi: NDArray + self.xi: NDArray[np.float64] r"""Defines the sensitivity of :math:`\chi` to the vapour pressure deficit, related to the carbon cost of water (Medlyn et al. 2011; Prentice et 2014).""" - self.chi: NDArray + self.chi: NDArray[np.float64] r"""The ratio of leaf internal to ambient :math:`\ce{CO2}` partial pressure (:math:`\chi`).""" - self.mc: NDArray + self.mc: NDArray[np.float64] r""":math:`\ce{CO2}` limitation factor for RuBisCO-limited assimilation (:math:`m_c`).""" - self.mj: NDArray + self.mj: NDArray[np.float64] r""":math:`\ce{CO2}` limitation factor for light-limited assimilation (:math:`m_j`).""" - self.ci: NDArray + self.ci: NDArray[np.float64] r"""The leaf internal :math:`\ce{CO2}` partial pressure (:math:`c_i`).""" - self.mjoc: NDArray + self.mjoc: NDArray[np.float64] r"""Ratio of :math:`m_j/m_c`.""" # Run the calculation methods after checking for any required variables @@ -144,7 +144,7 @@ def set_beta(self) -> None: """Set the beta values.""" @abstractmethod - def estimate_chi(self, xi_values: NDArray | None = None) -> None: + def estimate_chi(self, xi_values: NDArray[np.float64] | None = None) -> None: """Estimate xi, chi and other variables.""" def _check_requires(self) -> None: @@ -250,7 +250,7 @@ def set_beta(self) -> None: # leaf-internal-to-ambient CO2 partial pressure (ci/ca) ratio self.beta = self.pmodel_const.beta_cost_ratio_prentice14 - def estimate_chi(self, xi_values: NDArray | None = None) -> None: + def estimate_chi(self, xi_values: NDArray[np.float64] | None = None) -> None: """Estimate ``chi`` for C3 plants.""" if xi_values is not None: @@ -319,7 +319,7 @@ def set_beta(self) -> None: # leaf-internal-to-ambient CO2 partial pressure (ci/ca) ratio self.beta = self.pmodel_const.beta_cost_ratio_prentice14 - def estimate_chi(self, xi_values: NDArray | None = None) -> None: + def estimate_chi(self, xi_values: NDArray[np.float64] | None = None) -> None: """Estimate ``chi`` for C3 plants.""" if xi_values is not None: @@ -382,7 +382,7 @@ def set_beta(self) -> None: # leaf-internal-to-ambient CO2 partial pressure (ci/ca) ratio self.beta = self.pmodel_const.beta_cost_ratio_c4 - def estimate_chi(self, xi_values: NDArray | None = None) -> None: + def estimate_chi(self, xi_values: NDArray[np.float64] | None = None) -> None: """Estimate ``chi`` for C4 plants, setting ``mj`` and ``mc`` to 1.""" if xi_values is not None: _ = check_input_shapes(self.env.ca, xi_values) @@ -449,7 +449,7 @@ def set_beta(self) -> None: # leaf-internal-to-ambient CO2 partial pressure (ci/ca) ratio self.beta = self.pmodel_const.beta_cost_ratio_c4 - def estimate_chi(self, xi_values: NDArray | None = None) -> None: + def estimate_chi(self, xi_values: NDArray[np.float64] | None = None) -> None: """Estimate ``chi`` for C4 plants, setting ``mj`` and ``mc`` to 1.""" if xi_values is not None: _ = check_input_shapes(self.env.ca, xi_values) @@ -534,7 +534,7 @@ def set_beta(self) -> None: + self.pmodel_const.lavergne_2020_a_c3 ) - def estimate_chi(self, xi_values: NDArray | None = None) -> None: + def estimate_chi(self, xi_values: NDArray[np.float64] | None = None) -> None: """Estimate ``chi`` for C3 plants.""" if xi_values is not None: @@ -625,7 +625,7 @@ def set_beta(self) -> None: + self.pmodel_const.lavergne_2020_a_c4 ) - def estimate_chi(self, xi_values: NDArray | None = None) -> None: + def estimate_chi(self, xi_values: NDArray[np.float64] | None = None) -> None: """Estimate ``chi`` for C4 plants excluding photorespiration.""" # Calculate chi and xi as in Prentice 14 but removing gamma terms. @@ -702,7 +702,7 @@ def set_beta(self) -> None: # Calculate chi and xi as in Prentice 14 but removing gamma terms. self.beta = self.pmodel_const.beta_cost_ratio_c4 - def estimate_chi(self, xi_values: NDArray | None = None) -> None: + def estimate_chi(self, xi_values: NDArray[np.float64] | None = None) -> None: """Estimate ``chi`` for C4 plants excluding photorespiration.""" # Calculate chi and xi as in Prentice 14 but removing gamma terms. @@ -771,7 +771,7 @@ def set_beta(self) -> None: # Calculate chi and xi as in Prentice 14 but removing gamma terms. self.beta = self.pmodel_const.beta_cost_ratio_c4 - def estimate_chi(self, xi_values: NDArray | None = None) -> None: + def estimate_chi(self, xi_values: NDArray[np.float64] | None = None) -> None: """Estimate ``chi`` for C4 plants excluding photorespiration.""" # Calculate chi and xi as in Prentice 14 but removing gamma terms. diff --git a/pyrealm/pmodel/pmodel.py b/pyrealm/pmodel/pmodel.py index c3ce20ca..abad0e80 100644 --- a/pyrealm/pmodel/pmodel.py +++ b/pyrealm/pmodel/pmodel.py @@ -238,14 +238,14 @@ def __init__( # in Pascals, but more commonly reported in µmol mol-1. The standard equation # (ca - ci) / 1.6 expects inputs in ppm, so the pascal versions are back # converted here. - self.iwue: NDArray = (5 / 8 * (env.ca - self.optchi.ci)) / ( + self.iwue: NDArray[np.float64] = (5 / 8 * (env.ca - self.optchi.ci)) / ( 1e-6 * self.env.patm ) """Intrinsic water use efficiency (iWUE, µmol mol-1)""" # The basic calculation of LUE = phi0 * M_c * m with an added penalty term # for jmax limitation - self.lue: NDArray = ( + self.lue: NDArray[np.float64] = ( self.kphio.kphio * self.optchi.mj * self.jmaxlim.f_v @@ -258,14 +258,14 @@ def __init__( # no defaults and are only populated by estimate_productivity. Their getter # methods have a check to raise an informative error # ----------------------------------------------------------------------- - self._vcmax: NDArray - self._vcmax25: NDArray - self._rd: NDArray - self._jmax: NDArray - self._gpp: NDArray - self._gs: NDArray - self._ppfd: NDArray - self._fapar: NDArray + self._vcmax: NDArray[np.float64] + self._vcmax25: NDArray[np.float64] + self._rd: NDArray[np.float64] + self._jmax: NDArray[np.float64] + self._gpp: NDArray[np.float64] + self._gs: NDArray[np.float64] + self._ppfd: NDArray[np.float64] + self._fapar: NDArray[np.float64] def _check_estimated(self, varname: str) -> None: """Raise error when accessing unpopulated parameters. @@ -277,50 +277,50 @@ def _check_estimated(self, varname: str) -> None: raise RuntimeError(f"{varname} not calculated: use estimate_productivity") @property - def gpp(self) -> NDArray: + def gpp(self) -> NDArray[np.float64]: """Gross primary productivity (µg C m-2 s-1).""" self._check_estimated("gpp") return self._gpp @property - def vcmax(self) -> NDArray: + def vcmax(self) -> NDArray[np.float64]: """Maximum rate of carboxylation (µmol m-2 s-1).""" self._check_estimated("vcmax") return self._vcmax @property - def vcmax25(self) -> NDArray: + def vcmax25(self) -> NDArray[np.float64]: """Maximum rate of carboxylation at standard temperature (µmol m-2 s-1).""" self._check_estimated("vcmax25") return self._vcmax25 @property - def rd(self) -> NDArray: + def rd(self) -> NDArray[np.float64]: """Dark respiration (µmol m-2 s-1).""" self._check_estimated("rd") return self._rd @property - def jmax(self) -> NDArray: + def jmax(self) -> NDArray[np.float64]: """Maximum rate of electron transport (µmol m-2 s-1).""" self._check_estimated("jmax") return self._jmax @property - def gs(self) -> NDArray: + def gs(self) -> NDArray[np.float64]: """Stomatal conductance (µmol m-2 s-1).""" self._check_estimated("gs") return self._gs @property - def ppfd(self) -> NDArray: + def ppfd(self) -> NDArray[np.float64]: """Photosynthetic photon flux density (PPFD, µmol m-2 s-1).""" self._check_estimated("gs") return self._ppfd @property - def fapar(self) -> NDArray: + def fapar(self) -> NDArray[np.float64]: """Fraction of absorbed photosynthetically active radiation (:math:`f_{APAR}` unitless). """ # noqa: D205 diff --git a/pyrealm/pmodel/pmodel_environment.py b/pyrealm/pmodel/pmodel_environment.py index 34722c4c..a63083a7 100644 --- a/pyrealm/pmodel/pmodel_environment.py +++ b/pyrealm/pmodel/pmodel_environment.py @@ -75,27 +75,29 @@ class PModelEnvironment: def __init__( self, - tc: NDArray, - vpd: NDArray, - co2: NDArray, - patm: NDArray, - theta: NDArray | None = None, - rootzonestress: NDArray | None = None, - aridity_index: NDArray | None = None, - mean_growth_temperature: NDArray | None = None, + tc: NDArray[np.float64], + vpd: NDArray[np.float64], + co2: NDArray[np.float64], + patm: NDArray[np.float64], + theta: NDArray[np.float64] | None = None, + rootzonestress: NDArray[np.float64] | None = None, + aridity_index: NDArray[np.float64] | None = None, + mean_growth_temperature: NDArray[np.float64] | None = None, pmodel_const: PModelConst = PModelConst(), core_const: CoreConst = CoreConst(), ): self.shape: tuple = check_input_shapes(tc, vpd, co2, patm) # Validate and store the forcing variables - self.tc: NDArray = bounds_checker(tc, -25, 80, "[]", "tc", "°C") + self.tc: NDArray[np.float64] = bounds_checker(tc, -25, 80, "[]", "tc", "°C") """The temperature at which to estimate photosynthesis, °C""" - self.vpd: NDArray = bounds_checker(vpd, 0, 10000, "[]", "vpd", "Pa") + self.vpd: NDArray[np.float64] = bounds_checker(vpd, 0, 10000, "[]", "vpd", "Pa") """Vapour pressure deficit, Pa""" - self.co2: NDArray = bounds_checker(co2, 0, 1000, "[]", "co2", "ppm") + self.co2: NDArray[np.float64] = bounds_checker(co2, 0, 1000, "[]", "co2", "ppm") """CO2 concentration, ppm""" - self.patm: NDArray = bounds_checker(patm, 30000, 110000, "[]", "patm", "Pa") + self.patm: NDArray[np.float64] = bounds_checker( + patm, 30000, 110000, "[]", "patm", "Pa" + ) """Atmospheric pressure, Pa""" # Guard against calc_density issues @@ -112,7 +114,7 @@ def __init__( "zero or explicitly set to np.nan" ) - self.ca: NDArray = calc_co2_to_ca(self.co2, self.patm) + self.ca: NDArray[np.float64] = calc_co2_to_ca(self.co2, self.patm) """Ambient CO2 partial pressure, Pa""" self.gammastar = calc_gammastar( @@ -140,13 +142,13 @@ def __init__( # Easy to add the attributes dynamically, but bounds checking less # obvious. - self.theta: NDArray + self.theta: NDArray[np.float64] """Volumetric soil moisture (m3/m3)""" - self.rootzonestress: NDArray + self.rootzonestress: NDArray[np.float64] """Rootzone stress factor (experimental) (-)""" - self.aridity_index: NDArray + self.aridity_index: NDArray[np.float64] """Climatological aridity index as PET/P (-)""" - self.mean_growth_temperature: NDArray + self.mean_growth_temperature: NDArray[np.float64] """Mean temperature > 0°C during growing degree days (°C)""" if theta is not None: diff --git a/pyrealm/pmodel/quantum_yield.py b/pyrealm/pmodel/quantum_yield.py index 63ece4be..9a7b0c8d 100644 --- a/pyrealm/pmodel/quantum_yield.py +++ b/pyrealm/pmodel/quantum_yield.py @@ -146,7 +146,7 @@ def __init__( "of reference kphio values" ) - self.reference_kphio: NDArray = reference_kphio + self.reference_kphio: NDArray[np.float64] = reference_kphio """The kphio reference value for the method.""" self.use_c4: bool = use_c4 """Use a C4 parameterisation if available.""" @@ -154,7 +154,7 @@ def __init__( # Declare attributes populated by methods. These are typed but not assigned a # default value as they must are populated by the subclass specific # calculate_kphio method, which is called below to populate the values. - self.kphio: NDArray + self.kphio: NDArray[np.float64] """The calculated intrinsic quantum yield of photosynthesis.""" # Run the calculation methods after checking for any required variables @@ -300,7 +300,7 @@ class QuantumYieldSandoval( defaulting to the ratio of 1/9 in the absence of a Q cycle :cite:`long:1993a`. """ - def peak_quantum_yield(self, aridity: NDArray) -> NDArray: + def peak_quantum_yield(self, aridity: NDArray[np.float64]) -> NDArray[np.float64]: """Calculate the peak quantum yield as a function of the aridity index. Args: diff --git a/pyrealm/pmodel/scaler.py b/pyrealm/pmodel/scaler.py index 4fa8d0a7..a781a0e7 100644 --- a/pyrealm/pmodel/scaler.py +++ b/pyrealm/pmodel/scaler.py @@ -69,7 +69,7 @@ class SubdailyScaler: def __init__( self, - datetimes: NDArray, + datetimes: NDArray[np.datetime64], ) -> None: # Datetime validation. The inputs must be: # - one dimensional datetime64 @@ -306,7 +306,7 @@ def set_nearest(self, time: np.timedelta64) -> None: # self.method = "Around max" - def pad_values(self, values: NDArray) -> NDArray: + def pad_values(self, values: NDArray[np.float64]) -> NDArray[np.float64]: """Pad values array to full days. This method takes an array representing daily values and pads the first and @@ -329,7 +329,7 @@ def pad_values(self, values: NDArray) -> NDArray: return np.pad(values, padding_dims, constant_values=(np.nan, np.nan)) - def get_window_values(self, values: NDArray) -> NDArray: + def get_window_values(self, values: NDArray[np.float64]) -> NDArray[np.float64]: """Extract acclimation window values for a variable. This method takes an array of values which has the same shape along the first @@ -368,8 +368,8 @@ def get_window_values(self, values: NDArray) -> NDArray: return values_by_day[:, self.include, ...] def get_daily_means( - self, values: NDArray, allow_partial_data: bool = False - ) -> NDArray: + self, values: NDArray[np.float64], allow_partial_data: bool = False + ) -> NDArray[np.float64]: """Get the daily means of a variable during the acclimation window. This method extracts values from a given variable during a defined acclimation @@ -406,11 +406,12 @@ def get_daily_means( def fill_daily_to_subdaily( self, - values: NDArray, + values: NDArray[np.float64], + previous_value: NDArray[np.float64] | None = None, update_point: str = "max", kind: str = "previous", fill_from: np.timedelta64 | None = None, - ) -> NDArray: + ) -> NDArray[np.float64]: """Resample daily variables onto the subdaily time scale. This method takes an array representing daily values and interpolates those @@ -419,45 +420,53 @@ def fill_daily_to_subdaily( axis of the `values` must be the same length as the number of days used to create the instance. + The update point defaults to the maximum time of day during the acclimation + window. It can also be set to the mean time of day, but note that this implies + that the plant predicts the daily values between the mean and max observation + time. The ``fill_from`` argument can be used to set the update point to an + arbitrary time of day. + Two interpolation kinds are currently implemented: * ``previous`` interpolates the daily value as a constant, until updating to the next daily value. This option will fill values until the end of the time - series. + series. The * ``linear`` interpolates linearly between the update points of the daily values. The interpolated values are held constant for the first day and then interpolated linearly: this is to avoid plants adapting optimally to future conditions. - The update point defaults to the maximum time of day during the acclimation - window. It can also be set to the mean time of day, but note that this implies - that the plant predicts the daily values between the mean and max observation - time. The ``fill_from`` argument can be used to set the update point to an - arbitrary time of day. + Subdaily observations before the update point on the first day of the time + series are filled with ``np.nan``. The ``previous_value`` argument can be used + to provide an alternative value, allowing time series to be processed in blocks, + but this option is only currently implemented for the ``previous`` + interpolation method. Args: values: An array with the first dimension matching the number of days in the - instances :class:`~pyrealm.pmodel.scaler.SubdailyScaler` object. + instances :class:`~pyrealm.pmodel.scaler.SubdailyScaler` object. + previous_value: An array with dimensions equal to a slice across the first + axis of the values array. update_point: The point in the acclimation window at which the plant updates - to the new daily value: one of 'mean' or 'max'. + to the new daily value: one of 'mean' or 'max'. kind: The kind of interpolation to use to fill between daily values: one of - 'previous' or 'linear', + 'previous' or 'linear', fill_from: As an alternative to ``update_point``, an - :class:`numpy.timedelta64` value giving the time of day from which to fill - values forward. + :class:`numpy.timedelta64` value giving the time of day from which to + fill values forward. """ if values.shape[0] != self.n_days: - raise ValueError("Values is not of length n_days on its first axis.") + raise ValueError("Values is not of length n_days on its first axis") if fill_from is not None: if not isinstance(fill_from, np.timedelta64): - raise ValueError("The fill_from argument must be a timedelta64 value.") + raise ValueError("The fill_from argument must be a timedelta64 value") # Convert to seconds and check it is in range _fill_from = fill_from.astype("timedelta64[s]") if not (_fill_from >= 0 and _fill_from < 24 * 60 * 60): - raise ValueError("The fill_from argument is not >= 0 and < 24 hours.") + raise ValueError("The fill_from argument is not >= 0 and < 24 hours") update_time = self.observation_dates + _fill_from @@ -468,8 +477,23 @@ def fill_daily_to_subdaily( else: raise ValueError("Unknown update point") - # Note that interp1d cannot handle datetime64 inputs, so need to interpolate - # using datetimes cast to integer types + # Check previous value settings - only allow with previous interpolation and + # check the previous value shape matches + if previous_value is not None: + if kind == "linear": + raise NotImplementedError( + "Using previous value with kind='linear' is not implemented" + ) + + # Use np.broadcast_shapes here to handle checking array shapes. This is + # mostly to catch the fact that () and (1,) are equivalent. + try: + np.broadcast_shapes(previous_value.shape, values.shape) + except ValueError: + raise ValueError( + "The input to previous_value is not congruent with " + "the shape of the observed data" + ) # Use fill_value to handle extrapolation before or after update point: # - previous will fill the last value forward to the end of the time series, @@ -479,8 +503,16 @@ def fill_daily_to_subdaily( # value until _after_ the update point. if kind == "previous": - fill_value = (None, values[-1]) + # The fill values here are used to extend the last daily value out to the + # end of the subdaily observations but also to fill any provided previous + # values for subdaily observations _before_ the first daily value. If + # the default previous value of None is supplied, this inserts np.nan as + # expected. + fill_value = (previous_value, values[-1]) elif kind == "linear": + # Shift the values forward by a day, inserting a copy of the first day at + # the start. This then avoids plants seeing the future and provides values + # up until the last observation. values = np.insert(values, 0, values[0], axis=0) update_time = np.append( update_time, update_time[-1] + np.timedelta64(1, "D") @@ -489,6 +521,8 @@ def fill_daily_to_subdaily( else: raise ValueError("Unsupported interpolation option") + # Note that interp1d cannot handle datetime64 inputs, so need to interpolate + # using datetimes cast to integer types interp_fun = interp1d( update_time.astype("int"), values, @@ -496,6 +530,7 @@ def fill_daily_to_subdaily( kind=kind, bounds_error=False, fill_value=fill_value, + assume_sorted=True, ) # TODO - The kind "previous" might be replaceable with bottleneck.push diff --git a/pyrealm/pmodel/subdaily.py b/pyrealm/pmodel/subdaily.py index 0367ce38..cd8277a7 100644 --- a/pyrealm/pmodel/subdaily.py +++ b/pyrealm/pmodel/subdaily.py @@ -41,8 +41,11 @@ def memory_effect( - values: NDArray, alpha: float = 0.067, allow_holdover: bool = False -) -> NDArray: + values: NDArray[np.float64], + previous_values: NDArray[np.float64] | None = None, + alpha: float = 0.067, + allow_holdover: bool = False, +) -> NDArray[np.float64]: r"""Apply a memory effect to a variable. Three key photosynthetic parameters (:math:`\xi`, :math:`V_{cmax25}` and @@ -89,6 +92,8 @@ def memory_effect( Args: values: The values to apply the memory effect to. + previous_values: Last available realised value used if model is fitted in + chunks and value at t=0 is not optimal. alpha: The relative weight applied to the most recent observation. allow_holdover: Allow missing values to be filled by holding over earlier values. @@ -104,8 +109,11 @@ def memory_effect( # Initialise the output storage and set the first values to be a slice along the # first axis of the input values - memory_values = np.empty_like(values, dtype=np.float32) - memory_values[0] = values[0] + memory_values = np.empty_like(values, dtype=np.float64) + if previous_values is None: + memory_values[0] = values[0] + else: + memory_values[0] = previous_values * (1 - alpha) + values[0] * alpha # Handle the data if there are no missing data, if not nan_present: @@ -177,6 +185,10 @@ class SubdailyPModel: more rapid acclimation: :math:`\alpha=1` results in immediate acclimation and :math:`\alpha=0` results in no acclimation at all, with values pinned to the initial estimates. + * By default, the initial realised value :math:`R_1` for each of the three slowly + acclimating variables is assumed to be the first optimal value :math:`O_1`, but + the `previous_realised` argument can be used to provide values of :math:`R_0` from + which to calculate :math:`R_{1} = R_{0}(1 - \alpha) + O_{1} \alpha`. * The realised values are then filled back onto the original subdaily timescale, with :math:`V_{cmax}` and :math:`J_{max}` then being calculated from the slowly responding :math:`V_{cmax25}` and :math:`J_{max25}` and the actual subdaily @@ -221,18 +233,20 @@ class SubdailyPModel: allow_partial_data: Should estimates of daily optimal conditions be calculated with missing values in the acclimation window. reference_kphio: An optional alternative reference value for the quantum yield - efficiency of photosynthesis (:math:`\phi_0`, -) to be passed to the kphio - calculation method. + efficiency of photosynthesis (:math:`\phi_0`, -) to be passed to the kphio + calculation method. fill_kind: The approach used to fill daily realised values to the subdaily timescale, currently one of 'previous' or 'linear'. + previous_realised: A tuple of previous realised values of three NumPy arrays + (xi_real, vcmax25_real, jmax25_real). """ def __init__( self, env: PModelEnvironment, fs_scaler: SubdailyScaler, - fapar: NDArray, - ppfd: NDArray, + fapar: NDArray[np.float64], + ppfd: NDArray[np.float64], method_optchi: str = "prentice14", method_jmaxlim: str = "wang17", method_kphio: str = "temperature", @@ -241,6 +255,7 @@ def __init__( allow_holdover: bool = False, allow_partial_data: bool = False, fill_kind: str = "previous", + previous_realised: tuple[NDArray, NDArray, NDArray] | None = None, ) -> None: # Warn about the API warn( @@ -379,36 +394,90 @@ def __init__( 1 / calc_ftemp_arrh(tk_acclim, self.env.pmodel_const.subdaily_jmax25_ha) ) + """Instantaneous optimal :math:`x_{i}`, :math:`V_{cmax}` and :math:`J_{max}`""" + # Check the shape of previous realised values are congruent with a slice across + # the time axis + if previous_realised is not None: + if fill_kind != "previous": + raise NotImplementedError( + "Using previous_realised is only implemented for " + "fill_kind = 'previous'" + ) + + # All variables should share the shape of a slice along the first axis of + # the environmental forcings + expected_shape = self.env.tc[0].shape + if not ( + (previous_realised[0].shape == expected_shape) + and (previous_realised[1].shape == expected_shape) + and (previous_realised[2].shape == expected_shape) + ): + raise ValueError( + "`previous_realised` entries have wrong shape in Subdaily PModel" + ) + else: + previous_xi_real, previous_vcmax25_real, previous_jmax25_real = ( + previous_realised + ) + else: + previous_xi_real, previous_vcmax25_real, previous_jmax25_real = [ + None, + None, + None, + ] + # 5) Calculate the realised daily values from the instantaneous optimal values - self.xi_real: NDArray = memory_effect( - self.pmodel_acclim.optchi.xi, alpha=alpha, allow_holdover=allow_holdover + self.xi_real: NDArray[np.float64] = memory_effect( + self.pmodel_acclim.optchi.xi, + previous_values=previous_xi_real, + alpha=alpha, + allow_holdover=allow_holdover, ) r"""Realised daily slow responses in :math:`\xi`""" - self.vcmax25_real: NDArray = memory_effect( - self.vcmax25_opt, alpha=alpha, allow_holdover=allow_holdover + self.vcmax25_real: NDArray[np.float64] = memory_effect( + self.vcmax25_opt, + previous_values=previous_vcmax25_real, + alpha=alpha, + allow_holdover=allow_holdover, ) r"""Realised daily slow responses in :math:`V_{cmax25}`""" - self.jmax25_real: NDArray = memory_effect( - self.jmax25_opt, alpha=alpha, allow_holdover=allow_holdover + self.jmax25_real: NDArray[np.float64] = memory_effect( + self.jmax25_opt, + previous_values=previous_jmax25_real, + alpha=alpha, + allow_holdover=allow_holdover, ) + r"""Realised daily slow responses in :math:`J_{max25}`""" # 6) Fill the realised xi, jmax25 and vcmax25 from daily values back to the # subdaily timescale. - self.subdaily_vcmax25 = fs_scaler.fill_daily_to_subdaily(self.vcmax25_real) - self.subdaily_jmax25 = fs_scaler.fill_daily_to_subdaily(self.jmax25_real) - self.subdaily_xi = fs_scaler.fill_daily_to_subdaily(self.xi_real) + self.subdaily_xi = fs_scaler.fill_daily_to_subdaily( + self.xi_real, previous_value=previous_xi_real + ) + self.subdaily_vcmax25 = fs_scaler.fill_daily_to_subdaily( + self.vcmax25_real, previous_value=previous_vcmax25_real + ) + self.subdaily_jmax25 = fs_scaler.fill_daily_to_subdaily( + self.jmax25_real, previous_value=previous_jmax25_real + ) # 7) Adjust subdaily jmax25 and vcmax25 back to jmax and vcmax given the # actual subdaily temperatures. subdaily_tk = self.env.tc + self.env.core_const.k_CtoK - self.subdaily_vcmax: NDArray = self.subdaily_vcmax25 * calc_ftemp_arrh( - tk=subdaily_tk, ha=self.env.pmodel_const.subdaily_vcmax25_ha + self.subdaily_vcmax: NDArray[np.float64] = ( + self.subdaily_vcmax25 + * calc_ftemp_arrh( + tk=subdaily_tk, ha=self.env.pmodel_const.subdaily_vcmax25_ha + ) ) """Estimated subdaily :math:`V_{cmax}`.""" - self.subdaily_jmax: NDArray = self.subdaily_jmax25 * calc_ftemp_arrh( - tk=subdaily_tk, ha=self.env.pmodel_const.subdaily_jmax25_ha + self.subdaily_jmax: NDArray[np.float64] = ( + self.subdaily_jmax25 + * calc_ftemp_arrh( + tk=subdaily_tk, ha=self.env.pmodel_const.subdaily_jmax25_ha + ) ) """Estimated subdaily :math:`J_{max}`.""" @@ -421,7 +490,9 @@ def __init__( """Estimated subdaily :math:`c_i`.""" # Calculate Ac, J and Aj at subdaily scale to calculate assimilation - self.subdaily_Ac: NDArray = self.subdaily_vcmax * self.optimal_chi.mc + self.subdaily_Ac: NDArray[np.float64] = ( + self.subdaily_vcmax * self.optimal_chi.mc + ) """Estimated subdaily :math:`A_c`.""" iabs = fapar * ppfd @@ -430,11 +501,11 @@ def __init__( 1 + ((4 * self.kphio.kphio * iabs) / self.subdaily_jmax) ** 2 ) - self.subdaily_Aj: NDArray = (subdaily_J / 4) * self.optimal_chi.mj + self.subdaily_Aj: NDArray[np.float64] = (subdaily_J / 4) * self.optimal_chi.mj """Estimated subdaily :math:`A_j`.""" # Calculate GPP and convert from mol to gC - self.gpp: NDArray = ( + self.gpp: NDArray[np.float64] = ( np.minimum(self.subdaily_Aj, self.subdaily_Ac) * self.env.core_const.k_c_molmass ) @@ -535,8 +606,8 @@ def __init__( self, env: PModelEnvironment, fs_scaler: SubdailyScaler, - ppfd: NDArray, - fapar: NDArray, + ppfd: NDArray[np.float64], + fapar: NDArray[np.float64], alpha: float = 1 / 15, allow_holdover: bool = False, kphio: float = 1 / 8, @@ -618,11 +689,11 @@ def __init__( ) # Calculate the realised values from the instantaneous optimal values - self.vcmax25_real: NDArray = memory_effect( + self.vcmax25_real: NDArray[np.float64] = memory_effect( self.vcmax25_opt, alpha=alpha, allow_holdover=allow_holdover ) r"""Realised daily slow responses in :math:`V_{cmax25}`""" - self.jmax25_real: NDArray = memory_effect( + self.jmax25_real: NDArray[np.float64] = memory_effect( self.jmax25_opt, alpha=alpha, allow_holdover=allow_holdover ) r"""Realised daily slow responses in :math:`J_{max25}`""" @@ -661,18 +732,24 @@ def __init__( self.jmax25_real, fill_from=fill_from ) - self.subdaily_vcmax: NDArray = self.subdaily_vcmax25 * calc_ftemp_arrh( - tk=subdaily_tk, ha=self.env.pmodel_const.subdaily_vcmax25_ha + self.subdaily_vcmax: NDArray[np.float64] = ( + self.subdaily_vcmax25 + * calc_ftemp_arrh( + tk=subdaily_tk, ha=self.env.pmodel_const.subdaily_vcmax25_ha + ) ) """Estimated subdaily :math:`V_{cmax}`.""" - self.subdaily_jmax: NDArray = self.subdaily_jmax25 * calc_ftemp_arrh( - tk=subdaily_tk, ha=self.env.pmodel_const.subdaily_jmax25_ha + self.subdaily_jmax: NDArray[np.float64] = ( + self.subdaily_jmax25 + * calc_ftemp_arrh( + tk=subdaily_tk, ha=self.env.pmodel_const.subdaily_jmax25_ha + ) ) """Estimated subdaily :math:`J_{max}`.""" # Calculate Ac, J and Aj at subdaily scale to calculate assimilation - self.subdaily_Ac: NDArray = ( + self.subdaily_Ac: NDArray[np.float64] = ( self.subdaily_vcmax * (self.subdaily_ci - self.env.gammastar) / (self.subdaily_ci + self.env.kmm) @@ -688,7 +765,7 @@ def __init__( 1 + ((4 * self.kphio.kphio * iabs) / self.subdaily_jmax) ** 2 ) - self.subdaily_Aj: NDArray = ( + self.subdaily_Aj: NDArray[np.float64] = ( (subdaily_J / 4) * (self.subdaily_ci - self.env.gammastar) / (self.subdaily_ci + 2 * self.env.gammastar) @@ -696,7 +773,7 @@ def __init__( """Estimated subdaily :math:`A_j`.""" # Calculate GPP, converting from mol m2 s1 to grams carbon m2 s1 - self.gpp: NDArray = ( + self.gpp: NDArray[np.float64] = ( np.minimum(self.subdaily_Aj, self.subdaily_Ac) * self.env.core_const.k_c_molmass ) diff --git a/pyrealm/splash/evap.py b/pyrealm/splash/evap.py index 501f9eab..281720ff 100644 --- a/pyrealm/splash/evap.py +++ b/pyrealm/splash/evap.py @@ -49,29 +49,29 @@ class DailyEvapFluxes: solar: DailySolarFluxes pa: InitVar[NDArray] tc: InitVar[NDArray] - kWm: NDArray = field(default_factory=lambda: np.array([150.0])) + kWm: NDArray[np.float64] = field(default_factory=lambda: np.array([150.0])) core_const: CoreConst = field(default_factory=lambda: CoreConst()) - sat: NDArray = field(init=False) + sat: NDArray[np.float64] = field(init=False) """Slope of saturation vapour pressure temperature curve, Pa/K""" - lv: NDArray = field(init=False) + lv: NDArray[np.float64] = field(init=False) """Enthalpy of vaporization, J/kg""" - pw: NDArray = field(init=False) + pw: NDArray[np.float64] = field(init=False) """Density of water, kg/m^3""" - psy: NDArray = field(init=False) + psy: NDArray[np.float64] = field(init=False) """Psychrometric constant, Pa/K""" - econ: NDArray = field(init=False) + econ: NDArray[np.float64] = field(init=False) """Water-to-energy conversion factor""" - cond: NDArray = field(init=False) + cond: NDArray[np.float64] = field(init=False) """Daily condensation, mm""" - eet_d: NDArray = field(init=False) + eet_d: NDArray[np.float64] = field(init=False) """Daily equilibrium evapotranspiration (EET), mm""" - pet_d: NDArray = field(init=False) + pet_d: NDArray[np.float64] = field(init=False) """Daily potential evapotranspiration (PET), mm""" - rx: NDArray = field(init=False) + rx: NDArray[np.float64] = field(init=False) """Variable substitute, (mm/hr)/(W/m^2)""" - def __post_init__(self, pa: NDArray, tc: NDArray) -> None: + def __post_init__(self, pa: NDArray[np.float64], tc: NDArray[np.float64]) -> None: """Calculate invariant components of evapotranspiration. The post_init method calculates the components of the evaporative fluxes that @@ -107,8 +107,8 @@ def __post_init__(self, pa: NDArray, tc: NDArray) -> None: self.rx = (3.6e6) * (1.0 + self.core_const.k_w) * self.econ def estimate_aet( - self, wn: NDArray, day_idx: int | None = None, only_aet: bool = True - ) -> NDArray | tuple[NDArray, NDArray, NDArray]: + self, wn: NDArray[np.float64], day_idx: int | None = None, only_aet: bool = True + ) -> NDArray[np.float64] | tuple[NDArray, NDArray, NDArray]: """Estimate actual evapotranspiration. This method estimates the daily actual evapotranspiration (AET, mm/day), given diff --git a/pyrealm/splash/solar.py b/pyrealm/splash/solar.py index a80326e1..fe3bfc54 100644 --- a/pyrealm/splash/solar.py +++ b/pyrealm/splash/solar.py @@ -51,39 +51,43 @@ class DailySolarFluxes: tc: InitVar[NDArray] core_const: CoreConst = field(default_factory=lambda: CoreConst()) - nu: NDArray = field(init=False) + nu: NDArray[np.float64] = field(init=False) """True heliocentric anomaly, degrees""" - lambda_: NDArray = field(init=False) + lambda_: NDArray[np.float64] = field(init=False) """True heliocentric longitude, degrees""" - dr: NDArray = field(init=False) + dr: NDArray[np.float64] = field(init=False) """Distance factor, -""" - delta: NDArray = field(init=False) + delta: NDArray[np.float64] = field(init=False) """Declination angle, degrees""" - ru: NDArray = field(init=False) + ru: NDArray[np.float64] = field(init=False) """Intermediate variable, unitless""" - rv: NDArray = field(init=False) + rv: NDArray[np.float64] = field(init=False) """Intermediate variable, unitless""" - hs: NDArray = field(init=False) + hs: NDArray[np.float64] = field(init=False) """Sunset hour angle, degrees""" - ra_d: NDArray = field(init=False) + ra_d: NDArray[np.float64] = field(init=False) """Daily extraterrestrial solar radiation, J/m^2""" - tau: NDArray = field(init=False) + tau: NDArray[np.float64] = field(init=False) """Transmittivity, unitless""" - ppfd_d: NDArray = field(init=False) + ppfd_d: NDArray[np.float64] = field(init=False) """Daily PPFD, mol/m^2""" - rnl: NDArray = field(init=False) + rnl: NDArray[np.float64] = field(init=False) """Net longwave radiation, W/m^2""" - rw: NDArray = field(init=False) + rw: NDArray[np.float64] = field(init=False) """Intermediate variable, W/m^2""" - hn: NDArray = field(init=False) + hn: NDArray[np.float64] = field(init=False) """Net radiation cross-over hour angle, degrees""" - rn_d: NDArray = field(init=False) + rn_d: NDArray[np.float64] = field(init=False) """Daytime net radiation, J/m^2""" - rnn_d: NDArray = field(init=False) + rnn_d: NDArray[np.float64] = field(init=False) """Nighttime net radiation (rnn_d), J/m^2""" def __post_init__( - self, lat: NDArray, elv: NDArray, sf: NDArray, tc: NDArray + self, + lat: NDArray[np.float64], + elv: NDArray[np.float64], + sf: NDArray[np.float64], + tc: NDArray[np.float64], ) -> None: """Populates key fluxes from input variables.""" diff --git a/pyrealm/splash/splash.py b/pyrealm/splash/splash.py index d7116609..79c0230f 100644 --- a/pyrealm/splash/splash.py +++ b/pyrealm/splash/splash.py @@ -57,13 +57,13 @@ class SplashModel: def __init__( self, - lat: NDArray, - elv: NDArray, - sf: NDArray, - tc: NDArray, - pn: NDArray, + lat: NDArray[np.float64], + elv: NDArray[np.float64], + sf: NDArray[np.float64], + tc: NDArray[np.float64], + pn: NDArray[np.float64], dates: Calendar, - kWm: NDArray = np.array([150.0]), + kWm: NDArray[np.float64] = np.array([150.0]), core_const: CoreConst = CoreConst(), ): # Check input sizes are congurent @@ -76,23 +76,31 @@ def __init__( if len(dates) != self.shape[0]: raise ValueError("Number of dates must match the first dimension of inputs") - self.elv: NDArray = elv + self.elv: NDArray[np.float64] = elv """The elevation of sites.""" - self.lat: NDArray = bounds_checker(lat, -90, 90, label="lat", unit="°") + self.lat: NDArray[np.float64] = bounds_checker( + lat, -90, 90, label="lat", unit="°" + ) """The latitude of sites.""" - self.sf: NDArray = bounds_checker(sf, 0, 1, label="sf") + self.sf: NDArray[np.float64] = bounds_checker(sf, 0, 1, label="sf") """The sunshine fraction (0-1) of daily observations.""" - self.tc: NDArray = bounds_checker(tc, -25, 80, label="tc", unit="°C") + self.tc: NDArray[np.float64] = bounds_checker( + tc, -25, 80, label="tc", unit="°C" + ) """The air temperature in °C of daily observations.""" - self.pn: NDArray = bounds_checker(pn, 0, 1e3, label="pn", unit="mm/day") + self.pn: NDArray[np.float64] = bounds_checker( + pn, 0, 1e3, label="pn", unit="mm/day" + ) """The precipitation in mm of daily observations.""" self.dates: Calendar = dates """The dates of observations along the first array axis.""" - self.kWm: NDArray = bounds_checker(kWm, 0, 1e4, label="kWm", unit="mm") + self.kWm: NDArray[np.float64] = bounds_checker( + kWm, 0, 1e4, label="kWm", unit="mm" + ) """The maximum soil water capacity for sites.""" # TODO - potentially allow _actual_ climatic pressure data as an input - self.pa: NDArray = calc_patm(elv, core_const=core_const) + self.pa: NDArray[np.float64] = calc_patm(elv, core_const=core_const) """The atmospheric pressure at sites, derived from elevation""" # Calculate the daily solar fluxes - these are invariant across the simulation @@ -109,12 +117,12 @@ def __init__( def estimate_initial_soil_moisture( # noqa: max-complexity=12 self, - wn_init: NDArray | None = None, + wn_init: NDArray[np.float64] | None = None, max_iter: int = 10, max_diff: float = 1.0, return_convergence: bool = False, verbose: bool = False, - ) -> NDArray: + ) -> NDArray[np.float64]: """Estimate initial soil moisture. This method uses the first year of data provided to a SplashModel instance to @@ -222,7 +230,7 @@ def estimate_initial_soil_moisture( # noqa: max-complexity=12 return wn_start def estimate_daily_water_balance( - self, previous_wn: NDArray, day_idx: int | None = None + self, previous_wn: NDArray[np.float64], day_idx: int | None = None ) -> tuple[NDArray, NDArray, NDArray]: r"""Estimate the daily water balance. @@ -286,7 +294,7 @@ def estimate_daily_water_balance( def calculate_soil_moisture( self, - wn_init: NDArray, + wn_init: NDArray[np.float64], ) -> tuple[NDArray, NDArray, NDArray]: """Calculate the soil moisture, AET and runoff from a SplashModel. diff --git a/pyrealm/tmodel.py b/pyrealm/tmodel.py index 35739d42..416c53c5 100644 --- a/pyrealm/tmodel.py +++ b/pyrealm/tmodel.py @@ -47,7 +47,7 @@ class TTree: def __init__( self, - diameters: NDArray, + diameters: NDArray[np.float64], traits: TModelTraits = TModelTraits(), ) -> None: self.traits: TModelTraits = traits @@ -55,33 +55,33 @@ def __init__( # The diameter is used to define all of the geometric scaling # based on the trait parameters. - self._diameter: NDArray - self._height: NDArray - self._crown_fraction: NDArray - self._crown_area: NDArray - self._mass_stm: NDArray - self._mass_fol: NDArray - self._mass_swd: NDArray + self._diameter: NDArray[np.float64] + self._height: NDArray[np.float64] + self._crown_fraction: NDArray[np.float64] + self._crown_area: NDArray[np.float64] + self._mass_stm: NDArray[np.float64] + self._mass_fol: NDArray[np.float64] + self._mass_swd: NDArray[np.float64] self.reset_diameters(diameters) # Growth is then applied by providing estimated gpp using the # calculate_growth() method, which populates the following: self.growth_calculated: bool = False - self._gpp_raw: NDArray - self._gpp_actual: NDArray - self._npp: NDArray - self._resp_swd: NDArray - self._resp_frt: NDArray - self._resp_fol: NDArray - self._turnover: NDArray - self._d_mass_s: NDArray - self._d_mass_fr: NDArray - self._delta_d: NDArray - self._delta_mass_stm: NDArray - self._delta_mass_frt: NDArray - - def _check_growth_calculated(self, property: str) -> NDArray: + self._gpp_raw: NDArray[np.float64] + self._gpp_actual: NDArray[np.float64] + self._npp: NDArray[np.float64] + self._resp_swd: NDArray[np.float64] + self._resp_frt: NDArray[np.float64] + self._resp_fol: NDArray[np.float64] + self._turnover: NDArray[np.float64] + self._d_mass_s: NDArray[np.float64] + self._d_mass_fr: NDArray[np.float64] + self._delta_d: NDArray[np.float64] + self._delta_mass_stm: NDArray[np.float64] + self._delta_mass_frt: NDArray[np.float64] + + def _check_growth_calculated(self, property: str) -> NDArray[np.float64]: """Helper function to return growth values if calculated. This acts as a gatekeeper to make sure that a growth property is not returned @@ -96,101 +96,101 @@ def _check_growth_calculated(self, property: str) -> NDArray: return getattr(self, property) @property - def diameter(self) -> NDArray: + def diameter(self) -> NDArray[np.float64]: """Individual diameter (m).""" return self._diameter @property - def height(self) -> NDArray: + def height(self) -> NDArray[np.float64]: """Individual height (m).""" return self._height @property - def crown_fraction(self) -> NDArray: + def crown_fraction(self) -> NDArray[np.float64]: """Individual crown fraction (unitless).""" return self._crown_fraction @property - def crown_area(self) -> NDArray: + def crown_area(self) -> NDArray[np.float64]: """Individual crown area (m2).""" return self._crown_area @property - def mass_swd(self) -> NDArray: + def mass_swd(self) -> NDArray[np.float64]: """Individual softwood mass (kg).""" return self._mass_swd @property - def mass_stm(self) -> NDArray: + def mass_stm(self) -> NDArray[np.float64]: """Individual stem mass (kg).""" return self._mass_stm @property - def mass_fol(self) -> NDArray: + def mass_fol(self) -> NDArray[np.float64]: """Individual foliage mass (kg).""" return self._mass_fol @property - def gpp_raw(self) -> NDArray: + def gpp_raw(self) -> NDArray[np.float64]: """Raw gross primary productivity.""" return self._check_growth_calculated("_gpp_raw") @property - def gpp_actual(self) -> NDArray: + def gpp_actual(self) -> NDArray[np.float64]: """Actual gross primary productivity.""" return self._check_growth_calculated("_gpp_actual") @property - def resp_swd(self) -> NDArray: + def resp_swd(self) -> NDArray[np.float64]: """Individual softwood respiration costs.""" return self._check_growth_calculated("_resp_swd") @property - def resp_frt(self) -> NDArray: + def resp_frt(self) -> NDArray[np.float64]: """Individual fine root respiration costs.""" return self._check_growth_calculated("_resp_frt") @property - def resp_fol(self) -> NDArray: + def resp_fol(self) -> NDArray[np.float64]: """Individual foliar respiration costs.""" return self._check_growth_calculated("_resp_fol") @property - def npp(self) -> NDArray: + def npp(self) -> NDArray[np.float64]: """Net primary productivity.""" return self._check_growth_calculated("_npp") @property - def turnover(self) -> NDArray: + def turnover(self) -> NDArray[np.float64]: """Plant turnover.""" return self._check_growth_calculated("_turnover") @property - def d_mass_s(self) -> NDArray: + def d_mass_s(self) -> NDArray[np.float64]: """Individual relative change in mass.""" return self._check_growth_calculated("_d_mass_s") @property - def d_mass_fr(self) -> NDArray: + def d_mass_fr(self) -> NDArray[np.float64]: """Individual relative change in fine root mass.""" return self._check_growth_calculated("_d_mass_fr") @property - def delta_d(self) -> NDArray: + def delta_d(self) -> NDArray[np.float64]: """Individual change in diameter.""" return self._check_growth_calculated("_delta_d") @property - def delta_mass_stm(self) -> NDArray: + def delta_mass_stm(self) -> NDArray[np.float64]: """Individual total change in stem mass.""" return self._check_growth_calculated("_delta_mass_stm") @property - def delta_mass_frt(self) -> NDArray: + def delta_mass_frt(self) -> NDArray[np.float64]: """Individual total change in fine root mass.""" return self._check_growth_calculated("_delta_mass_frt") - def reset_diameters(self, values: NDArray) -> None: + def reset_diameters(self, values: NDArray[np.float64]) -> None: """Reset the stem diameters for the T model. The set_diameter method can be used to reset the diameter values and then uses @@ -239,7 +239,7 @@ def reset_diameters(self, values: NDArray) -> None: # Flag any calculated growth values as outdated self.growth_calculated = False - def calculate_growth(self, gpp: NDArray) -> None: + def calculate_growth(self, gpp: NDArray[np.float64]) -> None: """Calculate growth predictions given a GPP estimate. This method updates the instance with predicted changes in tree geometry, mass @@ -318,8 +318,8 @@ def calculate_growth(self, gpp: NDArray) -> None: def grow_ttree( - gpp: NDArray, - d_init: NDArray, + gpp: NDArray[np.float64], + d_init: NDArray[np.float64], time_axis: int, traits: TModelTraits = TModelTraits(), outvars: tuple[str, ...] = ("diameter", "height", "crown_area", "delta_d"), diff --git a/pyrealm_build_data/__init__.py b/pyrealm_build_data/__init__.py index 2735eb94..1b5e9bc0 100644 --- a/pyrealm_build_data/__init__.py +++ b/pyrealm_build_data/__init__.py @@ -1,4 +1,21 @@ -"""The pyrealm_build_data package is an sdist only package used to store build data -shared between the docs and testing. Making it a package allows it to be accessed using -importlib.resources(). -""" # noqa: D205 +"""The ``pyrealm`` repository includes both the ``pyrealm`` package and the +``pyrealm_build_data`` package. The ``pyrealm_build_data`` package contains datasets +that are used in the ``pyrealm`` build and testing process. This includes: + +* Example datasets that are used in the package documentation, such as simple spatial + datasets for showing the use of the P Model. +* "Golden" datasets for regression testing ``pyrealm`` implementations against the + outputs of other implementations. These datasets will include a set of input data and + then output predictions from other implementations. +* Datasets for providing profiling of ``pyrealm`` code and for benchmarking new versions + of the package code against earlier implementations to check for performance issues. + +The package is organised into submodules that reflect the data use or previous +implementation. + +Note that ``pyrealm_build_data`` is a source distribution only (``sdist``) component of +``pyrealm``, so is not included in binary distributions (``wheel``) that are typically +installed by end users. This means that files in ``pyrealm_build_data`` are not +available if a user has simply used ``pip install pyrealm``: please *do not* use +``pyrealm_build_data`` within the main ``pyrealm`` code. +""" # noqa: D205, D415 diff --git a/pyrealm_build_data/bigleaf/__init__.py b/pyrealm_build_data/bigleaf/__init__.py index 9e3edcee..e0568665 100644 --- a/pyrealm_build_data/bigleaf/__init__.py +++ b/pyrealm_build_data/bigleaf/__init__.py @@ -1 +1,16 @@ -"""Validation data from the bigleaf package in R.""" +"""This submodule contains benchmark outputs from the ``bigleaf`` package in ``R``, +which has been used as the basis for core hygrometry functions. The +``bigleaf_conversions.R`` R script runs a set of test values through `bigleaf`. The +first part of the file prints out some simple test values that have been used in package +doctests and then the second part of the file generates more complex benchmarking inputs +that are saved, along with `bigleaf` outputs as `bigleaf_test_values.json`. + +Running ``bigleaf_conversions.R`` requires an installation of ``R`` along with the +``jsonlite`` and ``bigleaf`` packages, and the script can then be run from within the +submodule folder as: + +.. code:: sh + + Rscript bigleaf_conversions.R + +""" # noqa: D205 diff --git a/pyrealm_build_data/community/__init__.py b/pyrealm_build_data/community/__init__.py new file mode 100644 index 00000000..fc2675a9 --- /dev/null +++ b/pyrealm_build_data/community/__init__.py @@ -0,0 +1,5 @@ +"""The :mod:`pyrealm_build_data.community` submodule provides a set of input files for +the :mod:`pyrealm.demography` module that are used both in unit testing for the module +and as inputs for generating documentation of the module. The files provide definitions +of plant functional types and plant communities in a range of formats. +""" # noqa: D205 diff --git a/pyrealm_build_data/rpmodel/__init__.py b/pyrealm_build_data/rpmodel/__init__.py index 0500ea06..7ca00769 100644 --- a/pyrealm_build_data/rpmodel/__init__.py +++ b/pyrealm_build_data/rpmodel/__init__.py @@ -1 +1,58 @@ -"""Validation data from the rpmodel package in R.""" +"""This submodule contains benchmark outputs from the ``rpmodel`` package in ``R``, +which has been used as the basis for initial development of the standard P Model. + +Test inputs +=========== + +The ``generate_test_inputs.py`` file defines a set of constants for running P Model +calculations and then defines a set of scalar and array inputs for the forcing variables +required to run the P Model. The array inputs are set of 100 values sampled randomly +across the ranges of plausible forcing value inputs in order to benchmark the +calculations of the P Model implementation. All of these values are stored in the +``test_inputs.json`` file. + +It requires ``python`` and the ``numpy`` package and can be run as: + +.. code:: sh + + python generate_test_inputs.py + +Simple `rpmodel` benchmarking +============================= + +The ``test_outputs_rpmodel.R`` contains R code to run the test input data set, and store +the expected predictions from the ``rpmodel`` package as ``test_outputs_rpmodel.json``. +It requires an installation of ``R`` and the ``rpmodel`` package and can be run as: + +.. code:: sh + + Rscript test_outputs_rpmodel.R + +Global array test +================= + +The remaining files in the submodule are intended to provide a global test dataset for +benchmarking the use of ``rpmodel`` on a global time-series, so using 3 dimensional +arrays with latitude, longitude and time coordinates. It is currently not used in +testing because of issues with the ``rpmodel`` package in version 1.2.0. It may also be +replaced in testing with the ``uk_data`` submodule, which is used as an example dataset +in the documentation. + +The files are: + +* ``pmodel_global.nc``: An input global NetCDF file containing forcing variables at 0.5° + spatial resolution and for two time steps. +* ``test_global_array.R``: An R script to run ``rpmodel`` using the dataset. +* ``rpmodel_global_gpp_do_ftkphio.nc``: A NetCDF file containing ``rpmodel`` predictions + using corrections for temperature effects on the `kphio` parameter. +* ``rpmodel_global_gpp_no_ftkphio.nc``: A NetCDF file containing ``rpmodel`` predictions + with fixed ``kphio``. + +To generate the predicted outputs again requires an R installation with the ``rpmodel`` +package: + +.. code:: sh + + Rscript test_global_array.R + +""" # noqa: D205 diff --git a/pyrealm_build_data/sandoval_kphio/__init__.py b/pyrealm_build_data/sandoval_kphio/__init__.py new file mode 100644 index 00000000..e5a8cf09 --- /dev/null +++ b/pyrealm_build_data/sandoval_kphio/__init__.py @@ -0,0 +1,14 @@ +r"""This submodule contains benchmark outputs from the ``calc_phi0.R`` script, which is +an experimental approach to calculating the :math:`\phi_0` parameter for the P Model +with modulation from climatic aridity and growing degree days and the current +temperature. The calculation is implemented in ``pyrealm`` as +:class:`~pyrealm.pmodel.quantum_yield.QuantumYieldSandoval`. + +The files are: + +* ``calc_phi0.R``: The original implementation and parameterisation. +* ``create_test_inputs.R``: A script to run the original implementation with a range of + inputs and save a file of test values. +* ``sandoval_kphio.csv``: The resulting test values. + +""" # noqa: D205 diff --git a/pyrealm_build_data/splash/README.md b/pyrealm_build_data/splash/README.md deleted file mode 100644 index a1bebba8..00000000 --- a/pyrealm_build_data/splash/README.md +++ /dev/null @@ -1,91 +0,0 @@ -# SPLASH benchmark data - -This directory contains the code and inputs used to generate the SPLASH benchmark -datasets used in unit testing `pyrealm.splash` and in regression tests against the -original SPLASH v1 implementation. - -## Benchmark test data - -The `splash_make_flux_benchmark_inputs.py` script is used to generate 100 random -locations around the globe with random dates, initial soil moisture, preciptation, -cloud fraction and temperature (within reasonable bounds). This provides a robust test -of the calculations of various fluxes across a wide range of plausible value -combinations. The input data is created using: - -```sh -python splash_make_flux_benchmark_inputs.py -o data/daily_flux_benchmark_inputs.csv -``` - -The `splash_run_calc_daily_fluxes.py` script can then be used to run the inputs through -the original SPLASH implementation provided in the `splash_py_version` module. - -```sh -python splash_run_calc_daily_fluxes.py \ - -i data/daily_flux_benchmark_inputs.csv \ - -o data/daily_flux_benchmark_outputs.csv -``` - -## Original time series - -The SPLASH v1.0 implementation provided a time series of inputs for a single location -around San Francisco in 2000, with precipitation and temperature taken from WFDEI and -sunshine fraction interpolated from CRU TS. The original source data is included as -`data/splash_sf_example_data.csv`. - -The original SPLASH `main.py` provides a simple example to run this code and output -water balance, which can be used as a direct benchmark without any wrapper scripts. With -the alterations to make the SPLASH code importable, the command below can be used to run -the code and capture the output: - -```sh -python -m splash_py_version.main > data/splash_sf_example_data_main_output.csv -``` - -Note that this command also generates `main.log`, which contains over 54K lines of -logging and takes up over 6 Mb. This is not included in the `pyrealm` repo. - -Because the `splash_sf_example_data_main_output.csv` file only contains predicted water -balance, the same input data is also run through a wrapper script to allow daily -calculations to be benchmarked in more detail. The first step is to use the -`splash_sf_example_to_netcdf.py` script to convert the CSV data into a properly -dimensioned NetCDF file: - -```sh -python splash_sf_example_to_netcdf.py -``` - -This creates the file `data/splash_sf_example_data.nc`, which can be run using the -original SPLASH components using script `splash_run_time_series_parallel.py`. - -```sh -python splash_run_time_series_parallel.py \ - -i "data/splash_sf_example_data.nc" \ - -o "data/splash_sf_example_data_details.nc" -``` - -## Gridded time series - -This is a 20 x 20 cell spatial grid covering 2 years of daily data that is used to -validate the spin up of the initial moisture and the calculation of SPLASH water balance -over a time series across a larger spatial extent. The dataset is generated using the -`splash_make_spatial_grid_data.py` script, which requires paths to local copies of the -`WFDE5_v2` dataset and a version of the `CRU TS` dataset. Note that the file paths below -are examples and these data **are not included in the `pyrealm` repo**. - -```sh -python splash_make_spatial_grid_data.py \ - -w "/rds/general/project/lemontree/live/source/wfde5/wfde5_v2/" \ - -c "/rds/general/project/lemontree/live/source/cru_ts/cru_ts_4.0.4/" \ - -o "data/splash_nw_us_grid_data.nc" -``` - -The resulting `splash_nw_us_grid_data.nc` dataset can then be analysed using the -original SPLASH implementation using the script `splash_run_time_series_parallel.py`. -This uses parallel processing to run multiple cells simultaneously and will output the -progress of the calculations. - -```sh -python splash_run_time_series_parallel.py \ - -i "data/splash_nw_us_grid_data.nc" \ - -o "data/splash_nw_us_grid_data_outputs.nc" -``` diff --git a/pyrealm_build_data/splash/__init__.py b/pyrealm_build_data/splash/__init__.py index f48e4a55..2ce2c510 100644 --- a/pyrealm_build_data/splash/__init__.py +++ b/pyrealm_build_data/splash/__init__.py @@ -1 +1,97 @@ -"""Validation data from the SPLASH V1 package in python.""" +r"""This module contains the code and inputs used to generate the SPLASH benchmark +datasets used in unit testing :mod:`~pyrealm.splash` and in regression tests against the +original SPLASH v1 implementation. + +Benchmark test data +=================== + +The ``splash_make_flux_benchmark_inputs.py`` script is used to generate 100 random +locations around the globe with random dates, initial soil moisture, preciptation, +cloud fraction and temperature (within reasonable bounds). This provides a robust test +of the calculations of various fluxes across a wide range of plausible value +combinations. The input data is created using: + +.. code:: sh + + python splash_make_flux_benchmark_inputs.py -o data/daily_flux_benchmark_inputs.csv + +The ``splash_run_calc_daily_fluxes.py`` script can then be used to run the inputs +through the original SPLASH implementation provided in the ``splash_py_version`` +directory. + +.. code:: sh + + python splash_run_calc_daily_fluxes.py \ + -i data/daily_flux_benchmark_inputs.csv \ + -o data/daily_flux_benchmark_outputs.csv + +Original time series +==================== + +The SPLASH v1.0 implementation provided a time series of inputs for a single location +around San Francisco in 2000, with precipitation and temperature taken from WFDEI and +sunshine fraction interpolated from CRU TS. The original source data is included as +``data/splash_sf_example_data.csv``. + +The original SPLASH ``main.py`` provides a simple example to run this code and output +water balance, which can be used as a direct benchmark without any wrapper scripts. With +the alterations to make the SPLASH code importable, the command below can be used to run +the code and capture the output: + +.. code:: sh + + python -m splash_py_version.main > data/splash_sf_example_data_main_output.csv + +Note that this command also generates ``main.log``, which contains over 54K lines of +logging and takes up over 6 Mb. This is not included in the ``pyrealm_build_data`` +package. + +Because the ``splash_sf_example_data_main_output.csv`` file only contains predicted +water balance, the same input data is also run through a wrapper script to allow daily +calculations to be benchmarked in more detail. The first step is to use the +``splash_sf_example_to_netcdf.py`` script to convert the CSV data into a properly +dimensioned NetCDF file: + +.. code:: sh + + python splash_sf_example_to_netcdf.py + + +This creates the file `data/splash_sf_example_data.nc`, which can be run using the +original SPLASH components using script `splash_run_time_series_parallel.py`. + +.. code:: sh + + python splash_run_time_series_parallel.py \ + -i "data/splash_sf_example_data.nc" \ + -o "data/splash_sf_example_data_details.nc" + +Gridded time series +=================== + +This is a 20 x 20 cell spatial grid covering 2 years of daily data that is used to +validate the spin up of the initial moisture and the calculation of SPLASH water balance +over a time series across a larger spatial extent. The dataset is generated using the +``splash_make_spatial_grid_data.py`` script, which requires paths to local copies of the +``WFDE5_v2`` dataset and a version of the ``CRU TS`` dataset. Note that the file paths +below are examples and these data are **not included** in the ``pyrealm_build_data`` +package. + +.. code:: sh + + python splash_make_spatial_grid_data.py \ + -w "/rds/general/project/lemontree/live/source/wfde5/wfde5_v2/" \ + -c "/rds/general/project/lemontree/live/source/cru_ts/cru_ts_4.0.4/" \ + -o "data/splash_nw_us_grid_data.nc" + +The resulting ``splash_nw_us_grid_data.nc`` dataset can then be analysed using the +original SPLASH implementation using the script ``splash_run_time_series_parallel.py``. +This uses parallel processing to run multiple cells simultaneously and will output the +progress of the calculations. + +.. code:: sh + + python splash_run_time_series_parallel.py \ + -i "data/splash_nw_us_grid_data.nc" \ + -o "data/splash_nw_us_grid_data_outputs.nc" +""" # noqa: D205 diff --git a/pyrealm_build_data/subdaily/__init__.py b/pyrealm_build_data/subdaily/__init__.py index 1a922b13..6e4a2e8b 100644 --- a/pyrealm_build_data/subdaily/__init__.py +++ b/pyrealm_build_data/subdaily/__init__.py @@ -1 +1,11 @@ -"""Validation data from the original R implementation of the subdaily model.""" +"""At present, this submodule only contains a single file containing the predictions for +the ``BE_Vie`` fluxnet site from the original implementation of the ``subdaily`` module, +published in :cite:`mengoli:2022a`. Generating these predictions requires an +installation of R and then code from the following repository: + +`https://github.com/GiuliaMengoli/P-model_subDaily `_ + +TODO - This submodule should be updated to include the required code along with the +settings files and a runner script to reproduce this code. Or possibly to checkout the +required code as part of a shell script. +""" # noqa: D205, D415 diff --git a/pyrealm_build_data/t_model/__init__.py b/pyrealm_build_data/t_model/__init__.py index 78415131..af9ae09a 100644 --- a/pyrealm_build_data/t_model/__init__.py +++ b/pyrealm_build_data/t_model/__init__.py @@ -1 +1,22 @@ -"""Validation data from the original implementation of the T model in R.""" +"""The `t_model` submodule provides reference data for testing the implementation of the +T model :cite:p:`Li:2014bc`. The file ``t_model.r`` contains the original implementation +in R. The ``rtmodel_test_outputs.r`` contains a slightly modified version of the +function that makes it easier to output test values and then runs the function for the +following scenarios: + +* A 100 year sequence of plant growth for each of three plant functional type (PFT) + definitions (``default``, ``alt_one`` and ``alt_two``). The parameterisations for the + three PFTs are in the file ``pft_definitions.csv`` and the resulting time series for + each PFT is written to ``rtmodel_output_xxx.csv``. + +* Single year predictions across a range of initial diameter at breast height values for + each of the three PFTs. These are saved as ``rtmodel_unit_testing.csv`` and are used + for simple validation of the main scaling functions. + +To generate the predicted outputs again requires an R installation + +.. code:: sh + + Rscript rtmodel_test_outputs.r + +""" # noqa: D205, D415 diff --git a/pyrealm_build_data/t_model/rtmodel_output.csv b/pyrealm_build_data/t_model/rtmodel_output.csv deleted file mode 100644 index 192908b5..00000000 --- a/pyrealm_build_data/t_model/rtmodel_output.csv +++ /dev/null @@ -1,101 +0,0 @@ -"dD","D","H","Ac","Wf","Ws","Wss","GPP","Rm1","Rm2","dWs","dWfr" -9.23535494468514,0.1,9.30685130731739,2.46024211324268,0.316316843131202,7.30958392378022,7.02392939699003,9.1978885806073,0.309052893467561,0.687337521113514,3.76476031902394,0.353180440335674 -9.41347975186052,0.11847070988937,10.6064694519639,3.3216732508429,0.427072275108373,11.6918358255143,11.0829530843886,12.418444631113,0.4876499357131,0.928002429473987,5.11540450334165,0.402164311708017 -9.56242271047402,0.137297669393091,11.8227317389103,4.29097588211766,0.551696899129414,17.5038687119995,16.341714741243,16.0422902499508,0.719035448614692,1.19880425999427,6.62787138562828,0.446337579848307 -9.69154513020616,0.156422514814039,12.9554153338523,5.35704971954974,0.68876353537068,24.8965832383358,22.8600457539045,20.0279257785112,1.0058420131718,1.49664183654837,8.28100480236048,0.485860611280906 -9.8067299345139,0.175805605074452,14.0065146815861,6.50935370650332,0.836916905121855,34.0005072801248,30.6655594427653,24.3359423049901,1.34928461548167,1.81857021981548,10.0553709356637,0.52100017161193 -9.9118179887506,0.19541906494348,14.9792591514052,7.73806437175455,0.994893990654156,44.9276887698395,39.7582456018626,28.9296136904005,1.74936280648195,2.16184494805204,11.9334707249518,0.552072706446046 -10.0093918091071,0.215242700920981,15.8775424513254,9.03413941905036,1.16153221102076,57.7736797915528,50.1151867893735,33.7751343052069,2.20506821873244,2.52393980261545,13.8997585704866,0.579412356468393 -10.1012253105873,0.235261484539195,16.7055797227729,10.389327995,1.33577074221428,72.619474012326,61.6950922673494,38.841657416978,2.71458405976337,2.90254967658711,15.9405589482595,0.603352606803862 -10.1885538666546,0.25546393516037,17.4676966162467,11.7961506682707,1.51664794306338,89.5333258349725,74.4424750457644,44.1012203403852,3.27546890201363,3.29558498140014,18.0439316139172,0.624215789412495 -10.2722432037438,0.275841042893679,18.1681979492976,13.2478625042661,1.70329660769136,108.572417149682,88.2913823468227,49.5286063877833,3.8848208232602,3.70116133071686,20.1995143511014,0.642307261965835 -10.3528989805214,0.296385529301166,18.8112859111281,14.7384072607093,1.89493807637692,129.784359269137,103.168645349644,55.1011736243075,4.53942039538435,4.11758674368246,22.3983600310141,0.65791243548126 -10.4309402999148,0.317091327262209,19.4010098996293,16.2623676083615,2.09087583536076,153.208530237407,118.996649730928,60.7986687624964,5.23585258816084,4.54334773768881,24.6327776139951,0.671295558231005 -10.5066505553506,0.337953207862039,19.9412369382857,17.8149143933709,2.2904889934334,178.877254866472,135.695650514095,66.6030374739461,5.97060862262016,4.97709515339117,26.8961824217297,0.682699582930953 -10.5802135953775,0.35896650897274,20.4356356570896,19.3917567830863,2.4932258721111,206.816838688539,153.185667373722,72.4982380038911,6.74016936444376,5.41763022654509,29.1829583714671,0.692346693223028 -10.6517401049225,0.380126936163495,20.8876692783712,20.9890943915628,2.69859785034379,237.048468789206,171.388002835312,78.4700621870874,7.54107212475373,5.86389121292604,31.4883332404618,0.70043921831005 -10.7212872876908,0.40143041637334,21.300594588579,22.6035720021614,2.90617354313504,269.588994995487,190.226427898919,84.5059661732193,8.36996282755243,6.31494073881986,33.8082670397389,0.707160761026536 -10.788873837953,0.422872990948721,21.6774648674801,24.2322371960362,3.11557335377609,304.451604634413,209.628079003584,90.5949120162889,9.22363547615769,6.76995396335422,36.1393529904729,0.712677426777046 -10.8544915107154,0.444450738624627,22.0211353971244,25.8725009974224,3.32646441395431,341.646403373464,229.524107986928,96.7272205426512,10.0990607514249,7.22820758365789,38.4787302691996,0.717139081457436 -10.9181141692032,0.466159721646058,22.3342706083702,27.5221015217334,3.53855590993715,381.180913710614,249.850123500867,102.894435445385,10.9934054340381,7.68906967893882,40.8240075270072,0.720680593366289 -10.9797049112577,0.487995949984465,22.6193522183127,29.1790705359805,3.75159478319749,423.060501631756,270.546458704653,109.089198266706,11.9040441830047,8.15199036820116,43.1731961323058,0.723423031954346 -11.0392216947139,0.50995535980698,22.8786879147386,30.8417027966632,3.96536178814241,467.28874088081,291.558296287291,115.30513376426,12.8285650366408,8.61649324392616,45.5246520968697,0.725474808061432 -11.0966217616171,0.532033803196408,23.1144202839139,32.508528007077,4.17966788662418,513.867723244362,312.835678172426,121.536745070403,13.7647698395867,9.08216753756115,47.8770256941991,0.726932748015837 -11.1518650808786,0.554227046719642,23.3285357753348,34.1782852265282,4.39435095769649,562.798322268441,334.333423749197,127.779318018207,14.7106706449647,9.548660970017,50.2292178503936,0.727883098927698 -11.2049169750738,0.576530776881399,23.5228735647065,35.8498995631448,4.60927280097576,614.080416916261,356.010977216961,134.028834004941,15.6644829975463,10.0156732401523,52.5803424692092,0.72840246558233 -11.2557500607435,0.598940610831546,23.699134223315,37.5224609864079,4.82431641253815,667.713080849277,377.832201659479,140.281890780365,16.6246168730171,10.4829501054607,54.929693937393,0.728558681142435 -11.3043456069087,0.621452110953033,23.8588881343156,39.1952051028777,5.03938351322714,723.694742271028,399.765134781884,146.535630574648,17.5896659304029,10.9502780112318,57.2767191390989,0.728411614824685 -11.3506943996244,0.644060802166851,24.0035836186046,40.867495747327,5.25439231037061,782.023318611418,421.781718844873,152.787675013381,18.5583956291744,11.4174792268967,59.6209933863983,0.728013920123719 -11.3947971885715,0.6667621909661,24.1345547479324,42.5388092506586,5.46927547508467,842.696329743805,443.857515200855,159.036066301443,19.5297306688376,11.8844074508305,61.9621997450884,0.72741172722304 -11.4366647830844,0.689551785343243,24.2530288329452,44.2087202549852,5.6839783184981,905.710992912826,465.971411958392,165.279214191096,20.5027421261692,12.3509438473973,64.3001113004591,0.726645283101856 -11.4763178583598,0.712425114909411,24.3601335804589,45.8768889547655,5.89845715132699,971.064302101036,488.105331654886,171.515848281556,21.476634592815,12.8169934824045,66.6345759662775,0.72574954261762 -11.5137865270621,0.735377750626131,24.4569039185977,47.5430496507862,6.11267781224394,1038.75309417099,510.243944383874,177.744975226784,22.4507335528904,13.2824821253373,68.965503492165,0.72475471358056 -11.5491097265677,0.758405323680255,24.5442884912449,49.2070005110274,6.3266143514178,1108.77410378004,532.374390583779,183.965840455342,23.4244731856863,13.7473533887698,71.2928543701998,0.72368675857888 -11.5823344673607,0.781503543133391,24.623155825123,50.8685944390853,6.54024785645383,1181.12400877234,554.486016631851,190.177894030974,24.3973847318015,14.2115661772028,73.6166303814841,0.722567856084471 -11.6135149834225,0.804668212068112,24.6943001741004,52.5277309569395,6.75356540874936,1255.79946750029,576.570125483616,196.380760305435,25.3690855212791,14.6750924192878,75.936866558147,0.721416823179015 -11.6427118207842,0.827895242034957,24.7584470462852,54.1843490145248,6.96655915901033,1332.797149312,598.619743838572,202.574211036269,26.3392687288971,15.1379150589799,78.253624366379,0.72024950209124 -11.6699908957385,0.851180665676526,24.8162584202463,55.8384206438972,7.1792255113582,1412.11375925615,620.62940668237,208.758141662209,27.3076938940243,15.6000262826507,80.5669859421713,0.719079112624556 -11.695422549592,0.874520647468003,24.8683376574133,57.4899453808399,7.39156440610798,1493.74605789843,642.594959540046,214.932550447731,28.274178219762,16.0614259606083,82.8770492339763,0.71791657247362 -11.719080622341,0.897911492567187,24.9152341183743,59.1389453816113,7.60357869192145,1577.69087700956,664.513378360821,221.097520226468,29.2385886478761,16.5221202828238,85.1839239259814,0.716770787371128 -11.7410415633662,0.921349653811869,24.9574474914475,60.7854611672287,7.81527357864369,1663.94513177103,686.382606629988,227.253202490747,30.2008346917195,16.982120569978,87.4877280325238,0.715648912964093 -11.7613835932232,0.944831736938601,24.995431842535,62.4295479322454,8.02665616271726,1752.50583004824,708.201409055604,233.39980359154,31.1608619984466,17.4414422422149,89.7885850687288,0.714556590285057 -11.7801859269303,0.968354504125048,25.0295993958697,64.071272359425,8.23773501764035,1843.37007919871,729.969240996173,239.53757282977,32.1186466038316,17.9001039292314,92.0866217150543,0.713498156651918 -11.7975280658584,0.991914875978908,25.0603240558164,65.7107098860491,8.44851984249203,1936.53509081381,751.68613267014,245.666792236099,33.0741898374862,18.3581267065446,94.38196590435,0.712476833795632 -11.8134891624399,1.01550993211063,25.0879446803725,67.3479423718062,8.65902116208937,2031.99818373354,773.352587109393,251.78776785207,34.0275138328133,18.8155334439505,96.6747452695209,0.711494894974698 -11.8281474594478,1.0391369104355,25.1127681174163,68.9830561222896,8.8692500728658,2129.7567856242,794.969490778912,257.900822340737,34.9786575942721,19.272348253333,98.9650858981251,0.710553812787041 -11.8415798035397,1.0627932053544,25.1350720150602,70.6161402260632,9.07921802906526,2229.80843336607,816.538035775549,264.006288769597,35.9276735741242,19.7285960240771,101.253111347416,0.7096543893328 -11.8538612311044,1.08647636496148,25.1551074176751,72.2472851670227,9.28893666433149,2332.15077246263,838.05965253416,270.104505422751,36.8746247115031,20.1843020353925,103.538941879587,0.708796870315698 -11.8650646231567,1.11018408742369,25.173101159261,73.8765816773654,9.4984176442327,2436.78155565152,859.535952003227,276.195811512602,37.819581888142,20.639491635859,105.822693882435,0.707981044596844 -11.8752604250679,1.13391421667,25.1892580658455,75.5041197998774,9.70767254569852,2543.6986408717,880.968676299715,282.280543674115,38.7626217571875,21.0941899814501,108.104479445418,0.707206330634246 -11.8845164262532,1.15766473752014,25.2037629785039,77.1299881314368,9.91671275975616,2652.89998871831,902.359656910159,288.359033136565,39.703824904047,21.5484218241846,110.384406065233,0.706471851155574 -11.8928975945279,1.18143377037264,25.2167826084184,78.754273222612,10.1255494143358,2764.38365949757,923.710779568305,294.431603478872,40.6432743010054,22.0022113443869,112.662576458682,0.705776497322344 -11.9004659596413,1.2052195655617,25.2284672351395,80.3770591109958,10.3341933142709,2878.14780997727,945.023955006398,300.498568884916,41.5810540202815,22.4555820203118,114.939088463738,0.705118983552523 -11.9072805404736,1.22902049748098,25.2389522588894,81.9984269684694,10.5426548959461,2994.19068991414,966.301094845036,306.560232824797,42.5172481731816,22.9085565295971,117.214035012502,0.704497894076989 -11.9133973104918,1.25283505856193,25.2483596173681,83.6184548449223,10.7509441943472,3112.51063842675,987.544091953677,312.616887096702,43.4519400459618,23.3611566776647,119.487504162134,0.703911722214845 -11.9188691962787,1.27666185318291,25.2567990770969,85.2372174930867,10.9590708205397,3233.106080272,1008.75480467921,318.668811172028,44.3852114058853,23.8134033487836,121.759579171935,0.703358903264433 -11.9237461042401,1.30049959157547,25.2643694088729,86.8547862610741,11.1670439478524,3355.97552207376,1029.93504440227,324.71627179362,45.3171419536999,24.2653164760464,124.030338616569,0.702837841822017 -11.9280749709458,1.32434708378395,25.271159456425,88.4712290409399,11.3748723052637,3481.11754854429,1051.08656593975,330.759522783474,46.2478089013491,24.7169150269997,126.299856526992,0.702346934259345 -11.9318998329353,1.34820323372584,25.2772491068575,90.0866102631639,11.5825641766925,3608.53081873188,1072.21106036679,336.798805022098,47.1772866561388,25.1682170021022,128.568202552003,0.701884587015119 -11.9352619122133,1.37206703339171,25.2827101709614,91.7009909283261,11.7901274050705,3738.21406232222,1093.31014988192,342.834346566941,48.1056465948044,25.6192394435739,130.835442134521,0.701449231284342 -11.9381997140573,1.39593755721614,25.287607180965,93.3144286684936,11.9975694002349,3870.16607601566,1114.38538438539,348.866362881891,49.0329569129571,26.0699984525464,133.10163669769,0.701039334623581 -11.9407491341446,1.41981395664425,25.2919981127896,94.9269778319297,12.2048971498195,4004.38571999813,1135.43823948266,354.895057153972,49.959282537237,26.5205092127289,135.366843836778,0.700653409929674 -11.9429435723781,1.44369545491254,25.2959350393887,96.5386895856977,12.4121172324468,4140.87191451933,1156.47011566282,360.920620676935,50.8846850891641,26.9707860190731,137.631117513583,0.700290022194102 -11.9448140511425,1.4675813420573,25.2994647212688,98.1496120315762,12.6192358326312,4279.6236365888,1177.48233843573,366.943233284627,51.8092228911721,27.4208423101577,139.894508250683,0.699947793385115 -11.9463893360423,1.49147097015958,25.302629139833,99.7597903314401,12.8262587568994,4420.63991679732,1198.47615924184,372.963063819736,52.7329510066408,27.8706907032171,142.157063323418,0.699625405764469 -11.9476960574774,1.51536374883167,25.3054659787496,101.369266838901,13.0331914507158,4563.91983626886,1219.45275697546,378.980270625947,53.6559213069204,28.3203430309185,144.418826947912,0.699321603905093 -11.9487588316758,1.53925914094662,25.3080090581336,102.978081234555,13.2400390158714,4709.46252374619,1240.41323998599,384.995002053579,54.5781825593838,28.7698103791476,146.679840463869,0.699035195639795 -11.9496003800507,1.56315665860998,25.3102887259348,104.586270662666,13.446806228057,4857.26715281142,1261.35864844211,391.007396970588,55.499780531453,29.2191031251943,148.940142511157,0.698765052139036 -11.9502416459605,1.58705585937008,25.3123322105577,106.193869867514,13.6534975543946,5007.33293924156,1282.28995696246,397.017585272329,56.4207581063483,29.6682309758462,151.199769199484,0.698510107287346 -11.9507019081418,1.610956342662,25.3141639383961,107.800911328006,13.8601171707436,5159.65913849779,1303.20807743181,403.025688384807,57.3411554069997,30.1172030049955,153.458754270681,0.698269356503016 -11.9509988902507,1.63485774647828,25.3158058196382,109.407425389423,14.06666897864,5314.24504334618,1324.11386193558,409.031819757225,58.2610099251653,30.5660276904461,155.717129253297,0.698041855123787 -11.9511488660899,1.65875974425878,25.317277505408,111.013440391445,14.2731566217573,5471.08998160699,1345.00810575727,415.036085340631,59.18035665332,31.0147129496812,157.974923609348,0.697826716462099 -11.9511667602222,1.68266204199096,25.3185966190244,112.618982791806,14.4795835018036,5630.19331402889,1365.89155039375,421.038584050197,60.0992282173251,31.4632661744092,160.23216487319,0.697623109616918 -11.9510662437754,1.70656437551141,25.3197789639113,114.224077285093,14.6859527937977,5791.55443228403,1386.76488655164,427.039408209385,61.0176550082721,31.9116942637547,162.488878782583,0.697430257114672 -11.95085982533,1.73046650799896,25.3208387104553,115.828746916384,14.892267460678,5955.17275707956,1407.6287570959,433.038643974783,61.9356653122196,32.3600036560057,164.745089402082,0.697247432439474 -11.9505589368557,1.75436822764962,25.3217885638921,117.433013189507,15.0985302672223,6121.04773638097,1428.48375992771,439.036371740836,62.8532854368194,32.8082003588581,167.000819238952,0.697073957502065 -11.9501740147204,1.77826934552333,25.3226399151073,119.036896169816,15.3047437932621,6289.17884374234,1449.33045077411,445.032666524105,63.770539834061,33.2562899781309,169.256089351856,0.696909200087836 -11.9497145758456,1.80216969355277,25.323402976057,120.64041458148,15.5109104461903,6459.56557673866,1470.16934587636,451.027598326976,64.68745121856,33.7042777449449,171.510919452591,0.696752571316395 -11.9491892891207,1.82606912270446,25.3240869013502,122.243585899309,15.7170324727683,6632.20745549527,1491.00092456774,457.021232480963,65.6040406809804,34.1521685413771,173.765328001152,0.696603523138593 -11.9486060422159,1.8499675012827,25.3246998973831,123.846426435221,15.9231119702428,6807.10402130943,1511.82563173442,463.013629969995,66.5203277963146,34.5999669246193,176.019332294468,0.696461545891287 -11.9479720039624,1.87386471336713,25.3252493202842,125.448951419498,16.1291508967926,6984.25483535922,1532.64388015593,469.004847734164,67.4363307268608,35.0476771496765,178.2729485491,0.696326165925379 -11.947293682477,1.89776065737506,25.3257417638004,127.05117507697,16.3351510813247,7163.65947749504,1553.45605272328,474.994938954582,68.3520663198244,35.4953031906537,180.526191978243,0.696196943318756 -11.9465769792282,1.92165524474001,25.3261831381463,128.653110698346,16.5411142326445,7345.31754510896,1574.26250453518,480.983953320044,69.2675501995478,35.9428487606825,182.779076863339,0.69607346968243 -11.9458272392406,1.94554839869847,25.3265787407367,130.25477070688,16.7470419480275,7529.22865207757,1595.06356487342,486.971937276264,70.1827968544307,36.3903173305468,185.031616620636,0.69595536606546 -11.9450492976437,1.96944005317695,25.3269333196293,131.85616672059,16.9529357212187,7715.39242777381,1615.85953906019,492.95893425849,71.0978197186482,36.837712146065,187.28382386298,0.695842280962021 -11.9442475227673,1.99333015177224,25.3272511304224,133.457309610253,17.1587969498897,7903.80851614384,1636.65071020033,498.944984908336,72.0126312488145,37.2850362442934,189.535710457149,0.695733888422098 -11.9434258559878,2.01721864681777,25.327535987279,135.058209553405,17.3646269425807,8094.47657484475,1657.43734081274,504.930127275648,72.9272429957607,37.7322924686113,191.787287577026,0.695629886265887 -11.9425878485214,2.04110549852975,25.3277913086781,136.658876084556,17.5704269251573,8287.39627443939,1678.21967435503,510.914397006251,73.8416656716215,38.1794834827512,194.038565752866,0.69552999440074 -11.9417366953606,2.06499067422679,25.3280201584364,138.259318141853,17.7761980468097,8482.56729764473,1698.99793664625,516.897827516402,74.755909212435,38.6266117838347,196.289554916939,0.695433953238604 -11.9408752665385,2.08887414761751,25.3282252824847,139.859544110396,17.9819413856223,8679.98933863027,1719.77233719249,522.880450154729,75.6699828364695,39.0736797144742,198.540264445788,0.695341522211167 -11.9400061359016,2.11275589815059,25.3284091418387,141.459561862421,18.1876579537398,8879.66210236323,1740.54307042038,528.862294352461,76.5838950984967,39.5206894739994,200.790703199337,0.695252478379422 -11.9391316075632,2.13663591042239,25.3285739421546,143.059378794547,18.393348702156,9081.58530399742,1761.3103168234,534.843387762681,77.4976539402297,39.9676431288628,203.040879557085,0.695166615133919 -11.9382537402013,2.16051417363752,25.3287216602227,144.659001862283,18.5990145251506,9285.75866830297,1782.07424402603,540.823756389335,78.4112667371455,40.4145426222808,205.290801451581,0.695083740981745 -11.9373743693555,2.18439068111792,25.3288540677147,146.258437611976,18.8046562643969,9492.18192913409,1802.83500777056,546.803424706665,79.3247403419046,40.8613897831587,207.540476399384,0.695003678416091 -11.9364951278701,2.20826542985663,25.3289727524698,147.857692210376,19.0102747127626,9700.85482893232,1823.59275283131,552.782415769751,80.2380811245776,41.3081863343504,209.789911529694,0.694926262864179 -11.9356174646233,2.23213842011237,25.3290791375723,149.456771471983,19.2158706178263,9911.77711826289,1844.34761386094,558.760751316748,81.1512950098814,41.7549339002996,212.039113610826,0.694851341709288 -11.9347426616697,2.25600965504162,25.329174498451,151.055680884333,19.4214446851286,10124.9485553818,1865.09971617314,564.738451863424,82.0643875116182,42.2016340141033,214.288089074682,0.69477877338268 -11.9338718499205,2.27987914036496,25.3292599782041,152.654425631377,19.6269975811771,10340.3689058318,1885.84917646605,570.71553679055,82.9773637645061,42.648288124043,216.536844039384,0.694708426521255 -11.9330060234746,2.3037468840648,25.3293366013341,154.253010615075,19.8325299362239,10558.0379420646,1906.59610349039,576.692024424647,83.890228553577,43.0948975996184,218.785384330193,0.694640179186923 -11.9321460527067,2.32761289611175,25.3294052860573,155.851440475352,20.038042346831,10777.9554430886,1927.34059866618,582.667932112599,84.8029863413119,43.541463737123,221.03371549886,0.694573918143752 -11.9312926962124,2.35147718821716,25.329466855335,157.449719608536,20.2435353782403,11000.1211941389,1948.08275665167,588.643276290556,85.7156412926734,43.9879877647935,223.281842841516,0.694509538189161 -11.9304466117017,2.37533977360959,25.3295220467605,159.047852184374,20.4490095665623,11224.5349863698,1968.82266586788,594.618072547588,86.6281972981868,44.4344708475659,225.529771415216,0.694446941535517 diff --git a/pyrealm_build_data/two_leaf/__init__.py b/pyrealm_build_data/two_leaf/__init__.py new file mode 100644 index 00000000..2c2da074 --- /dev/null +++ b/pyrealm_build_data/two_leaf/__init__.py @@ -0,0 +1,7 @@ +r"""This submodule contains benchmark outputs from an R implementation of the two leaf, +two stream model. + +TODO - this module is currently in development and the files here need to be cleaned up +and documented once the implementation has been completed. + +""" # noqa: D205 diff --git a/pyrealm_build_data/uk_data/__init__.py b/pyrealm_build_data/uk_data/__init__.py index 0489a3c0..ff85bef2 100644 --- a/pyrealm_build_data/uk_data/__init__.py +++ b/pyrealm_build_data/uk_data/__init__.py @@ -1 +1,10 @@ -"""Exemplar UK 3D XYT dataset containing P Model forcings.""" +"""This submodule provides P Model forcings for the United Kingdom at 0.5° spatial +resolution and hourly temporal resolution over 2 months (1464 temporal observations). It +is used for demonstrating the use of the subdaily P Model. + +The Python script ``create_2D_uk_inputs.py`` is used to generate the NetCDF output file +``UK_WFDE5_FAPAR_2018_JuneJuly.nc``. The script is currently written with a hard-coded +set of paths to key source data - the WFDE5 v2 climate data and a separate source of +interpolated hourly fAPAR. This should probably be rewritten to generate reproducible +content from publically available sources of these datasets. +""" # noqa: D205 diff --git a/tests/unit/demography/conftest.py b/tests/unit/demography/conftest.py index 5605db74..735cb4c1 100644 --- a/tests/unit/demography/conftest.py +++ b/tests/unit/demography/conftest.py @@ -63,6 +63,25 @@ def rtmodel_flora(): ) +@pytest.fixture +def fixture_community(): + """A fixture providing a simple community.""" + from pyrealm.demography.community import Community + from pyrealm.demography.flora import Flora, PlantFunctionalType + + # A simple community containing one sample stem, with an initial crown gap fraction + # of zero. + flora = Flora([PlantFunctionalType(name="test", f_g=0.0)]) + return Community( + cell_id=1, + cell_area=100, + flora=flora, + cohort_n_individuals=np.repeat([1], 4), + cohort_pft_names=np.repeat(["test"], 4), + cohort_dbh_values=np.array([0.2, 0.4, 0.6, 0.8]), + ) + + @pytest.fixture def rtmodel_data(): """Loads some simple predictions from the R implementation for testing.""" diff --git a/tests/unit/demography/test_canopy.py b/tests/unit/demography/test_canopy.py index 615f963a..a078fbfe 100644 --- a/tests/unit/demography/test_canopy.py +++ b/tests/unit/demography/test_canopy.py @@ -1,6 +1,7 @@ """Testing the Canopy object.""" import numpy as np +import pytest def test_Canopy__init__(): @@ -31,7 +32,7 @@ def test_Canopy__init__(): ) canopy_gap_fraction = 0.05 - canopy = Canopy(community, canopy_gap_fraction=canopy_gap_fraction) + canopy = Canopy(community, canopy_gap_fraction=canopy_gap_fraction, fit_ppa=True) # Simply check that the shape of the stem leaf area matrix is the right shape n_layers_from_crown_area = int( @@ -46,4 +47,43 @@ def test_Canopy__init__(): / community.cell_area ) ) - assert canopy.stem_leaf_area.shape == (n_layers_from_crown_area, canopy.n_cohorts) + assert canopy.stem_leaf_area.shape == ( + n_layers_from_crown_area, + canopy.n_cohorts, + ) + + +def test_solve_canopy_area_filling_height(fixture_community): + """Test solve_community_projected_canopy_area. + + The logic of this test is that given the cumulative sum of the crown areas in the + fixture from tallest to shortest as the target, providing the z_max of each stem as + the height _should_ always return zero, as this is exactly the height at which that + cumulative area would close: crown 1 closes at z_max 1, crown 1 + 2 closes at z_max + 2 and so on. + """ + + from pyrealm.demography.canopy import ( + solve_canopy_area_filling_height, + ) + + for ( + this_height, + this_target, + ) in zip( + np.flip(fixture_community.stem_allometry.crown_z_max), + np.cumsum(np.flip(fixture_community.stem_allometry.crown_area)), + ): + solved = solve_canopy_area_filling_height( + z=this_height, + stem_height=fixture_community.stem_allometry.stem_height, + crown_area=fixture_community.stem_allometry.crown_area, + n_individuals=fixture_community.cohort_data["n_individuals"], + m=fixture_community.stem_traits.m, + n=fixture_community.stem_traits.n, + q_m=fixture_community.stem_traits.q_m, + z_max=fixture_community.stem_allometry.crown_z_max, + target_area=this_target, + ) + + assert solved == pytest.approx(0) diff --git a/tests/unit/demography/test_crown.py b/tests/unit/demography/test_crown.py index 221eac2c..a7cd10ec 100644 --- a/tests/unit/demography/test_crown.py +++ b/tests/unit/demography/test_crown.py @@ -1,4 +1,4 @@ -"""test the functions in canopy_functions.py.""" +"""Test the functions in crown.py.""" from collections import namedtuple from contextlib import nullcontext as does_not_raise @@ -6,26 +6,6 @@ import numpy as np import pytest - -@pytest.fixture -def fixture_community(): - """A fixture providing a simple community.""" - from pyrealm.demography.community import Community - from pyrealm.demography.flora import Flora, PlantFunctionalType - - # A simple community containing one sample stem, with an initial crown gap fraction - # of zero. - flora = Flora([PlantFunctionalType(name="test", f_g=0.0)]) - return Community( - cell_id=1, - cell_area=100, - flora=flora, - cohort_n_individuals=np.repeat([1], 4), - cohort_pft_names=np.repeat(["test"], 4), - cohort_dbh_values=np.array([0.2, 0.4, 0.6, 0.8]), - ) - - ZQZInput = namedtuple( "ZQZInput", ["z", "stem", "more_stem", "q_z", "outcome", "excep_msg", "output_shape"], @@ -466,42 +446,6 @@ def test_calculate_stem_projected_crown_area_at_z_values( ) -def test_solve_community_projected_canopy_area(fixture_community): - """Test solve_community_projected_canopy_area. - - The logic of this test is that given the cumulative sum of the crown areas in the - fixture from tallest to shortest as the target, providing the z_max of each stem as - the height _should_ always return zero, as this is exactly the height at which that - cumulative area would close: crown 1 closes at z_max 1, crown 1 + 2 closes at z_max - 2 and so on. - """ - - from pyrealm.demography.crown import ( - solve_community_projected_canopy_area, - ) - - for ( - this_height, - this_target, - ) in zip( - np.flip(fixture_community.stem_allometry.crown_z_max), - np.cumsum(np.flip(fixture_community.stem_allometry.crown_area)), - ): - solved = solve_community_projected_canopy_area( - z=this_height, - stem_height=fixture_community.stem_allometry.stem_height, - crown_area=fixture_community.stem_allometry.crown_area, - n_individuals=fixture_community.cohort_data["n_individuals"], - m=fixture_community.stem_traits.m, - n=fixture_community.stem_traits.n, - q_m=fixture_community.stem_traits.q_m, - z_max=fixture_community.stem_allometry.crown_z_max, - target_area=this_target, - ) - - assert solved == pytest.approx(0) - - @pytest.mark.parametrize( argnames="fixture_z_qz_stem_properties", argvalues=[ diff --git a/tests/unit/pmodel/test_memory_effect.py b/tests/unit/pmodel/test_memory_effect.py index b4d49cc2..d5ab4402 100644 --- a/tests/unit/pmodel/test_memory_effect.py +++ b/tests/unit/pmodel/test_memory_effect.py @@ -54,6 +54,43 @@ def test_memory_effect(inputs, alpha): assert np.allclose(result, expected) +@pytest.mark.parametrize( + argnames="inputs_whole", + argvalues=[ + pytest.param(np.arange(0, 10), id="1D"), + pytest.param( + np.column_stack([np.arange(0, 10)] * 4) + np.arange(4), + id="2D", + ), + pytest.param( + np.dstack([np.column_stack([np.arange(0, 10)] * 4)] * 4) + + np.arange(16).reshape(4, 4), + id="3D", + ), + ], +) +@pytest.mark.parametrize(argnames="alpha", argvalues=(0.0, 0.5, 1.0)) +def test_memory_effect_chunked(inputs_whole, alpha): + """Test that the memory effect works when chunking the time series up. + + This compares the output of `test_memory_effect` with the output of + `memory_effect` which gets the two time chunks fed sequentially., + """ + from pyrealm.pmodel import memory_effect + + result_whole = memory_effect(inputs_whole, alpha=alpha) + + [inputs_chunk1, inputs_chunk2] = np.split(inputs_whole, [5], axis=0) + + result_chunk1 = memory_effect(inputs_chunk1, alpha=alpha) + + result_chunk2 = memory_effect( + inputs_chunk2, previous_values=result_chunk1[-1], alpha=alpha + ) + + assert np.allclose(result_whole[-1], result_chunk2[-1]) + + @pytest.mark.parametrize( argnames="inputs,allow_holdover,context_manager,expected", argvalues=[ diff --git a/tests/unit/pmodel/test_subdaily.py b/tests/unit/pmodel/test_subdaily.py index 0796086d..471f60dd 100644 --- a/tests/unit/pmodel/test_subdaily.py +++ b/tests/unit/pmodel/test_subdaily.py @@ -114,6 +114,90 @@ def test_FSPModel_corr(be_vie_data_components, data_args): assert np.all(r_vals > 0.995) +def test_SubdailyPModel_previous_realised(be_vie_data_components): + """Test the functionality that allows the subdaily model to restart in blocks.""" + + from pyrealm.pmodel import SubdailyScaler + from pyrealm.pmodel.subdaily import SubdailyPModel + + # Run all in one model + env, ppfd, fapar, datetime, _ = be_vie_data_components.get( + mode="crop", start=0, end=17520 + ) + + # Get the fast slow scaler and set window + fsscaler = SubdailyScaler(datetime) + fsscaler.set_window( + window_center=np.timedelta64(12, "h"), + half_width=np.timedelta64(30, "m"), + ) + + # Run as a subdaily model using the kphio used in the reference implementation. + all_in_one_subdaily_pmodel = SubdailyPModel( + env=env, + ppfd=ppfd, + fapar=fapar, + reference_kphio=1 / 8, + fs_scaler=fsscaler, + allow_holdover=True, + ) + + # Run first half of year + env1, ppfd1, fapar1, datetime1, _ = be_vie_data_components.get( + mode="crop", start=0, end=182 * 48 + ) + + # Get the fast slow scaler and set window + fsscaler1 = SubdailyScaler(datetime1) + fsscaler1.set_window( + window_center=np.timedelta64(12, "h"), + half_width=np.timedelta64(30, "m"), + ) + + # Run as a subdaily model using the kphio used in the reference implementation. + part_1_subdaily_pmodel = SubdailyPModel( + env=env1, + ppfd=ppfd1, + fapar=fapar1, + reference_kphio=1 / 8, + fs_scaler=fsscaler1, + allow_holdover=True, + ) + + # Run second year + env2, ppfd2, fapar2, datetime2, _ = be_vie_data_components.get( + mode="crop", start=182 * 48, end=365 * 48 + ) + + # Get the fast slow scaler and set window + fsscaler2 = SubdailyScaler(datetime2) + fsscaler2.set_window( + window_center=np.timedelta64(12, "h"), + half_width=np.timedelta64(30, "m"), + ) + + # Run as a subdaily model using the kphio used in the reference implementation. + part_2_subdaily_pmodel = SubdailyPModel( + env=env2, + ppfd=ppfd2, + fapar=fapar2, + reference_kphio=1 / 8, + fs_scaler=fsscaler2, + allow_holdover=True, + previous_realised=( + part_1_subdaily_pmodel.optimal_chi.xi[-1], + part_1_subdaily_pmodel.vcmax25_real[-1], + part_1_subdaily_pmodel.jmax25_real[-1], + ), + ) + + assert np.allclose( + all_in_one_subdaily_pmodel.gpp, + np.concat([part_1_subdaily_pmodel.gpp, part_2_subdaily_pmodel.gpp]), + equal_nan=True, + ) + + @pytest.mark.parametrize("ndims", [2, 3, 4]) def test_FSPModel_dimensionality(be_vie_data, ndims): """Tests that the SubdailyPModel handles dimensions correctly. diff --git a/tests/unit/pmodel/test_fast_slow_scaler.py b/tests/unit/pmodel/test_subdailyscaler.py similarity index 79% rename from tests/unit/pmodel/test_fast_slow_scaler.py rename to tests/unit/pmodel/test_subdailyscaler.py index 1dfc1758..977095dc 100644 --- a/tests/unit/pmodel/test_fast_slow_scaler.py +++ b/tests/unit/pmodel/test_subdailyscaler.py @@ -11,7 +11,7 @@ @pytest.fixture -def fixture_FSS(): +def fixture_SubdailyScaler(): """A fixture providing a SubdailyScaler object.""" from pyrealm.pmodel import SubdailyScaler @@ -114,7 +114,7 @@ def fixture_FSS(): ), ], ) -def test_FSS_init(ctext_mngr, msg, datetimes): +def test_SubdailyScaler_init(ctext_mngr, msg, datetimes): """Test the SubdailyScaler init handling of date ranges.""" from pyrealm.pmodel import SubdailyScaler @@ -162,11 +162,13 @@ def test_FSS_init(ctext_mngr, msg, datetimes): ), ], ) -def test_FSS_set_window(fixture_FSS, ctext_mngr, msg, kwargs, samp_mean, samp_max): +def test_SubdailyScaler_set_window( + fixture_SubdailyScaler, ctext_mngr, msg, kwargs, samp_mean, samp_max +): """Test the SubdailyScaler set_window method.""" with ctext_mngr as cman: - fixture_FSS.set_window(**kwargs) + fixture_SubdailyScaler.set_window(**kwargs) if msg is not None: assert str(cman.value) == msg @@ -174,8 +176,8 @@ def test_FSS_set_window(fixture_FSS, ctext_mngr, msg, kwargs, samp_mean, samp_ma # Check that _set_times has run correctly. Can't use allclose directly on # datetimes and since these are integers under the hood, don't need float # testing - assert np.all(fixture_FSS.sample_datetimes_mean == samp_mean) - assert np.all(fixture_FSS.sample_datetimes_max == samp_max) + assert np.all(fixture_SubdailyScaler.sample_datetimes_mean == samp_mean) + assert np.all(fixture_SubdailyScaler.sample_datetimes_max == samp_max) @pytest.mark.parametrize( @@ -227,10 +229,12 @@ def test_FSS_set_window(fixture_FSS, ctext_mngr, msg, kwargs, samp_mean, samp_ma ), ], ) -def test_FSS_set_include(fixture_FSS, ctext_mngr, msg, include, samp_mean, samp_max): +def test_SubdailyScaler_set_include( + fixture_SubdailyScaler, ctext_mngr, msg, include, samp_mean, samp_max +): """Test the SubdailyScaler set_include method.""" with ctext_mngr as cman: - fixture_FSS.set_include(include) + fixture_SubdailyScaler.set_include(include) if msg is not None: assert str(cman.value) == msg @@ -239,8 +243,8 @@ def test_FSS_set_include(fixture_FSS, ctext_mngr, msg, include, samp_mean, samp_ # Check that _set_times has run correctly. Can't use allclose directly on # datetimes and since these are integers under the hood, don't need float # testing - assert np.all(fixture_FSS.sample_datetimes_mean == samp_mean) - assert np.all(fixture_FSS.sample_datetimes_max == samp_max) + assert np.all(fixture_SubdailyScaler.sample_datetimes_mean == samp_mean) + assert np.all(fixture_SubdailyScaler.sample_datetimes_max == samp_max) @pytest.mark.parametrize( @@ -290,10 +294,12 @@ def test_FSS_set_include(fixture_FSS, ctext_mngr, msg, include, samp_mean, samp_ ), ], ) -def test_FSS_set_nearest(fixture_FSS, ctext_mngr, msg, time, samp_mean, samp_max): +def test_SubdailyScaler_set_nearest( + fixture_SubdailyScaler, ctext_mngr, msg, time, samp_mean, samp_max +): """Test the SubdailyScaler set_nearest method.""" with ctext_mngr as cman: - fixture_FSS.set_nearest(time) + fixture_SubdailyScaler.set_nearest(time) if msg is not None: assert str(cman.value) == msg @@ -302,8 +308,8 @@ def test_FSS_set_nearest(fixture_FSS, ctext_mngr, msg, time, samp_mean, samp_max # Check that _set_times has run correctly. Can't use allclose directly on # datetimes and since these are integers under the hood, don't need float # testing - assert np.all(fixture_FSS.sample_datetimes_mean == samp_mean) - assert np.all(fixture_FSS.sample_datetimes_max == samp_max) + assert np.all(fixture_SubdailyScaler.sample_datetimes_mean == samp_mean) + assert np.all(fixture_SubdailyScaler.sample_datetimes_max == samp_max) @pytest.mark.parametrize( @@ -317,15 +323,15 @@ def test_FSS_set_nearest(fixture_FSS, ctext_mngr, msg, time, samp_mean, samp_max ), ], ) -def test_FSS_get_wv_errors(fixture_FSS, ctext_mngr, msg, values): +def test_SubdailyScaler_get_wv_errors(fixture_SubdailyScaler, ctext_mngr, msg, values): """Test errors arising in the SubdailyScaler get_window_value method.""" - fixture_FSS.set_window( + fixture_SubdailyScaler.set_window( window_center=np.timedelta64(12, "h"), half_width=np.timedelta64(2, "h"), ) with ctext_mngr as cman: - _ = fixture_FSS.get_window_values(values) + _ = fixture_SubdailyScaler.get_window_values(values) assert str(cman.value) == msg @@ -488,8 +494,8 @@ def test_FSS_get_wv_errors(fixture_FSS, ctext_mngr, msg, values): ), ], ) -class Test_FSS_get_vals_window_and_include: - """Test FSS get methods for set_window and set_include. +class Test_SubdailyScaler_get_vals_window_and_include: + """Test SubdailyScaler get methods for set_window and set_include. The daily values extracted using the set_window and set_include methods can be the same, by setting the window and the include to cover the same observations, so these @@ -509,30 +515,30 @@ class Test_FSS_get_vals_window_and_include: are np.nan - this should revert to setting np.nan in the first day. """ - def test_FSS_get_vals_window( - self, fixture_FSS, values, expected_means, allow_partial_data + def test_SubdailyScaler_get_vals_window( + self, fixture_SubdailyScaler, values, expected_means, allow_partial_data ): """Test a window.""" - fixture_FSS.set_window( + fixture_SubdailyScaler.set_window( window_center=np.timedelta64(12, "h"), half_width=np.timedelta64(2, "h"), ) - calculated_means = fixture_FSS.get_daily_means( + calculated_means = fixture_SubdailyScaler.get_daily_means( values, allow_partial_data=allow_partial_data ) assert np.allclose(calculated_means, expected_means, equal_nan=True) - def test_FSS_get_vals_include( - self, fixture_FSS, values, expected_means, allow_partial_data + def test_SubdailyScaler_get_vals_include( + self, fixture_SubdailyScaler, values, expected_means, allow_partial_data ): """Test include.""" # This duplicates the selection of the window test but using direct include inc = np.zeros(48, dtype=np.bool_) inc[20:29] = True - fixture_FSS.set_include(inc) - calculated_means = fixture_FSS.get_daily_means( + fixture_SubdailyScaler.set_include(inc) + calculated_means = fixture_SubdailyScaler.get_daily_means( values, allow_partial_data=allow_partial_data ) @@ -607,7 +613,9 @@ def test_FSS_get_vals_include( ), ], ) -def test_FSS_get_vals_nearest(fixture_FSS, values, expected_means): +def test_SubdailyScaler_get_vals_nearest( + fixture_SubdailyScaler, values, expected_means +): """Test get_daily_values. This tests the specific behaviour when set_nearest is used and a single observation @@ -616,8 +624,8 @@ def test_FSS_get_vals_nearest(fixture_FSS, values, expected_means): """ # Select the 11:30 observation, which is missing in PARTIAL_ONES and PARTIAL_VARYING - fixture_FSS.set_nearest(np.timedelta64(11 * 60 + 29, "m")) - calculated_means = fixture_FSS.get_daily_means(values) + fixture_SubdailyScaler.set_nearest(np.timedelta64(11 * 60 + 29, "m")) + calculated_means = fixture_SubdailyScaler.get_daily_means(values) assert np.allclose(calculated_means, expected_means, equal_nan=True) @@ -678,27 +686,37 @@ def test_FSS_get_vals_nearest(fixture_FSS, values, expected_means): ], ) @pytest.mark.parametrize( - argnames=["ctext_mngr", "msg", "input_values", "exp_values", "fill_from"], + argnames=["input_values", "exp_values", "fill_from", "previous_value"], argvalues=[ pytest.param( - does_not_raise(), - None, np.array([1, 2, 3]), np.repeat([np.nan, 1, 2, 3], (26, 48, 48, 22)), None, + None, id="1D test", ), pytest.param( - does_not_raise(), - None, np.array([1, 2, 3]), np.repeat([1, 2, 3], (48, 48, 48)), np.timedelta64(0, "h"), + None, id="1D test - fill from", ), pytest.param( - does_not_raise(), + np.array([1, 2, 3]), + np.repeat([0, 1, 2, 3], (26, 48, 48, 22)), + None, + np.array([0]), + id="1D test - previous value 1D", + ), + pytest.param( + np.array([1, 2, 3]), + np.repeat([0, 1, 2, 3], (26, 48, 48, 22)), None, + np.array(0), + id="1D test - previous value 0D", + ), + pytest.param( np.array([[[1, 4], [7, 10]], [[2, 5], [8, 11]], [[3, 6], [9, 12]]]), np.repeat( a=[ @@ -711,11 +729,10 @@ def test_FSS_get_vals_nearest(fixture_FSS, values, expected_means): axis=0, ), None, - id="2D test", + None, + id="3D test", ), pytest.param( - does_not_raise(), - None, np.array([[[1, 4], [7, 10]], [[2, 5], [8, 11]], [[3, 6], [9, 12]]]), np.repeat( a=[ @@ -728,11 +745,26 @@ def test_FSS_get_vals_nearest(fixture_FSS, values, expected_means): axis=0, ), np.timedelta64(2, "h"), - id="2D test - fill from", + None, + id="3D test - fill from", ), pytest.param( - does_not_raise(), + np.array([[[1, 4], [7, 10]], [[2, 5], [8, 11]], [[3, 6], [9, 12]]]), + np.repeat( + a=[ + [[0, 3], [6, 9]], + [[1, 4], [7, 10]], + [[2, 5], [8, 11]], + [[3, 6], [9, 12]], + ], + repeats=[26, 48, 48, 22], + axis=0, + ), None, + np.array([[0, 3], [6, 9]]), + id="3D test - previous value 2D", + ), + pytest.param( np.array([[1, 4], [2, 5], [3, 6]]), np.repeat( a=[[np.nan, np.nan], [1, 4], [2, 5], [3, 6]], @@ -740,37 +772,41 @@ def test_FSS_get_vals_nearest(fixture_FSS, values, expected_means): axis=0, ), None, - id="3D test", + None, + id="2D test", ), ], ) -def test_FSS_resample_subdaily( - fixture_FSS, +def test_SubdailyScaler_fill_daily_to_subdaily_previous( + fixture_SubdailyScaler, method_name, kwargs, update_point, - ctext_mngr, - msg, input_values, exp_values, fill_from, + previous_value, ): - """Test the calculation of subdaily samples using SubdailyScaler.""" + """Test fill_daily_to_subdaily using SubdailyScale with method previous. + + The first parameterisation sets the exact same acclimation windows in a bunch of + different ways. The second paramaterisation provides inputs with different + dimensionality. + """ # Set the included observations - the different parameterisations here and for # the update point should all select the same update point. - func = getattr(fixture_FSS, method_name) + func = getattr(fixture_SubdailyScaler, method_name) func(**kwargs) - with ctext_mngr as cman: - res = fixture_FSS.fill_daily_to_subdaily( - input_values, update_point=update_point, fill_from=fill_from - ) + res = fixture_SubdailyScaler.fill_daily_to_subdaily( + input_values, + update_point=update_point, + fill_from=fill_from, + previous_value=previous_value, + ) - if cman is not None: - assert str(cman.value) == msg - else: - assert np.allclose(res, exp_values, equal_nan=True) + assert np.allclose(res, exp_values, equal_nan=True) @pytest.mark.parametrize( @@ -829,21 +865,91 @@ def test_FSS_resample_subdaily( ), ], ) -def test_FSS_resample_subdaily_linear( - fixture_FSS, +def test_SubdailyScaler_fill_daily_to_subdaily_linear( + fixture_SubdailyScaler, update_point, input_values, exp_values, ): - """Test SubdailyScaler resampling to subdaily timescale by linear interpolation.""" + """Test fill_daily_to_subdaily using SubdailyScaler with method linear.""" # Set the included observations - fixture_FSS.set_window( + fixture_SubdailyScaler.set_window( window_center=np.timedelta64(13, "h"), half_width=np.timedelta64(1, "h") ) - res = fixture_FSS.fill_daily_to_subdaily( + res = fixture_SubdailyScaler.fill_daily_to_subdaily( input_values, update_point=update_point, kind="linear" ) assert np.allclose(res, exp_values, equal_nan=True) + + +@pytest.mark.parametrize( + argnames="inputs, outcome, msg", + argvalues=[ + pytest.param( + {"values": np.arange(12)}, + pytest.raises(ValueError), + "Values is not of length n_days on its first axis", + id="values wrong shape", + ), + pytest.param( + {"values": np.arange(3), "fill_from": 3}, + pytest.raises(ValueError), + "The fill_from argument must be a timedelta64 value", + id="fill_from not timedelta64", + ), + pytest.param( + {"values": np.arange(3), "fill_from": np.timedelta64(12, "D")}, + pytest.raises(ValueError), + "The fill_from argument is not >= 0 and < 24 hours", + id="fill_from too large", + ), + pytest.param( + {"values": np.arange(3), "fill_from": np.timedelta64(-1, "s")}, + pytest.raises(ValueError), + "The fill_from argument is not >= 0 and < 24 hours", + id="fill_from negative", + ), + pytest.param( + {"values": np.arange(3), "update_point": "noon"}, + pytest.raises(ValueError), + "Unknown update point", + id="unknown update point", + ), + pytest.param( + {"values": np.arange(3), "previous_value": np.array(1), "kind": "linear"}, + pytest.raises(NotImplementedError), + "Using previous value with kind='linear' is not implemented", + id="previous_value with linear", + ), + pytest.param( + {"values": np.arange(3), "previous_value": np.ones(4)}, + pytest.raises(ValueError), + "The input to previous_value is not congruent with " + "the shape of the observed data", + id="previous_value shape issue", + ), + pytest.param( + {"values": np.arange(3), "kind": "quadratic"}, + pytest.raises(ValueError), + "Unsupported interpolation option", + id="unsupported interpolation", + ), + ], +) +def test_SubdailyScaler_fill_daily_to_subdaily_failure_modes( + fixture_SubdailyScaler, inputs, outcome, msg +): + """Test fill_daily_to_subdaily using SubdailyScaler with method linear.""" + + # Set the included observations + fixture_SubdailyScaler.set_window( + window_center=np.timedelta64(13, "h"), half_width=np.timedelta64(1, "h") + ) + + with outcome as excep: + _ = fixture_SubdailyScaler.fill_daily_to_subdaily(**inputs) + + assert str(excep.value) == msg