diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 00000000..a08741e4 --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 35dbdb2b27cad3442b8a3d7e176c0c97 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.doctrees/community/code-of-conduct.doctree b/.doctrees/community/code-of-conduct.doctree new file mode 100644 index 00000000..097631f8 Binary files /dev/null and b/.doctrees/community/code-of-conduct.doctree differ diff --git a/.doctrees/community/contributing.doctree b/.doctrees/community/contributing.doctree new file mode 100644 index 00000000..fa33d433 Binary files /dev/null and b/.doctrees/community/contributing.doctree differ diff --git a/.doctrees/community/faq.doctree b/.doctrees/community/faq.doctree new file mode 100644 index 00000000..5e3d4c62 Binary files /dev/null and b/.doctrees/community/faq.doctree differ diff --git a/.doctrees/community/index.doctree b/.doctrees/community/index.doctree new file mode 100644 index 00000000..88bf7090 Binary files /dev/null and b/.doctrees/community/index.doctree differ diff --git a/.doctrees/community/roadmap.doctree b/.doctrees/community/roadmap.doctree new file mode 100644 index 00000000..ae77b5ba Binary files /dev/null and b/.doctrees/community/roadmap.doctree differ diff --git a/.doctrees/contact.doctree b/.doctrees/contact.doctree new file mode 100644 index 00000000..40e68713 Binary files /dev/null and b/.doctrees/contact.doctree differ diff --git a/.doctrees/environment.pickle b/.doctrees/environment.pickle new file mode 100644 index 00000000..f007fb3d Binary files /dev/null and b/.doctrees/environment.pickle differ diff --git a/.doctrees/getting-started/extending.doctree b/.doctrees/getting-started/extending.doctree new file mode 100644 index 00000000..c86c1ad6 Binary files /dev/null and b/.doctrees/getting-started/extending.doctree differ diff --git a/.doctrees/getting-started/index.doctree b/.doctrees/getting-started/index.doctree new file mode 100644 index 00000000..e9bce49b Binary files /dev/null and b/.doctrees/getting-started/index.doctree differ diff --git a/.doctrees/getting-started/installation.doctree b/.doctrees/getting-started/installation.doctree new file mode 100644 index 00000000..aecfffbc Binary files /dev/null and b/.doctrees/getting-started/installation.doctree differ diff --git a/.doctrees/getting-started/tutorials/index.doctree b/.doctrees/getting-started/tutorials/index.doctree new file mode 100644 index 00000000..13666b00 Binary files /dev/null and b/.doctrees/getting-started/tutorials/index.doctree differ diff --git a/.doctrees/getting-started/tutorials/quickstart.doctree b/.doctrees/getting-started/tutorials/quickstart.doctree new file mode 100644 index 00000000..b1ea678a Binary files /dev/null and b/.doctrees/getting-started/tutorials/quickstart.doctree differ diff --git a/.doctrees/index.doctree b/.doctrees/index.doctree new file mode 100644 index 00000000..f32bc084 Binary files /dev/null and b/.doctrees/index.doctree differ diff --git a/.doctrees/reference/active_learning/acquisition_fns.doctree b/.doctrees/reference/active_learning/acquisition_fns.doctree new file mode 100644 index 00000000..478accb5 Binary files /dev/null and b/.doctrees/reference/active_learning/acquisition_fns.doctree differ diff --git a/.doctrees/reference/active_learning/algorithms.doctree b/.doctrees/reference/active_learning/algorithms.doctree new file mode 100644 index 00000000..f1d1ecd4 Binary files /dev/null and b/.doctrees/reference/active_learning/algorithms.doctree differ diff --git a/.doctrees/reference/active_learning/index.doctree b/.doctrees/reference/active_learning/index.doctree new file mode 100644 index 00000000..17ccc370 Binary files /dev/null and b/.doctrees/reference/active_learning/index.doctree differ diff --git a/.doctrees/reference/data/index.doctree b/.doctrees/reference/data/index.doctree new file mode 100644 index 00000000..e96cdd51 Binary files /dev/null and b/.doctrees/reference/data/index.doctree differ diff --git a/.doctrees/reference/data/loader.doctree b/.doctrees/reference/data/loader.doctree new file mode 100644 index 00000000..4c6d7331 Binary files /dev/null and b/.doctrees/reference/data/loader.doctree differ diff --git a/.doctrees/reference/data/processor.doctree b/.doctrees/reference/data/processor.doctree new file mode 100644 index 00000000..71e3732f Binary files /dev/null and b/.doctrees/reference/data/processor.doctree differ diff --git a/.doctrees/reference/data/task.doctree b/.doctrees/reference/data/task.doctree new file mode 100644 index 00000000..46ead1f7 Binary files /dev/null and b/.doctrees/reference/data/task.doctree differ diff --git a/.doctrees/reference/data/utils.doctree b/.doctrees/reference/data/utils.doctree new file mode 100644 index 00000000..6422b78f Binary files /dev/null and b/.doctrees/reference/data/utils.doctree differ diff --git a/.doctrees/reference/index.doctree b/.doctrees/reference/index.doctree new file mode 100644 index 00000000..9d3fa0ac Binary files /dev/null and b/.doctrees/reference/index.doctree differ diff --git a/.doctrees/reference/model/convnp.doctree b/.doctrees/reference/model/convnp.doctree new file mode 100644 index 00000000..59e566b1 Binary files /dev/null and b/.doctrees/reference/model/convnp.doctree differ diff --git a/.doctrees/reference/model/defaults.doctree b/.doctrees/reference/model/defaults.doctree new file mode 100644 index 00000000..f47ae801 Binary files /dev/null and b/.doctrees/reference/model/defaults.doctree differ diff --git a/.doctrees/reference/model/index.doctree b/.doctrees/reference/model/index.doctree new file mode 100644 index 00000000..ccdd641f Binary files /dev/null and b/.doctrees/reference/model/index.doctree differ diff --git a/.doctrees/reference/model/model.doctree b/.doctrees/reference/model/model.doctree new file mode 100644 index 00000000..eb432ed6 Binary files /dev/null and b/.doctrees/reference/model/model.doctree differ diff --git a/.doctrees/reference/model/nps.doctree b/.doctrees/reference/model/nps.doctree new file mode 100644 index 00000000..22d9c459 Binary files /dev/null and b/.doctrees/reference/model/nps.doctree differ diff --git a/.doctrees/reference/plot.doctree b/.doctrees/reference/plot.doctree new file mode 100644 index 00000000..5f2854a7 Binary files /dev/null and b/.doctrees/reference/plot.doctree differ diff --git a/.doctrees/reference/tensorflow/index.doctree b/.doctrees/reference/tensorflow/index.doctree new file mode 100644 index 00000000..38cc178d Binary files /dev/null and b/.doctrees/reference/tensorflow/index.doctree differ diff --git a/.doctrees/reference/torch/index.doctree b/.doctrees/reference/torch/index.doctree new file mode 100644 index 00000000..18a04903 Binary files /dev/null and b/.doctrees/reference/torch/index.doctree differ diff --git a/.doctrees/reference/train/index.doctree b/.doctrees/reference/train/index.doctree new file mode 100644 index 00000000..2947ec05 Binary files /dev/null and b/.doctrees/reference/train/index.doctree differ diff --git a/.doctrees/reference/train/train.doctree b/.doctrees/reference/train/train.doctree new file mode 100644 index 00000000..2fccaed0 Binary files /dev/null and b/.doctrees/reference/train/train.doctree differ diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/_sources/community/code-of-conduct.rst.txt b/_sources/community/code-of-conduct.rst.txt new file mode 100644 index 00000000..bdb71d14 --- /dev/null +++ b/_sources/community/code-of-conduct.rst.txt @@ -0,0 +1,6 @@ +========================= +Developer Code of Conduct +========================= + +.. + TODO: write a code of conduct for DeepSensor/mirror the CODE-OF-CONDUCT.md document? diff --git a/_sources/community/contributing.rst.txt b/_sources/community/contributing.rst.txt new file mode 100644 index 00000000..c6b6575d --- /dev/null +++ b/_sources/community/contributing.rst.txt @@ -0,0 +1,6 @@ +========================== +Contributing to DeepSensor +========================== + +.. + TODO: write a intro to contributing to DeepSensor/mirror the CONTRIBUTE.md document? diff --git a/_sources/community/faq.rst.txt b/_sources/community/faq.rst.txt new file mode 100644 index 00000000..e3927c1b --- /dev/null +++ b/_sources/community/faq.rst.txt @@ -0,0 +1,104 @@ +============= +Community FAQ +============= + +This FAQ aims to answer common questions about the DeepSensor library. It is our way to streamline the onboarding process and clarify expectations. + +.. note:: + + If you have a question that is not answered here, please open an issue or submit a pull request. + +Questions +--------- + +**Q: What is the purpose of this project?** + +**Answer:** [Briefly describe the project, its objectives, and its intended audience.] + +--- + +**Q: How can I contribute?** + +**Answer:** There are many ways to contribute, from writing code and fixing bugs to improving documentation or translating content. Check our :doc:`contribution guide ` for detailed steps. + +--- + +**Q: Where can I find the contribution guidelines?** + +**Answer:** You can find our contribution guidelines in the CONTRIBUTING.md file in the root directory of the repository, or in the :doc:`contribution guide `. + +--- + +**Q: Do I need to sign a Contributor License Agreement (CLA)?** + +**Answer:** At the current time, we do not require a CLA from our contributors. + +--- + +**Q: How do I report a bug?** + +**Answer:** Please submit an issue in our GitHub repository. Make sure to provide detailed information, including steps to reproduce the bug and the expected outcome. + +--- + +**Q: How do I request a new feature?** + +**Answer:** Open a new issue on our GitHub repository and label it as a feature request. Describe the feature in detail and its potential benefits. + +--- + +**Q: How do I set up the development environment?** + +**Answer:** Follow the instructions in our developer documentation. If you run into issues, ask in our community chat (on Slack) or :doc:`contact the core group of maintainers directly `. + +--- + +**Q: Do you have a code of conduct?** + +**Answer:** Yes, we value a respectful and inclusive community. Please read our :doc:`Code of Conduct ` before contributing. + +--- + +**Q: How can I get in touch with other contributors or maintainers?** + +**Answer:** Join our Slack team to stay in touch with other contributors and maintainers. We also have a standing community meeting, which is by invite, so :doc:`get in touch with the core group of maintainers ` to receive an invite to them. + +--- + +**Q: Can I contribute even if I'm not a coder?** + +**Answer:** Absolutely! Contributions can be made in the form of documentation, design, testing, and more. Everyone's skills are valuable. Join our Slack discussion to learn more. + +--- + +**Q: How do I claim an issue to work on?** + +**Answer:** Comment on the issue expressing your interest to help out. If the issue is unassigned, a maintainer will likely assign it to you. + +--- + +**Q: What's the process for proposing a significant change?** + +**Answer:** For significant changes, it's a good practice to first open a discussion or issue to gather feedback. Once there's a consensus, you can proceed with a pull request. + +--- + +**Q: How can I get my pull request (PR) merged?** + +**Answer:** Ensure your PR follows the contribution guidelines, passes all tests, and has been reviewed by at least one maintainer. Address any feedback provided. + +--- + +**Q: What's the project's release cycle?** + +**Answer:** We release updates in a rolling manner, following our roadmap planning. Critical bug fixes might be released as needed. + +--- + +**Q: How is credit given to contributors?** + +**Answer:** Contributors are acknowledged in our release notes, and their contributions are forever recorded in the project's history. + + +.. + TODO: write more questions and answers here. diff --git a/_sources/community/index.rst.txt b/_sources/community/index.rst.txt new file mode 100644 index 00000000..e4816243 --- /dev/null +++ b/_sources/community/index.rst.txt @@ -0,0 +1,18 @@ +=========================================== +DeepSensor's user and contributor community +=========================================== + +The DeepSensor community is a group of users and contributors who are interested in the development of DeepSensor. The community is open to anyone who is interested in DeepSensor. The community is a place to ask questions, discuss ideas, and share your work. + +If you are interested in joining the community, please join our Slack channel at https://deepsensor.slack.com. You can request an invitation to the Slack channel at `https://ai4environment.slack.com/signup`. + +We welcome contributions from the community. If you are interested in contributing to DeepSensor, please read the :doc:`contributing` guide. + +.. toctree:: + :maxdepth: 1 + :caption: Table of contents: + + faq + contributing + code-of-conduct + roadmap diff --git a/_sources/community/roadmap.rst.txt b/_sources/community/roadmap.rst.txt new file mode 100644 index 00000000..e67c7eaf --- /dev/null +++ b/_sources/community/roadmap.rst.txt @@ -0,0 +1,6 @@ +================== +DeepSensor Roadmap +================== + +.. + TODO: write an intro to the roadmap for DeepSensor + link to the GitHub project management board diff --git a/_sources/contact.rst.txt b/_sources/contact.rst.txt new file mode 100644 index 00000000..6f6516ef --- /dev/null +++ b/_sources/contact.rst.txt @@ -0,0 +1,7 @@ +Contact the developers +====================== + +tomand@bas.ac.uk + +.. + TODO: Add contact information here... \ No newline at end of file diff --git a/_sources/getting-started/extending.rst.txt b/_sources/getting-started/extending.rst.txt new file mode 100644 index 00000000..5beef9f7 --- /dev/null +++ b/_sources/getting-started/extending.rst.txt @@ -0,0 +1,34 @@ +==================================== +Extending DeepSensor with new models +==================================== + +To extend DeepSensor with a new model, simply create a new class that inherits from ``deepsensor.model.DeepSensorModel`` and implement the low-level prediction methods defined in ``deepsensor.model.ProbabilisticModel``, such as ``.mean`` and ``.stddev``. + +In this example, we'll create a new model called ``ExampleModel``: + +.. code-block:: python + + class ExampleModel(DeepSensorModel): + """ + A very naive model that predicts the mean of the first context set + with a fixed stddev. + """ + + def __init__(self, data_processor: DataProcessor, task_loader: TaskLoader): + # Initiate the parent class (DeepSensorModel) with the + # provided data processor and task loader: + super().__init__(data_processor, task_loader) + + def mean(self, task: Task): + """Compute mean at target locations""" + return np.mean(task["Y_c"][0]) + + def stddev(self, task: Task): + """Compute stddev at target locations""" + return 0.1 + + ... + +After creating ``ExampleModel`` in this way, it can be used in the same way as the built-in :class:`~deepsensor.model.convnp.ConvNP` model. + +See `this Jupyter notebook `_ for more details. diff --git a/_sources/getting-started/index.rst.txt b/_sources/getting-started/index.rst.txt new file mode 100644 index 00000000..59493711 --- /dev/null +++ b/_sources/getting-started/index.rst.txt @@ -0,0 +1,14 @@ +=============== +Getting started +=============== + +In this part of the documentation, we will show you how to get started with ``DeepSensor``. We will show you how to install ``DeepSensor`` and how to extend it. We will also provide you with some tutorials to get you started with ``DeepSensor``. + + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + installation + extending + tutorials/index diff --git a/_sources/getting-started/installation.rst.txt b/_sources/getting-started/installation.rst.txt new file mode 100644 index 00000000..2fc3497c --- /dev/null +++ b/_sources/getting-started/installation.rst.txt @@ -0,0 +1,68 @@ +Installation instructions +========================= + +DeepSensor is a Python package that can be installed in a number of ways. In this section we will describe the two main ways to install the package. + +Install from `PyPI `_ +----------------------------------------------------------- + +If you want to use the latest stable release of DeepSensor and do not want/need access to the worked examples or the package's source code, we recommend installing from PyPI. + +This is the easiest way to install DeepSensor. + +- Install ``deepsensor``: + + .. code-block:: bash + + pip install deepsensor + +- Install the backend of your choice: + + - Install ``tensorflow``: + + .. code-block:: bash + + pip install tensorflow + + - Install ``pytorch``: + + .. code-block:: bash + + pip install torch + +Install from `source `_ +--------------------------------------------------------------------- + +.. note:: + + You will want to use this method if you intend on contributing to the source code of DeepSensor. + +If you want to keep up with the latest changes to DeepSensor, or want/need easy access to the worked examples or the package's source code, we recommend installing from source. + +This method will create a ``DeepSensor`` directory on your machine which will contain all the source code, docs and worked examples. + +- Clone the repository: + + .. code-block:: bash + + git clone + +- Install ``deepsensor``: + + .. code-block:: bash + + pip install -e -v . + +- Install the backend of your choice: + + - Install ``tensorflow``: + + .. code-block:: bash + + pip install tensorflow + + - Install ``pytorch``: + + .. code-block:: bash + + pip install torch diff --git a/_sources/getting-started/tutorials/index.rst.txt b/_sources/getting-started/tutorials/index.rst.txt new file mode 100644 index 00000000..9bfe997a --- /dev/null +++ b/_sources/getting-started/tutorials/index.rst.txt @@ -0,0 +1,9 @@ +========= +Tutorials +========= + +.. toctree:: + :maxdepth: 2 + :caption: Tutorials: + + quickstart \ No newline at end of file diff --git a/_sources/getting-started/tutorials/quickstart.rst.txt b/_sources/getting-started/tutorials/quickstart.rst.txt new file mode 100644 index 00000000..2da7f8d2 --- /dev/null +++ b/_sources/getting-started/tutorials/quickstart.rst.txt @@ -0,0 +1,84 @@ +==================== +Tutorial: Quickstart +==================== + +Here we will demonstrate a simple example of training a convolutional conditional neural process (ConvCNP) to spatially interpolate ERA5 data. + +We can go from imports to predictions with a trained model in less than 30 lines of code! + +.. code-block:: python + + import deepsensor.torch + from deepsensor.data.loader import TaskLoader + from deepsensor.data.processor import DataProcessor + from deepsensor.model.convnp import ConvNP + from deepsensor.train.train import train_epoch + + import xarray as xr + import pandas as pd + import numpy as np + + # Load raw data + ds_raw = xr.tutorial.open_dataset("air_temperature") + + # Normalise data + data_processor = DataProcessor(x1_name="lat", x1_map=(15, 75), x2_name="lon", x2_map=(200, 330)) + ds = data_processor(ds_raw) + + # Set up task loader + task_loader = TaskLoader(context=ds, target=ds) + + # Set up model + model = ConvNP(data_processor, task_loader) + + # Generate training tasks with up to 10% of grid cells passed as context and all grid cells + # passed as targets + train_tasks = [] + for date in pd.date_range("2013-01-01", "2014-11-30")[::7]: + task = task_loader(date, context_sampling=np.random.uniform(0.0, 0.1), target_sampling="all") + train_tasks.append(task) + + # Train model + for epoch in range(10): + train_epoch(model, train_tasks, progress_bar=True) + + # Predict on new task with 10% of context data and a dense grid of target points + test_task = task_loader("2014-12-31", 0.1) + mean_ds, std_ds = model.predict(test_task, X_t=ds_raw) + +After training, the model can predict directly to `xarray` in your data's original units and coordinate system: + +.. code-block:: python + + >>> mean_ds + + Dimensions: (time: 1, lat: 25, lon: 53) + Coordinates: + * time (time) datetime64[ns] 2014-12-31 + * lat (lat) float32 75.0 72.5 70.0 67.5 65.0 ... 25.0 22.5 20.0 17.5 15.0 + * lon (lon) float32 200.0 202.5 205.0 207.5 ... 322.5 325.0 327.5 330.0 + Data variables: + air (time, lat, lon) float32 246.7 244.4 245.5 ... 290.2 289.8 289.4 + +We can also predict directly to `pandas` containing a timeseries of predictions at off-grid locations +by passing a `numpy` array of target locations to the `X_t` argument of `.predict`: + +.. code-block:: python + + # Predict at two off-grid locations for three days in December 2014 + test_tasks = task_loader(pd.date_range("2014-12-01", "2014-12-31"), 0.1) + mean_df, std_df = model.predict(test_tasks, X_t=np.array([[50, 280], [40, 250]]).T) + +.. code-block:: python + + >>> mean_df + air + time lat lon + 2014-12-01 50.0 280.0 260.183056 + 40.0 250.0 277.947373 + 2014-12-02 50.0 280.0 261.08943 + 40.0 250.0 278.219599 + 2014-12-03 50.0 280.0 257.128185 + 40.0 250.0 278.444229 + +This quickstart example is also `available as a Jupyter notebook `_ with added visualisations. diff --git a/_sources/index.rst.txt b/_sources/index.rst.txt new file mode 100644 index 00000000..d0001c09 --- /dev/null +++ b/_sources/index.rst.txt @@ -0,0 +1,48 @@ +Welcome to DeepSensor's documentation! +====================================== + +DeepSensor is Python package and open-source project for modelling environmental data with neural processes. + +DeepSensor aims to faithfully match the flexibility of neural processes with a simple and intuitive interface. DeepSensor wraps around the powerful `neuralprocessess package `_ for the core modelling functionality, while allowing users to stay in the familiar `xarray `_ and `pandas `_ world and avoid the murky depths of tensors! + +DeepSensor is also compatible with both `PyTorch `_ or `TensorFlow `_ for its machine learning abilities, thanks to the `backends package `_. Simply ``import deepsensor.torch`` or ``import deepsensor.tensorflow`` to choose between them! + +.. note:: + + This package is currently undergoing active development. If you are interested in using DeepSensor in production, please :doc:`get in touch `. + +Citing DeepSensor +----------------- + +If you use DeepSensor in your research, please consider citing the repository. You can generate a BiBTeX entry by clicking the 'Cite this repository' button on the top right of this page. + +Quick installation +------------------ + +The DeepSensor package can easiest be pip installed, together with the backend of your choice. In this example we use the PyTorch backend: + +.. code-block:: bash + + $ pip install deepsensor torch + +To install the TensorFlow backend instead, simply replace ``torch`` with ``tensorflow`` in the above command. + + + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + getting-started/index + community/index + contact + reference/index + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/_sources/reference/active_learning/acquisition_fns.rst.txt b/_sources/reference/active_learning/acquisition_fns.rst.txt new file mode 100644 index 00000000..4261b490 --- /dev/null +++ b/_sources/reference/active_learning/acquisition_fns.rst.txt @@ -0,0 +1,83 @@ +``deepsensor.active_learning.acquisition_fns`` +============================================== + +.. autoclass:: deepsensor.active_learning.acquisition_fns.AcquisitionFunction + :members: + :undoc-members: + :special-members: __init__, __call__ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.AcquisitionFunctionOracle + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.AcquisitionFunctionParallel + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.MeanStddev + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.MeanVariance + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.pNormStddev + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.JointEntropy + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.OracleMAE + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.OracleRMSE + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.OracleMarginalNLL + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.OracleJointNLL + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.Random + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.ContextDist + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.Stddev + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.ExpectedImprovement + :members: + :undoc-members: + :special-members: __init__, __call_ + diff --git a/_sources/reference/active_learning/algorithms.rst.txt b/_sources/reference/active_learning/algorithms.rst.txt new file mode 100644 index 00000000..a9ae8880 --- /dev/null +++ b/_sources/reference/active_learning/algorithms.rst.txt @@ -0,0 +1,7 @@ +``deepsensor.active_learning.algorithms`` +========================================= + +.. autoclass:: deepsensor.active_learning.algorithms.GreedyAlgorithm + :members: + :undoc-members: + :special-members: __init__, __call__ diff --git a/_sources/reference/active_learning/index.rst.txt b/_sources/reference/active_learning/index.rst.txt new file mode 100644 index 00000000..37bb249b --- /dev/null +++ b/_sources/reference/active_learning/index.rst.txt @@ -0,0 +1,9 @@ +``active_learning`` module +========================== + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + acquisition_fns + algorithms \ No newline at end of file diff --git a/_sources/reference/data/index.rst.txt b/_sources/reference/data/index.rst.txt new file mode 100644 index 00000000..1f534aa7 --- /dev/null +++ b/_sources/reference/data/index.rst.txt @@ -0,0 +1,11 @@ +``data`` module +=============== + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + loader + processor + task + utils \ No newline at end of file diff --git a/_sources/reference/data/loader.rst.txt b/_sources/reference/data/loader.rst.txt new file mode 100644 index 00000000..22cedb2d --- /dev/null +++ b/_sources/reference/data/loader.rst.txt @@ -0,0 +1,7 @@ +``deepsensor.data.loader`` +========================== + +.. autoclass:: deepsensor.data.loader.TaskLoader + :members: + :undoc-members: + :special-members: __init__, __call__ diff --git a/_sources/reference/data/processor.rst.txt b/_sources/reference/data/processor.rst.txt new file mode 100644 index 00000000..a4f2c9a9 --- /dev/null +++ b/_sources/reference/data/processor.rst.txt @@ -0,0 +1,15 @@ +``deepsensor.data.processor`` +============================= + +.. autoclass:: deepsensor.data.processor.DataProcessor + :members: + :undoc-members: + :special-members: __init__, __call__, __str__ + +.. autofunction:: deepsensor.data.processor.xarray_to_coord_array_normalised + +.. autofunction:: deepsensor.data.processor.mask_coord_array_normalised + +.. autofunction:: deepsensor.data.processor.da1_da2_same_grid + +.. autofunction:: deepsensor.data.processor.interp_da1_to_da2 diff --git a/_sources/reference/data/task.rst.txt b/_sources/reference/data/task.rst.txt new file mode 100644 index 00000000..07b9303f --- /dev/null +++ b/_sources/reference/data/task.rst.txt @@ -0,0 +1,15 @@ +``deepsensor.data.task`` +======================== + +.. autoclass:: deepsensor.data.task.Task + :members: + :undoc-members: + :special-members: __init__, __call__, __str__, __repr__ + +.. autofunction:: deepsensor.data.task.append_obs_to_task + +.. autofunction:: deepsensor.data.task.flatten_X + +.. autofunction:: deepsensor.data.task.flatten_Y + +.. autofunction:: deepsensor.data.task.flatten_gridded_data_in_task \ No newline at end of file diff --git a/_sources/reference/data/utils.rst.txt b/_sources/reference/data/utils.rst.txt new file mode 100644 index 00000000..0c3d41a1 --- /dev/null +++ b/_sources/reference/data/utils.rst.txt @@ -0,0 +1,10 @@ +``deepsensor.data.utils`` +========================= + +.. autofunction:: deepsensor.data.utils.construct_x1x2_ds + +.. autofunction:: deepsensor.data.utils.construct_circ_time_ds + +.. autofunction:: deepsensor.data.utils.compute_xarray_data_resolution + +.. autofunction:: deepsensor.data.utils.compute_pandas_data_resolution diff --git a/_sources/reference/index.rst.txt b/_sources/reference/index.rst.txt new file mode 100644 index 00000000..7d910b3e --- /dev/null +++ b/_sources/reference/index.rst.txt @@ -0,0 +1,17 @@ +API Reference +============= + +This part of the documentation contains the API reference for the package. It is structured by modules, and each module contains its respective classes, functions, and attributes. The API is designed to be as simple as possible while still allowing for a lot of flexibility. The API is divided into several submodules, which are described in the following sections. + + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + active_learning/index + data/index + model/index + tensorflow/index + torch/index + train/index + plot diff --git a/_sources/reference/model/convnp.rst.txt b/_sources/reference/model/convnp.rst.txt new file mode 100644 index 00000000..5f05acdc --- /dev/null +++ b/_sources/reference/model/convnp.rst.txt @@ -0,0 +1,9 @@ +``deepsensor.model.convnp`` +=========================== + +.. autoclass:: deepsensor.model.convnp.ConvNP + :members: + :undoc-members: + :special-members: __init__, __call__ + +.. autofunction:: deepsensor.model.convnp.concat_tasks diff --git a/_sources/reference/model/defaults.rst.txt b/_sources/reference/model/defaults.rst.txt new file mode 100644 index 00000000..7db5026e --- /dev/null +++ b/_sources/reference/model/defaults.rst.txt @@ -0,0 +1,8 @@ +``deepsensor.model.defaults`` +============================= + +.. autofunction:: deepsensor.model.defaults.gen_ppu + +.. autofunction:: deepsensor.model.defaults.gen_decoder_scale + +.. autofunction:: deepsensor.model.defaults.gen_encoder_scales diff --git a/_sources/reference/model/index.rst.txt b/_sources/reference/model/index.rst.txt new file mode 100644 index 00000000..53820582 --- /dev/null +++ b/_sources/reference/model/index.rst.txt @@ -0,0 +1,11 @@ +``model`` module +================ + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + convnp + defaults + model + nps \ No newline at end of file diff --git a/_sources/reference/model/model.rst.txt b/_sources/reference/model/model.rst.txt new file mode 100644 index 00000000..4b339e07 --- /dev/null +++ b/_sources/reference/model/model.rst.txt @@ -0,0 +1,20 @@ +``deepsensor.model.model`` +========================== + +.. autoclass:: deepsensor.model.model.DeepSensorModel + :members: + :undoc-members: + :show-inheritance: + :inherited-members: + :special-members: __init__ + +.. autoclass:: deepsensor.model.model.ProbabilisticModel + :members: + :undoc-members: + :show-inheritance: + :inherited-members: + :special-members: __init__ + +.. autofunction:: deepsensor.model.model.create_empty_spatiotemporal_xarray + +.. autofunction:: deepsensor.model.model.increase_spatial_resolution diff --git a/_sources/reference/model/nps.rst.txt b/_sources/reference/model/nps.rst.txt new file mode 100644 index 00000000..cc0daec0 --- /dev/null +++ b/_sources/reference/model/nps.rst.txt @@ -0,0 +1,12 @@ +``deepsensor.model.nps`` +======================== + +.. autofunction:: deepsensor.model.nps.convert_task_to_nps_args + +.. autofunction:: deepsensor.model.nps.run_nps_model + +.. autofunction:: deepsensor.model.nps.run_nps_model_ar + +.. autofunction:: deepsensor.model.nps.construct_neural_process + +.. autofunction:: deepsensor.model.nps.compute_encoding_tensor diff --git a/_sources/reference/plot.rst.txt b/_sources/reference/plot.rst.txt new file mode 100644 index 00000000..483af477 --- /dev/null +++ b/_sources/reference/plot.rst.txt @@ -0,0 +1,16 @@ +``deepsensor.plot`` module +========================== + +.. autofunction:: deepsensor.plot.acquisition_fn + +.. autofunction:: deepsensor.plot.context_encoding + +.. autofunction:: deepsensor.plot.feature_maps + +.. autofunction:: deepsensor.plot.offgrid_context + +.. autofunction:: deepsensor.plot.offgrid_context_observations + +.. autofunction:: deepsensor.plot.placements + +.. autofunction:: deepsensor.plot.receptive_field diff --git a/_sources/reference/tensorflow/index.rst.txt b/_sources/reference/tensorflow/index.rst.txt new file mode 100644 index 00000000..82aa5de8 --- /dev/null +++ b/_sources/reference/tensorflow/index.rst.txt @@ -0,0 +1,4 @@ +``tensorflow`` module +===================== + +... \ No newline at end of file diff --git a/_sources/reference/torch/index.rst.txt b/_sources/reference/torch/index.rst.txt new file mode 100644 index 00000000..e3ce8418 --- /dev/null +++ b/_sources/reference/torch/index.rst.txt @@ -0,0 +1,4 @@ +``torch`` module +================ + +... \ No newline at end of file diff --git a/_sources/reference/train/index.rst.txt b/_sources/reference/train/index.rst.txt new file mode 100644 index 00000000..17f1e345 --- /dev/null +++ b/_sources/reference/train/index.rst.txt @@ -0,0 +1,8 @@ +``train`` module +================ + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + train \ No newline at end of file diff --git a/_sources/reference/train/train.rst.txt b/_sources/reference/train/train.rst.txt new file mode 100644 index 00000000..1fd49b6c --- /dev/null +++ b/_sources/reference/train/train.rst.txt @@ -0,0 +1,6 @@ +``deepsensor.train.train`` +========================== + +.. autofunction:: deepsensor.train.train.set_gpu_default_device + +.. autofunction:: deepsensor.train.train.train_epoch diff --git a/_static/_sphinx_javascript_frameworks_compat.js b/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 00000000..81415803 --- /dev/null +++ b/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,123 @@ +/* Compatability shim for jQuery and underscores.js. + * + * Copyright Sphinx contributors + * Released under the two clause BSD licence + */ + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/_static/basic.css b/_static/basic.css new file mode 100644 index 00000000..30fee9d0 --- /dev/null +++ b/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/_static/css/badge_only.css b/_static/css/badge_only.css new file mode 100644 index 00000000..c718cee4 --- /dev/null +++ b/_static/css/badge_only.css @@ -0,0 +1 @@ +.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/_static/css/fonts/Roboto-Slab-Bold.woff b/_static/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 00000000..6cb60000 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/_static/css/fonts/Roboto-Slab-Bold.woff2 b/_static/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 00000000..7059e231 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/_static/css/fonts/Roboto-Slab-Regular.woff b/_static/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 00000000..f815f63f Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/_static/css/fonts/Roboto-Slab-Regular.woff2 b/_static/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 00000000..f2c76e5b Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/_static/css/fonts/fontawesome-webfont.eot b/_static/css/fonts/fontawesome-webfont.eot new file mode 100644 index 00000000..e9f60ca9 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.eot differ diff --git a/_static/css/fonts/fontawesome-webfont.svg b/_static/css/fonts/fontawesome-webfont.svg new file mode 100644 index 00000000..855c845e --- /dev/null +++ b/_static/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/_static/css/fonts/fontawesome-webfont.ttf b/_static/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 00000000..35acda2f Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.ttf differ diff --git a/_static/css/fonts/fontawesome-webfont.woff b/_static/css/fonts/fontawesome-webfont.woff new file mode 100644 index 00000000..400014a4 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.woff differ diff --git a/_static/css/fonts/fontawesome-webfont.woff2 b/_static/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 00000000..4d13fc60 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.woff2 differ diff --git a/_static/css/fonts/lato-bold-italic.woff b/_static/css/fonts/lato-bold-italic.woff new file mode 100644 index 00000000..88ad05b9 Binary files /dev/null and b/_static/css/fonts/lato-bold-italic.woff differ diff --git a/_static/css/fonts/lato-bold-italic.woff2 b/_static/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 00000000..c4e3d804 Binary files /dev/null and b/_static/css/fonts/lato-bold-italic.woff2 differ diff --git a/_static/css/fonts/lato-bold.woff b/_static/css/fonts/lato-bold.woff new file mode 100644 index 00000000..c6dff51f Binary files /dev/null and b/_static/css/fonts/lato-bold.woff differ diff --git a/_static/css/fonts/lato-bold.woff2 b/_static/css/fonts/lato-bold.woff2 new file mode 100644 index 00000000..bb195043 Binary files /dev/null and b/_static/css/fonts/lato-bold.woff2 differ diff --git a/_static/css/fonts/lato-normal-italic.woff b/_static/css/fonts/lato-normal-italic.woff new file mode 100644 index 00000000..76114bc0 Binary files /dev/null and b/_static/css/fonts/lato-normal-italic.woff differ diff --git a/_static/css/fonts/lato-normal-italic.woff2 b/_static/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 00000000..3404f37e Binary files /dev/null and b/_static/css/fonts/lato-normal-italic.woff2 differ diff --git a/_static/css/fonts/lato-normal.woff b/_static/css/fonts/lato-normal.woff new file mode 100644 index 00000000..ae1307ff Binary files /dev/null and b/_static/css/fonts/lato-normal.woff differ diff --git a/_static/css/fonts/lato-normal.woff2 b/_static/css/fonts/lato-normal.woff2 new file mode 100644 index 00000000..3bf98433 Binary files /dev/null and b/_static/css/fonts/lato-normal.woff2 differ diff --git a/_static/css/theme.css b/_static/css/theme.css new file mode 100644 index 00000000..19a446a0 --- /dev/null +++ b/_static/css/theme.css @@ -0,0 +1,4 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 00000000..d06a71d7 --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/_static/documentation_options.js b/_static/documentation_options.js new file mode 100644 index 00000000..808a4ac4 --- /dev/null +++ b/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '0.2.5', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 00000000..a858a410 Binary files /dev/null and b/_static/file.png differ diff --git a/_static/jquery.js b/_static/jquery.js new file mode 100644 index 00000000..c4c6022f --- /dev/null +++ b/_static/jquery.js @@ -0,0 +1,2 @@ +/*! jQuery v3.6.0 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.6.0",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},j=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||D,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,D=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function je(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function De(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Le(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var _t,zt=[],Ut=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=zt.pop()||S.expando+"_"+wt.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Ut.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ut.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Ut,"$1"+r):!1!==e.jsonp&&(e.url+=(Tt.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,zt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((_t=E.implementation.createHTMLDocument("").body).innerHTML="
",2===_t.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=Fe(y.pixelPosition,function(e,t){if(t)return t=We(e,n),Pe.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/_static/js/html5shiv.min.js b/_static/js/html5shiv.min.js new file mode 100644 index 00000000..cd1c674f --- /dev/null +++ b/_static/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/_static/js/theme.js b/_static/js/theme.js new file mode 100644 index 00000000..1fddb6ee --- /dev/null +++ b/_static/js/theme.js @@ -0,0 +1 @@ +!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/_static/minus.png b/_static/minus.png new file mode 100644 index 00000000..d96755fd Binary files /dev/null and b/_static/minus.png differ diff --git a/_static/plus.png b/_static/plus.png new file mode 100644 index 00000000..7107cec9 Binary files /dev/null and b/_static/plus.png differ diff --git a/_static/pygments.css b/_static/pygments.css new file mode 100644 index 00000000..84ab3030 --- /dev/null +++ b/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #f8f8f8; } +.highlight .c { color: #3D7B7B; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #008000; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #3D7B7B; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #3D7B7B; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #9C6500 } /* Comment.Preproc */ +.highlight .cpf { color: #3D7B7B; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #3D7B7B; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #3D7B7B; font-style: italic } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #E40000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #008400 } /* Generic.Inserted */ +.highlight .go { color: #717171 } /* Generic.Output */ +.highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #008000 } /* Keyword.Pseudo */ +.highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #B00040 } /* Keyword.Type */ +.highlight .m { color: #666666 } /* Literal.Number */ +.highlight .s { color: #BA2121 } /* Literal.String */ +.highlight .na { color: #687822 } /* Name.Attribute */ +.highlight .nb { color: #008000 } /* Name.Builtin */ +.highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */ +.highlight .no { color: #880000 } /* Name.Constant */ +.highlight .nd { color: #AA22FF } /* Name.Decorator */ +.highlight .ni { color: #717171; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #CB3F38; font-weight: bold } /* Name.Exception */ +.highlight .nf { color: #0000FF } /* Name.Function */ +.highlight .nl { color: #767600 } /* Name.Label */ +.highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #19177C } /* Name.Variable */ +.highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #666666 } /* Literal.Number.Bin */ +.highlight .mf { color: #666666 } /* Literal.Number.Float */ +.highlight .mh { color: #666666 } /* Literal.Number.Hex */ +.highlight .mi { color: #666666 } /* Literal.Number.Integer */ +.highlight .mo { color: #666666 } /* Literal.Number.Oct */ +.highlight .sa { color: #BA2121 } /* Literal.String.Affix */ +.highlight .sb { color: #BA2121 } /* Literal.String.Backtick */ +.highlight .sc { color: #BA2121 } /* Literal.String.Char */ +.highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */ +.highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #BA2121 } /* Literal.String.Double */ +.highlight .se { color: #AA5D1F; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */ +.highlight .si { color: #A45A77; font-weight: bold } /* Literal.String.Interpol */ +.highlight .sx { color: #008000 } /* Literal.String.Other */ +.highlight .sr { color: #A45A77 } /* Literal.String.Regex */ +.highlight .s1 { color: #BA2121 } /* Literal.String.Single */ +.highlight .ss { color: #19177C } /* Literal.String.Symbol */ +.highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #0000FF } /* Name.Function.Magic */ +.highlight .vc { color: #19177C } /* Name.Variable.Class */ +.highlight .vg { color: #19177C } /* Name.Variable.Global */ +.highlight .vi { color: #19177C } /* Name.Variable.Instance */ +.highlight .vm { color: #19177C } /* Name.Variable.Magic */ +.highlight .il { color: #666666 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/_static/searchtools.js b/_static/searchtools.js new file mode 100644 index 00000000..7918c3fa --- /dev/null +++ b/_static/searchtools.js @@ -0,0 +1,574 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + `Search finished, found ${resultCount} page(s) matching the search query.` + ); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent !== undefined) return docContent.textContent; + console.warn( + "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + /** + * execute search (requires search index to be loaded) + */ + query: (query) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + // array of [docname, title, anchor, descr, score, filename] + let results = []; + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + results.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id] of foundEntries) { + let score = Math.round(100 * queryLower.length / entry.length) + results.push([ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // lookup as object + objectTerms.forEach((term) => + results.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); + + // now sort the results by score (in opposite order of appearance, since the + // display function below uses pop() to retrieve items) and then + // alphabetically + results.sort((a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; + }); + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + results = results.reverse(); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord) && !terms[word]) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord) && !titleTerms[word]) + arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); + }); + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) + fileMap.get(file).push(word); + else fileMap.set(file, [word]); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords) => { + const text = Search.htmlToText(htmlText); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/_static/sphinx_highlight.js b/_static/sphinx_highlight.js new file mode 100644 index 00000000..8a96c69a --- /dev/null +++ b/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/community/code-of-conduct.html b/community/code-of-conduct.html new file mode 100644 index 00000000..03f27e25 --- /dev/null +++ b/community/code-of-conduct.html @@ -0,0 +1,119 @@ + + + + + + + Developer Code of Conduct — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Developer Code of Conduct

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/community/contributing.html b/community/contributing.html new file mode 100644 index 00000000..066ea491 --- /dev/null +++ b/community/contributing.html @@ -0,0 +1,119 @@ + + + + + + + Contributing to DeepSensor — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Contributing to DeepSensor

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/community/faq.html b/community/faq.html new file mode 100644 index 00000000..8f1d4bca --- /dev/null +++ b/community/faq.html @@ -0,0 +1,174 @@ + + + + + + + Community FAQ — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

Community FAQ

+

This FAQ aims to answer common questions about the DeepSensor library. It is our way to streamline the onboarding process and clarify expectations.

+
+

Note

+

If you have a question that is not answered here, please open an issue or submit a pull request.

+
+
+

Questions

+

Q: What is the purpose of this project?

+

Answer: [Briefly describe the project, its objectives, and its intended audience.]

+

+

Q: How can I contribute?

+

Answer: There are many ways to contribute, from writing code and fixing bugs to improving documentation or translating content. Check our contribution guide for detailed steps.

+

+

Q: Where can I find the contribution guidelines?

+

Answer: You can find our contribution guidelines in the CONTRIBUTING.md file in the root directory of the repository, or in the contribution guide.

+

+

Q: Do I need to sign a Contributor License Agreement (CLA)?

+

Answer: At the current time, we do not require a CLA from our contributors.

+

+

Q: How do I report a bug?

+

Answer: Please submit an issue in our GitHub repository. Make sure to provide detailed information, including steps to reproduce the bug and the expected outcome.

+

+

Q: How do I request a new feature?

+

Answer: Open a new issue on our GitHub repository and label it as a feature request. Describe the feature in detail and its potential benefits.

+

+

Q: How do I set up the development environment?

+

Answer: Follow the instructions in our developer documentation. If you run into issues, ask in our community chat (on Slack) or contact the core group of maintainers directly.

+

+

Q: Do you have a code of conduct?

+

Answer: Yes, we value a respectful and inclusive community. Please read our Code of Conduct before contributing.

+

+

Q: How can I get in touch with other contributors or maintainers?

+

Answer: Join our Slack team to stay in touch with other contributors and maintainers. We also have a standing community meeting, which is by invite, so get in touch with the core group of maintainers to receive an invite to them.

+

+

Q: Can I contribute even if I’m not a coder?

+

Answer: Absolutely! Contributions can be made in the form of documentation, design, testing, and more. Everyone’s skills are valuable. Join our Slack discussion to learn more.

+

+

Q: How do I claim an issue to work on?

+

Answer: Comment on the issue expressing your interest to help out. If the issue is unassigned, a maintainer will likely assign it to you.

+

+

Q: What’s the process for proposing a significant change?

+

Answer: For significant changes, it’s a good practice to first open a discussion or issue to gather feedback. Once there’s a consensus, you can proceed with a pull request.

+

+

Q: How can I get my pull request (PR) merged?

+

Answer: Ensure your PR follows the contribution guidelines, passes all tests, and has been reviewed by at least one maintainer. Address any feedback provided.

+

+

Q: What’s the project’s release cycle?

+

Answer: We release updates in a rolling manner, following our roadmap planning. Critical bug fixes might be released as needed.

+

+

Q: How is credit given to contributors?

+

Answer: Contributors are acknowledged in our release notes, and their contributions are forever recorded in the project’s history.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/community/index.html b/community/index.html new file mode 100644 index 00000000..a7a1d0bd --- /dev/null +++ b/community/index.html @@ -0,0 +1,130 @@ + + + + + + + DeepSensor’s user and contributor community — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

DeepSensor’s user and contributor community

+

The DeepSensor community is a group of users and contributors who are interested in the development of DeepSensor. The community is open to anyone who is interested in DeepSensor. The community is a place to ask questions, discuss ideas, and share your work.

+

If you are interested in joining the community, please join our Slack channel at https://deepsensor.slack.com. You can request an invitation to the Slack channel at https://ai4environment.slack.com/signup.

+

We welcome contributions from the community. If you are interested in contributing to DeepSensor, please read the Contributing to DeepSensor guide.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/community/roadmap.html b/community/roadmap.html new file mode 100644 index 00000000..99762592 --- /dev/null +++ b/community/roadmap.html @@ -0,0 +1,119 @@ + + + + + + + DeepSensor Roadmap — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

DeepSensor Roadmap

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/contact.html b/contact.html new file mode 100644 index 00000000..48fd058d --- /dev/null +++ b/contact.html @@ -0,0 +1,113 @@ + + + + + + + Contact the developers — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Contact the developers

+

tomand@bas.ac.uk

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/doctrees/community/code-of-conduct.doctree b/doctrees/community/code-of-conduct.doctree new file mode 100644 index 00000000..097631f8 Binary files /dev/null and b/doctrees/community/code-of-conduct.doctree differ diff --git a/doctrees/community/contributing.doctree b/doctrees/community/contributing.doctree new file mode 100644 index 00000000..fa33d433 Binary files /dev/null and b/doctrees/community/contributing.doctree differ diff --git a/doctrees/community/faq.doctree b/doctrees/community/faq.doctree new file mode 100644 index 00000000..5e3d4c62 Binary files /dev/null and b/doctrees/community/faq.doctree differ diff --git a/doctrees/community/index.doctree b/doctrees/community/index.doctree new file mode 100644 index 00000000..88bf7090 Binary files /dev/null and b/doctrees/community/index.doctree differ diff --git a/doctrees/community/roadmap.doctree b/doctrees/community/roadmap.doctree new file mode 100644 index 00000000..ae77b5ba Binary files /dev/null and b/doctrees/community/roadmap.doctree differ diff --git a/doctrees/contact.doctree b/doctrees/contact.doctree new file mode 100644 index 00000000..40e68713 Binary files /dev/null and b/doctrees/contact.doctree differ diff --git a/doctrees/environment.pickle b/doctrees/environment.pickle new file mode 100644 index 00000000..018e6e73 Binary files /dev/null and b/doctrees/environment.pickle differ diff --git a/doctrees/getting-started/extending.doctree b/doctrees/getting-started/extending.doctree new file mode 100644 index 00000000..c86c1ad6 Binary files /dev/null and b/doctrees/getting-started/extending.doctree differ diff --git a/doctrees/getting-started/index.doctree b/doctrees/getting-started/index.doctree new file mode 100644 index 00000000..e9bce49b Binary files /dev/null and b/doctrees/getting-started/index.doctree differ diff --git a/doctrees/getting-started/installation.doctree b/doctrees/getting-started/installation.doctree new file mode 100644 index 00000000..aecfffbc Binary files /dev/null and b/doctrees/getting-started/installation.doctree differ diff --git a/doctrees/getting-started/tutorials/index.doctree b/doctrees/getting-started/tutorials/index.doctree new file mode 100644 index 00000000..13666b00 Binary files /dev/null and b/doctrees/getting-started/tutorials/index.doctree differ diff --git a/doctrees/getting-started/tutorials/quickstart.doctree b/doctrees/getting-started/tutorials/quickstart.doctree new file mode 100644 index 00000000..b1ea678a Binary files /dev/null and b/doctrees/getting-started/tutorials/quickstart.doctree differ diff --git a/doctrees/index.doctree b/doctrees/index.doctree new file mode 100644 index 00000000..f32bc084 Binary files /dev/null and b/doctrees/index.doctree differ diff --git a/doctrees/reference/active_learning/acquisition_fns.doctree b/doctrees/reference/active_learning/acquisition_fns.doctree new file mode 100644 index 00000000..478accb5 Binary files /dev/null and b/doctrees/reference/active_learning/acquisition_fns.doctree differ diff --git a/doctrees/reference/active_learning/algorithms.doctree b/doctrees/reference/active_learning/algorithms.doctree new file mode 100644 index 00000000..f1d1ecd4 Binary files /dev/null and b/doctrees/reference/active_learning/algorithms.doctree differ diff --git a/doctrees/reference/active_learning/index.doctree b/doctrees/reference/active_learning/index.doctree new file mode 100644 index 00000000..17ccc370 Binary files /dev/null and b/doctrees/reference/active_learning/index.doctree differ diff --git a/doctrees/reference/data/index.doctree b/doctrees/reference/data/index.doctree new file mode 100644 index 00000000..e96cdd51 Binary files /dev/null and b/doctrees/reference/data/index.doctree differ diff --git a/doctrees/reference/data/loader.doctree b/doctrees/reference/data/loader.doctree new file mode 100644 index 00000000..4c6d7331 Binary files /dev/null and b/doctrees/reference/data/loader.doctree differ diff --git a/doctrees/reference/data/processor.doctree b/doctrees/reference/data/processor.doctree new file mode 100644 index 00000000..71e3732f Binary files /dev/null and b/doctrees/reference/data/processor.doctree differ diff --git a/doctrees/reference/data/task.doctree b/doctrees/reference/data/task.doctree new file mode 100644 index 00000000..46ead1f7 Binary files /dev/null and b/doctrees/reference/data/task.doctree differ diff --git a/doctrees/reference/data/utils.doctree b/doctrees/reference/data/utils.doctree new file mode 100644 index 00000000..6422b78f Binary files /dev/null and b/doctrees/reference/data/utils.doctree differ diff --git a/doctrees/reference/index.doctree b/doctrees/reference/index.doctree new file mode 100644 index 00000000..9d3fa0ac Binary files /dev/null and b/doctrees/reference/index.doctree differ diff --git a/doctrees/reference/model/convnp.doctree b/doctrees/reference/model/convnp.doctree new file mode 100644 index 00000000..59e566b1 Binary files /dev/null and b/doctrees/reference/model/convnp.doctree differ diff --git a/doctrees/reference/model/defaults.doctree b/doctrees/reference/model/defaults.doctree new file mode 100644 index 00000000..f47ae801 Binary files /dev/null and b/doctrees/reference/model/defaults.doctree differ diff --git a/doctrees/reference/model/index.doctree b/doctrees/reference/model/index.doctree new file mode 100644 index 00000000..ccdd641f Binary files /dev/null and b/doctrees/reference/model/index.doctree differ diff --git a/doctrees/reference/model/model.doctree b/doctrees/reference/model/model.doctree new file mode 100644 index 00000000..eb432ed6 Binary files /dev/null and b/doctrees/reference/model/model.doctree differ diff --git a/doctrees/reference/model/nps.doctree b/doctrees/reference/model/nps.doctree new file mode 100644 index 00000000..22d9c459 Binary files /dev/null and b/doctrees/reference/model/nps.doctree differ diff --git a/doctrees/reference/plot.doctree b/doctrees/reference/plot.doctree new file mode 100644 index 00000000..5f2854a7 Binary files /dev/null and b/doctrees/reference/plot.doctree differ diff --git a/doctrees/reference/tensorflow/index.doctree b/doctrees/reference/tensorflow/index.doctree new file mode 100644 index 00000000..38cc178d Binary files /dev/null and b/doctrees/reference/tensorflow/index.doctree differ diff --git a/doctrees/reference/torch/index.doctree b/doctrees/reference/torch/index.doctree new file mode 100644 index 00000000..18a04903 Binary files /dev/null and b/doctrees/reference/torch/index.doctree differ diff --git a/doctrees/reference/train/index.doctree b/doctrees/reference/train/index.doctree new file mode 100644 index 00000000..2947ec05 Binary files /dev/null and b/doctrees/reference/train/index.doctree differ diff --git a/doctrees/reference/train/train.doctree b/doctrees/reference/train/train.doctree new file mode 100644 index 00000000..2fccaed0 Binary files /dev/null and b/doctrees/reference/train/train.doctree differ diff --git a/genindex.html b/genindex.html new file mode 100644 index 00000000..16b50ec2 --- /dev/null +++ b/genindex.html @@ -0,0 +1,595 @@ + + + + + + Index — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Index

+ +
+ _ + | A + | C + | D + | E + | F + | G + | I + | J + | L + | M + | O + | P + | R + | S + | T + | U + | V + | X + +
+

_

+ + + +
+ +

A

+ + + +
+ +

C

+ + + +
+ +

D

+ + + +
+ +

E

+ + +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

I

+ + + +
+ +

J

+ + + +
+ +

L

+ + + +
+ +

M

+ + + +
+ +

O

+ + + +
+ +

P

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

U

+ + +
+ +

V

+ + +
+ +

X

+ + +
+ + + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/getting-started/extending.html b/getting-started/extending.html new file mode 100644 index 00000000..7d0be597 --- /dev/null +++ b/getting-started/extending.html @@ -0,0 +1,144 @@ + + + + + + + Extending DeepSensor with new models — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Extending DeepSensor with new models

+

To extend DeepSensor with a new model, simply create a new class that inherits from deepsensor.model.DeepSensorModel and implement the low-level prediction methods defined in deepsensor.model.ProbabilisticModel, such as .mean and .stddev.

+

In this example, we’ll create a new model called ExampleModel:

+
class ExampleModel(DeepSensorModel):
+    """
+    A very naive model that predicts the mean of the first context set
+    with a fixed stddev.
+    """
+
+    def __init__(self, data_processor: DataProcessor, task_loader: TaskLoader):
+        # Initiate the parent class (DeepSensorModel) with the
+        # provided data processor and task loader:
+        super().__init__(data_processor, task_loader)
+
+    def mean(self, task: Task):
+        """Compute mean at target locations"""
+        return np.mean(task["Y_c"][0])
+
+    def stddev(self, task: Task):
+        """Compute stddev at target locations"""
+        return 0.1
+
+    ...
+
+
+

After creating ExampleModel in this way, it can be used in the same way as the built-in ConvNP model.

+

See this Jupyter notebook for more details.

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/getting-started/index.html b/getting-started/index.html new file mode 100644 index 00000000..adf3feb3 --- /dev/null +++ b/getting-started/index.html @@ -0,0 +1,133 @@ + + + + + + + Getting started — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Getting started

+

In this part of the documentation, we will show you how to get started with DeepSensor. We will show you how to install DeepSensor and how to extend it. We will also provide you with some tutorials to get you started with DeepSensor.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/getting-started/installation.html b/getting-started/installation.html new file mode 100644 index 00000000..8bdfa892 --- /dev/null +++ b/getting-started/installation.html @@ -0,0 +1,184 @@ + + + + + + + Installation instructions — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Installation instructions

+

DeepSensor is a Python package that can be installed in a number of ways. In this section we will describe the two main ways to install the package.

+
+

Install from PyPI

+

If you want to use the latest stable release of DeepSensor and do not want/need access to the worked examples or the package’s source code, we recommend installing from PyPI.

+

This is the easiest way to install DeepSensor.

+
    +
  • Install deepsensor:

    +
    pip install deepsensor
    +
    +
    +
  • +
  • Install the backend of your choice:

    +
      +
    • Install tensorflow:

      +
      pip install tensorflow
      +
      +
      +
    • +
    • Install pytorch:

      +
      pip install torch
      +
      +
      +
    • +
    +
  • +
+
+
+

Install from source

+
+

Note

+

You will want to use this method if you intend on contributing to the source code of DeepSensor.

+
+

If you want to keep up with the latest changes to DeepSensor, or want/need easy access to the worked examples or the package’s source code, we recommend installing from source.

+

This method will create a DeepSensor directory on your machine which will contain all the source code, docs and worked examples.

+
    +
  • Clone the repository:

    +
    git clone
    +
    +
    +
  • +
  • Install deepsensor:

    +
    pip install -e -v .
    +
    +
    +
  • +
  • Install the backend of your choice:

    +
      +
    • Install tensorflow:

      +
      pip install tensorflow
      +
      +
      +
    • +
    • Install pytorch:

      +
      pip install torch
      +
      +
      +
    • +
    +
  • +
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/getting-started/tutorials/index.html b/getting-started/tutorials/index.html new file mode 100644 index 00000000..178da6ff --- /dev/null +++ b/getting-started/tutorials/index.html @@ -0,0 +1,127 @@ + + + + + + + Tutorials — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Tutorials

+
+

Tutorials:

+ +
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/getting-started/tutorials/quickstart.html b/getting-started/tutorials/quickstart.html new file mode 100644 index 00000000..bf159b5f --- /dev/null +++ b/getting-started/tutorials/quickstart.html @@ -0,0 +1,194 @@ + + + + + + + Tutorial: Quickstart — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Tutorial: Quickstart

+

Here we will demonstrate a simple example of training a convolutional conditional neural process (ConvCNP) to spatially interpolate ERA5 data.

+

We can go from imports to predictions with a trained model in less than 30 lines of code!

+
import deepsensor.torch
+from deepsensor.data.loader import TaskLoader
+from deepsensor.data.processor import DataProcessor
+from deepsensor.model.convnp import ConvNP
+from deepsensor.train.train import train_epoch
+
+import xarray as xr
+import pandas as pd
+import numpy as np
+
+# Load raw data
+ds_raw = xr.tutorial.open_dataset("air_temperature")
+
+# Normalise data
+data_processor = DataProcessor(x1_name="lat", x1_map=(15, 75), x2_name="lon", x2_map=(200, 330))
+ds = data_processor(ds_raw)
+
+# Set up task loader
+task_loader = TaskLoader(context=ds, target=ds)
+
+# Set up model
+model = ConvNP(data_processor, task_loader)
+
+# Generate training tasks with up to 10% of grid cells passed as context and all grid cells
+# passed as targets
+train_tasks = []
+for date in pd.date_range("2013-01-01", "2014-11-30")[::7]:
+    task = task_loader(date, context_sampling=np.random.uniform(0.0, 0.1), target_sampling="all")
+    train_tasks.append(task)
+
+# Train model
+for epoch in range(10):
+    train_epoch(model, train_tasks, progress_bar=True)
+
+# Predict on new task with 10% of context data and a dense grid of target points
+test_task = task_loader("2014-12-31", 0.1)
+mean_ds, std_ds = model.predict(test_task, X_t=ds_raw)
+
+
+

After training, the model can predict directly to xarray in your data’s original units and coordinate system:

+
>>> mean_ds
+<xarray.Dataset>
+Dimensions:  (time: 1, lat: 25, lon: 53)
+Coordinates:
+* time     (time) datetime64[ns] 2014-12-31
+* lat      (lat) float32 75.0 72.5 70.0 67.5 65.0 ... 25.0 22.5 20.0 17.5 15.0
+* lon      (lon) float32 200.0 202.5 205.0 207.5 ... 322.5 325.0 327.5 330.0
+Data variables:
+    air      (time, lat, lon) float32 246.7 244.4 245.5 ... 290.2 289.8 289.4
+
+
+

We can also predict directly to pandas containing a timeseries of predictions at off-grid locations +by passing a numpy array of target locations to the X_t argument of .predict:

+
# Predict at two off-grid locations for three days in December 2014
+test_tasks = task_loader(pd.date_range("2014-12-01", "2014-12-31"), 0.1)
+mean_df, std_df = model.predict(test_tasks, X_t=np.array([[50, 280], [40, 250]]).T)
+
+
+
>>> mean_df
+                            air
+time       lat  lon
+2014-12-01 50.0 280.0  260.183056
+        40.0 250.0  277.947373
+2014-12-02 50.0 280.0   261.08943
+        40.0 250.0  278.219599
+2014-12-03 50.0 280.0  257.128185
+        40.0 250.0  278.444229
+
+
+

This quickstart example is also available as a Jupyter notebook with added visualisations.

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/.buildinfo b/html/.buildinfo new file mode 100644 index 00000000..a08741e4 --- /dev/null +++ b/html/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 35dbdb2b27cad3442b8a3d7e176c0c97 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/html/_sources/community/code-of-conduct.rst.txt b/html/_sources/community/code-of-conduct.rst.txt new file mode 100644 index 00000000..bdb71d14 --- /dev/null +++ b/html/_sources/community/code-of-conduct.rst.txt @@ -0,0 +1,6 @@ +========================= +Developer Code of Conduct +========================= + +.. + TODO: write a code of conduct for DeepSensor/mirror the CODE-OF-CONDUCT.md document? diff --git a/html/_sources/community/contributing.rst.txt b/html/_sources/community/contributing.rst.txt new file mode 100644 index 00000000..c6b6575d --- /dev/null +++ b/html/_sources/community/contributing.rst.txt @@ -0,0 +1,6 @@ +========================== +Contributing to DeepSensor +========================== + +.. + TODO: write a intro to contributing to DeepSensor/mirror the CONTRIBUTE.md document? diff --git a/html/_sources/community/faq.rst.txt b/html/_sources/community/faq.rst.txt new file mode 100644 index 00000000..e3927c1b --- /dev/null +++ b/html/_sources/community/faq.rst.txt @@ -0,0 +1,104 @@ +============= +Community FAQ +============= + +This FAQ aims to answer common questions about the DeepSensor library. It is our way to streamline the onboarding process and clarify expectations. + +.. note:: + + If you have a question that is not answered here, please open an issue or submit a pull request. + +Questions +--------- + +**Q: What is the purpose of this project?** + +**Answer:** [Briefly describe the project, its objectives, and its intended audience.] + +--- + +**Q: How can I contribute?** + +**Answer:** There are many ways to contribute, from writing code and fixing bugs to improving documentation or translating content. Check our :doc:`contribution guide ` for detailed steps. + +--- + +**Q: Where can I find the contribution guidelines?** + +**Answer:** You can find our contribution guidelines in the CONTRIBUTING.md file in the root directory of the repository, or in the :doc:`contribution guide `. + +--- + +**Q: Do I need to sign a Contributor License Agreement (CLA)?** + +**Answer:** At the current time, we do not require a CLA from our contributors. + +--- + +**Q: How do I report a bug?** + +**Answer:** Please submit an issue in our GitHub repository. Make sure to provide detailed information, including steps to reproduce the bug and the expected outcome. + +--- + +**Q: How do I request a new feature?** + +**Answer:** Open a new issue on our GitHub repository and label it as a feature request. Describe the feature in detail and its potential benefits. + +--- + +**Q: How do I set up the development environment?** + +**Answer:** Follow the instructions in our developer documentation. If you run into issues, ask in our community chat (on Slack) or :doc:`contact the core group of maintainers directly
`. + +--- + +**Q: Do you have a code of conduct?** + +**Answer:** Yes, we value a respectful and inclusive community. Please read our :doc:`Code of Conduct ` before contributing. + +--- + +**Q: How can I get in touch with other contributors or maintainers?** + +**Answer:** Join our Slack team to stay in touch with other contributors and maintainers. We also have a standing community meeting, which is by invite, so :doc:`get in touch with the core group of maintainers ` to receive an invite to them. + +--- + +**Q: Can I contribute even if I'm not a coder?** + +**Answer:** Absolutely! Contributions can be made in the form of documentation, design, testing, and more. Everyone's skills are valuable. Join our Slack discussion to learn more. + +--- + +**Q: How do I claim an issue to work on?** + +**Answer:** Comment on the issue expressing your interest to help out. If the issue is unassigned, a maintainer will likely assign it to you. + +--- + +**Q: What's the process for proposing a significant change?** + +**Answer:** For significant changes, it's a good practice to first open a discussion or issue to gather feedback. Once there's a consensus, you can proceed with a pull request. + +--- + +**Q: How can I get my pull request (PR) merged?** + +**Answer:** Ensure your PR follows the contribution guidelines, passes all tests, and has been reviewed by at least one maintainer. Address any feedback provided. + +--- + +**Q: What's the project's release cycle?** + +**Answer:** We release updates in a rolling manner, following our roadmap planning. Critical bug fixes might be released as needed. + +--- + +**Q: How is credit given to contributors?** + +**Answer:** Contributors are acknowledged in our release notes, and their contributions are forever recorded in the project's history. + + +.. + TODO: write more questions and answers here. diff --git a/html/_sources/community/index.rst.txt b/html/_sources/community/index.rst.txt new file mode 100644 index 00000000..e4816243 --- /dev/null +++ b/html/_sources/community/index.rst.txt @@ -0,0 +1,18 @@ +=========================================== +DeepSensor's user and contributor community +=========================================== + +The DeepSensor community is a group of users and contributors who are interested in the development of DeepSensor. The community is open to anyone who is interested in DeepSensor. The community is a place to ask questions, discuss ideas, and share your work. + +If you are interested in joining the community, please join our Slack channel at https://deepsensor.slack.com. You can request an invitation to the Slack channel at `https://ai4environment.slack.com/signup`. + +We welcome contributions from the community. If you are interested in contributing to DeepSensor, please read the :doc:`contributing` guide. + +.. toctree:: + :maxdepth: 1 + :caption: Table of contents: + + faq + contributing + code-of-conduct + roadmap diff --git a/html/_sources/community/roadmap.rst.txt b/html/_sources/community/roadmap.rst.txt new file mode 100644 index 00000000..e67c7eaf --- /dev/null +++ b/html/_sources/community/roadmap.rst.txt @@ -0,0 +1,6 @@ +================== +DeepSensor Roadmap +================== + +.. + TODO: write an intro to the roadmap for DeepSensor + link to the GitHub project management board diff --git a/html/_sources/contact.rst.txt b/html/_sources/contact.rst.txt new file mode 100644 index 00000000..6f6516ef --- /dev/null +++ b/html/_sources/contact.rst.txt @@ -0,0 +1,7 @@ +Contact the developers +====================== + +tomand@bas.ac.uk + +.. + TODO: Add contact information here... \ No newline at end of file diff --git a/html/_sources/getting-started/extending.rst.txt b/html/_sources/getting-started/extending.rst.txt new file mode 100644 index 00000000..5beef9f7 --- /dev/null +++ b/html/_sources/getting-started/extending.rst.txt @@ -0,0 +1,34 @@ +==================================== +Extending DeepSensor with new models +==================================== + +To extend DeepSensor with a new model, simply create a new class that inherits from ``deepsensor.model.DeepSensorModel`` and implement the low-level prediction methods defined in ``deepsensor.model.ProbabilisticModel``, such as ``.mean`` and ``.stddev``. + +In this example, we'll create a new model called ``ExampleModel``: + +.. code-block:: python + + class ExampleModel(DeepSensorModel): + """ + A very naive model that predicts the mean of the first context set + with a fixed stddev. + """ + + def __init__(self, data_processor: DataProcessor, task_loader: TaskLoader): + # Initiate the parent class (DeepSensorModel) with the + # provided data processor and task loader: + super().__init__(data_processor, task_loader) + + def mean(self, task: Task): + """Compute mean at target locations""" + return np.mean(task["Y_c"][0]) + + def stddev(self, task: Task): + """Compute stddev at target locations""" + return 0.1 + + ... + +After creating ``ExampleModel`` in this way, it can be used in the same way as the built-in :class:`~deepsensor.model.convnp.ConvNP` model. + +See `this Jupyter notebook `_ for more details. diff --git a/html/_sources/getting-started/index.rst.txt b/html/_sources/getting-started/index.rst.txt new file mode 100644 index 00000000..59493711 --- /dev/null +++ b/html/_sources/getting-started/index.rst.txt @@ -0,0 +1,14 @@ +=============== +Getting started +=============== + +In this part of the documentation, we will show you how to get started with ``DeepSensor``. We will show you how to install ``DeepSensor`` and how to extend it. We will also provide you with some tutorials to get you started with ``DeepSensor``. + + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + installation + extending + tutorials/index diff --git a/html/_sources/getting-started/installation.rst.txt b/html/_sources/getting-started/installation.rst.txt new file mode 100644 index 00000000..2fc3497c --- /dev/null +++ b/html/_sources/getting-started/installation.rst.txt @@ -0,0 +1,68 @@ +Installation instructions +========================= + +DeepSensor is a Python package that can be installed in a number of ways. In this section we will describe the two main ways to install the package. + +Install from `PyPI `_ +----------------------------------------------------------- + +If you want to use the latest stable release of DeepSensor and do not want/need access to the worked examples or the package's source code, we recommend installing from PyPI. + +This is the easiest way to install DeepSensor. + +- Install ``deepsensor``: + + .. code-block:: bash + + pip install deepsensor + +- Install the backend of your choice: + + - Install ``tensorflow``: + + .. code-block:: bash + + pip install tensorflow + + - Install ``pytorch``: + + .. code-block:: bash + + pip install torch + +Install from `source `_ +--------------------------------------------------------------------- + +.. note:: + + You will want to use this method if you intend on contributing to the source code of DeepSensor. + +If you want to keep up with the latest changes to DeepSensor, or want/need easy access to the worked examples or the package's source code, we recommend installing from source. + +This method will create a ``DeepSensor`` directory on your machine which will contain all the source code, docs and worked examples. + +- Clone the repository: + + .. code-block:: bash + + git clone + +- Install ``deepsensor``: + + .. code-block:: bash + + pip install -e -v . + +- Install the backend of your choice: + + - Install ``tensorflow``: + + .. code-block:: bash + + pip install tensorflow + + - Install ``pytorch``: + + .. code-block:: bash + + pip install torch diff --git a/html/_sources/getting-started/tutorials/index.rst.txt b/html/_sources/getting-started/tutorials/index.rst.txt new file mode 100644 index 00000000..9bfe997a --- /dev/null +++ b/html/_sources/getting-started/tutorials/index.rst.txt @@ -0,0 +1,9 @@ +========= +Tutorials +========= + +.. toctree:: + :maxdepth: 2 + :caption: Tutorials: + + quickstart \ No newline at end of file diff --git a/html/_sources/getting-started/tutorials/quickstart.rst.txt b/html/_sources/getting-started/tutorials/quickstart.rst.txt new file mode 100644 index 00000000..2da7f8d2 --- /dev/null +++ b/html/_sources/getting-started/tutorials/quickstart.rst.txt @@ -0,0 +1,84 @@ +==================== +Tutorial: Quickstart +==================== + +Here we will demonstrate a simple example of training a convolutional conditional neural process (ConvCNP) to spatially interpolate ERA5 data. + +We can go from imports to predictions with a trained model in less than 30 lines of code! + +.. code-block:: python + + import deepsensor.torch + from deepsensor.data.loader import TaskLoader + from deepsensor.data.processor import DataProcessor + from deepsensor.model.convnp import ConvNP + from deepsensor.train.train import train_epoch + + import xarray as xr + import pandas as pd + import numpy as np + + # Load raw data + ds_raw = xr.tutorial.open_dataset("air_temperature") + + # Normalise data + data_processor = DataProcessor(x1_name="lat", x1_map=(15, 75), x2_name="lon", x2_map=(200, 330)) + ds = data_processor(ds_raw) + + # Set up task loader + task_loader = TaskLoader(context=ds, target=ds) + + # Set up model + model = ConvNP(data_processor, task_loader) + + # Generate training tasks with up to 10% of grid cells passed as context and all grid cells + # passed as targets + train_tasks = [] + for date in pd.date_range("2013-01-01", "2014-11-30")[::7]: + task = task_loader(date, context_sampling=np.random.uniform(0.0, 0.1), target_sampling="all") + train_tasks.append(task) + + # Train model + for epoch in range(10): + train_epoch(model, train_tasks, progress_bar=True) + + # Predict on new task with 10% of context data and a dense grid of target points + test_task = task_loader("2014-12-31", 0.1) + mean_ds, std_ds = model.predict(test_task, X_t=ds_raw) + +After training, the model can predict directly to `xarray` in your data's original units and coordinate system: + +.. code-block:: python + + >>> mean_ds + + Dimensions: (time: 1, lat: 25, lon: 53) + Coordinates: + * time (time) datetime64[ns] 2014-12-31 + * lat (lat) float32 75.0 72.5 70.0 67.5 65.0 ... 25.0 22.5 20.0 17.5 15.0 + * lon (lon) float32 200.0 202.5 205.0 207.5 ... 322.5 325.0 327.5 330.0 + Data variables: + air (time, lat, lon) float32 246.7 244.4 245.5 ... 290.2 289.8 289.4 + +We can also predict directly to `pandas` containing a timeseries of predictions at off-grid locations +by passing a `numpy` array of target locations to the `X_t` argument of `.predict`: + +.. code-block:: python + + # Predict at two off-grid locations for three days in December 2014 + test_tasks = task_loader(pd.date_range("2014-12-01", "2014-12-31"), 0.1) + mean_df, std_df = model.predict(test_tasks, X_t=np.array([[50, 280], [40, 250]]).T) + +.. code-block:: python + + >>> mean_df + air + time lat lon + 2014-12-01 50.0 280.0 260.183056 + 40.0 250.0 277.947373 + 2014-12-02 50.0 280.0 261.08943 + 40.0 250.0 278.219599 + 2014-12-03 50.0 280.0 257.128185 + 40.0 250.0 278.444229 + +This quickstart example is also `available as a Jupyter notebook `_ with added visualisations. diff --git a/html/_sources/index.rst.txt b/html/_sources/index.rst.txt new file mode 100644 index 00000000..d0001c09 --- /dev/null +++ b/html/_sources/index.rst.txt @@ -0,0 +1,48 @@ +Welcome to DeepSensor's documentation! +====================================== + +DeepSensor is Python package and open-source project for modelling environmental data with neural processes. + +DeepSensor aims to faithfully match the flexibility of neural processes with a simple and intuitive interface. DeepSensor wraps around the powerful `neuralprocessess package `_ for the core modelling functionality, while allowing users to stay in the familiar `xarray `_ and `pandas `_ world and avoid the murky depths of tensors! + +DeepSensor is also compatible with both `PyTorch `_ or `TensorFlow `_ for its machine learning abilities, thanks to the `backends package `_. Simply ``import deepsensor.torch`` or ``import deepsensor.tensorflow`` to choose between them! + +.. note:: + + This package is currently undergoing active development. If you are interested in using DeepSensor in production, please :doc:`get in touch `. + +Citing DeepSensor +----------------- + +If you use DeepSensor in your research, please consider citing the repository. You can generate a BiBTeX entry by clicking the 'Cite this repository' button on the top right of this page. + +Quick installation +------------------ + +The DeepSensor package can easiest be pip installed, together with the backend of your choice. In this example we use the PyTorch backend: + +.. code-block:: bash + + $ pip install deepsensor torch + +To install the TensorFlow backend instead, simply replace ``torch`` with ``tensorflow`` in the above command. + + + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + getting-started/index + community/index + contact + reference/index + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/html/_sources/reference/active_learning/acquisition_fns.rst.txt b/html/_sources/reference/active_learning/acquisition_fns.rst.txt new file mode 100644 index 00000000..4261b490 --- /dev/null +++ b/html/_sources/reference/active_learning/acquisition_fns.rst.txt @@ -0,0 +1,83 @@ +``deepsensor.active_learning.acquisition_fns`` +============================================== + +.. autoclass:: deepsensor.active_learning.acquisition_fns.AcquisitionFunction + :members: + :undoc-members: + :special-members: __init__, __call__ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.AcquisitionFunctionOracle + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.AcquisitionFunctionParallel + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.MeanStddev + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.MeanVariance + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.pNormStddev + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.JointEntropy + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.OracleMAE + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.OracleRMSE + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.OracleMarginalNLL + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.OracleJointNLL + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.Random + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.ContextDist + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.Stddev + :members: + :undoc-members: + :special-members: __init__, __call_ + +.. autoclass:: deepsensor.active_learning.acquisition_fns.ExpectedImprovement + :members: + :undoc-members: + :special-members: __init__, __call_ + diff --git a/html/_sources/reference/active_learning/algorithms.rst.txt b/html/_sources/reference/active_learning/algorithms.rst.txt new file mode 100644 index 00000000..a9ae8880 --- /dev/null +++ b/html/_sources/reference/active_learning/algorithms.rst.txt @@ -0,0 +1,7 @@ +``deepsensor.active_learning.algorithms`` +========================================= + +.. autoclass:: deepsensor.active_learning.algorithms.GreedyAlgorithm + :members: + :undoc-members: + :special-members: __init__, __call__ diff --git a/html/_sources/reference/active_learning/index.rst.txt b/html/_sources/reference/active_learning/index.rst.txt new file mode 100644 index 00000000..37bb249b --- /dev/null +++ b/html/_sources/reference/active_learning/index.rst.txt @@ -0,0 +1,9 @@ +``active_learning`` module +========================== + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + acquisition_fns + algorithms \ No newline at end of file diff --git a/html/_sources/reference/data/index.rst.txt b/html/_sources/reference/data/index.rst.txt new file mode 100644 index 00000000..1f534aa7 --- /dev/null +++ b/html/_sources/reference/data/index.rst.txt @@ -0,0 +1,11 @@ +``data`` module +=============== + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + loader + processor + task + utils \ No newline at end of file diff --git a/html/_sources/reference/data/loader.rst.txt b/html/_sources/reference/data/loader.rst.txt new file mode 100644 index 00000000..22cedb2d --- /dev/null +++ b/html/_sources/reference/data/loader.rst.txt @@ -0,0 +1,7 @@ +``deepsensor.data.loader`` +========================== + +.. autoclass:: deepsensor.data.loader.TaskLoader + :members: + :undoc-members: + :special-members: __init__, __call__ diff --git a/html/_sources/reference/data/processor.rst.txt b/html/_sources/reference/data/processor.rst.txt new file mode 100644 index 00000000..a4f2c9a9 --- /dev/null +++ b/html/_sources/reference/data/processor.rst.txt @@ -0,0 +1,15 @@ +``deepsensor.data.processor`` +============================= + +.. autoclass:: deepsensor.data.processor.DataProcessor + :members: + :undoc-members: + :special-members: __init__, __call__, __str__ + +.. autofunction:: deepsensor.data.processor.xarray_to_coord_array_normalised + +.. autofunction:: deepsensor.data.processor.mask_coord_array_normalised + +.. autofunction:: deepsensor.data.processor.da1_da2_same_grid + +.. autofunction:: deepsensor.data.processor.interp_da1_to_da2 diff --git a/html/_sources/reference/data/task.rst.txt b/html/_sources/reference/data/task.rst.txt new file mode 100644 index 00000000..07b9303f --- /dev/null +++ b/html/_sources/reference/data/task.rst.txt @@ -0,0 +1,15 @@ +``deepsensor.data.task`` +======================== + +.. autoclass:: deepsensor.data.task.Task + :members: + :undoc-members: + :special-members: __init__, __call__, __str__, __repr__ + +.. autofunction:: deepsensor.data.task.append_obs_to_task + +.. autofunction:: deepsensor.data.task.flatten_X + +.. autofunction:: deepsensor.data.task.flatten_Y + +.. autofunction:: deepsensor.data.task.flatten_gridded_data_in_task \ No newline at end of file diff --git a/html/_sources/reference/data/utils.rst.txt b/html/_sources/reference/data/utils.rst.txt new file mode 100644 index 00000000..0c3d41a1 --- /dev/null +++ b/html/_sources/reference/data/utils.rst.txt @@ -0,0 +1,10 @@ +``deepsensor.data.utils`` +========================= + +.. autofunction:: deepsensor.data.utils.construct_x1x2_ds + +.. autofunction:: deepsensor.data.utils.construct_circ_time_ds + +.. autofunction:: deepsensor.data.utils.compute_xarray_data_resolution + +.. autofunction:: deepsensor.data.utils.compute_pandas_data_resolution diff --git a/html/_sources/reference/index.rst.txt b/html/_sources/reference/index.rst.txt new file mode 100644 index 00000000..7d910b3e --- /dev/null +++ b/html/_sources/reference/index.rst.txt @@ -0,0 +1,17 @@ +API Reference +============= + +This part of the documentation contains the API reference for the package. It is structured by modules, and each module contains its respective classes, functions, and attributes. The API is designed to be as simple as possible while still allowing for a lot of flexibility. The API is divided into several submodules, which are described in the following sections. + + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + active_learning/index + data/index + model/index + tensorflow/index + torch/index + train/index + plot diff --git a/html/_sources/reference/model/convnp.rst.txt b/html/_sources/reference/model/convnp.rst.txt new file mode 100644 index 00000000..5f05acdc --- /dev/null +++ b/html/_sources/reference/model/convnp.rst.txt @@ -0,0 +1,9 @@ +``deepsensor.model.convnp`` +=========================== + +.. autoclass:: deepsensor.model.convnp.ConvNP + :members: + :undoc-members: + :special-members: __init__, __call__ + +.. autofunction:: deepsensor.model.convnp.concat_tasks diff --git a/html/_sources/reference/model/defaults.rst.txt b/html/_sources/reference/model/defaults.rst.txt new file mode 100644 index 00000000..7db5026e --- /dev/null +++ b/html/_sources/reference/model/defaults.rst.txt @@ -0,0 +1,8 @@ +``deepsensor.model.defaults`` +============================= + +.. autofunction:: deepsensor.model.defaults.gen_ppu + +.. autofunction:: deepsensor.model.defaults.gen_decoder_scale + +.. autofunction:: deepsensor.model.defaults.gen_encoder_scales diff --git a/html/_sources/reference/model/index.rst.txt b/html/_sources/reference/model/index.rst.txt new file mode 100644 index 00000000..53820582 --- /dev/null +++ b/html/_sources/reference/model/index.rst.txt @@ -0,0 +1,11 @@ +``model`` module +================ + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + convnp + defaults + model + nps \ No newline at end of file diff --git a/html/_sources/reference/model/model.rst.txt b/html/_sources/reference/model/model.rst.txt new file mode 100644 index 00000000..4b339e07 --- /dev/null +++ b/html/_sources/reference/model/model.rst.txt @@ -0,0 +1,20 @@ +``deepsensor.model.model`` +========================== + +.. autoclass:: deepsensor.model.model.DeepSensorModel + :members: + :undoc-members: + :show-inheritance: + :inherited-members: + :special-members: __init__ + +.. autoclass:: deepsensor.model.model.ProbabilisticModel + :members: + :undoc-members: + :show-inheritance: + :inherited-members: + :special-members: __init__ + +.. autofunction:: deepsensor.model.model.create_empty_spatiotemporal_xarray + +.. autofunction:: deepsensor.model.model.increase_spatial_resolution diff --git a/html/_sources/reference/model/nps.rst.txt b/html/_sources/reference/model/nps.rst.txt new file mode 100644 index 00000000..cc0daec0 --- /dev/null +++ b/html/_sources/reference/model/nps.rst.txt @@ -0,0 +1,12 @@ +``deepsensor.model.nps`` +======================== + +.. autofunction:: deepsensor.model.nps.convert_task_to_nps_args + +.. autofunction:: deepsensor.model.nps.run_nps_model + +.. autofunction:: deepsensor.model.nps.run_nps_model_ar + +.. autofunction:: deepsensor.model.nps.construct_neural_process + +.. autofunction:: deepsensor.model.nps.compute_encoding_tensor diff --git a/html/_sources/reference/plot.rst.txt b/html/_sources/reference/plot.rst.txt new file mode 100644 index 00000000..483af477 --- /dev/null +++ b/html/_sources/reference/plot.rst.txt @@ -0,0 +1,16 @@ +``deepsensor.plot`` module +========================== + +.. autofunction:: deepsensor.plot.acquisition_fn + +.. autofunction:: deepsensor.plot.context_encoding + +.. autofunction:: deepsensor.plot.feature_maps + +.. autofunction:: deepsensor.plot.offgrid_context + +.. autofunction:: deepsensor.plot.offgrid_context_observations + +.. autofunction:: deepsensor.plot.placements + +.. autofunction:: deepsensor.plot.receptive_field diff --git a/html/_sources/reference/tensorflow/index.rst.txt b/html/_sources/reference/tensorflow/index.rst.txt new file mode 100644 index 00000000..82aa5de8 --- /dev/null +++ b/html/_sources/reference/tensorflow/index.rst.txt @@ -0,0 +1,4 @@ +``tensorflow`` module +===================== + +... \ No newline at end of file diff --git a/html/_sources/reference/torch/index.rst.txt b/html/_sources/reference/torch/index.rst.txt new file mode 100644 index 00000000..e3ce8418 --- /dev/null +++ b/html/_sources/reference/torch/index.rst.txt @@ -0,0 +1,4 @@ +``torch`` module +================ + +... \ No newline at end of file diff --git a/html/_sources/reference/train/index.rst.txt b/html/_sources/reference/train/index.rst.txt new file mode 100644 index 00000000..17f1e345 --- /dev/null +++ b/html/_sources/reference/train/index.rst.txt @@ -0,0 +1,8 @@ +``train`` module +================ + +.. toctree:: + :maxdepth: 2 + :caption: Table of contents: + + train \ No newline at end of file diff --git a/html/_sources/reference/train/train.rst.txt b/html/_sources/reference/train/train.rst.txt new file mode 100644 index 00000000..1fd49b6c --- /dev/null +++ b/html/_sources/reference/train/train.rst.txt @@ -0,0 +1,6 @@ +``deepsensor.train.train`` +========================== + +.. autofunction:: deepsensor.train.train.set_gpu_default_device + +.. autofunction:: deepsensor.train.train.train_epoch diff --git a/html/_static/_sphinx_javascript_frameworks_compat.js b/html/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 00000000..81415803 --- /dev/null +++ b/html/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,123 @@ +/* Compatability shim for jQuery and underscores.js. + * + * Copyright Sphinx contributors + * Released under the two clause BSD licence + */ + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/html/_static/basic.css b/html/_static/basic.css new file mode 100644 index 00000000..30fee9d0 --- /dev/null +++ b/html/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/html/_static/css/badge_only.css b/html/_static/css/badge_only.css new file mode 100644 index 00000000..c718cee4 --- /dev/null +++ b/html/_static/css/badge_only.css @@ -0,0 +1 @@ +.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/html/_static/css/fonts/Roboto-Slab-Bold.woff b/html/_static/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 00000000..6cb60000 Binary files /dev/null and b/html/_static/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/html/_static/css/fonts/Roboto-Slab-Bold.woff2 b/html/_static/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 00000000..7059e231 Binary files /dev/null and b/html/_static/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/html/_static/css/fonts/Roboto-Slab-Regular.woff b/html/_static/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 00000000..f815f63f Binary files /dev/null and b/html/_static/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/html/_static/css/fonts/Roboto-Slab-Regular.woff2 b/html/_static/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 00000000..f2c76e5b Binary files /dev/null and b/html/_static/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/html/_static/css/fonts/fontawesome-webfont.eot b/html/_static/css/fonts/fontawesome-webfont.eot new file mode 100644 index 00000000..e9f60ca9 Binary files /dev/null and b/html/_static/css/fonts/fontawesome-webfont.eot differ diff --git a/html/_static/css/fonts/fontawesome-webfont.svg b/html/_static/css/fonts/fontawesome-webfont.svg new file mode 100644 index 00000000..855c845e --- /dev/null +++ b/html/_static/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/html/_static/css/fonts/fontawesome-webfont.ttf b/html/_static/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 00000000..35acda2f Binary files /dev/null and b/html/_static/css/fonts/fontawesome-webfont.ttf differ diff --git a/html/_static/css/fonts/fontawesome-webfont.woff b/html/_static/css/fonts/fontawesome-webfont.woff new file mode 100644 index 00000000..400014a4 Binary files /dev/null and b/html/_static/css/fonts/fontawesome-webfont.woff differ diff --git a/html/_static/css/fonts/fontawesome-webfont.woff2 b/html/_static/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 00000000..4d13fc60 Binary files /dev/null and b/html/_static/css/fonts/fontawesome-webfont.woff2 differ diff --git a/html/_static/css/fonts/lato-bold-italic.woff b/html/_static/css/fonts/lato-bold-italic.woff new file mode 100644 index 00000000..88ad05b9 Binary files /dev/null and b/html/_static/css/fonts/lato-bold-italic.woff differ diff --git a/html/_static/css/fonts/lato-bold-italic.woff2 b/html/_static/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 00000000..c4e3d804 Binary files /dev/null and b/html/_static/css/fonts/lato-bold-italic.woff2 differ diff --git a/html/_static/css/fonts/lato-bold.woff b/html/_static/css/fonts/lato-bold.woff new file mode 100644 index 00000000..c6dff51f Binary files /dev/null and b/html/_static/css/fonts/lato-bold.woff differ diff --git a/html/_static/css/fonts/lato-bold.woff2 b/html/_static/css/fonts/lato-bold.woff2 new file mode 100644 index 00000000..bb195043 Binary files /dev/null and b/html/_static/css/fonts/lato-bold.woff2 differ diff --git a/html/_static/css/fonts/lato-normal-italic.woff b/html/_static/css/fonts/lato-normal-italic.woff new file mode 100644 index 00000000..76114bc0 Binary files /dev/null and b/html/_static/css/fonts/lato-normal-italic.woff differ diff --git a/html/_static/css/fonts/lato-normal-italic.woff2 b/html/_static/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 00000000..3404f37e Binary files /dev/null and b/html/_static/css/fonts/lato-normal-italic.woff2 differ diff --git a/html/_static/css/fonts/lato-normal.woff b/html/_static/css/fonts/lato-normal.woff new file mode 100644 index 00000000..ae1307ff Binary files /dev/null and b/html/_static/css/fonts/lato-normal.woff differ diff --git a/html/_static/css/fonts/lato-normal.woff2 b/html/_static/css/fonts/lato-normal.woff2 new file mode 100644 index 00000000..3bf98433 Binary files /dev/null and b/html/_static/css/fonts/lato-normal.woff2 differ diff --git a/html/_static/css/theme.css b/html/_static/css/theme.css new file mode 100644 index 00000000..19a446a0 --- /dev/null +++ b/html/_static/css/theme.css @@ -0,0 +1,4 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/html/_static/doctools.js b/html/_static/doctools.js new file mode 100644 index 00000000..d06a71d7 --- /dev/null +++ b/html/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/html/_static/documentation_options.js b/html/_static/documentation_options.js new file mode 100644 index 00000000..808a4ac4 --- /dev/null +++ b/html/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '0.2.5', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/html/_static/file.png b/html/_static/file.png new file mode 100644 index 00000000..a858a410 Binary files /dev/null and b/html/_static/file.png differ diff --git a/html/_static/jquery.js b/html/_static/jquery.js new file mode 100644 index 00000000..c4c6022f --- /dev/null +++ b/html/_static/jquery.js @@ -0,0 +1,2 @@ +/*! jQuery v3.6.0 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.6.0",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},j=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||D,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,D=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function je(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function De(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Le(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var _t,zt=[],Ut=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=zt.pop()||S.expando+"_"+wt.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Ut.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ut.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Ut,"$1"+r):!1!==e.jsonp&&(e.url+=(Tt.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,zt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((_t=E.implementation.createHTMLDocument("").body).innerHTML="
",2===_t.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=Fe(y.pixelPosition,function(e,t){if(t)return t=We(e,n),Pe.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/html/_static/js/html5shiv.min.js b/html/_static/js/html5shiv.min.js new file mode 100644 index 00000000..cd1c674f --- /dev/null +++ b/html/_static/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/html/_static/js/theme.js b/html/_static/js/theme.js new file mode 100644 index 00000000..1fddb6ee --- /dev/null +++ b/html/_static/js/theme.js @@ -0,0 +1 @@ +!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/html/_static/minus.png b/html/_static/minus.png new file mode 100644 index 00000000..d96755fd Binary files /dev/null and b/html/_static/minus.png differ diff --git a/html/_static/plus.png b/html/_static/plus.png new file mode 100644 index 00000000..7107cec9 Binary files /dev/null and b/html/_static/plus.png differ diff --git a/html/_static/pygments.css b/html/_static/pygments.css new file mode 100644 index 00000000..84ab3030 --- /dev/null +++ b/html/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #f8f8f8; } +.highlight .c { color: #3D7B7B; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #008000; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #3D7B7B; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #3D7B7B; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #9C6500 } /* Comment.Preproc */ +.highlight .cpf { color: #3D7B7B; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #3D7B7B; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #3D7B7B; font-style: italic } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #E40000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #008400 } /* Generic.Inserted */ +.highlight .go { color: #717171 } /* Generic.Output */ +.highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #008000 } /* Keyword.Pseudo */ +.highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #B00040 } /* Keyword.Type */ +.highlight .m { color: #666666 } /* Literal.Number */ +.highlight .s { color: #BA2121 } /* Literal.String */ +.highlight .na { color: #687822 } /* Name.Attribute */ +.highlight .nb { color: #008000 } /* Name.Builtin */ +.highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */ +.highlight .no { color: #880000 } /* Name.Constant */ +.highlight .nd { color: #AA22FF } /* Name.Decorator */ +.highlight .ni { color: #717171; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #CB3F38; font-weight: bold } /* Name.Exception */ +.highlight .nf { color: #0000FF } /* Name.Function */ +.highlight .nl { color: #767600 } /* Name.Label */ +.highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #19177C } /* Name.Variable */ +.highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #666666 } /* Literal.Number.Bin */ +.highlight .mf { color: #666666 } /* Literal.Number.Float */ +.highlight .mh { color: #666666 } /* Literal.Number.Hex */ +.highlight .mi { color: #666666 } /* Literal.Number.Integer */ +.highlight .mo { color: #666666 } /* Literal.Number.Oct */ +.highlight .sa { color: #BA2121 } /* Literal.String.Affix */ +.highlight .sb { color: #BA2121 } /* Literal.String.Backtick */ +.highlight .sc { color: #BA2121 } /* Literal.String.Char */ +.highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */ +.highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #BA2121 } /* Literal.String.Double */ +.highlight .se { color: #AA5D1F; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */ +.highlight .si { color: #A45A77; font-weight: bold } /* Literal.String.Interpol */ +.highlight .sx { color: #008000 } /* Literal.String.Other */ +.highlight .sr { color: #A45A77 } /* Literal.String.Regex */ +.highlight .s1 { color: #BA2121 } /* Literal.String.Single */ +.highlight .ss { color: #19177C } /* Literal.String.Symbol */ +.highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #0000FF } /* Name.Function.Magic */ +.highlight .vc { color: #19177C } /* Name.Variable.Class */ +.highlight .vg { color: #19177C } /* Name.Variable.Global */ +.highlight .vi { color: #19177C } /* Name.Variable.Instance */ +.highlight .vm { color: #19177C } /* Name.Variable.Magic */ +.highlight .il { color: #666666 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/html/_static/searchtools.js b/html/_static/searchtools.js new file mode 100644 index 00000000..7918c3fa --- /dev/null +++ b/html/_static/searchtools.js @@ -0,0 +1,574 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + `Search finished, found ${resultCount} page(s) matching the search query.` + ); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent !== undefined) return docContent.textContent; + console.warn( + "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + /** + * execute search (requires search index to be loaded) + */ + query: (query) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + // array of [docname, title, anchor, descr, score, filename] + let results = []; + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + results.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id] of foundEntries) { + let score = Math.round(100 * queryLower.length / entry.length) + results.push([ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // lookup as object + objectTerms.forEach((term) => + results.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); + + // now sort the results by score (in opposite order of appearance, since the + // display function below uses pop() to retrieve items) and then + // alphabetically + results.sort((a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; + }); + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + results = results.reverse(); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord) && !terms[word]) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord) && !titleTerms[word]) + arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); + }); + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) + fileMap.get(file).push(word); + else fileMap.set(file, [word]); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords) => { + const text = Search.htmlToText(htmlText); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/html/_static/sphinx_highlight.js b/html/_static/sphinx_highlight.js new file mode 100644 index 00000000..8a96c69a --- /dev/null +++ b/html/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/html/community/code-of-conduct.html b/html/community/code-of-conduct.html new file mode 100644 index 00000000..03f27e25 --- /dev/null +++ b/html/community/code-of-conduct.html @@ -0,0 +1,119 @@ + + + + + + + Developer Code of Conduct — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Developer Code of Conduct

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/community/contributing.html b/html/community/contributing.html new file mode 100644 index 00000000..066ea491 --- /dev/null +++ b/html/community/contributing.html @@ -0,0 +1,119 @@ + + + + + + + Contributing to DeepSensor — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Contributing to DeepSensor

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/community/faq.html b/html/community/faq.html new file mode 100644 index 00000000..8f1d4bca --- /dev/null +++ b/html/community/faq.html @@ -0,0 +1,174 @@ + + + + + + + Community FAQ — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

Community FAQ

+

This FAQ aims to answer common questions about the DeepSensor library. It is our way to streamline the onboarding process and clarify expectations.

+
+

Note

+

If you have a question that is not answered here, please open an issue or submit a pull request.

+
+
+

Questions

+

Q: What is the purpose of this project?

+

Answer: [Briefly describe the project, its objectives, and its intended audience.]

+

+

Q: How can I contribute?

+

Answer: There are many ways to contribute, from writing code and fixing bugs to improving documentation or translating content. Check our contribution guide for detailed steps.

+

+

Q: Where can I find the contribution guidelines?

+

Answer: You can find our contribution guidelines in the CONTRIBUTING.md file in the root directory of the repository, or in the contribution guide.

+

+

Q: Do I need to sign a Contributor License Agreement (CLA)?

+

Answer: At the current time, we do not require a CLA from our contributors.

+

+

Q: How do I report a bug?

+

Answer: Please submit an issue in our GitHub repository. Make sure to provide detailed information, including steps to reproduce the bug and the expected outcome.

+

+

Q: How do I request a new feature?

+

Answer: Open a new issue on our GitHub repository and label it as a feature request. Describe the feature in detail and its potential benefits.

+

+

Q: How do I set up the development environment?

+

Answer: Follow the instructions in our developer documentation. If you run into issues, ask in our community chat (on Slack) or contact the core group of maintainers directly.

+

+

Q: Do you have a code of conduct?

+

Answer: Yes, we value a respectful and inclusive community. Please read our Code of Conduct before contributing.

+

+

Q: How can I get in touch with other contributors or maintainers?

+

Answer: Join our Slack team to stay in touch with other contributors and maintainers. We also have a standing community meeting, which is by invite, so get in touch with the core group of maintainers to receive an invite to them.

+

+

Q: Can I contribute even if I’m not a coder?

+

Answer: Absolutely! Contributions can be made in the form of documentation, design, testing, and more. Everyone’s skills are valuable. Join our Slack discussion to learn more.

+

+

Q: How do I claim an issue to work on?

+

Answer: Comment on the issue expressing your interest to help out. If the issue is unassigned, a maintainer will likely assign it to you.

+

+

Q: What’s the process for proposing a significant change?

+

Answer: For significant changes, it’s a good practice to first open a discussion or issue to gather feedback. Once there’s a consensus, you can proceed with a pull request.

+

+

Q: How can I get my pull request (PR) merged?

+

Answer: Ensure your PR follows the contribution guidelines, passes all tests, and has been reviewed by at least one maintainer. Address any feedback provided.

+

+

Q: What’s the project’s release cycle?

+

Answer: We release updates in a rolling manner, following our roadmap planning. Critical bug fixes might be released as needed.

+

+

Q: How is credit given to contributors?

+

Answer: Contributors are acknowledged in our release notes, and their contributions are forever recorded in the project’s history.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/community/index.html b/html/community/index.html new file mode 100644 index 00000000..a7a1d0bd --- /dev/null +++ b/html/community/index.html @@ -0,0 +1,130 @@ + + + + + + + DeepSensor’s user and contributor community — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

DeepSensor’s user and contributor community

+

The DeepSensor community is a group of users and contributors who are interested in the development of DeepSensor. The community is open to anyone who is interested in DeepSensor. The community is a place to ask questions, discuss ideas, and share your work.

+

If you are interested in joining the community, please join our Slack channel at https://deepsensor.slack.com. You can request an invitation to the Slack channel at https://ai4environment.slack.com/signup.

+

We welcome contributions from the community. If you are interested in contributing to DeepSensor, please read the Contributing to DeepSensor guide.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/community/roadmap.html b/html/community/roadmap.html new file mode 100644 index 00000000..99762592 --- /dev/null +++ b/html/community/roadmap.html @@ -0,0 +1,119 @@ + + + + + + + DeepSensor Roadmap — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

DeepSensor Roadmap

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/contact.html b/html/contact.html new file mode 100644 index 00000000..48fd058d --- /dev/null +++ b/html/contact.html @@ -0,0 +1,113 @@ + + + + + + + Contact the developers — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Contact the developers

+

tomand@bas.ac.uk

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/genindex.html b/html/genindex.html new file mode 100644 index 00000000..16b50ec2 --- /dev/null +++ b/html/genindex.html @@ -0,0 +1,595 @@ + + + + + + Index — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Index

+ +
+ _ + | A + | C + | D + | E + | F + | G + | I + | J + | L + | M + | O + | P + | R + | S + | T + | U + | V + | X + +
+

_

+ + + +
+ +

A

+ + + +
+ +

C

+ + + +
+ +

D

+ + + +
+ +

E

+ + +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

I

+ + + +
+ +

J

+ + + +
+ +

L

+ + + +
+ +

M

+ + + +
+ +

O

+ + + +
+ +

P

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

U

+ + +
+ +

V

+ + +
+ +

X

+ + +
+ + + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/getting-started/extending.html b/html/getting-started/extending.html new file mode 100644 index 00000000..7d0be597 --- /dev/null +++ b/html/getting-started/extending.html @@ -0,0 +1,144 @@ + + + + + + + Extending DeepSensor with new models — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Extending DeepSensor with new models

+

To extend DeepSensor with a new model, simply create a new class that inherits from deepsensor.model.DeepSensorModel and implement the low-level prediction methods defined in deepsensor.model.ProbabilisticModel, such as .mean and .stddev.

+

In this example, we’ll create a new model called ExampleModel:

+
class ExampleModel(DeepSensorModel):
+    """
+    A very naive model that predicts the mean of the first context set
+    with a fixed stddev.
+    """
+
+    def __init__(self, data_processor: DataProcessor, task_loader: TaskLoader):
+        # Initiate the parent class (DeepSensorModel) with the
+        # provided data processor and task loader:
+        super().__init__(data_processor, task_loader)
+
+    def mean(self, task: Task):
+        """Compute mean at target locations"""
+        return np.mean(task["Y_c"][0])
+
+    def stddev(self, task: Task):
+        """Compute stddev at target locations"""
+        return 0.1
+
+    ...
+
+
+

After creating ExampleModel in this way, it can be used in the same way as the built-in ConvNP model.

+

See this Jupyter notebook for more details.

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/getting-started/index.html b/html/getting-started/index.html new file mode 100644 index 00000000..adf3feb3 --- /dev/null +++ b/html/getting-started/index.html @@ -0,0 +1,133 @@ + + + + + + + Getting started — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Getting started

+

In this part of the documentation, we will show you how to get started with DeepSensor. We will show you how to install DeepSensor and how to extend it. We will also provide you with some tutorials to get you started with DeepSensor.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/getting-started/installation.html b/html/getting-started/installation.html new file mode 100644 index 00000000..8bdfa892 --- /dev/null +++ b/html/getting-started/installation.html @@ -0,0 +1,184 @@ + + + + + + + Installation instructions — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Installation instructions

+

DeepSensor is a Python package that can be installed in a number of ways. In this section we will describe the two main ways to install the package.

+
+

Install from PyPI

+

If you want to use the latest stable release of DeepSensor and do not want/need access to the worked examples or the package’s source code, we recommend installing from PyPI.

+

This is the easiest way to install DeepSensor.

+
    +
  • Install deepsensor:

    +
    pip install deepsensor
    +
    +
    +
  • +
  • Install the backend of your choice:

    +
      +
    • Install tensorflow:

      +
      pip install tensorflow
      +
      +
      +
    • +
    • Install pytorch:

      +
      pip install torch
      +
      +
      +
    • +
    +
  • +
+
+
+

Install from source

+
+

Note

+

You will want to use this method if you intend on contributing to the source code of DeepSensor.

+
+

If you want to keep up with the latest changes to DeepSensor, or want/need easy access to the worked examples or the package’s source code, we recommend installing from source.

+

This method will create a DeepSensor directory on your machine which will contain all the source code, docs and worked examples.

+
    +
  • Clone the repository:

    +
    git clone
    +
    +
    +
  • +
  • Install deepsensor:

    +
    pip install -e -v .
    +
    +
    +
  • +
  • Install the backend of your choice:

    +
      +
    • Install tensorflow:

      +
      pip install tensorflow
      +
      +
      +
    • +
    • Install pytorch:

      +
      pip install torch
      +
      +
      +
    • +
    +
  • +
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/getting-started/tutorials/index.html b/html/getting-started/tutorials/index.html new file mode 100644 index 00000000..178da6ff --- /dev/null +++ b/html/getting-started/tutorials/index.html @@ -0,0 +1,127 @@ + + + + + + + Tutorials — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Tutorials

+
+

Tutorials:

+ +
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/getting-started/tutorials/quickstart.html b/html/getting-started/tutorials/quickstart.html new file mode 100644 index 00000000..bf159b5f --- /dev/null +++ b/html/getting-started/tutorials/quickstart.html @@ -0,0 +1,194 @@ + + + + + + + Tutorial: Quickstart — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Tutorial: Quickstart

+

Here we will demonstrate a simple example of training a convolutional conditional neural process (ConvCNP) to spatially interpolate ERA5 data.

+

We can go from imports to predictions with a trained model in less than 30 lines of code!

+
import deepsensor.torch
+from deepsensor.data.loader import TaskLoader
+from deepsensor.data.processor import DataProcessor
+from deepsensor.model.convnp import ConvNP
+from deepsensor.train.train import train_epoch
+
+import xarray as xr
+import pandas as pd
+import numpy as np
+
+# Load raw data
+ds_raw = xr.tutorial.open_dataset("air_temperature")
+
+# Normalise data
+data_processor = DataProcessor(x1_name="lat", x1_map=(15, 75), x2_name="lon", x2_map=(200, 330))
+ds = data_processor(ds_raw)
+
+# Set up task loader
+task_loader = TaskLoader(context=ds, target=ds)
+
+# Set up model
+model = ConvNP(data_processor, task_loader)
+
+# Generate training tasks with up to 10% of grid cells passed as context and all grid cells
+# passed as targets
+train_tasks = []
+for date in pd.date_range("2013-01-01", "2014-11-30")[::7]:
+    task = task_loader(date, context_sampling=np.random.uniform(0.0, 0.1), target_sampling="all")
+    train_tasks.append(task)
+
+# Train model
+for epoch in range(10):
+    train_epoch(model, train_tasks, progress_bar=True)
+
+# Predict on new task with 10% of context data and a dense grid of target points
+test_task = task_loader("2014-12-31", 0.1)
+mean_ds, std_ds = model.predict(test_task, X_t=ds_raw)
+
+
+

After training, the model can predict directly to xarray in your data’s original units and coordinate system:

+
>>> mean_ds
+<xarray.Dataset>
+Dimensions:  (time: 1, lat: 25, lon: 53)
+Coordinates:
+* time     (time) datetime64[ns] 2014-12-31
+* lat      (lat) float32 75.0 72.5 70.0 67.5 65.0 ... 25.0 22.5 20.0 17.5 15.0
+* lon      (lon) float32 200.0 202.5 205.0 207.5 ... 322.5 325.0 327.5 330.0
+Data variables:
+    air      (time, lat, lon) float32 246.7 244.4 245.5 ... 290.2 289.8 289.4
+
+
+

We can also predict directly to pandas containing a timeseries of predictions at off-grid locations +by passing a numpy array of target locations to the X_t argument of .predict:

+
# Predict at two off-grid locations for three days in December 2014
+test_tasks = task_loader(pd.date_range("2014-12-01", "2014-12-31"), 0.1)
+mean_df, std_df = model.predict(test_tasks, X_t=np.array([[50, 280], [40, 250]]).T)
+
+
+
>>> mean_df
+                            air
+time       lat  lon
+2014-12-01 50.0 280.0  260.183056
+        40.0 250.0  277.947373
+2014-12-02 50.0 280.0   261.08943
+        40.0 250.0  278.219599
+2014-12-03 50.0 280.0  257.128185
+        40.0 250.0  278.444229
+
+
+

This quickstart example is also available as a Jupyter notebook with added visualisations.

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/index.html b/html/index.html new file mode 100644 index 00000000..5be5f58f --- /dev/null +++ b/html/index.html @@ -0,0 +1,166 @@ + + + + + + + Welcome to DeepSensor’s documentation! — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Welcome to DeepSensor’s documentation!

+

DeepSensor is Python package and open-source project for modelling environmental data with neural processes.

+

DeepSensor aims to faithfully match the flexibility of neural processes with a simple and intuitive interface. DeepSensor wraps around the powerful neuralprocessess package for the core modelling functionality, while allowing users to stay in the familiar xarray and pandas world and avoid the murky depths of tensors!

+

DeepSensor is also compatible with both PyTorch or TensorFlow for its machine learning abilities, thanks to the backends package. Simply import deepsensor.torch or import deepsensor.tensorflow to choose between them!

+
+

Note

+

This package is currently undergoing active development. If you are interested in using DeepSensor in production, please get in touch.

+
+
+

Citing DeepSensor

+

If you use DeepSensor in your research, please consider citing the repository. You can generate a BiBTeX entry by clicking the ‘Cite this repository’ button on the top right of this page.

+
+
+

Quick installation

+

The DeepSensor package can easiest be pip installed, together with the backend of your choice. In this example we use the PyTorch backend:

+
$ pip install deepsensor torch
+
+
+

To install the TensorFlow backend instead, simply replace torch with tensorflow in the above command.

+ +
+
+
+

Indices and tables

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/objects.inv b/html/objects.inv new file mode 100644 index 00000000..40e085c6 Binary files /dev/null and b/html/objects.inv differ diff --git a/html/reference/active_learning/acquisition_fns.html b/html/reference/active_learning/acquisition_fns.html new file mode 100644 index 00000000..6c4f67b0 --- /dev/null +++ b/html/reference/active_learning/acquisition_fns.html @@ -0,0 +1,462 @@ + + + + + + + deepsensor.active_learning.acquisition_fns — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.active_learning.acquisition_fns

+
+
+class deepsensor.active_learning.acquisition_fns.AcquisitionFunction(model: ProbabilisticModel)
+

Parent class for acquisition functions.

+
+
+__call__(task: Task) ndarray
+

+
+

Parameters

+
+
taskTask

Task object containing context and target sets.

+
+
+
+
+

Returns

+
+
numpy.ndarray

Acquisition function value/s. Shape ().

+
+
+
+
+

Raises

+
+
NotImplementedError

Because this is an abstract method, it must be implemented by the +subclass.

+
+
+
+
+ +
+
+__init__(model: ProbabilisticModel)
+
+

Parameters

+
+
modelProbabilisticModel

+
+
context_set_idxint

Index of context set to add new observations to when computing the +acquisition function.

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.AcquisitionFunctionOracle(model: ProbabilisticModel)
+

Signifies that the acquisition function is computed using the true +target values.

+
+ +
+
+class deepsensor.active_learning.acquisition_fns.AcquisitionFunctionParallel(model: ProbabilisticModel)
+

Parent class for acquisition functions that are computed across all search +points in parallel.

+
+ +
+
+class deepsensor.active_learning.acquisition_fns.MeanStddev(model: ProbabilisticModel)
+

Mean of the marginal variances.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.MeanVariance(model: ProbabilisticModel)
+

Mean of the marginal variances.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.pNormStddev(*args, p: int = 1, **kwargs)
+

p-norm of the vector of marginal standard deviations.

+
+
+__init__(*args, p: int = 1, **kwargs)
+

+
+

Parameters

+
+
pint, optional

…, by default 1

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy(model: ProbabilisticModel)
+

Mean of the entropies of the marginal predictive distributions.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.JointEntropy(model: ProbabilisticModel)
+

Joint entropy of the predictive distribution.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.OracleMAE(model: ProbabilisticModel)
+

Oracle mean absolute error.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.OracleRMSE(model: ProbabilisticModel)
+

Oracle root mean squared error.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.OracleMarginalNLL(model: ProbabilisticModel)
+

Oracle marginal negative log-likelihood.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.OracleJointNLL(model: ProbabilisticModel)
+

Oracle joint negative log-likelihood.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.Random(seed: int = 42)
+

Random acquisition function.

+
+
+__init__(seed: int = 42)
+

+
+

Parameters

+
+
seedint, optional

Random seed, by default 42.

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.ContextDist(context_set_idx: int = 0)
+

Distance to closest context point.

+
+
+__init__(context_set_idx: int = 0)
+

+
+

Parameters

+
+
context_set_idxint, optional

…, by default 0

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.Stddev(model: ProbabilisticModel)
+

Model standard deviation.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.ExpectedImprovement(model: ProbabilisticModel, context_set_idx: int = 0)
+

Expected improvement acquisition function.

+
+

Note

+

The current implementation of this acquisition function is only valid +for maximisation.

+
+
+
+__init__(model: ProbabilisticModel, context_set_idx: int = 0)
+
+

Parameters

+
+
modelProbabilisticModel

+
+
context_set_idxint

Index of context set to add new observations to when computing the +acquisition function.

+
+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/active_learning/algorithms.html b/html/reference/active_learning/algorithms.html new file mode 100644 index 00000000..4c99c2ca --- /dev/null +++ b/html/reference/active_learning/algorithms.html @@ -0,0 +1,226 @@ + + + + + + + deepsensor.active_learning.algorithms — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.active_learning.algorithms

+
+
+class deepsensor.active_learning.algorithms.GreedyAlgorithm(model: DeepSensorModel, X_s: Dataset | DataArray | DataFrame | Series | Index, X_t: Dataset | DataArray | DataFrame | Series | Index, X_s_mask: DataArray | Dataset | None = None, X_t_mask: DataArray | Dataset | None = None, N_new_context: int = 1, X_normalised: bool = False, model_infill_method: str = 'mean', query_infill: DataArray | None = None, proposed_infill: DataArray | None = None, context_set_idx: int = 0, target_set_idx: int = 0, progress_bar: bool = False, task_loader: TaskLoader | None = None, verbose: bool = False)
+

Greedy algorithm for active learning

+
+
+__call__(acquisition_fn: AcquisitionFunction, tasks: List[Task] | Task, diff: bool = False) Tuple[DataFrame, Dataset]
+

Iteratively… docstring TODO

+

Returns a tensor of proposed new sensor locations (in greedy +iteration/priority order) and their corresponding list of indexes in +the search space.

+
+

Parameters

+
+
acquisition_fn: AcquisitionFunction

+
+
tasks: List[Task] | Task

+
+
+
+
+

Returns

+
+
X_new_df, acquisition_fn_ds: Tuple[pandas.DataFrame, xarray.Dataset]

+
+
+
+
+

Raises

+
+
ValueError

If acquisition_fn is an +AcquisitionFunctionOracle +and task_loader is None.

+
+
ValueError

If min_or_max is not "min" or "max".

+
+
ValueError

If Y_t_aux is in tasks but task_loader is None.

+
+
+
+
+ +
+
+__init__(model: DeepSensorModel, X_s: Dataset | DataArray | DataFrame | Series | Index, X_t: Dataset | DataArray | DataFrame | Series | Index, X_s_mask: DataArray | Dataset | None = None, X_t_mask: DataArray | Dataset | None = None, N_new_context: int = 1, X_normalised: bool = False, model_infill_method: str = 'mean', query_infill: DataArray | None = None, proposed_infill: DataArray | None = None, context_set_idx: int = 0, target_set_idx: int = 0, progress_bar: bool = False, task_loader: TaskLoader | None = None, verbose: bool = False)
+

+
+

Parameters

+
+
modelDeepSensorModel

Trained model to use for proposing new context points.

+
+
X_sxarray.Dataset | xarray.DataArray | pandas.DataFrame | pandas.Series | pandas.Index

Search coordinates.

+
+
X_txarray.Dataset | xarray.DataArray

Target coordinates.

+
+
X_s_maskxarray.Dataset | xarray.DataArray, optional

Mask for search coordinates. If provided, only points where mask +is True will be considered. Defaults to None.

+
+
X_t_maskxarray.Dataset | xarray.DataArray, optional

…, by default None.

+
+
N_new_contextint, optional

…, by default 1.

+
+
X_normalisedbool, optional

…, by default False.

+
+
model_infill_methodstr, optional

…, by default “mean”.

+
+
query_infillxarray.DataArray, optional

…, by default None.

+
+
proposed_infillxarray.DataArray, optional

…, by default None.

+
+
context_set_idxint, optional

…, by default 0.

+
+
target_set_idxint, optional

…, by default 0.

+
+
progress_barbool, optional

…, by default False.

+
+
min_or_maxstr, optional

…, by default “min”.

+
+
task_loaderTaskLoader, optional

…, by default None.

+
+
verbosebool, optional

…, by default False.

+
+
+
+
+

Raises

+
+
ValueError

If the model passed does not inherit from +DeepSensorModel.

+
+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/active_learning/index.html b/html/reference/active_learning/index.html new file mode 100644 index 00000000..52bba31c --- /dev/null +++ b/html/reference/active_learning/index.html @@ -0,0 +1,154 @@ + + + + + + + active_learning module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/html/reference/data/index.html b/html/reference/data/index.html new file mode 100644 index 00000000..d88d6300 --- /dev/null +++ b/html/reference/data/index.html @@ -0,0 +1,160 @@ + + + + + + + data module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/html/reference/data/loader.html b/html/reference/data/loader.html new file mode 100644 index 00000000..3a2ce8c1 --- /dev/null +++ b/html/reference/data/loader.html @@ -0,0 +1,427 @@ + + + + + + + deepsensor.data.loader — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.data.loader

+
+
+class deepsensor.data.loader.TaskLoader(task_loader_ID: str | None = None, context: DataArray | Dataset | DataFrame | str | List[DataArray | Dataset | DataFrame | str] = None, target: DataArray | Dataset | DataFrame | str | List[DataArray | Dataset | DataFrame | str] = None, aux_at_contexts: Tuple[int, DataArray | Dataset] | None = None, aux_at_targets: DataArray | Dataset | None = None, links: Tuple[int, int] | List[Tuple[int, int]] | None = None, context_delta_t: int | List[int] = 0, target_delta_t: int | List[int] = 0, time_freq: str = 'D', xarray_interp_method: str = 'linear', discrete_xarray_sampling: bool = False, dtype: object = 'float32')
+
+
+__call__(date, *args, **kwargs)
+

Generate a task for a given date.

+
+

Parameters

+
+
date

Date for which to generate the task.

+
+
+
+
+

Returns

+
+
task: Task | List[Task]

Task object or list of task objects for each date containing the +context and target data.

+
+
+
+
+ +
+
+__init__(task_loader_ID: str | None = None, context: DataArray | Dataset | DataFrame | str | List[DataArray | Dataset | DataFrame | str] = None, target: DataArray | Dataset | DataFrame | str | List[DataArray | Dataset | DataFrame | str] = None, aux_at_contexts: Tuple[int, DataArray | Dataset] | None = None, aux_at_targets: DataArray | Dataset | None = None, links: Tuple[int, int] | List[Tuple[int, int]] | None = None, context_delta_t: int | List[int] = 0, target_delta_t: int | List[int] = 0, time_freq: str = 'D', xarray_interp_method: str = 'linear', discrete_xarray_sampling: bool = False, dtype: object = 'float32') None
+

Initialise a TaskLoader object.

+

The behaviour is the following: +- If all data passed as paths, load the data and overwrite the paths with the loaded data +- Either all data is passed as paths, or all data is passed as loaded data (else ValueError) +- If all data passed as paths, the TaskLoader can be saved with the save method (using config)

+
+

Parameters

+
+
task_loader_ID

If loading a TaskLoader from a config file, this is the folder the +TaskLoader was saved in (using .save). If this argument is passed, all other +arguments are ignored.

+
+
contextxarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset, pandas.DataFrame]

Context data. Can be a single xarray.DataArray, +xarray.Dataset or pandas.DataFrame, or a +list/tuple of these.

+
+
targetxarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset, pandas.DataFrame]

Target data. Can be a single xarray.DataArray, +xarray.Dataset or pandas.DataFrame, or a +list/tuple of these.

+
+
aux_at_contextsTuple[int, xarray.DataArray | xarray.Dataset], optional

Auxiliary data at context locations. Tuple of two elements, where +the first element is the index of the context set for which the +auxiliary data will be sampled at, and the second element is the +auxiliary data, which can be a single xarray.DataArray or +xarray.Dataset. Default: None.

+
+
aux_at_targetsxarray.DataArray | xarray.Dataset, optional

Auxiliary data at target locations. Can be a single +xarray.DataArray or xarray.Dataset. Default: +None.

+
+
linksTuple[int, int] | List[Tuple[int, int]], optional

Specifies links between context and target data. Each link is a +tuple of two integers, where the first integer is the index of the +context data and the second integer is the index of the target +data. Can be a single tuple in the case of a single link. If None, +no links are specified. Default: None.

+
+
context_delta_tint | List[int], optional

Time difference between context data and t=0 (task init time). Can +be a single int (same for all context data) or a list/tuple of +ints. Default is 0.

+
+
target_delta_tint | List[int], optional

Time difference between target data and t=0 (task init time). Can +be a single int (same for all target data) or a list/tuple of ints. +Default is 0.

+
+
time_freqstr, optional

Time frequency of the data. Default: 'D' (daily).

+
+
xarray_interp_methodstr, optional

Interpolation method to use when interpolating +xarray.DataArray. Default is 'linear'.

+
+
discrete_xarray_samplingbool, optional

When randomly sampling xarray variables, whether to sample at +discrete points defined at grid cell centres, or at continuous +points within the grid. Default is False.

+
+
dtypeobject, optional

Data type of the data. Used to cast the data to the specified +dtype. Default: 'float32'.

+
+
+
+
+ +
+
+config_fname = 'task_loader_config.json'
+
+ +
+
+count_context_and_target_data_dims()
+

Count the number of data dimensions in the context and target data.

+
+

Returns

+

context_dims : tuple. Tuple of data dimensions in the context data. +target_dims : tuple. Tuple of data dimensions in the target data.

+
+
+

Raises

+
+
ValueError

If the context/target data is not a tuple/list of +xarray.DataArray, xarray.Dataset or +pandas.DataFrame.

+
+
+
+
+ +
+
+infer_context_and_target_var_IDs()
+

Infer the variable IDs of the context and target data.

+
+

Returns

+

context_var_IDs : tuple. Tuple of variable IDs in the context data. +target_var_IDs : tuple. Tuple of variable IDs in the target data.

+
+
+

Raises

+
+
ValueError

If the context/target data is not a tuple/list of +xarray.DataArray, xarray.Dataset or +pandas.DataFrame.

+
+
+
+
+ +
+
+load_dask() None
+

Load any dask data into memory.

+
+

Returns

+

None.

+
+
+ +
+
+sample_da(da: ~xarray.core.dataarray.DataArray | ~xarray.core.dataset.Dataset, sampling_strat: str | int | float | ~numpy.ndarray, seed: int | None = None) -> (<class 'numpy.ndarray'>, <class 'numpy.ndarray'>)
+

Sample a DataArray according to a given strategy.

+
+

Parameters

+
+
daxarray.DataArray | xarray.Dataset

DataArray to sample, assumed to be sliced for the task already.

+
+
sampling_stratstr | int | float | numpy.ndarray

Sampling strategy, either “all” or an integer for random grid cell +sampling.

+
+
seedint, optional

Seed for random sampling. Default: None.

+
+
+
+
+

Returns

+
+
DataTuple[numpy.ndarray, numpy.ndarray]

Tuple of sampled target data and sampled context data.

+
+
+
+
+

Raises

+
+
InvalidSamplingStrategyError

If the sampling strategy is not valid.

+
+
InvalidSamplingStrategyError

If a numpy coordinate array is passed to sample an xarray object, +but the coordinates are out of bounds.

+
+
+
+
+ +
+
+sample_df(df: ~pandas.core.frame.DataFrame | ~pandas.core.series.Series, sampling_strat: str | int | float | ~numpy.ndarray, seed: int | None = None) -> (<class 'numpy.ndarray'>, <class 'numpy.ndarray'>)
+

Sample a DataArray according to a given strategy.

+
+

Parameters

+
+
dfpandas.DataFrame | pandas.Series

DataArray to sample, assumed to be time-sliced for the task +already.

+
+
sampling_stratstr | int | float | numpy.ndarray

Sampling strategy, either “all” or an integer for random grid cell +sampling.

+
+
seedint, optional

Seed for random sampling. Default: None.

+
+
+
+
+

Returns

+
+
DataTuple[X_c, Y_c]

Tuple of sampled target data and sampled context data.

+
+
+
+
+

Raises

+
+
InvalidSamplingStrategyError

If the sampling strategy is not valid.

+
+
InvalidSamplingStrategyError

If a numpy coordinate array is passed to sample a pandas object, +but the DataFrame does not contain all the requested samples.

+
+
+
+
+ +
+
+sample_offgrid_aux(X_t: ndarray | Tuple[ndarray, ndarray], offgrid_aux: DataArray | Dataset) ndarray
+

Sample auxiliary data at off-grid locations.

+
+

Parameters

+
+
X_tnumpy.ndarray | Tuple[numpy.ndarray, numpy.ndarray]

Off-grid locations at which to sample the auxiliary data. Can be a +tuple of two numpy arrays, or a single numpy array.

+
+
offgrid_auxxarray.DataArray | xarray.Dataset

Auxiliary data at off-grid locations.

+
+
+
+
+

Returns

+
+
numpy.ndarray

+
+
+
+
+ +
+
+save(folder: str)
+

Save TaskLoader config to JSON in folder

+
+ +
+
+task_generation(date: Timestamp, context_sampling: str | int | float | ndarray | List[str | int | float | ndarray] = 'all', target_sampling: str | int | float | ndarray | List[str | int | float | ndarray] = 'all', split_frac: float = 0.5, datewise_deterministic: bool = False, seed_override: int | None = None) Task
+

Generate a task for a given date.

+

There are several sampling strategies available for the context and +target data:

+
+
    +
  • “all”: Sample all observations.

  • +
  • int: Sample N observations uniformly at random.

  • +
  • float: Sample a fraction of observations uniformly at random.

  • +
  • numpy.ndarray, shape (2, N): Sample N observations +at the given x1, x2 coordinates. Coords are assumed to be +unnormalised.

  • +
+
+
+

Parameters

+
+
datepandas.Timestamp

Date for which to generate the task.

+
+
context_samplingstr | int | float | numpy.ndarray | List[str | int | float | numpy.ndarray]

Sampling strategy for the context data, either a list of sampling +strategies for each context set, or a single strategy applied to +all context sets. Default is "all".

+
+
target_samplingstr | int | float | numpy.ndarray | List[str | int | float | numpy.ndarray]

Sampling strategy for the target data, either a list of sampling +strategies for each target set, or a single strategy applied to all +target sets. Default is "all".

+
+
split_fracfloat

The fraction of observations to use for the context set with the +“split” sampling strategy for linked context and target set pairs. +The remaining observations are used for the target set. Default is +0.5.

+
+
datewise_deterministicbool

Whether random sampling is datewise_deterministic based on the +date. Default is False.

+
+
seed_overrideOptional[int]

Override the seed for random sampling. This can be used to use the +same random sampling at different date. Default is None.

+
+
+
+
+

Returns

+
+
taskTask

Task object containing the context and target data.

+
+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/data/processor.html b/html/reference/data/processor.html new file mode 100644 index 00000000..7e942f1c --- /dev/null +++ b/html/reference/data/processor.html @@ -0,0 +1,557 @@ + + + + + + + deepsensor.data.processor — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.data.processor

+
+
+class deepsensor.data.processor.DataProcessor(folder: str | None = None, time_name: str = 'time', x1_name: str = 'x1', x2_name: str = 'x2', x1_map: tuple | None = None, x2_map: tuple | None = None, deepcopy: bool = True, verbose: bool = False)
+

Normalise xarray and pandas data for use in deepsensor models

+
+
+__call__(data: DataArray | Dataset | DataFrame | List[DataArray | Dataset | DataFrame], method: str = 'mean_std') DataArray | Dataset | DataFrame | List[DataArray | Dataset | DataFrame]
+

Normalise data.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset | pandas.DataFrame]

Data to normalise.

+
+
methodstr, optional
+
Normalisation method. Defaults to “mean_std”. Options:
    +
  • “mean_std”: Normalise to mean=0 and std=1

  • +
  • “min_max”: Normalise to min=-1 and max=1

  • +
+
+
+
+
+
+
+

Returns

+
+
xarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset | pandas.DataFrame]

Normalised data.

+
+
+
+
+ +
+
+__init__(folder: str | None = None, time_name: str = 'time', x1_name: str = 'x1', x2_name: str = 'x2', x1_map: tuple | None = None, x2_map: tuple | None = None, deepcopy: bool = True, verbose: bool = False)
+

Initialise a DataProcessor object.

+
+

Parameters

+
+
folderstr, optional

Folder to load normalisation params from. Defaults to None.

+
+
x1_namestr, optional

Name of first spatial coord (e.g. “lat”). Defaults to “x1”.

+
+
x2_namestr, optional

Name of second spatial coord (e.g. “lon”). Defaults to “x2”.

+
+
x1_maptuple, optional

2-tuple of raw x1 coords to linearly map to (0, 1), respectively. +Defaults to (0, 1) (i.e. no normalisation).

+
+
x2_maptuple, optional

2-tuple of raw x2 coords to linearly map to (0, 1), respectively. +Defaults to (0, 1) (i.e. no normalisation).

+
+
deepcopybool, optional

Whether to make a deepcopy of raw data to ensure it is not changed +by reference when normalising. Defaults to True.

+
+
verbosebool, optional

Whether to print verbose output. Defaults to False.

+
+
+
+
+ +
+
+__str__()
+

Return str(self).

+
+ +
+
+add_to_config(var_ID, **kwargs)
+

Add kwargs to config dict for variable var_ID

+
+ +
+
+check_params_computed(var_ID, method) bool
+

Check if normalisation params computed for a given variable.

+
+

Parameters

+
+
var_ID

+
+
method

+
+
+
+
+

Returns

+
+
bool

Whether normalisation params are computed for a given variable.

+
+
+
+
+ +
+
+config_fname = 'data_processor_config.json'
+
+ +
+
+get_config(var_ID, data, method=None)
+

Get pre-computed normalisation params or compute them for variable +var_ID.

+
+

Parameters

+
+
var_ID

+
+
data

+
+
method…, optional

…, by default None.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+classmethod load_dask(data: DataArray | Dataset)
+

Load dask data into memory.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map(data: DataArray | Dataset | DataFrame | Series, method: str | None = None, add_offset: bool = True, unnorm: bool = False)
+

Normalise or unnormalise the data values and coords in an xarray or +pandas object.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | pandas.Series

+
+
methodstr, optional

…, by default None.

+
+
add_offsetbool, optional

…, by default True.

+
+
unnormbool, optional

…, by default False.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map_array(data: DataArray | Dataset | DataFrame | Series | ndarray, var_ID: str, method: str | None = None, unnorm: bool = False, add_offset: bool = True)
+

Normalise or unnormalise the data values in an xarray, pandas, or +numpy object.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | pandas.Series | numpy.ndarray

+
+
var_IDstr

+
+
methodstr, optional

…, by default None.

+
+
unnormbool, optional

…, by default False.

+
+
add_offsetbool, optional

…, by default True.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map_coord_array(coord_array: ndarray, unnorm: bool = False)
+

Normalise or unnormalise a coordinate array.

+
+

Parameters

+
+
coord_arraynumpy.ndarray

Array of shape (2, N) containing coords.

+
+
unnormbool, optional

Whether to unnormalise. Defaults to False.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map_coords(data: DataArray | Dataset | DataFrame | Series, unnorm=False)
+

Normalise spatial coords in a pandas or xarray object.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | pandas.Series

+
+
unnormbool, optional

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map_x1_and_x2(x1: ndarray, x2: ndarray, unnorm: bool = False)
+

Normalise or unnormalise spatial coords in a array.

+
+

Parameters

+
+
x1numpy.ndarray

Array of shape (N_x1,) containing spatial coords of x1.

+
+
x2numpy.ndarray

Array of shape (N_x2,) containing spatial coords of x2.

+
+
unnormbool, optional

Whether to unnormalise. Defaults to False.

+
+
+
+
+

Returns

+
+
Tuple[numpy.ndarray, numpy.ndarray]

Normalised or unnormalised spatial coords of x1 and x2.

+
+
+
+
+ +
+
+save(folder: str)
+

Save DataProcessor config to JSON in folder

+
+ +
+
+set_coord_params(time_name, x1_name, x1_map, x2_name, x2_map) None
+

Set coordinate normalisation params.

+
+

Parameters

+
+
time_name

+
+
x1_name

+
+
x1_map

+
+
x2_name

+
+
x2_map

+
+
+
+
+

Returns

+

None.

+
+
+ +
+
+unnormalise(data: DataArray | Dataset | DataFrame | List[DataArray | Dataset | DataFrame], add_offset: bool = True) DataArray | Dataset | DataFrame | List[DataArray | Dataset | DataFrame]
+

Unnormalise data.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset | pandas.DataFrame]

Data to unnormalise.

+
+
add_offsetbool, optional

Whether to add the offset to the data when unnormalising. Set to +False to unnormalise uncertainty values (e.g. std dev). Defaults to +True.

+
+
+
+
+

Returns

+
+
xarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset | pandas.DataFrame]

Unnormalised data.

+
+
+
+
+ +
+ +
+
+deepsensor.data.processor.xarray_to_coord_array_normalised(da: Dataset | DataArray) ndarray
+

Convert xarray to normalised coordinate array.

+
+

Parameters

+
+
daxarray.Dataset | xarray.DataArray

+
+
+
+
+

Returns

+
+
numpy.ndarray

A normalised coordinate array of shape (2, N).

+
+
+
+
+ +
+
+deepsensor.data.processor.mask_coord_array_normalised(coord_arr: ndarray, mask_da: DataArray | Dataset | None)
+

Remove points from (2, N) numpy array that are outside gridded xarray boolean mask.

+

If coord_arr is shape (2, N), then mask_da is a shape (N,) boolean array +(True if point is inside mask, False if outside).

+
+

Parameters

+
+
coord_arr

+
+
mask_da

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+deepsensor.data.processor.da1_da2_same_grid(da1: DataArray, da2: DataArray) bool
+

Check if da1 and da2 are on the same grid.

+
+

Note

+

da1 and da2 are assumed normalised by DataProcessor.

+
+
+

Parameters

+
+
da1xarray.DataArray

+
+
da2xarray.DataArray

+
+
+
+
+

Returns

+
+
bool

Whether da1 and da2 are on the same grid.

+
+
+
+
+ +
+
+deepsensor.data.processor.interp_da1_to_da2(da1: DataArray, da2: DataArray) DataArray
+

Interpolate da1 to da2.

+
+

Note

+

da1 and da2 are assumed normalised by DataProcessor.

+
+
+

Parameters

+
+
da1xarray.DataArray

+
+
da2xarray.DataArray

+
+
+
+
+

Returns

+
+
xarray.DataArray

Interpolated xarray.

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/data/task.html b/html/reference/data/task.html new file mode 100644 index 00000000..97e04dbb --- /dev/null +++ b/html/reference/data/task.html @@ -0,0 +1,330 @@ + + + + + + + deepsensor.data.task — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.data.task

+
+
+class deepsensor.data.task.Task(task_dict: dict)
+

Task dictionary class.

+

Inherits from dict and adds methods for printing and modifying the +data.

+
+
+__init__(task_dict: dict) None
+

Initialise a Task object.

+
+

Parameters

+
+
task_dictdict

Dictionary containing the task.

+
+
+
+
+ +
+
+__repr__()
+

Print a convenient summary of the task dictionary.

+

Print the type of each entry and if it is an array, print its shape, +otherwise print the value.

+
+ +
+
+__str__()
+

Print a convenient summary of the task dictionary.

+

For array entries, print their shape, otherwise print the value.

+
+ +
+
+add_batch_dim()
+

Add a batch dimension to the arrays in the task dictionary.

+
+

Returns

+

task : dict. Task dictionary with batch dimension added to the array elements.

+
+
+ +
+
+cast_to_float32()
+

Cast the arrays in the task dictionary to float32.

+
+

Returns

+

task : dict. Task dictionary with arrays cast to float32.

+
+
+ +
+
+convert_to_tensor()
+

Convert to tensor object based on deep learning backend

+
+
Returns

task: dict. Task dictionary with arrays converted to deep learning tensor objects

+
+
+
+ +
+
+mask_nans_nps()
+
+ +
+
+mask_nans_numpy()
+

Replace NaNs with zeroes and set a mask to indicate where the NaNs were.

+
+

Returns

+

task : dict. Task with NaNs set to zeros and a mask indicating where the missing values are.

+
+
+ +
+
+op(f, op_flag=None)
+

Apply function f to the array elements of a task dictionary.

+

Useful for recasting to a different dtype or reshaping (e.g. adding a +batch dimension).

+
+

Parameters

+
+
ffunction

Function to apply to the array elements of the task.

+
+
taskdict

Task dictionary.

+
+
op_flagstr

Flag to set in the task dictionary’s ops key.

+
+
+
+
+

Returns

+
+
taskdict.

Task dictionary with f applied to the array elements and +op_flag set in the ops key.

+
+
+
+
+ +
+
+remove_nans_from_task_Y_t_if_present()
+

If NaNs are present in task[“Y_t”], remove them (and corresponding task[“X_t”])

+
+ +
+
+classmethod summarise_repr(k, v)
+
+ +
+
+classmethod summarise_str(k, v)
+
+ +
+ +
+
+deepsensor.data.task.append_obs_to_task(task: Task, X_new: int | Dimension | int8 | int16 | int32 | int64 | uint8 | uint16 | uint32 | uint64 | bool | bool_ | float | float16 | float32 | float64 | longdouble | complex | complex64 | complex128 | clongdouble | ndarray | ModuleType[autograd.tracer.Box] | ModuleType[tensorflow.Tensor] | ModuleType[tensorflow.Variable] | ModuleType[tensorflow.IndexedSlices] | ModuleType[jaxlib.xla_extension.ArrayImpl] | ModuleType[jax.core.Tracer] | ModuleType[torch.Tensor], Y_new: int | Dimension | int8 | int16 | int32 | int64 | uint8 | uint16 | uint32 | uint64 | bool | bool_ | float | float16 | float32 | float64 | longdouble | complex | complex64 | complex128 | clongdouble | ndarray | ModuleType[autograd.tracer.Box] | ModuleType[tensorflow.Tensor] | ModuleType[tensorflow.Variable] | ModuleType[tensorflow.IndexedSlices] | ModuleType[jaxlib.xla_extension.ArrayImpl] | ModuleType[jax.core.Tracer] | ModuleType[torch.Tensor], context_set_idx: int)
+

Append a single observation to a context set in task.

+

Makes a deep copy of the data structure to avoid affecting the original +object.

+
+ +
+
+deepsensor.data.task.flatten_X(X: ndarray | Tuple[ndarray, ndarray]) ndarray
+

Convert tuple of gridded coords to (2, N) array if necessary.

+
+

Parameters

+
+
Xnumpy.ndarray | Tuple[numpy.ndarray, numpy.ndarray]

+
+
+
+
+

Returns

+
+
numpy.ndarray

+
+
+
+
+ +
+
+deepsensor.data.task.flatten_Y(Y: ndarray | Tuple[ndarray, ndarray]) ndarray
+

Convert gridded data of shape (N_dim, N_x1, N_x2) to (N_dim, N_x1 * N_x2) +array if necessary.

+
+

Parameters

+
+
Ynumpy.ndarray | Tuple[numpy.ndarray, numpy.ndarray]

+
+
+
+
+

Returns

+
+
numpy.ndarray

+
+
+
+
+ +
+
+deepsensor.data.task.flatten_gridded_data_in_task(task: Task) Task
+

Convert any gridded data in Task to flattened arrays.

+

Necessary for AR sampling, which doesn’t yet permit gridded context sets.

+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+
Task

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/data/utils.html b/html/reference/data/utils.html new file mode 100644 index 00000000..95f4c925 --- /dev/null +++ b/html/reference/data/utils.html @@ -0,0 +1,244 @@ + + + + + + + deepsensor.data.utils — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.data.utils

+
+
+deepsensor.data.utils.construct_x1x2_ds(gridded_ds)
+

Construct an xarray.Dataset containing two vars, where each var is +a 2D gridded channel whose values contain the x_1 and x_2 coordinate +values, respectively.

+
+

Parameters

+
+
gridded_dsxarray.Dataset

+
+
+
+
+

Returns

+
+
xarray.Dataset

+
+
+
+
+ +
+
+deepsensor.data.utils.construct_circ_time_ds(dates, freq)
+

Return an xarray.Dataset containing a circular variable for time. +The freq entry dictates the frequency of cycling of the circular +variable. E.g.:

+
+
    +
  • 'H': cycles once per day at hourly intervals

  • +
  • 'D': cycles once per year at daily intervals

  • +
  • 'M': cycles once per year at monthly intervals

  • +
+
+
+

Parameters

+
+
dates: …

+
+
freq

+
+
+
+
+

Returns

+
+
xarray.Dataset

+
+
+
+
+ +
+
+deepsensor.data.utils.compute_xarray_data_resolution(ds: DataArray | Dataset) float
+

Computes the resolution of an xarray object with coordinates x1 and x2.

+

The data resolution is the finer of the two coordinate resolutions (x1 and +x2). For example, if x1 has a resolution of 0.1 degrees and x2 has a +resolution of 0.2 degrees, the data resolution returned will be 0.1 +degrees.

+
+

Parameters

+
+
dsxarray.DataArray | xarray.Dataset

Xarray object with coordinates x1 and x2.

+
+
+
+
+

Returns

+
+
data_resolutionfloat

Resolution of the data (in spatial units, e.g. 0.1 degrees).

+
+
+
+
+ +
+
+deepsensor.data.utils.compute_pandas_data_resolution(df: DataFrame | Series, n_times: int = 1000, percentile: int = 5) float
+

Approximates the resolution of non-gridded pandas data with indexes time, +x1, and x2.

+

The resolution is approximated as the Nth percentile of the distances +between neighbouring observations, possibly using a subset of the dates in +the data. The default is to use 1000 dates (or all dates if there are fewer +than 1000) and to use the 5th percentile. This means that the resolution is +the distance between the closest 5% of neighbouring observations.

+
+

Parameters

+
+
dfpandas.DataFrame | pandas.Series

Dataframe or series with indexes time, x1, and x2.

+
+
n_timesint, optional

Number of dates to sample. Defaults to 1000. If “all”, all dates are +used.

+
+
percentileint, optional

Percentile of pairwise distances for computing the resolution. +Defaults to 5.

+
+
+
+
+

Returns

+
+
data_resolutionfloat

Resolution of the data (in spatial units, e.g. 0.1 degrees).

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/index.html b/html/reference/index.html new file mode 100644 index 00000000..df83215e --- /dev/null +++ b/html/reference/index.html @@ -0,0 +1,162 @@ + + + + + + + API Reference — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

API Reference

+

This part of the documentation contains the API reference for the package. It is structured by modules, and each module contains its respective classes, functions, and attributes. The API is designed to be as simple as possible while still allowing for a lot of flexibility. The API is divided into several submodules, which are described in the following sections.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/model/convnp.html b/html/reference/model/convnp.html new file mode 100644 index 00000000..d02b517f --- /dev/null +++ b/html/reference/model/convnp.html @@ -0,0 +1,785 @@ + + + + + + + deepsensor.model.convnp — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.model.convnp

+
+
+class deepsensor.model.convnp.ConvNP(*args, **kwargs)
+

A ConvNP regression probabilistic model.

+

Wraps around the neuralprocesses package to construct a ConvNP model. +See: https://github.com/wesselb/neuralprocesses/blob/main/neuralprocesses/architectures/convgnp.py

+

Multiple dispatch is implemented using plum to allow for re-using the +model’s forward prediction object when computing the logpdf, entropy, etc. +Alternatively, the model can be run forwards with a Task object of data +from the TaskLoader.

+

The ConvNP can optionally be instantiated with:

+
+
    +
  • a DataProcessor object to auto-unnormalise the data at inference +time with the .predict method.

  • +
  • a TaskLoader object to infer sensible default model parameters +from the data.

  • +
+
+

These additional parameters can be passed to the __init__ method to +customise the model, which will override any defaults inferred from a +TaskLoader.

+
+

Parameters

+
+
points_per_unitint, optional

Density of the internal discretisation. Defaults to 100.

+
+
likelihoodstr, optional

Likelihood. Must be one of "cnp" (equivalently "het"), +"gnp" (equivalently "lowrank"), or "cnp-spikes-beta" +(equivalently "spikes-beta"). Defaults to "cnp".

+
+
dim_xint, optional

Dimensionality of the inputs. Defaults to 1.

+
+
dim_yint, optional

Dimensionality of the outputs. Defaults to 1.

+
+
dim_ycint or tuple[int], optional

Dimensionality of the outputs of the context set. You should set this +if the dimensionality of the outputs of the context set is not equal +to the dimensionality of the outputs of the target set. You should +also set this if you want to use multiple context sets. In that case, +set this equal to a tuple of integers indicating the respective output +dimensionalities.

+
+
dim_ytint, optional

Dimensionality of the outputs of the target set. You should set this +if the dimensionality of the outputs of the target set is not equal to +the dimensionality of the outputs of the context set.

+
+
dim_aux_tint, optional

Dimensionality of target-specific auxiliary variables.

+
+
conv_archstr, optional

Convolutional architecture to use. Must be one of +"unet[-res][-sep]" or "conv[-res][-sep]". Defaults to +"unet".

+
+
unet_channelstuple[int], optional

Channels of every layer of the UNet. Defaults to six layers each with +64 channels.

+
+
unet_kernelsint or tuple[int], optional

Sizes of the kernels in the UNet. Defaults to 5.

+
+
unet_resize_convsbool, optional

Use resize convolutions rather than transposed convolutions in the +UNet. Defaults to False.

+
+
unet_resize_conv_interp_methodstr, optional

Interpolation method for the resize convolutions in the UNet. Can be +set to "bilinear". Defaults to “bilinear”.

+
+
num_basis_functionsint, optional

Number of basis functions for the low-rank likelihood. Defaults to +64.

+
+
dim_lvint, optional

Dimensionality of the latent variable. Setting to >0 constructs a +latent neural process. Defaults to 0.

+
+
encoder_scalesfloat or tuple[float], optional

Initial value for the length scales of the set convolutions for the +context sets embeddings. Set to a tuple equal to the number of context +sets to use different values for each set. Set to a single value to use +the same value for all context sets. Defaults to +1 / points_per_unit.

+
+
encoder_scales_learnablebool, optional

Whether the encoder SetConv length scale(s) are learnable. Defaults to +False.

+
+
decoder_scalefloat, optional

Initial value for the length scale of the set convolution in the +decoder. Defaults to 1 / points_per_unit.

+
+
decoder_scale_learnablebool, optional

Whether the decoder SetConv length scale(s) are learnable. Defaults to +False.

+
+
aux_t_mlp_layerstuple[int], optional

Widths of the layers of the MLP for the target-specific auxiliary +variable. Defaults to three layers of width 128.

+
+
epsilonfloat, optional

Epsilon added by the set convolutions before dividing by the density +channel. Defaults to 1e-2.

+
+
dtypedtype, optional

Data type.

+
+
+
+
+__call__(task, n_samples=10, requires_grad=False)
+

Compute ConvNP distribution.

+
+

Parameters

+
+
taskTask

+
+
n_samplesint, optional

Number of samples to draw from the distribution, by default 10.

+
+
requires_gradbool, optional

Whether to compute gradients, by default False.

+
+
+
+
+

Returns

+
+

The ConvNP distribution.

+
+
+
+
+ +
+
+__init__(*args, **kwargs)
+

Generate a new model using nps.construct_convgnp with default or +specified parameters.

+

This method does not take a TaskLoader or DataProcessor object, +so the model will not auto-unnormalise predictions at inference time.

+
+
+__init__(self, data_processor: deepsensor.data.processor.DataProcessor, task_loader: deepsensor.data.loader.TaskLoader, *args, verbose: bool = True, **kwargs)
+
+ +

Instantiate model from TaskLoader, using data to infer model parameters +(unless overridden).

+
+

Parameters

+
+
data_processorDataProcessor

DataProcessor object.

+
+
task_loaderTaskLoader

TaskLoader object.

+
+
verbosebool, optional

Whether to print inferred model parameters, by default True.

+
+
+
+
+__init__(self, data_processor: deepsensor.data.processor.DataProcessor, task_loader: deepsensor.data.loader.TaskLoader, neural_process: plum.type.ModuleType[tensorflow.keras.Model] | plum.type.ModuleType[torch.nn.Module])
+
+ +

Instantiate with a pre-defined neural process model.

+
+
+

Parameters

+
+
data_processorDataProcessor

DataProcessor object.

+
+
task_loaderTaskLoader

TaskLoader object.

+
+
neural_processTFModel | TorchModel

Pre-defined neural process model.

+
+
+
+
+__init__(self, model_ID: str)
+
+ +

Instantiate a model from a folder containing model weights and config.

+
+
+__init__(self, data_processor: deepsensor.data.processor.DataProcessor, task_loader: deepsensor.data.loader.TaskLoader, model_ID: str)
+
+ +

Instantiate a model from a folder containing model weights and config.

+
+
+ +
+
+ar_sample(task: Task, n_samples: int = 1, X_target_AR: ndarray | None = None, ar_subsample_factor: int = 1, fill_type: Literal['mean', 'sample'] = 'mean')
+

Autoregressive sampling from the model.

+

AR sampling with optional functionality to only draw AR samples over a +subset of the target set and then infill the rest of the sample with +the model mean or joint sample conditioned on the AR samples.

+
+

Note

+

AR sampling only works for 0th context/target set

+
+
+

Parameters

+
+
taskTask

The task to sample from.

+
+
n_samplesint, optional

The number of samples to draw from the distribution, by default 1.

+
+
X_target_ARnumpy.ndarray, optional

Locations to draw AR samples over. If None, AR samples will be +drawn over the target locations in the task. Defaults to None.

+
+
ar_subsample_factorint, optional

Subsample target locations to draw AR samples over. Defaults to 1.

+
+
fill_typeLiteral[“mean”, “sample”], optional

How to infill the rest of the sample. Must be one of “mean” or +“sample”. Defaults to “mean”.

+
+
+
+
+

Returns

+
+
numpy.ndarray

The samples.

+
+
+
+
+ +
+
+covariance(dist: AbstractDistribution)
+

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

+
+
+
+
+

Returns

+
+

+
+
+
+
+covariance(self, task: deepsensor.data.task.Task)
+
+ +

+
+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+joint_entropy(dist: AbstractDistribution)
+

Model entropy over target points given context points.

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to compute the entropy of.

+
+
+
+
+

Returns

+
+
float

The model entropy.

+
+
+
+
+joint_entropy(self, task: deepsensor.data.task.Task)
+
+ +

Model entropy over target points given context points.

+
+
+

Parameters

+
+
taskTask

The task to compute the entropy of.

+
+
+
+
+

Returns

+
+
float

The model entropy.

+
+
+
+
+ +
+
+load(model_ID: str)
+

Load a model from a folder containing model weights and config.

+
+ +
+
+logpdf(dist: AbstractDistribution, task: Task)
+

Model outputs joint distribution over all targets: Concat targets along +observation dimension.

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to compute the logpdf of.

+
+
taskTask

The task to compute the logpdf of.

+
+
+
+
+

Returns

+
+
float

The logpdf.

+
+
+
+
+logpdf(self, task: deepsensor.data.task.Task)
+
+ +

Model outputs joint distribution over all targets: Concat targets along +observation dimension.

+
+
+

Parameters

+
+
taskTask

The task to compute the logpdf of.

+
+
+
+
+

Returns

+
+
float

The logpdf.

+
+
+
+
+ +
+
+loss_fn(task: Task, fix_noise=None, num_lv_samples: int = 8, normalise: bool = False)
+

Compute the loss of a task.

+
+

Parameters

+
+
taskTask

The task to compute the loss of.

+
+
fix_noise

Whether to fix the noise to the value specified in the model +config.

+
+
num_lv_samplesint, optional

If latent variable model, number of lv samples for evaluating the +loss, by default 8.

+
+
normalisebool, optional

Whether to normalise the loss by the number of target points, by +default False.

+
+
+
+
+

Returns

+
+
float

The loss.

+
+
+
+
+ +
+
+mean(dist: AbstractDistribution)
+

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

+
+
+
+
+

Returns

+
+

+
+
+
+
+mean(self, task: deepsensor.data.task.Task)
+
+ +

+
+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+mean_marginal_entropy(dist: AbstractDistribution)
+

Mean marginal entropy over target points given context points.

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to compute the entropy of.

+
+
+
+
+

Returns

+
+
float

The mean marginal entropy.

+
+
+
+
+mean_marginal_entropy(self, task: deepsensor.data.task.Task)
+
+ +

Mean marginal entropy over target points given context points.

+
+
+

Parameters

+
+
taskTask

The task to compute the entropy of.

+
+
+
+
+

Returns

+
+
float

The mean marginal entropy.

+
+
+
+
+ +
+
+classmethod modify_task(task)
+

Cast numpy arrays to TensorFlow or PyTorch tensors, add batch dim, and +mask NaNs.

+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+sample(dist: AbstractDistribution, n_samples: int = 1, noiseless: bool = True)
+

Create samples from a ConvNP distribution.

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to sample from.

+
+
n_samplesint, optional

The number of samples to draw from the distribution, by default 1.

+
+
noiselessbool, optional

Whether to sample from the noiseless distribution, by default True.

+
+
+
+
+

Returns

+
+
numpy.ndarray | List[numpy.ndarray]

The samples as an array or list of arrays.

+
+
+
+
+sample(self, task: deepsensor.data.task.Task, n_samples: int = 1, noiseless: bool = True)
+
+ +

Create samples from a ConvNP distribution.

+
+
+

Parameters

+
+
taskTask

The task to sample from.

+
+
n_samplesint, optional

The number of samples to draw from the distribution, by default 1.

+
+
noiselessbool, optional

Whether to sample from the noiseless distribution, by default True.

+
+
+
+
+

Returns

+
+
numpy.ndarray | List[numpy.ndarray]

The samples as an array or list of arrays.

+
+
+
+
+ +
+
+save(model_ID: str)
+

Save the model weights and config to a folder.

+
+ +
+
+slice_diag(task: Task)
+

Slice out the ConvCNP part of the ConvNP distribution.

+
+

Parameters

+
+
taskTask

The task to slice.

+
+
+
+
+

Returns

+
+

+
+
+
+
+slice_diag(self, dist: neuralprocesses.dist.dist.AbstractDistribution)
+
+ +

Slice out the ConvCNP part of the ConvNP distribution.

+
+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to slice.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+stddev(dist: AbstractDistribution)
+

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

+
+
+
+
+

Returns

+
+

+
+
+
+
+stddev(self, task: deepsensor.data.task.Task)
+
+ +

+
+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+variance(dist: AbstractDistribution)
+

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

+
+
+
+
+

Returns

+
+

+
+
+
+
+variance(self, task: deepsensor.data.task.Task)
+
+ +

+
+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+ +
+
+deepsensor.model.convnp.concat_tasks(tasks: List[Task], multiple: int = 1) Task
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/model/defaults.html b/html/reference/model/defaults.html new file mode 100644 index 00000000..b4f5e668 --- /dev/null +++ b/html/reference/model/defaults.html @@ -0,0 +1,219 @@ + + + + + + + deepsensor.model.defaults — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.model.defaults

+
+
+deepsensor.model.defaults.gen_ppu(task_loader: TaskLoader) int
+

Computes data-informed settings for the model’s internal grid density (ppu, +points per unit)

+

Loops over all context and target variables in the TaskLoader and +computes the data resolution for each. The model ppu is then set to the +maximum data ppu.

+
+

Parameters

+
+
task_loaderTaskLoader

TaskLoader object containing context and target sets.

+
+
+
+
+

Returns

+
+
model_ppuint

Model ppu (points per unit), i.e. the number of points per unit of +input space.

+
+
+
+
+ +
+
+deepsensor.model.defaults.gen_decoder_scale(model_ppu: int) float
+

Computes informed setting for the decoder SetConv scale.

+

This sets the length scale of the Gaussian basis functions used interpolate +from the model’s internal grid to the target locations.

+

The decoder scale should be as small as possible given the model’s +internal grid. The value chosen is 1 / model_ppu (i.e. the length scale is +equal to the model’s internal grid spacing).

+
+

Parameters

+
+
model_ppuint

Model ppu (points per unit), i.e. the number of points per unit of +input space.

+
+
+
+
+

Returns

+
+
decoder_scalefloat

Decoder scale.

+
+
+
+
+ +
+
+deepsensor.model.defaults.gen_encoder_scales(model_ppu: int, task_loader: TaskLoader) list[float]
+

Computes data-informed settings for the encoder SetConv scale for each +context set.

+

This sets the length scale of the Gaussian basis functions used to encode +the context sets.

+

For off-grid station data, the scale should be as small as possible given +the model’s internal grid density (ppu, points per unit). The value chosen +is 0.5 / model_ppu (i.e. half the model’s internal resolution).

+

For gridded data, the scale should be such that the functional +representation smoothly interpolates the data. This is determined by +computing the data resolution (the distance between the nearest two data +points) for each context variable. The encoder scale is then set to 0.5 * +data_resolution.

+
+

Parameters

+
+
model_ppuint

Model ppu (points per unit), i.e. the number of points per unit of +input space.

+
+
task_loaderTaskLoader

TaskLoader object containing context and target sets.

+
+
+
+
+

Returns

+
+
encoder_scaleslist[float]

List of encoder scales for each context set.

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/model/index.html b/html/reference/model/index.html new file mode 100644 index 00000000..84bf626d --- /dev/null +++ b/html/reference/model/index.html @@ -0,0 +1,159 @@ + + + + + + + model module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/html/reference/model/model.html b/html/reference/model/model.html new file mode 100644 index 00000000..a7be7bdb --- /dev/null +++ b/html/reference/model/model.html @@ -0,0 +1,747 @@ + + + + + + + deepsensor.model.model — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.model.model

+
+
+class deepsensor.model.model.DeepSensorModel(data_processor: DataProcessor | None = None, task_loader: TaskLoader | None = None)
+

Bases: ProbabilisticModel

+

Implements DeepSensor prediction functionality of a ProbabilisticModel. +Allows for outputting an xarray object containing on-grid predictions or a +pandas object containing off-grid predictions.

+
+
+__init__(data_processor: DataProcessor | None = None, task_loader: TaskLoader | None = None)
+

Initialise DeepSensorModel.

+
+

Parameters

+
+
data_processorDataProcessor

DataProcessor object, used to unnormalise predictions.

+
+
task_loaderTaskLoader

TaskLoader object, used to determine target variables for +unnormalising.

+
+
+
+
+ +
+
+covariance(task: Task, *args, **kwargs)
+

Computes the model covariance matrix over target points based on given +context data. Shape (N, N).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
covnumpy.ndarray

Should return covariance matrix over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+joint_entropy(task: Task, *args, **kwargs)
+

Computes the model joint entropy over target points based on given +context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
joint_entropyfloat

Should return joint entropy over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+logpdf(task: Task, *args, **kwargs)
+

Computes the joint model logpdf over target points based on given +context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
logpdffloat

Should return joint logpdf over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+loss(task: Task, *args, **kwargs)
+

Computes the model loss over target points based on given context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
lossfloat

Should return loss over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+mean(task: Task, *args, **kwargs)
+

Computes the model mean prediction over target points based on given context +data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
meannumpy.ndarray

Should return mean prediction over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+mean_marginal_entropy(task: Task, *args, **kwargs)
+

Computes the mean marginal entropy over target points based on given +context data.

+
+

Note

+

Note: Getting a vector of marginal entropies would be useful too.

+
+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
mean_marginal_entropyfloat

Should return mean marginal entropy over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+predict(tasks: List[Task] | Task, X_t: Dataset | DataArray | DataFrame | Series | Index | ndarray, X_t_mask: Dataset | DataArray = None, X_t_is_normalised: bool = False, resolution_factor: int = 1, n_samples: int = 0, ar_sample: bool = False, ar_subsample_factor: int = 1, unnormalise: bool = True, seed: int = 0, append_indexes: dict = None, progress_bar: int = 0, verbose: bool = False)
+

Predict on a regular grid or at off-grid locations.

+
+

Parameters

+
+
tasksList[Task] | Task

List of tasks containing context data.

+
+
X_txarray.Dataset | xarray.DataArray | pandas.DataFrame | pandas.Series | pandas.Index | numpy.ndarray

Target locations to predict at. Can be an xarray object containing +on-grid locations or a pandas object containing off-grid locations.

+
+
X_t_is_normalisedbool

Whether the X_t coords are normalised. If False, will normalise +the coords before passing to model. Default False.

+
+
resolution_factorfloat

Optional factor to increase the resolution of the target grid by. +E.g. 2 will double the target resolution, 0.5 will halve it. +Applies to on-grid predictions only. Default 1.

+
+
n_samplesint

Number of joint samples to draw from the model. If 0, will not +draw samples. Default 0.

+
+
ar_samplebool

Whether to use autoregressive sampling. Default False.

+
+
unnormalisebool

Whether to unnormalise the predictions. Only works if self has +a data_processor and task_loader attribute. Default +True.

+
+
seedint

Random seed for deterministic sampling. Default 0.

+
+
append_indexesdict

Dictionary of index metadata to append to pandas indexes in the +off-grid case. Default None.

+
+
progress_barint

Whether to display a progress bar over tasks. Default 0.

+
+
verbosebool

Whether to print time taken for prediction. Default False.

+
+
+
+
+

Returns

+
+
predictionsxarray.Dataset | xarray.DataArray | pandas.DataFrame | pandas.Series | pandas.Index

If X_t is a pandas object, returns pandas objects containing +off-grid predictions.

+

If X_t is an xarray object, returns xarray object containing +on-grid predictions.

+

If n_samples == 0, returns only mean and std predictions.

+

If n_samples > 0, returns mean, std and samples predictions.

+
+
+
+
+

Raises

+
+
ValueError

If X_t is not an xarray object and +resolution_factor is not 1 or ar_subsample_factor is not 1.

+
+
ValueError

If X_t is not a pandas object and append_indexes is not +None.

+
+
ValueError

If X_t is not an xarray, pandas or numpy object.

+
+
ValueError

If append_indexes are not all the same length as X_t.

+
+
+
+
+ +
+
+sample(task: Task, n_samples=1, *args, **kwargs)
+

Draws n_samples joint samples over target points based on given +context data. Returned shape is (n_samples, n_target).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
n_samplesint

Number of samples to draw.

+
+
+
+
+

Returns

+
+
samplesTuple[numpy.ndarray, numpy.ndarray]

Should return joint samples over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+stddev(task: Task)
+

Model marginal standard deviation over target points given context +points. Shape (N,).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
stdnumpy.ndarray

Should return marginal standard deviation over target points.

+
+
+
+
+ +
+
+variance(task: Task, *args, **kwargs)
+

Model marginal variance over target points given context points. +Shape (N,).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
varnumpy.ndarray

Should return marginal variance over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+ +
+
+class deepsensor.model.model.ProbabilisticModel
+

Bases: object

+

Base class for probabilistic model used for DeepSensor. +Ensures a set of methods required for DeepSensor +are implemented by specific model classes that inherit from it.

+
+
+covariance(task: Task, *args, **kwargs)
+

Computes the model covariance matrix over target points based on given +context data. Shape (N, N).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
covnumpy.ndarray

Should return covariance matrix over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+joint_entropy(task: Task, *args, **kwargs)
+

Computes the model joint entropy over target points based on given +context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
joint_entropyfloat

Should return joint entropy over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+logpdf(task: Task, *args, **kwargs)
+

Computes the joint model logpdf over target points based on given +context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
logpdffloat

Should return joint logpdf over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+loss(task: Task, *args, **kwargs)
+

Computes the model loss over target points based on given context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
lossfloat

Should return loss over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+mean(task: Task, *args, **kwargs)
+

Computes the model mean prediction over target points based on given context +data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
meannumpy.ndarray

Should return mean prediction over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+mean_marginal_entropy(task: Task, *args, **kwargs)
+

Computes the mean marginal entropy over target points based on given +context data.

+
+

Note

+

Note: Getting a vector of marginal entropies would be useful too.

+
+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
mean_marginal_entropyfloat

Should return mean marginal entropy over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+sample(task: Task, n_samples=1, *args, **kwargs)
+

Draws n_samples joint samples over target points based on given +context data. Returned shape is (n_samples, n_target).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
n_samplesint

Number of samples to draw.

+
+
+
+
+

Returns

+
+
samplesTuple[numpy.ndarray, numpy.ndarray]

Should return joint samples over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+stddev(task: Task)
+

Model marginal standard deviation over target points given context +points. Shape (N,).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
stdnumpy.ndarray

Should return marginal standard deviation over target points.

+
+
+
+
+ +
+
+variance(task: Task, *args, **kwargs)
+

Model marginal variance over target points given context points. +Shape (N,).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
varnumpy.ndarray

Should return marginal variance over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+ +
+
+deepsensor.model.model.create_empty_spatiotemporal_xarray(X: Dataset | DataArray, dates: List, coord_names: dict = {'x1': 'x1', 'x2': 'x2'}, data_vars: List = ['var'], prepend_dims: List[str] = None, prepend_coords: dict = None)
+
+ +
+
+deepsensor.model.model.increase_spatial_resolution(X_t_normalised, resolution_factor, coord_names: dict = {'x1': 'x1', 'x2': 'x2'})
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/model/nps.html b/html/reference/model/nps.html new file mode 100644 index 00000000..dd2397d9 --- /dev/null +++ b/html/reference/model/nps.html @@ -0,0 +1,323 @@ + + + + + + + deepsensor.model.nps — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.model.nps

+
+
+deepsensor.model.nps.convert_task_to_nps_args(task: Task)
+

Infer & build model call signature from task dict.

+
+

Parameters

+
+
taskTask

Task object containing context and target sets.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+deepsensor.model.nps.run_nps_model(neural_process, task: Task, n_samples: int | None = None, requires_grad: bool = False)
+

Run neuralprocesses model.

+
+

Parameters

+
+
neural_processneuralprocesses.Model

Neural process model.

+
+
taskTask

Task object containing context and target sets.

+
+
n_samplesint, optional

Number of samples to draw from the model. Defaults to None (single +sample).

+
+
requires_gradbool, optional

Whether to require gradients. Defaults to False.

+
+
+
+
+

Returns

+
+
distneuralprocesses.distributions.Distribution

Distribution object containing the model’s predictions.

+
+
+
+
+ +
+
+deepsensor.model.nps.run_nps_model_ar(neural_process, task: Task, num_samples: int = 1)
+

Run neural_process in AR mode.

+
+

Parameters

+
+
neural_processneuralprocesses.Model

Neural process model.

+
+
taskTask

Task object containing context and target sets.

+
+
num_samplesint, optional

Number of samples to draw from the model. Defaults to 1.

+
+
+
+
+

Returns

+
+
Tuple[…, …, …, …]

Tuple of mean, variance, noiseless samples, and noisy samples.

+
+
+
+
+ +
+
+deepsensor.model.nps.construct_neural_process(dim_x: int = 2, dim_yc: int = 1, dim_yt: int = 1, dim_aux_t: int | None = None, dim_lv: int = 0, conv_arch: str = 'unet', unet_channels: Tuple[int, int, int, int] = (64, 64, 64, 64), unet_resize_convs: bool = True, unet_resize_conv_interp_method: Literal['bilinear'] = 'bilinear', aux_t_mlp_layers: Tuple[int] | None = None, likelihood: Literal['cnp', 'gnp', 'cnp-spikes-beta'] = 'cnp', unet_kernels: int = 5, points_per_unit: int = 100, encoder_scales: float = 0.01, encoder_scales_learnable: bool = False, decoder_scale: float = 0.01, decoder_scale_learnable: bool = False, num_basis_functions: int = 64, epsilon: float = 0.01)
+

Construct a neuralprocesses ConvNP model.

+

See: https://github.com/wesselb/neuralprocesses/blob/main/neuralprocesses/architectures/convgnp.py

+

Docstring below modified from neuralprocesses. If more kwargs are +needed, they must be explicitly passed to neuralprocesses constructor +(not currently safe to use **kwargs here).

+
+

Parameters

+
+
dim_xint, optional

Dimensionality of the inputs. Defaults to 1.

+
+
dim_yint, optional

Dimensionality of the outputs. Defaults to 1.

+
+
dim_ycint or tuple[int], optional

Dimensionality of the outputs of the context set. You should set this +if the dimensionality of the outputs of the context set is not equal to +the dimensionality of the outputs of the target set. You should also +set this if you want to use multiple context sets. In that case, set +this equal to a tuple of integers indicating the respective output +dimensionalities.

+
+
dim_ytint, optional

Dimensionality of the outputs of the target set. You should set this if +the dimensionality of the outputs of the target set is not equal to the +dimensionality of the outputs of the context set.

+
+
dim_aux_tint, optional

Dimensionality of target-specific auxiliary variables.

+
+
points_per_unitint, optional

Density of the internal discretisation. Defaults to 100.

+
+
likelihoodstr, optional

Likelihood. Must be one of "cnp" (equivalently "het"), +"gnp" (equivalently "lowrank"), or "cnp-spikes-beta" +(equivalently "spikes-beta"). Defaults to "cnp".

+
+
conv_archstr, optional

Convolutional architecture to use. Must be one of +"unet[-res][-sep]" or "conv[-res][-sep]". Defaults to +"unet".

+
+
unet_channels: tuple[int], optional

Channels of every layer of the UNet. Defaults to six layers each with +64 channels.

+
+
unet_kernelsint or tuple[int], optional

Sizes of the kernels in the UNet. Defaults to 5.

+
+
unet_resize_convsbool, optional

Use resize convolutions rather than transposed convolutions in the +UNet. Defaults to False.

+
+
unet_resize_conv_interp_methodstr, optional

Interpolation method for the resize convolutions in the UNet. Can be +set to "bilinear". Defaults to “bilinear”.

+
+
num_basis_functionsint, optional

Number of basis functions for the low-rank likelihood. Defaults to +64.

+
+
dim_lvint, optional

Dimensionality of the latent variable. Setting to >0 constructs a +latent neural process. Defaults to 0.

+
+
encoder_scalesfloat or tuple[float], optional

Initial value for the length scales of the set convolutions for the +context sets embeddings. Set to a tuple equal to the number of context +sets to use different values for each set. Set to a single value to use +the same value for all context sets. Defaults to +1 / points_per_unit.

+
+
encoder_scales_learnablebool, optional

Whether the encoder SetConv length scale(s) are learnable. Defaults to +False.

+
+
decoder_scalefloat, optional

Initial value for the length scale of the set convolution in the +decoder. Defaults to 1 / points_per_unit.

+
+
decoder_scale_learnablebool, optional

Whether the decoder SetConv length scale(s) are learnable. Defaults to +False.

+
+
aux_t_mlp_layerstuple[int], optional

Widths of the layers of the MLP for the target-specific auxiliary +variable. Defaults to three layers of width 128.

+
+
epsilonfloat, optional

Epsilon added by the set convolutions before dividing by the density +channel. Defaults to 1e-2.

+
+
+
+
+

Returns

+
+
model.Model:

ConvNP model.

+
+
+
+
+

Raises

+
+
NotImplementedError

If specified backend has no default dtype.

+
+
+
+
+ +
+
+deepsensor.model.nps.compute_encoding_tensor(model, task: Task)
+

Compute the encoding tensor for a given task.

+
+

Parameters

+
+
model

Model object.

+
+
taskTask

Task object containing context and target sets.

+
+
+
+
+

Returns

+
+
encodingnumpy.ndarray

Encoding tensor? #TODO

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/plot.html b/html/reference/plot.html new file mode 100644 index 00000000..89e42b1c --- /dev/null +++ b/html/reference/plot.html @@ -0,0 +1,393 @@ + + + + + + + deepsensor.plot module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.plot module

+
+
+deepsensor.plot.acquisition_fn(task: Task, acquisition_fn_ds: ndarray, X_new_df: DataFrame, data_processor: DataProcessor, crs: Axes | PolarAxes | AitoffAxes | HammerAxes | LambertAxes | MollweideAxes | Axes3D, col_dim: str = 'iteration', cmap: str | Colormap = 'Greys_r', figsize: int = 3, add_colorbar: bool = True, max_ncol: int = 5) Figure
+
+
Args:
+
task (Task):

Task containing the context set used to compute the acquisition +function.

+
+
acquisition_fn_ds (numpy.ndarray):

Acquisition function dataset.

+
+
X_new_df (pandas.DataFrame):

Dataframe containing the placement locations.

+
+
data_processor (DataProcessor):

Data processor used to unnormalise the context set and placement +locations.

+
+
crs (matplotlib.axes.Axes | matplotlib.projections.polar.PolarAxes | matplotlib.projections.geo.AitoffAxes | matplotlib.projections.geo.HammerAxes | matplotlib.projections.geo.LambertAxes | matplotlib.projections.geo.MollweideAxes | mpl_toolkits.mplot3d.Axes3D):

Coordinate reference system for the plots.

+
+
col_dim (str, optional):

Column dimension to plot over, by default “iteration”.

+
+
cmap (str | matplotlib.colors.Colormap, optional):

Color map to use for the plots, by default “Greys_r”.

+
+
figsize (int, optional):

Figure size in inches, by default 3.

+
+
add_colorbar (bool, optional):

Whether to add a colorbar to the plots, by default True.

+
+
max_ncol (int, optional):

Maximum number of columns to use for the plots, by default 5.

+
+
+
+
Returns:
+
matplotlib.pyplot.Figure

A figure containing the acquisition function plots.

+
+
+
+
Raises:
+
ValueError:

If a column dimension is encountered that is not one of +["time", "sample"].

+
+
AssertionError:

If the number of columns in the acquisition function dataset is +greater than max_ncol.

+
+
+
+
+
+ +
+
+deepsensor.plot.context_encoding(model, task: Task, task_loader: TaskLoader, batch_idx: int = 0, context_set_idxs: List[int] | int | None = None, land_idx: int | None = None, cbar: bool = True, clim: Tuple | None = None, cmap: str | Colormap = 'viridis', verbose_titles: bool = True, titles: dict | None = None, size: int = 3, return_axes: bool = False)
+

Plot the encoding of a context set in a task.

+
+
Args:
+
model (DeepSensorModel):

DeepSensor model.

+
+
task (Task):

Task containing context set to plot encoding of …

+
+
task_loader (TaskLoader):

DataLoader used to load the data, containing context set metadata +used for plotting.

+
+
batch_idx (int, optional):

Batch index in encoding to plot, by default 0.

+
+
context_set_idxs (List[int] | int, optional):

Indices of context sets to plot, by default None (plots all context +sets).

+
+
land_idx (int, optional):

Index of the land mask in the encoding (used to overlay land +contour on plots), by default None.

+
+
cbar (bool, optional):

Whether to add a colorbar to the plots, by default True.

+
+
clim (tuple, optional):

Colorbar limits, by default None.

+
+
cmap (str | matplotlib.colors.Colormap, optional):

Color map to use for the plots, by default “viridis”.

+
+
verbose_titles (bool, optional):

Whether to include verbose titles for the variable IDs in the +context set (including the time index), by default True.

+
+
titles (dict, optional):

Dict of titles to override for each subplot, by default None. If +None, titles are generated from context set metadata.

+
+
size (int, optional):

Size of the figure in inches, by default 3.

+
+
return_axes (bool, optional):

Whether to return the axes of the figure, by default False.

+
+
+
+
Returns:
+
matplotlib.figure.Figure | Tuple[matplotlib.figure.Figure, matplotlib.pyplot.Axes]:

Either a figure containing the context set encoding plots, or a +tuple containing the figure and +the axes of the figure (if +return_axes was set to True).

+
+
+
+
+
+ +
+
+deepsensor.plot.feature_maps(model, task: Task, n_features_per_layer: int = 1, seed: int | None = None, figsize: int = 3, add_colorbar: bool = False, cmap: str | Colormap = 'Greys') Figure
+

Plot the feature maps of a ConvNP model’s decoder layers after a +forward pass with a Task.

+

Currently only plots feature maps for the downsampling path.

+
+
Args:
+
model (ConvNP):

+
+
task (Task):

+
+
n_features_per_layer (int, optional):

…, by default 1.

+
+
seed (int, optional):

…, by default None.

+
+
figsize (int, optional):

…, by default 3.

+
+
add_colorbar (bool, optional):

…, by default False.

+
+
cmap (str | matplotlib.colors.Colormap, optional):

…, by default “Greys”.

+
+
+
+
Returns:
+
matplotlib.figure.Figure:

A figure containing the feature maps.

+
+
+
+
Raises:
+
ValueError:

If the backend is not recognised.

+
+
+
+
+
+ +
+
+deepsensor.plot.offgrid_context(axes: ndarray | List[Axes] | Tuple[Axes], task: Task, data_processor: DataProcessor | None = None, task_loader: TaskLoader | None = None, plot_target: bool = False, add_legend: bool = True, context_set_idxs: List[int] | int | None = None, markers: str | None = None, colors: str | None = None, **scatter_kwargs) None
+

Plot the off-grid context points on axes.

+

Uses a provided DataProcessor to unnormalise the +context coordinates if provided.

+
+
Args:
+
axes (numpy.ndarray | List[matplotlib.axes.Axes] | Tuple[matplotlib.axes.Axes]:

Axes to plot on.

+
+
task (Task):

Task containing the context set to plot.

+
+
data_processor (DataProcessor, optional):

Data processor used to unnormalise the context set, by default +None.

+
+
task_loader (TaskLoader, optional):

Task loader used to load the data, containing context set metadata +used for plotting, by default None.

+
+
plot_target (bool, optional):

Whether to plot the target set, by default False.

+
+
add_legend (bool, optional):

Whether to add a legend to the plot, by default True.

+
+
context_set_idxs (List[int] | int, optional):

Indices of context sets to plot, by default None (plots all context +sets).

+
+
markers (str, optional):

Marker styles to use for each context set, by default None.

+
+
colors (str, optional):

Colors to use for each context set, by default None.

+
+
scatter_kwargs:

Additional keyword arguments to pass to the scatter plot.

+
+
+
+
Returns:

None.

+
+
+
+ +
+
+deepsensor.plot.offgrid_context_observations(axes: ndarray | List[Axes] | Tuple[Axes], task: Task, data_processor: DataProcessor, task_loader: TaskLoader, context_set_idx: int, format_str: str | None = None, extent: Tuple[int, int, int, int] | None = None, color: str = 'black') None
+

Plot unnormalised context observation values.

+
+
Args:
+
axes (numpy.ndarray | List[matplotlib.axes.Axes] | Tuple[matplotlib.axes.Axes]):

Axes to plot on.

+
+
task (Task):

Task containing the context set to plot.

+
+
data_processor (DataProcessor):

Data processor used to unnormalise the context set.

+
+
task_loader (TaskLoader):

Task loader used to load the data, containing context set metadata +used for plotting.

+
+
context_set_idx (int):

Index of the context set to plot.

+
+
format_str (str, optional):

Format string for the context observation values, by default None.

+
+
extent (Tuple[int, int, int, int], optional):

Extent of the plot, by default None.

+
+
color (str, optional):

Color of the text, by default “black”.

+
+
+
+
Returns:

None.

+
+
Raises:
+
AssertionError:

If the context set is gridded.

+
+
AssertionError:

If the context set is not 1D.

+
+
AssertionError:

If the task’s “Y_c” value for the context set ID is not 2D.

+
+
AssertionError:

If the task’s “Y_c” value for the context set ID does not have +exactly one variable.

+
+
+
+
+
+ +
+
+deepsensor.plot.placements(task: Task, X_new_df: DataFrame, data_processor: DataProcessor, crs: Axes | PolarAxes | AitoffAxes | HammerAxes | LambertAxes | MollweideAxes | Axes3D, extent: Tuple[int, int, int, int] | str | None = None, figsize: int = 3, **scatter_kwargs) Figure
+

+
+
Args:
+
task (Task):

Task containing the context set used to compute the acquisition +function.

+
+
X_new_df (pandas.DataFrame):

Dataframe containing the placement locations.

+
+
data_processor (DataProcessor):

Data processor used to unnormalise the context set and placement +locations.

+
+
crs (matplotlib.axes.Axes | matplotlib.projections.polar.PolarAxes | matplotlib.projections.geo.AitoffAxes | matplotlib.projections.geo.HammerAxes | matplotlib.projections.geo.LambertAxes | matplotlib.projections.geo.MollweideAxes | mpl_toolkits.mplot3d.Axes3D):

Coordinate reference system for the plots.

+
+
extent (Tuple[int, int, int, int] | str, optional):

Extent of the plots, by default None.

+
+
figsize (int, optional):

Figure size in inches, by default 3.

+
+
+
+
Returns:
+
matplotlib.figure.Figure

A figure containing the placement plots.

+
+
+
+
+
+ +
+
+deepsensor.plot.receptive_field(receptive_field, data_processor: DataProcessor, crs: Axes | PolarAxes | AitoffAxes | HammerAxes | LambertAxes | MollweideAxes | Axes3D, extent: str = 'global') Figure
+

+
+
Args:
+
receptive_field (…):

Receptive field to plot.

+
+
data_processor (DataProcessor):

Data processor used to unnormalise the context set.

+
+
crs (matplotlib.axes.Axes | matplotlib.projections.polar.PolarAxes | matplotlib.projections.geo.AitoffAxes | matplotlib.projections.geo.HammerAxes | matplotlib.projections.geo.LambertAxes | matplotlib.projections.geo.MollweideAxes | mpl_toolkits.mplot3d.Axes3D):

Coordinate reference system for the plots.

+
+
extent (str, optional):

Extent of the plot, by default “global”.

+
+
+
+
Returns:

None.

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/tensorflow/index.html b/html/reference/tensorflow/index.html new file mode 100644 index 00000000..fe6fc3b3 --- /dev/null +++ b/html/reference/tensorflow/index.html @@ -0,0 +1,123 @@ + + + + + + + tensorflow module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

tensorflow module

+

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/torch/index.html b/html/reference/torch/index.html new file mode 100644 index 00000000..eaf90672 --- /dev/null +++ b/html/reference/torch/index.html @@ -0,0 +1,123 @@ + + + + + + + torch module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

torch module

+

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/train/index.html b/html/reference/train/index.html new file mode 100644 index 00000000..cf7ac87a --- /dev/null +++ b/html/reference/train/index.html @@ -0,0 +1,135 @@ + + + + + + + train module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

train module

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/reference/train/train.html b/html/reference/train/train.html new file mode 100644 index 00000000..180f8eb4 --- /dev/null +++ b/html/reference/train/train.html @@ -0,0 +1,184 @@ + + + + + + + deepsensor.train.train — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.train.train

+
+
+deepsensor.train.train.set_gpu_default_device() None
+

Set default GPU device for the backend.

+
+

Raises

+
+
RuntimeError

If no GPU is available.

+
+
RuntimeError

If backend is not supported.

+
+
NotImplementedError

If backend is not supported.

+
+
+
+
+

Returns

+

None.

+
+
+ +
+
+deepsensor.train.train.train_epoch(model: ConvNP, tasks: List[Task], lr: float = 5e-05, batch_size: int = None, opt=None, progress_bar=False, tqdm_notebook=False) List[float]
+

Train model for one epoch.

+
+

Parameters

+
+
modelConvNP

Model to train.

+
+
tasksList[Task]

List of tasks to train on.

+
+
lrfloat, optional

Learning rate, by default 5e-5.

+
+
batch_sizeint, optional

Batch size. Defaults to None. If None, no batching is performed.

+
+
optOptimizer, optional

TF or Torch optimizer. Defaults to None. If None, +tensorflow:tensorflow.keras.optimizer.Adam is used.

+
+
progress_barbool, optional

Whether to display a progress bar. Defaults to False.

+
+
tqdm_notebookbool, optional

Whether to use a notebook progress bar. Defaults to False.

+
+
+
+
+

Returns

+
+
List[float]

List of losses for each task/batch.

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/html/search.html b/html/search.html new file mode 100644 index 00000000..bf50ab6e --- /dev/null +++ b/html/search.html @@ -0,0 +1,123 @@ + + + + + + Search — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + + + +
+ +
+ +
+
+ +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/html/searchindex.js b/html/searchindex.js new file mode 100644 index 00000000..db3e67aa --- /dev/null +++ b/html/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["community/code-of-conduct", "community/contributing", "community/faq", "community/index", "community/roadmap", "contact", "getting-started/extending", "getting-started/index", "getting-started/installation", "getting-started/tutorials/index", "getting-started/tutorials/quickstart", "index", "reference/active_learning/acquisition_fns", "reference/active_learning/algorithms", "reference/active_learning/index", "reference/data/index", "reference/data/loader", "reference/data/processor", "reference/data/task", "reference/data/utils", "reference/index", "reference/model/convnp", "reference/model/defaults", "reference/model/index", "reference/model/model", "reference/model/nps", "reference/plot", "reference/tensorflow/index", "reference/torch/index", "reference/train/index", "reference/train/train"], "filenames": ["community/code-of-conduct.rst", "community/contributing.rst", "community/faq.rst", "community/index.rst", "community/roadmap.rst", "contact.rst", "getting-started/extending.rst", "getting-started/index.rst", "getting-started/installation.rst", "getting-started/tutorials/index.rst", "getting-started/tutorials/quickstart.rst", "index.rst", "reference/active_learning/acquisition_fns.rst", "reference/active_learning/algorithms.rst", "reference/active_learning/index.rst", "reference/data/index.rst", "reference/data/loader.rst", "reference/data/processor.rst", "reference/data/task.rst", "reference/data/utils.rst", "reference/index.rst", "reference/model/convnp.rst", "reference/model/defaults.rst", "reference/model/index.rst", "reference/model/model.rst", "reference/model/nps.rst", "reference/plot.rst", "reference/tensorflow/index.rst", "reference/torch/index.rst", "reference/train/index.rst", "reference/train/train.rst"], "titles": ["Developer Code of Conduct", "Contributing to DeepSensor", "Community FAQ", "DeepSensor\u2019s user and contributor community", "DeepSensor Roadmap", "Contact the developers", "Extending DeepSensor with new models", "Getting started", "Installation instructions", "Tutorials", "Tutorial: Quickstart", "Welcome to DeepSensor\u2019s documentation!", "deepsensor.active_learning.acquisition_fns", "deepsensor.active_learning.algorithms", "active_learning module", "data module", "deepsensor.data.loader", "deepsensor.data.processor", "deepsensor.data.task", "deepsensor.data.utils", "API Reference", "deepsensor.model.convnp", "deepsensor.model.defaults", "model module", "deepsensor.model.model", "deepsensor.model.nps", "deepsensor.plot module", "tensorflow module", "torch module", "train module", "deepsensor.train.train"], "terms": {"thi": [2, 6, 7, 8, 10, 11, 12, 16, 19, 20, 21, 22, 25], "aim": [2, 11], "answer": 2, "common": 2, "about": 2, "deepsensor": [2, 7, 8, 10, 14, 15, 20, 23, 29], "librari": 2, "It": [2, 20], "i": [2, 3, 8, 10, 11, 12, 13, 16, 17, 18, 19, 20, 21, 22, 24, 25, 26, 30], "our": [2, 3], "wai": [2, 6, 8], "streamlin": 2, "onboard": 2, "process": [2, 10, 11, 21, 25], "clarifi": 2, "expect": [2, 12], "If": [2, 3, 8, 11, 13, 16, 17, 18, 19, 21, 24, 25, 26, 30], "you": [2, 3, 7, 8, 11, 21, 25], "have": [2, 26], "here": [2, 10, 25], "pleas": [2, 3, 11], "open": [2, 3, 11], "an": [2, 3, 12, 13, 16, 17, 18, 19, 21, 24], "issu": 2, "submit": 2, "pull": 2, "request": [2, 3, 16], "q": 2, "what": 2, "purpos": 2, "project": [2, 11, 26], "briefli": 2, "describ": [2, 8, 20], "its": [2, 11, 18, 20], "object": [2, 12, 16, 17, 18, 19, 21, 22, 24, 25], "intend": [2, 8], "audienc": 2, "how": [2, 7, 21], "can": [2, 3, 6, 8, 10, 11, 16, 21, 24, 25], "contribut": [2, 3, 8, 11], "There": [2, 16], "ar": [2, 3, 11, 12, 16, 17, 18, 19, 20, 21, 24, 25, 26], "mani": 2, "from": [2, 3, 6, 7, 10, 13, 16, 17, 18, 21, 22, 24, 25, 26], "write": 2, "code": [2, 3, 8, 10, 11], "fix": [2, 6, 21], "bug": 2, "improv": [2, 12], "document": [2, 7, 20], "translat": 2, "content": 2, "check": [2, 17], "guid": [2, 3], "detail": [2, 6], "step": 2, "where": [2, 13, 16, 18, 19], "find": 2, "guidelin": 2, "md": 2, "file": [2, 16], "root": [2, 12], "directori": [2, 8], "repositori": [2, 8, 11], "do": [2, 8], "need": [2, 8, 25], "sign": 2, "contributor": [2, 11], "licens": 2, "agreement": 2, "cla": 2, "At": 2, "current": [2, 11, 12, 25, 26], "time": [2, 10, 16, 17, 19, 21, 24, 26], "we": [2, 3, 6, 7, 8, 10, 11], "requir": [2, 24, 25], "report": 2, "github": [2, 21, 25], "make": [2, 17, 18], "sure": 2, "provid": [2, 6, 7, 13, 26], "inform": [2, 22], "includ": [2, 26], "reproduc": 2, "outcom": 2, "new": [2, 7, 10, 11, 12, 13, 21], "featur": [2, 26], "label": 2, "potenti": 2, "benefit": 2, "set": [2, 6, 10, 12, 16, 17, 18, 21, 22, 24, 25, 26, 30], "up": [2, 8, 10], "develop": [2, 3, 11], "environ": 2, "follow": [2, 16, 20], "instruct": [2, 7, 11], "run": [2, 21, 25], "ask": [2, 3], "chat": 2, "slack": [2, 3], "contact": [2, 11], "core": [2, 11, 16, 18], "group": [2, 3], "maintain": 2, "directli": [2, 10], "conduct": [2, 3, 11], "ye": 2, "valu": [2, 12, 17, 18, 19, 21, 22, 25, 26], "respect": [2, 17, 19, 20, 21, 25], "inclus": 2, "read": [2, 3], "befor": [2, 21, 24, 25], "get": [2, 11, 17, 24], "touch": [2, 11], "other": [2, 16], "join": [2, 3], "team": 2, "stai": [2, 11], "also": [2, 7, 10, 11, 21, 25], "stand": 2, "meet": 2, "which": [2, 8, 16, 18, 20, 21], "invit": [2, 3], "so": [2, 21], "receiv": 2, "them": [2, 11, 17, 18], "even": 2, "m": [2, 19], "coder": 2, "absolut": [2, 12], "made": 2, "form": 2, "design": [2, 20], "test": 2, "more": [2, 6, 25], "everyon": 2, "": [2, 8, 10, 12, 18, 21, 22, 25, 26], "skill": 2, "valuabl": 2, "discuss": [2, 3], "learn": [2, 11, 13, 18, 30], "claim": 2, "work": [2, 3, 8, 21, 24], "comment": 2, "express": 2, "your": [2, 3, 8, 10, 11], "interest": [2, 3, 11], "help": 2, "out": [2, 16, 21], "unassign": 2, "like": 2, "assign": 2, "propos": [2, 13], "signific": 2, "chang": [2, 8, 17], "For": [2, 18, 19, 22], "good": 2, "practic": 2, "first": [2, 6, 16, 17], "gather": 2, "feedback": 2, "onc": [2, 19], "consensu": 2, "proce": 2, "my": 2, "pr": 2, "merg": 2, "ensur": [2, 17, 24], "pass": [2, 10, 13, 16, 21, 24, 25, 26], "all": [2, 8, 10, 12, 16, 19, 21, 22, 24, 25, 26], "ha": [2, 19, 24, 25], "been": 2, "review": 2, "least": 2, "one": [2, 21, 25, 26, 30], "address": 2, "ani": [2, 16, 18, 21], "releas": [2, 8], "cycl": [2, 19], "updat": 2, "roll": 2, "manner": 2, "roadmap": [2, 3, 11], "plan": 2, "critic": 2, "might": 2, "credit": 2, "given": [2, 16, 17, 21, 22, 24, 25], "acknowledg": 2, "note": [2, 24], "forev": 2, "record": 2, "histori": 2, "The": [3, 11, 12, 16, 19, 20, 21, 22], "who": 3, "anyon": 3, "place": 3, "question": 3, "idea": 3, "share": 3, "channel": [3, 19, 21, 25], "http": [3, 21, 25], "com": [3, 21, 25], "ai4environ": 3, "signup": 3, "welcom": 3, "faq": [3, 11], "tomand": 5, "ba": 5, "ac": 5, "uk": 5, "To": [6, 11], "simpli": [6, 11], "creat": [6, 8, 21], "class": [6, 12, 13, 16, 17, 18, 20, 21, 24], "inherit": [6, 13, 18, 24], "deepsensormodel": [6, 13, 23, 24, 26], "implement": [6, 12, 21, 24], "low": [6, 21, 25], "level": 6, "predict": [6, 10, 12, 21, 24, 25], "method": [6, 8, 12, 16, 17, 18, 21, 24, 25], "defin": [6, 16, 21], "probabilisticmodel": [6, 12, 23, 24], "mean": [6, 12, 13, 17, 19, 21, 24, 25], "stddev": [6, 12, 14, 21, 24], "In": [6, 7, 8, 11, 21, 25], "exampl": [6, 8, 10, 11, 19], "ll": 6, "call": [6, 25], "examplemodel": 6, "A": [6, 17, 21, 26], "veri": 6, "naiv": 6, "context": [6, 10, 12, 13, 16, 18, 21, 22, 24, 25, 26], "def": 6, "__init__": [6, 12, 13, 16, 17, 18, 21, 24], "self": [6, 17, 21, 24], "data_processor": [6, 10, 21, 24, 26], "dataprocessor": [6, 10, 15, 17, 21, 24, 26], "task_load": [6, 10, 13, 21, 22, 24, 26], "taskload": [6, 10, 13, 15, 16, 21, 22, 24, 26], "initi": [6, 21, 25], "parent": [6, 12], "data": [6, 10, 11, 20, 21, 22, 24, 26], "processor": [6, 10, 15, 20, 21, 26], "task": [6, 10, 12, 13, 15, 16, 20, 21, 24, 25, 26, 30], "loader": [6, 10, 15, 20, 21, 26], "super": 6, "comput": [6, 12, 17, 19, 21, 22, 24, 25, 26], "target": [6, 10, 12, 13, 16, 21, 22, 24, 25, 26], "locat": [6, 10, 13, 16, 21, 22, 24, 26], "return": [6, 26], "np": [6, 10, 20, 21, 23], "y_c": [6, 16, 26], "0": [6, 10, 12, 13, 16, 17, 19, 21, 22, 24, 25, 26], "1": [6, 10, 12, 13, 17, 19, 21, 22, 24, 25, 26], "after": [6, 10, 26], "us": [6, 8, 11, 12, 13, 16, 17, 18, 19, 21, 22, 24, 25, 26, 30], "same": [6, 16, 17, 21, 24, 25], "built": 6, "convnp": [6, 10, 20, 23, 25, 26, 30], "see": [6, 21, 25], "jupyt": [6, 10], "notebook": [6, 10, 30], "part": [7, 20, 21], "show": 7, "instal": 7, "extend": [7, 11], "some": 7, "tutori": [7, 11], "pypi": 7, "sourc": [7, 11], "model": [7, 10, 11, 12, 13, 17, 20, 26, 30], "quickstart": [7, 9], "python": [8, 11], "packag": [8, 11, 20, 21], "number": [8, 16, 19, 21, 22, 24, 25, 26], "section": [8, 20], "two": [8, 10, 16, 19, 22], "main": [8, 21, 25], "want": [8, 21, 25], "latest": 8, "stabl": 8, "access": 8, "recommend": 8, "easiest": [8, 11], "pip": [8, 11], "backend": [8, 11, 18, 25, 26, 30], "choic": [8, 11], "tensorflow": [8, 11, 18, 20, 21, 30], "pytorch": [8, 11, 21], "torch": [8, 10, 11, 18, 20, 21, 30], "keep": 8, "easi": 8, "machin": [8, 11], "contain": [8, 10, 12, 16, 17, 18, 19, 20, 21, 22, 24, 25, 26], "doc": 8, "clone": 8, "git": 8, "e": [8, 17, 18, 19, 22, 24], "v": [8, 18], "demonstr": 10, "simpl": [10, 11, 20], "train": [10, 11, 13, 20], "convolut": [10, 21, 25], "condit": [10, 21], "neural": [10, 11, 21, 25], "convcnp": [10, 21], "spatial": [10, 17, 19], "interpol": [10, 16, 17, 21, 22, 25], "era5": 10, "go": 10, "import": [10, 11], "less": 10, "than": [10, 19, 21, 25, 26], "30": 10, "line": 10, "train_epoch": [10, 29, 30], "xarrai": [10, 11, 13, 16, 17, 19, 24], "xr": 10, "panda": [10, 11, 13, 16, 17, 19, 24, 26], "pd": 10, "numpi": [10, 12, 16, 17, 18, 21, 24, 25, 26], "load": [10, 16, 17, 21, 26], "raw": [10, 17], "ds_raw": 10, "open_dataset": 10, "air_temperatur": 10, "normalis": [10, 17, 21, 24], "x1_name": [10, 17], "lat": [10, 17], "x1_map": [10, 17], "15": 10, "75": 10, "x2_name": [10, 17], "lon": [10, 17], "x2_map": [10, 17], "200": 10, "330": 10, "d": [10, 16, 19], "gener": [10, 11, 16, 21, 26], "10": [10, 21], "grid": [10, 16, 17, 18, 19, 22, 24, 26], "cell": [10, 16], "train_task": 10, "date": [10, 16, 19, 24], "date_rang": 10, "2013": 10, "01": [10, 25], "2014": 10, "11": 10, "7": 10, "context_sampl": [10, 16], "random": [10, 12, 14, 16, 24], "uniform": 10, "target_sampl": [10, 16], "append": [10, 18, 24], "epoch": [10, 30], "rang": 10, "progress_bar": [10, 13, 24, 30], "true": [10, 12, 13, 17, 21, 24, 25, 26], "dens": 10, "point": [10, 12, 13, 16, 17, 21, 22, 24, 26], "test_task": 10, "12": 10, "31": 10, "mean_d": 10, "std_d": 10, "x_t": [10, 13, 16, 18, 24], "origin": [10, 18], "unit": [10, 19, 22], "coordin": [10, 13, 16, 17, 19, 26], "system": [10, 26], "dataset": [10, 13, 16, 17, 19, 24, 26], "dimens": [10, 16, 18, 21, 26], "25": 10, "53": 10, "datetime64": 10, "n": [10, 16, 17, 18, 24], "float32": [10, 16, 18], "72": 10, "5": [10, 16, 19, 21, 22, 24, 25, 26, 30], "70": 10, "67": 10, "65": 10, "22": 10, "20": 10, "17": 10, "202": 10, "205": 10, "207": 10, "322": 10, "325": 10, "327": 10, "variabl": [10, 16, 17, 18, 19, 21, 22, 24, 25, 26], "air": 10, "246": 10, "244": 10, "4": 10, "245": 10, "290": 10, "2": [10, 16, 17, 18, 19, 21, 24, 25], "289": 10, "8": [10, 21], "timeseri": 10, "off": [10, 16, 22, 24, 26], "arrai": [10, 16, 17, 18, 21], "argument": [10, 16, 26], "three": [10, 21, 25], "dai": [10, 19], "decemb": 10, "mean_df": 10, "std_df": 10, "50": 10, "280": 10, "40": 10, "250": 10, "t": [10, 16, 18], "260": 10, "183056": 10, "277": 10, "947373": 10, "02": 10, "261": 10, "08943": 10, "278": 10, "219599": 10, "03": 10, "257": 10, "128185": 10, "444229": 10, "avail": [10, 16, 30], "ad": [10, 18, 21, 25], "visualis": 10, "environment": 11, "faithfulli": 11, "match": 11, "flexibl": [11, 20], "intuit": 11, "interfac": 11, "wrap": [11, 21], "around": [11, 21], "power": 11, "neuralprocessess": 11, "function": [11, 12, 18, 20, 21, 22, 24, 25, 26], "while": [11, 20], "allow": [11, 20, 21, 24], "user": 11, "familiar": 11, "world": 11, "avoid": [11, 18], "murki": 11, "depth": 11, "tensor": [11, 13, 18, 21, 25], "compat": 11, "both": 11, "abil": 11, "thank": 11, "choos": 11, "between": [11, 16, 19, 22], "undergo": 11, "activ": [11, 13], "product": 11, "research": 11, "consid": [11, 13], "bibtex": 11, "entri": [11, 18, 19], "click": 11, "button": 11, "top": 11, "right": 11, "page": 11, "togeth": 11, "instead": 11, "replac": [11, 18], "abov": 11, "command": 11, "start": 11, "commun": 11, "api": 11, "refer": [11, 17, 26], "active_learn": [11, 20], "modul": [11, 20, 21], "plot": [11, 20], "index": [11, 12, 13, 16, 19, 24, 26], "search": [11, 12, 13], "acquisitionfunct": [12, 13, 14], "acquisit": [12, 26], "__call__": [12, 13, 16, 17, 21], "ndarrai": [12, 16, 17, 18, 21, 24, 25, 26], "shape": [12, 16, 17, 18, 24], "notimplementederror": [12, 24, 25, 30], "becaus": 12, "abstract": 12, "must": [12, 21, 25], "subclass": 12, "context_set_idx": [12, 13, 18, 26], "int": [12, 13, 16, 18, 19, 21, 22, 24, 25, 26, 30], "add": [12, 17, 18, 21, 26], "observ": [12, 16, 18, 19, 21, 26], "when": [12, 16, 17, 21], "acquisitionfunctionoracl": [12, 13, 14], "signifi": 12, "acquisitionfunctionparallel": [12, 14], "across": 12, "parallel": 12, "meanstddev": [12, 14], "margin": [12, 21, 24], "varianc": [12, 21, 24, 25], "meanvari": [12, 14], "pnormstddev": [12, 14], "arg": [12, 16, 21, 24, 26], "p": 12, "kwarg": [12, 16, 17, 21, 24, 25], "norm": 12, "vector": [12, 24], "standard": [12, 24], "deviat": [12, 24], "option": [12, 13, 16, 17, 19, 21, 24, 25, 26, 30], "default": [12, 13, 16, 17, 19, 20, 21, 23, 24, 25, 26, 30], "meanmarginalentropi": [12, 14], "entropi": [12, 21, 24], "distribut": [12, 21, 25], "jointentropi": [12, 14], "joint": [12, 21, 24], "oraclema": [12, 14], "oracl": 12, "error": 12, "oraclerms": [12, 14], "squar": 12, "oraclemarginalnl": [12, 14], "neg": 12, "log": 12, "likelihood": [12, 21, 25], "oraclejointnl": [12, 14], "seed": [12, 16, 24, 26], "42": 12, "contextdist": [12, 14], "distanc": [12, 19, 22], "closest": [12, 19], "expectedimprov": [12, 14], "onli": [12, 13, 21, 24, 26], "valid": [12, 16], "maximis": 12, "greedyalgorithm": [13, 14], "x_": 13, "dataarrai": [13, 16, 17, 19, 24], "datafram": [13, 16, 17, 19, 24, 26], "seri": [13, 16, 17, 19, 24], "x_s_mask": 13, "none": [13, 16, 17, 18, 21, 24, 25, 26, 30], "x_t_mask": [13, 24], "n_new_context": 13, "x_normalis": 13, "bool": [13, 16, 17, 18, 21, 24, 25, 26, 30], "fals": [13, 16, 17, 21, 24, 25, 26, 30], "model_infill_method": 13, "str": [13, 16, 17, 18, 21, 24, 25, 26], "query_infil": 13, "proposed_infil": 13, "target_set_idx": 13, "verbos": [13, 17, 21, 24, 26], "greedi": 13, "acquisition_fn": [13, 14, 20, 26], "list": [13, 16, 17, 21, 22, 24, 26, 30], "diff": 13, "tupl": [13, 16, 17, 18, 21, 24, 25, 26], "iter": [13, 26], "docstr": [13, 25], "todo": [13, 25], "sensor": 13, "prioriti": 13, "order": 13, "correspond": [13, 18], "space": [13, 22], "x_new_df": [13, 26], "acquisition_fn_d": [13, 26], "valueerror": [13, 16, 24, 26], "min_or_max": 13, "min": [13, 17], "max": [13, 17], "y_t_aux": 13, "mask": [13, 17, 18, 21, 26], "doe": [13, 16, 21, 26], "algorithm": [14, 20], "xarray_to_coord_array_normalis": [15, 17], "mask_coord_array_normalis": [15, 17], "da1_da2_same_grid": [15, 17], "interp_da1_to_da2": [15, 17], "append_obs_to_task": [15, 18], "flatten_x": [15, 18], "flatten_i": [15, 18], "flatten_gridded_data_in_task": [15, 18], "util": [15, 20], "construct_x1x2_d": [15, 19], "construct_circ_time_d": [15, 19], "compute_xarray_data_resolut": [15, 19], "compute_pandas_data_resolut": [15, 19], "task_loader_id": 16, "aux_at_context": 16, "aux_at_target": 16, "link": 16, "context_delta_t": 16, "target_delta_t": 16, "time_freq": 16, "xarray_interp_method": 16, "linear": 16, "discrete_xarray_sampl": 16, "dtype": [16, 18, 21, 25], "each": [16, 18, 19, 20, 21, 22, 25, 26, 30], "initialis": [16, 17, 18, 24], "behaviour": 16, "path": [16, 26], "overwrit": 16, "either": [16, 26], "els": 16, "save": [16, 17, 21], "config": [16, 17, 21], "folder": [16, 17, 21], "wa": [16, 26], "ignor": 16, "singl": [16, 18, 21, 25], "auxiliari": [16, 21, 25], "element": [16, 18], "sampl": [16, 18, 19, 21, 24, 25, 26], "second": [16, 17], "specifi": [16, 21, 25], "integ": [16, 21, 25], "case": [16, 21, 24, 25], "differ": [16, 18, 21, 25], "init": 16, "frequenc": [16, 19], "daili": [16, 19], "randomli": 16, "whether": [16, 17, 21, 24, 25, 26, 30], "discret": 16, "centr": 16, "continu": 16, "within": 16, "type": [16, 18, 21], "cast": [16, 18, 21], "config_fnam": [16, 17], "task_loader_config": 16, "json": [16, 17], "count_context_and_target_data_dim": 16, "count": 16, "context_dim": 16, "target_dim": 16, "infer_context_and_target_var_id": 16, "infer": [16, 21, 25], "id": [16, 26], "context_var_id": 16, "target_var_id": 16, "load_dask": [16, 17], "dask": [16, 17], "memori": [16, 17], "sample_da": 16, "da": [16, 17], "sampling_strat": 16, "float": [16, 18, 19, 21, 22, 24, 25, 30], "accord": 16, "strategi": 16, "assum": [16, 17], "slice": [16, 21], "alreadi": 16, "invalidsamplingstrategyerror": 16, "bound": 16, "sample_df": 16, "df": [16, 19], "frame": 16, "x_c": 16, "sample_offgrid_aux": 16, "offgrid_aux": 16, "task_gener": 16, "timestamp": 16, "split_frac": 16, "datewise_determinist": 16, "seed_overrid": 16, "sever": [16, 20], "uniformli": 16, "fraction": 16, "x1": [16, 17, 19, 24], "x2": [16, 17, 19, 24], "coord": [16, 17, 18, 24], "unnormalis": [16, 17, 21, 24, 26], "appli": [16, 18, 24], "split": 16, "pair": 16, "remain": 16, "base": [16, 18, 24], "overrid": [16, 21, 26], "time_nam": 17, "deepcopi": 17, "mean_std": 17, "std": [17, 24], "min_max": 17, "param": 17, "name": 17, "g": [17, 18, 19, 24], "linearli": 17, "map": [17, 26], "print": [17, 18, 21, 24], "output": [17, 21, 24, 25], "__str__": [17, 18], "add_to_config": 17, "var_id": 17, "dict": [17, 18, 24, 25, 26], "check_params_comput": 17, "data_processor_config": 17, "get_config": 17, "pre": [17, 21], "classmethod": [17, 18, 21], "add_offset": 17, "unnorm": 17, "map_arrai": 17, "map_coord_arrai": 17, "coord_arrai": 17, "map_coord": 17, "map_x1_and_x2": 17, "n_x1": [17, 18], "n_x2": [17, 18], "set_coord_param": 17, "offset": 17, "uncertainti": 17, "dev": 17, "convert": [17, 18], "coord_arr": 17, "mask_da": 17, "remov": [17, 18], "outsid": 17, "boolean": 17, "insid": 17, "da1": 17, "da2": 17, "task_dict": 18, "dictionari": [18, 24], "modifi": [18, 25], "__repr__": 18, "conveni": 18, "summari": 18, "otherwis": 18, "add_batch_dim": 18, "batch": [18, 21, 26, 30], "cast_to_float32": 18, "convert_to_tensor": 18, "deep": 18, "mask_nans_np": 18, "mask_nans_numpi": 18, "nan": [18, 21], "zero": 18, "indic": [18, 21, 25, 26], "were": 18, "miss": 18, "op": 18, "f": 18, "op_flag": 18, "recast": 18, "reshap": 18, "flag": 18, "kei": 18, "remove_nans_from_task_y_t_if_pres": 18, "present": 18, "y_t": 18, "summarise_repr": 18, "k": 18, "summarise_str": 18, "x_new": 18, "int8": 18, "int16": 18, "int32": 18, "int64": 18, "uint8": 18, "uint16": 18, "uint32": 18, "uint64": 18, "bool_": 18, "float16": 18, "float64": 18, "longdoubl": 18, "complex": 18, "complex64": 18, "complex128": 18, "clongdoubl": 18, "moduletyp": [18, 21], "autograd": 18, "tracer": 18, "box": 18, "indexedslic": 18, "jaxlib": 18, "xla_extens": 18, "arrayimpl": 18, "jax": 18, "y_new": 18, "copi": 18, "structur": [18, 20], "affect": 18, "x": [18, 24], "necessari": 18, "y": 18, "n_dim": 18, "flatten": 18, "doesn": 18, "yet": 18, "permit": 18, "gridded_d": 19, "construct": [19, 21, 25], "var": [19, 24], "2d": [19, 26], "whose": 19, "x_1": 19, "x_2": 19, "freq": 19, "circular": 19, "dictat": 19, "h": 19, "per": [19, 22], "hourli": 19, "interv": 19, "year": 19, "monthli": 19, "resolut": [19, 22, 24], "finer": 19, "degre": 19, "data_resolut": [19, 22], "n_time": 19, "1000": 19, "percentil": 19, "approxim": 19, "non": 19, "nth": 19, "neighbour": 19, "possibli": 19, "subset": [19, 21], "fewer": 19, "5th": 19, "pairwis": 19, "attribut": [20, 24], "possibl": [20, 22], "still": 20, "lot": 20, "divid": [20, 21, 25], "submodul": 20, "context_encod": [20, 26], "feature_map": [20, 26], "offgrid_context": [20, 26], "offgrid_context_observ": [20, 26], "placement": [20, 26], "receptive_field": [20, 26], "regress": 21, "probabilist": [21, 24], "neuralprocess": [21, 25], "wesselb": [21, 25], "blob": [21, 25], "architectur": [21, 25], "convgnp": [21, 25], "py": [21, 25], "multipl": [21, 25], "dispatch": 21, "plum": 21, "re": [21, 25], "forward": [21, 26], "logpdf": [21, 24], "etc": 21, "altern": 21, "instanti": 21, "auto": 21, "sensibl": 21, "These": 21, "addit": [21, 26], "customis": 21, "points_per_unit": [21, 25], "densiti": [21, 22, 25], "intern": [21, 22, 25], "discretis": [21, 25], "100": [21, 25], "cnp": [21, 25], "equival": [21, 25], "het": [21, 25], "gnp": [21, 25], "lowrank": [21, 25], "spike": [21, 25], "beta": [21, 25], "dim_x": [21, 25], "dimension": [21, 25], "input": [21, 22, 25], "dim_i": [21, 25], "dim_yc": [21, 25], "should": [21, 22, 24, 25], "equal": [21, 22, 25], "dim_yt": [21, 25], "dim_aux_t": [21, 25], "specif": [21, 24, 25], "conv_arch": [21, 25], "unet": [21, 25], "sep": [21, 25], "conv": [21, 25], "unet_channel": [21, 25], "everi": [21, 25], "layer": [21, 25, 26], "six": [21, 25], "64": [21, 25], "unet_kernel": [21, 25], "size": [21, 25, 26, 30], "kernel": [21, 25], "unet_resize_conv": [21, 25], "resiz": [21, 25], "rather": [21, 25], "transpos": [21, 25], "unet_resize_conv_interp_method": [21, 25], "bilinear": [21, 25], "num_basis_funct": [21, 25], "basi": [21, 22, 25], "rank": [21, 25], "dim_lv": [21, 25], "latent": [21, 25], "encoder_scal": [21, 22, 25], "length": [21, 22, 24, 25], "scale": [21, 22, 25], "embed": [21, 25], "encoder_scales_learn": [21, 25], "encod": [21, 22, 25, 26], "setconv": [21, 22, 25], "learnabl": [21, 25], "decoder_scal": [21, 22, 25], "decod": [21, 22, 25, 26], "decoder_scale_learn": [21, 25], "aux_t_mlp_lay": [21, 25], "width": [21, 25], "mlp": [21, 25], "128": [21, 25], "epsilon": [21, 25], "1e": [21, 25], "n_sampl": [21, 24, 25], "requires_grad": [21, 25], "draw": [21, 24, 25], "gradient": [21, 25], "construct_convgnp": 21, "take": 21, "unless": 21, "overridden": 21, "neural_process": [21, 25], "kera": [21, 30], "nn": 21, "tfmodel": 21, "torchmodel": 21, "model_id": 21, "weight": 21, "ar_sampl": [21, 24], "x_target_ar": 21, "ar_subsample_factor": [21, 24], "fill_typ": 21, "liter": [21, 25], "autoregress": [21, 24], "over": [21, 22, 24, 26], "infil": 21, "rest": 21, "0th": 21, "drawn": 21, "subsampl": 21, "covari": [21, 24], "dist": [21, 25], "abstractdistribut": 21, "abstractmultioutputdistribut": 21, "joint_entropi": [21, 24], "concat": 21, "along": 21, "loss_fn": 21, "fix_nois": 21, "num_lv_sampl": 21, "loss": [21, 24, 30], "nois": 21, "lv": 21, "evalu": 21, "mean_marginal_entropi": [21, 24], "modify_task": 21, "dim": 21, "noiseless": [21, 25], "slice_diag": 21, "concat_task": [21, 23], "gen_ppu": [22, 23], "ppu": 22, "loop": 22, "maximum": [22, 26], "model_ppu": 22, "gen_decoder_scal": [22, 23], "gaussian": 22, "small": 22, "chosen": 22, "gen_encoder_scal": [22, 23], "station": 22, "half": 22, "represent": 22, "smoothli": 22, "determin": [22, 24], "nearest": 22, "create_empty_spatiotemporal_xarrai": [23, 24], "increase_spatial_resolut": [23, 24], "convert_task_to_nps_arg": [23, 25], "run_nps_model": [23, 25], "run_nps_model_ar": [23, 25], "construct_neural_process": [23, 25], "compute_encoding_tensor": [23, 25], "matrix": 24, "cov": 24, "child": 24, "would": 24, "too": 24, "x_t_is_normalis": 24, "resolution_factor": 24, "append_index": 24, "regular": 24, "factor": 24, "increas": 24, "doubl": 24, "halv": 24, "determinist": 24, "metadata": [24, 26], "displai": [24, 30], "progress": [24, 30], "bar": [24, 30], "taken": 24, "n_target": 24, "coord_nam": 24, "data_var": 24, "prepend_dim": 24, "prepend_coord": 24, "x_t_normalis": 24, "build": 25, "signatur": 25, "num_sampl": 25, "mode": 25, "noisi": 25, "below": 25, "thei": 25, "explicitli": 25, "constructor": 25, "safe": 25, "cr": 26, "ax": 26, "polarax": 26, "aitoffax": 26, "hammerax": 26, "lambertax": 26, "mollweideax": 26, "axes3d": 26, "col_dim": 26, "cmap": 26, "colormap": 26, "greys_r": 26, "figsiz": 26, "3": 26, "add_colorbar": 26, "max_ncol": 26, "figur": 26, "matplotlib": 26, "polar": 26, "geo": 26, "mpl_toolkit": 26, "mplot3d": 26, "column": 26, "color": 26, "inch": 26, "colorbar": 26, "pyplot": 26, "rais": 26, "encount": 26, "assertionerror": 26, "greater": 26, "batch_idx": 26, "land_idx": 26, "cbar": 26, "clim": 26, "viridi": 26, "verbose_titl": 26, "titl": 26, "return_ax": 26, "dataload": 26, "land": 26, "overlai": 26, "contour": 26, "limit": 26, "subplot": 26, "n_features_per_lay": 26, "grei": 26, "downsampl": 26, "recognis": 26, "plot_target": 26, "add_legend": 26, "marker": 26, "scatter_kwarg": 26, "legend": 26, "style": 26, "keyword": 26, "scatter": 26, "format_str": 26, "extent": 26, "black": 26, "format": 26, "string": 26, "text": 26, "1d": 26, "exactli": 26, "global": 26, "recept": 26, "field": 26, "set_gpu_default_devic": [29, 30], "gpu": 30, "devic": 30, "runtimeerror": 30, "support": 30, "lr": 30, "5e": 30, "05": 30, "batch_siz": 30, "opt": 30, "tqdm_notebook": 30, "rate": 30, "perform": 30, "optim": 30, "tf": 30, "adam": 30}, "objects": {"deepsensor.active_learning.acquisition_fns": [[12, 0, 1, "", "AcquisitionFunction"], [12, 0, 1, "", "AcquisitionFunctionOracle"], [12, 0, 1, "", "AcquisitionFunctionParallel"], [12, 0, 1, "", "ContextDist"], [12, 0, 1, "", "ExpectedImprovement"], [12, 0, 1, "", "JointEntropy"], [12, 0, 1, "", "MeanMarginalEntropy"], [12, 0, 1, "", "MeanStddev"], [12, 0, 1, "", "MeanVariance"], [12, 0, 1, "", "OracleJointNLL"], [12, 0, 1, "", "OracleMAE"], [12, 0, 1, "", "OracleMarginalNLL"], [12, 0, 1, "", "OracleRMSE"], [12, 0, 1, "", "Random"], [12, 0, 1, "", "Stddev"], [12, 0, 1, "", "pNormStddev"]], "deepsensor.active_learning.acquisition_fns.AcquisitionFunction": [[12, 1, 1, "", "__call__"], [12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.ContextDist": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.ExpectedImprovement": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.JointEntropy": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.MeanStddev": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.MeanVariance": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.OracleJointNLL": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.OracleMAE": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.OracleMarginalNLL": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.OracleRMSE": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.Random": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.Stddev": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.pNormStddev": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.algorithms": [[13, 0, 1, "", "GreedyAlgorithm"]], "deepsensor.active_learning.algorithms.GreedyAlgorithm": [[13, 1, 1, "", "__call__"], [13, 1, 1, "", "__init__"]], "deepsensor.data.loader": [[16, 0, 1, "", "TaskLoader"]], "deepsensor.data.loader.TaskLoader": [[16, 1, 1, "", "__call__"], [16, 1, 1, "", "__init__"], [16, 2, 1, "", "config_fname"], [16, 1, 1, "", "count_context_and_target_data_dims"], [16, 1, 1, "", "infer_context_and_target_var_IDs"], [16, 1, 1, "", "load_dask"], [16, 1, 1, "", "sample_da"], [16, 1, 1, "", "sample_df"], [16, 1, 1, "", "sample_offgrid_aux"], [16, 1, 1, "", "save"], [16, 1, 1, "", "task_generation"]], "deepsensor.data.processor": [[17, 0, 1, "", "DataProcessor"], [17, 3, 1, "", "da1_da2_same_grid"], [17, 3, 1, "", "interp_da1_to_da2"], [17, 3, 1, "", "mask_coord_array_normalised"], [17, 3, 1, "", "xarray_to_coord_array_normalised"]], "deepsensor.data.processor.DataProcessor": [[17, 1, 1, "", "__call__"], [17, 1, 1, "", "__init__"], [17, 1, 1, "", "__str__"], [17, 1, 1, "", "add_to_config"], [17, 1, 1, "", "check_params_computed"], [17, 2, 1, "", "config_fname"], [17, 1, 1, "", "get_config"], [17, 1, 1, "", "load_dask"], [17, 1, 1, "", "map"], [17, 1, 1, "", "map_array"], [17, 1, 1, "", "map_coord_array"], [17, 1, 1, "", "map_coords"], [17, 1, 1, "", "map_x1_and_x2"], [17, 1, 1, "", "save"], [17, 1, 1, "", "set_coord_params"], [17, 1, 1, "", "unnormalise"]], "deepsensor.data.task": [[18, 0, 1, "", "Task"], [18, 3, 1, "", "append_obs_to_task"], [18, 3, 1, "", "flatten_X"], [18, 3, 1, "", "flatten_Y"], [18, 3, 1, "", "flatten_gridded_data_in_task"]], "deepsensor.data.task.Task": [[18, 1, 1, "", "__init__"], [18, 1, 1, "", "__repr__"], [18, 1, 1, "", "__str__"], [18, 1, 1, "", "add_batch_dim"], [18, 1, 1, "", "cast_to_float32"], [18, 1, 1, "", "convert_to_tensor"], [18, 1, 1, "", "mask_nans_nps"], [18, 1, 1, "", "mask_nans_numpy"], [18, 1, 1, "", "op"], [18, 1, 1, "", "remove_nans_from_task_Y_t_if_present"], [18, 1, 1, "", "summarise_repr"], [18, 1, 1, "", "summarise_str"]], "deepsensor.data.utils": [[19, 3, 1, "", "compute_pandas_data_resolution"], [19, 3, 1, "", "compute_xarray_data_resolution"], [19, 3, 1, "", "construct_circ_time_ds"], [19, 3, 1, "", "construct_x1x2_ds"]], "deepsensor.model.convnp": [[21, 0, 1, "", "ConvNP"], [21, 3, 1, "", "concat_tasks"]], "deepsensor.model.convnp.ConvNP": [[21, 1, 1, "", "__call__"], [21, 1, 1, "", "__init__"], [21, 1, 1, "", "ar_sample"], [21, 1, 1, "", "covariance"], [21, 1, 1, "", "joint_entropy"], [21, 1, 1, "", "load"], [21, 1, 1, "", "logpdf"], [21, 1, 1, "", "loss_fn"], [21, 1, 1, "", "mean"], [21, 1, 1, "", "mean_marginal_entropy"], [21, 1, 1, "", "modify_task"], [21, 1, 1, "", "sample"], [21, 1, 1, "", "save"], [21, 1, 1, "", "slice_diag"], [21, 1, 1, "", "stddev"], [21, 1, 1, "", "variance"]], "deepsensor.model.defaults": [[22, 3, 1, "", "gen_decoder_scale"], [22, 3, 1, "", "gen_encoder_scales"], [22, 3, 1, "", "gen_ppu"]], "deepsensor.model.model": [[24, 0, 1, "", "DeepSensorModel"], [24, 0, 1, "", "ProbabilisticModel"], [24, 3, 1, "", "create_empty_spatiotemporal_xarray"], [24, 3, 1, "", "increase_spatial_resolution"]], "deepsensor.model.model.DeepSensorModel": [[24, 1, 1, "", "__init__"], [24, 1, 1, "", "covariance"], [24, 1, 1, "", "joint_entropy"], [24, 1, 1, "", "logpdf"], [24, 1, 1, "", "loss"], [24, 1, 1, "", "mean"], [24, 1, 1, "", "mean_marginal_entropy"], [24, 1, 1, "", "predict"], [24, 1, 1, "", "sample"], [24, 1, 1, "", "stddev"], [24, 1, 1, "", "variance"]], "deepsensor.model.model.ProbabilisticModel": [[24, 1, 1, "", "covariance"], [24, 1, 1, "", "joint_entropy"], [24, 1, 1, "", "logpdf"], [24, 1, 1, "", "loss"], [24, 1, 1, "", "mean"], [24, 1, 1, "", "mean_marginal_entropy"], [24, 1, 1, "", "sample"], [24, 1, 1, "", "stddev"], [24, 1, 1, "", "variance"]], "deepsensor.model.nps": [[25, 3, 1, "", "compute_encoding_tensor"], [25, 3, 1, "", "construct_neural_process"], [25, 3, 1, "", "convert_task_to_nps_args"], [25, 3, 1, "", "run_nps_model"], [25, 3, 1, "", "run_nps_model_ar"]], "deepsensor.plot": [[26, 3, 1, "", "acquisition_fn"], [26, 3, 1, "", "context_encoding"], [26, 3, 1, "", "feature_maps"], [26, 3, 1, "", "offgrid_context"], [26, 3, 1, "", "offgrid_context_observations"], [26, 3, 1, "", "placements"], [26, 3, 1, "", "receptive_field"]], "deepsensor.train.train": [[30, 3, 1, "", "set_gpu_default_device"], [30, 3, 1, "", "train_epoch"]]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:attribute", "3": "py:function"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "function", "Python function"]}, "titleterms": {"develop": [0, 5], "code": 0, "conduct": 0, "contribut": 1, "deepsensor": [1, 3, 4, 6, 11, 12, 13, 16, 17, 18, 19, 21, 22, 24, 25, 26, 30], "commun": [2, 3], "faq": 2, "question": 2, "": [3, 11], "user": 3, "contributor": 3, "tabl": [3, 7, 11, 14, 15, 20, 23, 29], "content": [3, 7, 11, 14, 15, 20, 23, 29], "roadmap": 4, "contact": 5, "extend": 6, "new": 6, "model": [6, 21, 22, 23, 24, 25], "get": 7, "start": 7, "instal": [8, 11], "instruct": 8, "from": 8, "pypi": 8, "sourc": 8, "tutori": [9, 10], "quickstart": 10, "welcom": 11, "document": 11, "cite": 11, "quick": 11, "indic": 11, "active_learn": [12, 13, 14], "acquisition_fn": 12, "paramet": [12, 13, 16, 17, 18, 19, 21, 22, 24, 25, 30], "return": [12, 13, 16, 17, 18, 19, 21, 22, 24, 25, 30], "rais": [12, 13, 16, 24, 25, 30], "algorithm": 13, "modul": [14, 15, 23, 26, 27, 28, 29], "data": [15, 16, 17, 18, 19], "loader": 16, "processor": 17, "task": 18, "util": 19, "api": 20, "refer": 20, "convnp": 21, "default": 22, "np": 25, "plot": 26, "tensorflow": 27, "torch": 28, "train": [29, 30]}, "envversion": {"sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 60}, "alltitles": {"Developer Code of Conduct": [[0, "developer-code-of-conduct"]], "Contributing to DeepSensor": [[1, "contributing-to-deepsensor"]], "Community FAQ": [[2, "community-faq"]], "Questions": [[2, "questions"]], "DeepSensor\u2019s user and contributor community": [[3, "deepsensor-s-user-and-contributor-community"]], "Table of contents:": [[3, null], [7, null], [14, null], [15, null], [20, null], [23, null], [29, null]], "DeepSensor Roadmap": [[4, "deepsensor-roadmap"]], "Contact the developers": [[5, "contact-the-developers"]], "Extending DeepSensor with new models": [[6, "extending-deepsensor-with-new-models"]], "Getting started": [[7, "getting-started"]], "Installation instructions": [[8, "installation-instructions"]], "Install from PyPI": [[8, "install-from-pypi"]], "Install from source": [[8, "install-from-source"]], "Tutorials": [[9, "tutorials"]], "Tutorials:": [[9, null]], "Tutorial: Quickstart": [[10, "tutorial-quickstart"]], "Welcome to DeepSensor\u2019s documentation!": [[11, "welcome-to-deepsensor-s-documentation"]], "Citing DeepSensor": [[11, "citing-deepsensor"]], "Quick installation": [[11, "quick-installation"]], "Contents:": [[11, null]], "Indices and tables": [[11, "indices-and-tables"]], "deepsensor.active_learning.acquisition_fns": [[12, "deepsensor-active-learning-acquisition-fns"]], "Parameters": [[12, "parameters"], [12, "id1"], [12, "id2"], [12, "id3"], [12, "id4"], [12, "id5"], [12, "id6"], [12, "id7"], [12, "id8"], [12, "id9"], [12, "id10"], [12, "id11"], [12, "id12"], [12, "id13"], [12, "id14"], [13, "parameters"], [13, "id1"], [16, "parameters"], [16, "id1"], [16, "id6"], [16, "id9"], [16, "id12"], [16, "id14"], [17, "parameters"], [17, "id1"], [17, "id2"], [17, "id4"], [17, "id6"], [17, "id8"], [17, "id10"], [17, "id12"], [17, "id14"], [17, "id16"], [17, "id18"], [17, "id20"], [17, "id22"], [17, "id24"], [17, "id26"], [17, "id28"], [18, "parameters"], [18, "id3"], [18, "id5"], [18, "id7"], [18, "id9"], [19, "parameters"], [19, "id1"], [19, "id3"], [19, "id5"], [21, "parameters"], [21, "id1"], [21, "id2"], [21, "id3"], [21, "id4"], [21, "id6"], [21, "id8"], [21, "id10"], [21, "id12"], [21, "id14"], [21, "id16"], [21, "id18"], [21, "id20"], [21, "id22"], [21, "id24"], [21, "id26"], [21, "id28"], [21, "id30"], [21, "id32"], [21, "id34"], [21, "id36"], [21, "id38"], [21, "id40"], [21, "id42"], [21, "id44"], [22, "parameters"], [22, "id1"], [22, "id3"], [24, "parameters"], [24, "id1"], [24, "id2"], [24, "id5"], [24, "id8"], [24, "id11"], [24, "id14"], [24, "id17"], [24, "id20"], [24, "id23"], [24, "id25"], [24, "id28"], [24, "id31"], [24, "id34"], [24, "id37"], [24, "id40"], [24, "id43"], [24, "id46"], [24, "id49"], [24, "id51"], [25, "parameters"], [25, "id1"], [25, "id3"], [25, "id5"], [25, "id7"], [30, "parameters"]], "Returns": [[12, "returns"], [13, "returns"], [16, "returns"], [16, "id2"], [16, "id3"], [16, "id5"], [16, "id7"], [16, "id10"], [16, "id13"], [16, "id15"], [17, "returns"], [17, "id3"], [17, "id5"], [17, "id7"], [17, "id9"], [17, "id11"], [17, "id13"], [17, "id15"], [17, "id17"], [17, "id19"], [17, "id21"], [17, "id23"], [17, "id25"], [17, "id27"], [17, "id29"], [18, "returns"], [18, "id1"], [18, "id2"], [18, "id4"], [18, "id6"], [18, "id8"], [18, "id10"], [19, "returns"], [19, "id2"], [19, "id4"], [19, "id6"], [21, "returns"], [21, "id5"], [21, "id7"], [21, "id9"], [21, "id11"], [21, "id13"], [21, "id15"], [21, "id17"], [21, "id19"], [21, "id21"], [21, "id23"], [21, "id25"], [21, "id27"], [21, "id29"], [21, "id31"], [21, "id33"], [21, "id35"], [21, "id37"], [21, "id39"], [21, "id41"], [21, "id43"], [21, "id45"], [22, "returns"], [22, "id2"], [22, "id4"], [24, "returns"], [24, "id3"], [24, "id6"], [24, "id9"], [24, "id12"], [24, "id15"], [24, "id18"], [24, "id21"], [24, "id24"], [24, "id26"], [24, "id29"], [24, "id32"], [24, "id35"], [24, "id38"], [24, "id41"], [24, "id44"], [24, "id47"], [24, "id50"], [24, "id52"], [25, "returns"], [25, "id2"], [25, "id4"], [25, "id6"], [25, "id8"], [30, "returns"], [30, "id1"]], "Raises": [[12, "raises"], [13, "raises"], [13, "id2"], [16, "raises"], [16, "id4"], [16, "id8"], [16, "id11"], [24, "raises"], [24, "id4"], [24, "id7"], [24, "id10"], [24, "id13"], [24, "id16"], [24, "id19"], [24, "id22"], [24, "id27"], [24, "id30"], [24, "id33"], [24, "id36"], [24, "id39"], [24, "id42"], [24, "id45"], [24, "id48"], [24, "id53"], [25, "raises"], [30, "raises"]], "deepsensor.active_learning.algorithms": [[13, "deepsensor-active-learning-algorithms"]], "active_learning module": [[14, "active-learning-module"]], "data module": [[15, "data-module"]], "deepsensor.data.loader": [[16, "deepsensor-data-loader"]], "deepsensor.data.processor": [[17, "deepsensor-data-processor"]], "deepsensor.data.task": [[18, "deepsensor-data-task"]], "deepsensor.data.utils": [[19, "deepsensor-data-utils"]], "API Reference": [[20, "api-reference"]], "deepsensor.model.convnp": [[21, "deepsensor-model-convnp"]], "deepsensor.model.defaults": [[22, "deepsensor-model-defaults"]], "model module": [[23, "model-module"]], "deepsensor.model.model": [[24, "deepsensor-model-model"]], "deepsensor.model.nps": [[25, "deepsensor-model-nps"]], "deepsensor.plot module": [[26, "deepsensor-plot-module"]], "tensorflow module": [[27, "tensorflow-module"]], "torch module": [[28, "torch-module"]], "train module": [[29, "train-module"]], "deepsensor.train.train": [[30, "deepsensor-train-train"]]}, "indexentries": {"acquisitionfunction (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunction"]], "acquisitionfunctionoracle (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunctionOracle"]], "acquisitionfunctionparallel (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunctionParallel"]], "contextdist (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.ContextDist"]], "expectedimprovement (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.ExpectedImprovement"]], "jointentropy (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.JointEntropy"]], "meanmarginalentropy (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy"]], "meanstddev (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.MeanStddev"]], "meanvariance (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.MeanVariance"]], "oraclejointnll (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.OracleJointNLL"]], "oraclemae (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.OracleMAE"]], "oraclemarginalnll (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.OracleMarginalNLL"]], "oraclermse (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.OracleRMSE"]], "random (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.Random"]], "stddev (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.Stddev"]], "__call__() (deepsensor.active_learning.acquisition_fns.acquisitionfunction method)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunction.__call__"]], "__init__() (deepsensor.active_learning.acquisition_fns.acquisitionfunction method)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunction.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.contextdist method)": [[12, "deepsensor.active_learning.acquisition_fns.ContextDist.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.expectedimprovement method)": [[12, "deepsensor.active_learning.acquisition_fns.ExpectedImprovement.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.jointentropy method)": [[12, "deepsensor.active_learning.acquisition_fns.JointEntropy.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.meanmarginalentropy method)": [[12, "deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.meanstddev method)": [[12, "deepsensor.active_learning.acquisition_fns.MeanStddev.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.meanvariance method)": [[12, "deepsensor.active_learning.acquisition_fns.MeanVariance.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.oraclejointnll method)": [[12, "deepsensor.active_learning.acquisition_fns.OracleJointNLL.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.oraclemae method)": [[12, "deepsensor.active_learning.acquisition_fns.OracleMAE.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.oraclemarginalnll method)": [[12, "deepsensor.active_learning.acquisition_fns.OracleMarginalNLL.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.oraclermse method)": [[12, "deepsensor.active_learning.acquisition_fns.OracleRMSE.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.random method)": [[12, "deepsensor.active_learning.acquisition_fns.Random.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.stddev method)": [[12, "deepsensor.active_learning.acquisition_fns.Stddev.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.pnormstddev method)": [[12, "deepsensor.active_learning.acquisition_fns.pNormStddev.__init__"]], "pnormstddev (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.pNormStddev"]], "greedyalgorithm (class in deepsensor.active_learning.algorithms)": [[13, "deepsensor.active_learning.algorithms.GreedyAlgorithm"]], "__call__() (deepsensor.active_learning.algorithms.greedyalgorithm method)": [[13, "deepsensor.active_learning.algorithms.GreedyAlgorithm.__call__"]], "__init__() (deepsensor.active_learning.algorithms.greedyalgorithm method)": [[13, "deepsensor.active_learning.algorithms.GreedyAlgorithm.__init__"]], "taskloader (class in deepsensor.data.loader)": [[16, "deepsensor.data.loader.TaskLoader"]], "__call__() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.__call__"]], "__init__() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.__init__"]], "config_fname (deepsensor.data.loader.taskloader attribute)": [[16, "deepsensor.data.loader.TaskLoader.config_fname"]], "count_context_and_target_data_dims() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.count_context_and_target_data_dims"]], "infer_context_and_target_var_ids() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.infer_context_and_target_var_IDs"]], "load_dask() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.load_dask"]], "sample_da() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.sample_da"]], "sample_df() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.sample_df"]], "sample_offgrid_aux() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.sample_offgrid_aux"]], "save() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.save"]], "task_generation() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.task_generation"]], "dataprocessor (class in deepsensor.data.processor)": [[17, "deepsensor.data.processor.DataProcessor"]], "__call__() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.__call__"]], "__init__() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.__init__"]], "__str__() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.__str__"]], "add_to_config() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.add_to_config"]], "check_params_computed() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.check_params_computed"]], "config_fname (deepsensor.data.processor.dataprocessor attribute)": [[17, "deepsensor.data.processor.DataProcessor.config_fname"]], "da1_da2_same_grid() (in module deepsensor.data.processor)": [[17, "deepsensor.data.processor.da1_da2_same_grid"]], "get_config() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.get_config"]], "interp_da1_to_da2() (in module deepsensor.data.processor)": [[17, "deepsensor.data.processor.interp_da1_to_da2"]], "load_dask() (deepsensor.data.processor.dataprocessor class method)": [[17, "deepsensor.data.processor.DataProcessor.load_dask"]], "map() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map"]], "map_array() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map_array"]], "map_coord_array() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map_coord_array"]], "map_coords() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map_coords"]], "map_x1_and_x2() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map_x1_and_x2"]], "mask_coord_array_normalised() (in module deepsensor.data.processor)": [[17, "deepsensor.data.processor.mask_coord_array_normalised"]], "save() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.save"]], "set_coord_params() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.set_coord_params"]], "unnormalise() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.unnormalise"]], "xarray_to_coord_array_normalised() (in module deepsensor.data.processor)": [[17, "deepsensor.data.processor.xarray_to_coord_array_normalised"]], "task (class in deepsensor.data.task)": [[18, "deepsensor.data.task.Task"]], "__init__() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.__init__"]], "__repr__() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.__repr__"]], "__str__() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.__str__"]], "add_batch_dim() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.add_batch_dim"]], "append_obs_to_task() (in module deepsensor.data.task)": [[18, "deepsensor.data.task.append_obs_to_task"]], "cast_to_float32() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.cast_to_float32"]], "convert_to_tensor() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.convert_to_tensor"]], "flatten_x() (in module deepsensor.data.task)": [[18, "deepsensor.data.task.flatten_X"]], "flatten_y() (in module deepsensor.data.task)": [[18, "deepsensor.data.task.flatten_Y"]], "flatten_gridded_data_in_task() (in module deepsensor.data.task)": [[18, "deepsensor.data.task.flatten_gridded_data_in_task"]], "mask_nans_nps() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.mask_nans_nps"]], "mask_nans_numpy() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.mask_nans_numpy"]], "op() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.op"]], "remove_nans_from_task_y_t_if_present() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.remove_nans_from_task_Y_t_if_present"]], "summarise_repr() (deepsensor.data.task.task class method)": [[18, "deepsensor.data.task.Task.summarise_repr"]], "summarise_str() (deepsensor.data.task.task class method)": [[18, "deepsensor.data.task.Task.summarise_str"]], "compute_pandas_data_resolution() (in module deepsensor.data.utils)": [[19, "deepsensor.data.utils.compute_pandas_data_resolution"]], "compute_xarray_data_resolution() (in module deepsensor.data.utils)": [[19, "deepsensor.data.utils.compute_xarray_data_resolution"]], "construct_circ_time_ds() (in module deepsensor.data.utils)": [[19, "deepsensor.data.utils.construct_circ_time_ds"]], "construct_x1x2_ds() (in module deepsensor.data.utils)": [[19, "deepsensor.data.utils.construct_x1x2_ds"]], "convnp (class in deepsensor.model.convnp)": [[21, "deepsensor.model.convnp.ConvNP"]], "__call__() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.__call__"]], "__init__() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.__init__"]], "ar_sample() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.ar_sample"]], "concat_tasks() (in module deepsensor.model.convnp)": [[21, "deepsensor.model.convnp.concat_tasks"]], "covariance() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.covariance"]], "joint_entropy() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.joint_entropy"]], "load() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.load"]], "logpdf() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.logpdf"]], "loss_fn() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.loss_fn"]], "mean() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.mean"]], "mean_marginal_entropy() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.mean_marginal_entropy"]], "modify_task() (deepsensor.model.convnp.convnp class method)": [[21, "deepsensor.model.convnp.ConvNP.modify_task"]], "sample() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.sample"]], "save() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.save"]], "slice_diag() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.slice_diag"]], "stddev() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.stddev"]], "variance() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.variance"]], "gen_decoder_scale() (in module deepsensor.model.defaults)": [[22, "deepsensor.model.defaults.gen_decoder_scale"]], "gen_encoder_scales() (in module deepsensor.model.defaults)": [[22, "deepsensor.model.defaults.gen_encoder_scales"]], "gen_ppu() (in module deepsensor.model.defaults)": [[22, "deepsensor.model.defaults.gen_ppu"]], "deepsensormodel (class in deepsensor.model.model)": [[24, "deepsensor.model.model.DeepSensorModel"]], "probabilisticmodel (class in deepsensor.model.model)": [[24, "deepsensor.model.model.ProbabilisticModel"]], "__init__() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.__init__"]], "covariance() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.covariance"]], "covariance() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.covariance"]], "create_empty_spatiotemporal_xarray() (in module deepsensor.model.model)": [[24, "deepsensor.model.model.create_empty_spatiotemporal_xarray"]], "increase_spatial_resolution() (in module deepsensor.model.model)": [[24, "deepsensor.model.model.increase_spatial_resolution"]], "joint_entropy() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.joint_entropy"]], "joint_entropy() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.joint_entropy"]], "logpdf() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.logpdf"]], "logpdf() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.logpdf"]], "loss() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.loss"]], "loss() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.loss"]], "mean() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.mean"]], "mean() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.mean"]], "mean_marginal_entropy() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.mean_marginal_entropy"]], "mean_marginal_entropy() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.mean_marginal_entropy"]], "predict() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.predict"]], "sample() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.sample"]], "sample() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.sample"]], "stddev() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.stddev"]], "stddev() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.stddev"]], "variance() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.variance"]], "variance() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.variance"]], "compute_encoding_tensor() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.compute_encoding_tensor"]], "construct_neural_process() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.construct_neural_process"]], "convert_task_to_nps_args() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.convert_task_to_nps_args"]], "run_nps_model() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.run_nps_model"]], "run_nps_model_ar() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.run_nps_model_ar"]], "acquisition_fn() (in module deepsensor.plot)": [[26, "deepsensor.plot.acquisition_fn"]], "context_encoding() (in module deepsensor.plot)": [[26, "deepsensor.plot.context_encoding"]], "feature_maps() (in module deepsensor.plot)": [[26, "deepsensor.plot.feature_maps"]], "offgrid_context() (in module deepsensor.plot)": [[26, "deepsensor.plot.offgrid_context"]], "offgrid_context_observations() (in module deepsensor.plot)": [[26, "deepsensor.plot.offgrid_context_observations"]], "placements() (in module deepsensor.plot)": [[26, "deepsensor.plot.placements"]], "receptive_field() (in module deepsensor.plot)": [[26, "deepsensor.plot.receptive_field"]], "set_gpu_default_device() (in module deepsensor.train.train)": [[30, "deepsensor.train.train.set_gpu_default_device"]], "train_epoch() (in module deepsensor.train.train)": [[30, "deepsensor.train.train.train_epoch"]]}}) \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 00000000..5be5f58f --- /dev/null +++ b/index.html @@ -0,0 +1,166 @@ + + + + + + + Welcome to DeepSensor’s documentation! — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Welcome to DeepSensor’s documentation!

+

DeepSensor is Python package and open-source project for modelling environmental data with neural processes.

+

DeepSensor aims to faithfully match the flexibility of neural processes with a simple and intuitive interface. DeepSensor wraps around the powerful neuralprocessess package for the core modelling functionality, while allowing users to stay in the familiar xarray and pandas world and avoid the murky depths of tensors!

+

DeepSensor is also compatible with both PyTorch or TensorFlow for its machine learning abilities, thanks to the backends package. Simply import deepsensor.torch or import deepsensor.tensorflow to choose between them!

+
+

Note

+

This package is currently undergoing active development. If you are interested in using DeepSensor in production, please get in touch.

+
+
+

Citing DeepSensor

+

If you use DeepSensor in your research, please consider citing the repository. You can generate a BiBTeX entry by clicking the ‘Cite this repository’ button on the top right of this page.

+
+
+

Quick installation

+

The DeepSensor package can easiest be pip installed, together with the backend of your choice. In this example we use the PyTorch backend:

+
$ pip install deepsensor torch
+
+
+

To install the TensorFlow backend instead, simply replace torch with tensorflow in the above command.

+ +
+
+
+

Indices and tables

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/objects.inv b/objects.inv new file mode 100644 index 00000000..40e085c6 Binary files /dev/null and b/objects.inv differ diff --git a/reference/active_learning/acquisition_fns.html b/reference/active_learning/acquisition_fns.html new file mode 100644 index 00000000..6c4f67b0 --- /dev/null +++ b/reference/active_learning/acquisition_fns.html @@ -0,0 +1,462 @@ + + + + + + + deepsensor.active_learning.acquisition_fns — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.active_learning.acquisition_fns

+
+
+class deepsensor.active_learning.acquisition_fns.AcquisitionFunction(model: ProbabilisticModel)
+

Parent class for acquisition functions.

+
+
+__call__(task: Task) ndarray
+

+
+

Parameters

+
+
taskTask

Task object containing context and target sets.

+
+
+
+
+

Returns

+
+
numpy.ndarray

Acquisition function value/s. Shape ().

+
+
+
+
+

Raises

+
+
NotImplementedError

Because this is an abstract method, it must be implemented by the +subclass.

+
+
+
+
+ +
+
+__init__(model: ProbabilisticModel)
+
+

Parameters

+
+
modelProbabilisticModel

+
+
context_set_idxint

Index of context set to add new observations to when computing the +acquisition function.

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.AcquisitionFunctionOracle(model: ProbabilisticModel)
+

Signifies that the acquisition function is computed using the true +target values.

+
+ +
+
+class deepsensor.active_learning.acquisition_fns.AcquisitionFunctionParallel(model: ProbabilisticModel)
+

Parent class for acquisition functions that are computed across all search +points in parallel.

+
+ +
+
+class deepsensor.active_learning.acquisition_fns.MeanStddev(model: ProbabilisticModel)
+

Mean of the marginal variances.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.MeanVariance(model: ProbabilisticModel)
+

Mean of the marginal variances.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.pNormStddev(*args, p: int = 1, **kwargs)
+

p-norm of the vector of marginal standard deviations.

+
+
+__init__(*args, p: int = 1, **kwargs)
+

+
+

Parameters

+
+
pint, optional

…, by default 1

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy(model: ProbabilisticModel)
+

Mean of the entropies of the marginal predictive distributions.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.JointEntropy(model: ProbabilisticModel)
+

Joint entropy of the predictive distribution.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.OracleMAE(model: ProbabilisticModel)
+

Oracle mean absolute error.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.OracleRMSE(model: ProbabilisticModel)
+

Oracle root mean squared error.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.OracleMarginalNLL(model: ProbabilisticModel)
+

Oracle marginal negative log-likelihood.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.OracleJointNLL(model: ProbabilisticModel)
+

Oracle joint negative log-likelihood.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.Random(seed: int = 42)
+

Random acquisition function.

+
+
+__init__(seed: int = 42)
+

+
+

Parameters

+
+
seedint, optional

Random seed, by default 42.

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.ContextDist(context_set_idx: int = 0)
+

Distance to closest context point.

+
+
+__init__(context_set_idx: int = 0)
+

+
+

Parameters

+
+
context_set_idxint, optional

…, by default 0

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.Stddev(model: ProbabilisticModel)
+

Model standard deviation.

+
+
+__init__(model: ProbabilisticModel)
+

+
+

Parameters

+
+
modelProbabilisticModel

+
+
+
+
+ +
+ +
+
+class deepsensor.active_learning.acquisition_fns.ExpectedImprovement(model: ProbabilisticModel, context_set_idx: int = 0)
+

Expected improvement acquisition function.

+
+

Note

+

The current implementation of this acquisition function is only valid +for maximisation.

+
+
+
+__init__(model: ProbabilisticModel, context_set_idx: int = 0)
+
+

Parameters

+
+
modelProbabilisticModel

+
+
context_set_idxint

Index of context set to add new observations to when computing the +acquisition function.

+
+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/active_learning/algorithms.html b/reference/active_learning/algorithms.html new file mode 100644 index 00000000..4c99c2ca --- /dev/null +++ b/reference/active_learning/algorithms.html @@ -0,0 +1,226 @@ + + + + + + + deepsensor.active_learning.algorithms — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.active_learning.algorithms

+
+
+class deepsensor.active_learning.algorithms.GreedyAlgorithm(model: DeepSensorModel, X_s: Dataset | DataArray | DataFrame | Series | Index, X_t: Dataset | DataArray | DataFrame | Series | Index, X_s_mask: DataArray | Dataset | None = None, X_t_mask: DataArray | Dataset | None = None, N_new_context: int = 1, X_normalised: bool = False, model_infill_method: str = 'mean', query_infill: DataArray | None = None, proposed_infill: DataArray | None = None, context_set_idx: int = 0, target_set_idx: int = 0, progress_bar: bool = False, task_loader: TaskLoader | None = None, verbose: bool = False)
+

Greedy algorithm for active learning

+
+
+__call__(acquisition_fn: AcquisitionFunction, tasks: List[Task] | Task, diff: bool = False) Tuple[DataFrame, Dataset]
+

Iteratively… docstring TODO

+

Returns a tensor of proposed new sensor locations (in greedy +iteration/priority order) and their corresponding list of indexes in +the search space.

+
+

Parameters

+
+
acquisition_fn: AcquisitionFunction

+
+
tasks: List[Task] | Task

+
+
+
+
+

Returns

+
+
X_new_df, acquisition_fn_ds: Tuple[pandas.DataFrame, xarray.Dataset]

+
+
+
+
+

Raises

+
+
ValueError

If acquisition_fn is an +AcquisitionFunctionOracle +and task_loader is None.

+
+
ValueError

If min_or_max is not "min" or "max".

+
+
ValueError

If Y_t_aux is in tasks but task_loader is None.

+
+
+
+
+ +
+
+__init__(model: DeepSensorModel, X_s: Dataset | DataArray | DataFrame | Series | Index, X_t: Dataset | DataArray | DataFrame | Series | Index, X_s_mask: DataArray | Dataset | None = None, X_t_mask: DataArray | Dataset | None = None, N_new_context: int = 1, X_normalised: bool = False, model_infill_method: str = 'mean', query_infill: DataArray | None = None, proposed_infill: DataArray | None = None, context_set_idx: int = 0, target_set_idx: int = 0, progress_bar: bool = False, task_loader: TaskLoader | None = None, verbose: bool = False)
+

+
+

Parameters

+
+
modelDeepSensorModel

Trained model to use for proposing new context points.

+
+
X_sxarray.Dataset | xarray.DataArray | pandas.DataFrame | pandas.Series | pandas.Index

Search coordinates.

+
+
X_txarray.Dataset | xarray.DataArray

Target coordinates.

+
+
X_s_maskxarray.Dataset | xarray.DataArray, optional

Mask for search coordinates. If provided, only points where mask +is True will be considered. Defaults to None.

+
+
X_t_maskxarray.Dataset | xarray.DataArray, optional

…, by default None.

+
+
N_new_contextint, optional

…, by default 1.

+
+
X_normalisedbool, optional

…, by default False.

+
+
model_infill_methodstr, optional

…, by default “mean”.

+
+
query_infillxarray.DataArray, optional

…, by default None.

+
+
proposed_infillxarray.DataArray, optional

…, by default None.

+
+
context_set_idxint, optional

…, by default 0.

+
+
target_set_idxint, optional

…, by default 0.

+
+
progress_barbool, optional

…, by default False.

+
+
min_or_maxstr, optional

…, by default “min”.

+
+
task_loaderTaskLoader, optional

…, by default None.

+
+
verbosebool, optional

…, by default False.

+
+
+
+
+

Raises

+
+
ValueError

If the model passed does not inherit from +DeepSensorModel.

+
+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/active_learning/index.html b/reference/active_learning/index.html new file mode 100644 index 00000000..52bba31c --- /dev/null +++ b/reference/active_learning/index.html @@ -0,0 +1,154 @@ + + + + + + + active_learning module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/reference/data/index.html b/reference/data/index.html new file mode 100644 index 00000000..d88d6300 --- /dev/null +++ b/reference/data/index.html @@ -0,0 +1,160 @@ + + + + + + + data module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/reference/data/loader.html b/reference/data/loader.html new file mode 100644 index 00000000..3a2ce8c1 --- /dev/null +++ b/reference/data/loader.html @@ -0,0 +1,427 @@ + + + + + + + deepsensor.data.loader — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.data.loader

+
+
+class deepsensor.data.loader.TaskLoader(task_loader_ID: str | None = None, context: DataArray | Dataset | DataFrame | str | List[DataArray | Dataset | DataFrame | str] = None, target: DataArray | Dataset | DataFrame | str | List[DataArray | Dataset | DataFrame | str] = None, aux_at_contexts: Tuple[int, DataArray | Dataset] | None = None, aux_at_targets: DataArray | Dataset | None = None, links: Tuple[int, int] | List[Tuple[int, int]] | None = None, context_delta_t: int | List[int] = 0, target_delta_t: int | List[int] = 0, time_freq: str = 'D', xarray_interp_method: str = 'linear', discrete_xarray_sampling: bool = False, dtype: object = 'float32')
+
+
+__call__(date, *args, **kwargs)
+

Generate a task for a given date.

+
+

Parameters

+
+
date

Date for which to generate the task.

+
+
+
+
+

Returns

+
+
task: Task | List[Task]

Task object or list of task objects for each date containing the +context and target data.

+
+
+
+
+ +
+
+__init__(task_loader_ID: str | None = None, context: DataArray | Dataset | DataFrame | str | List[DataArray | Dataset | DataFrame | str] = None, target: DataArray | Dataset | DataFrame | str | List[DataArray | Dataset | DataFrame | str] = None, aux_at_contexts: Tuple[int, DataArray | Dataset] | None = None, aux_at_targets: DataArray | Dataset | None = None, links: Tuple[int, int] | List[Tuple[int, int]] | None = None, context_delta_t: int | List[int] = 0, target_delta_t: int | List[int] = 0, time_freq: str = 'D', xarray_interp_method: str = 'linear', discrete_xarray_sampling: bool = False, dtype: object = 'float32') None
+

Initialise a TaskLoader object.

+

The behaviour is the following: +- If all data passed as paths, load the data and overwrite the paths with the loaded data +- Either all data is passed as paths, or all data is passed as loaded data (else ValueError) +- If all data passed as paths, the TaskLoader can be saved with the save method (using config)

+
+

Parameters

+
+
task_loader_ID

If loading a TaskLoader from a config file, this is the folder the +TaskLoader was saved in (using .save). If this argument is passed, all other +arguments are ignored.

+
+
contextxarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset, pandas.DataFrame]

Context data. Can be a single xarray.DataArray, +xarray.Dataset or pandas.DataFrame, or a +list/tuple of these.

+
+
targetxarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset, pandas.DataFrame]

Target data. Can be a single xarray.DataArray, +xarray.Dataset or pandas.DataFrame, or a +list/tuple of these.

+
+
aux_at_contextsTuple[int, xarray.DataArray | xarray.Dataset], optional

Auxiliary data at context locations. Tuple of two elements, where +the first element is the index of the context set for which the +auxiliary data will be sampled at, and the second element is the +auxiliary data, which can be a single xarray.DataArray or +xarray.Dataset. Default: None.

+
+
aux_at_targetsxarray.DataArray | xarray.Dataset, optional

Auxiliary data at target locations. Can be a single +xarray.DataArray or xarray.Dataset. Default: +None.

+
+
linksTuple[int, int] | List[Tuple[int, int]], optional

Specifies links between context and target data. Each link is a +tuple of two integers, where the first integer is the index of the +context data and the second integer is the index of the target +data. Can be a single tuple in the case of a single link. If None, +no links are specified. Default: None.

+
+
context_delta_tint | List[int], optional

Time difference between context data and t=0 (task init time). Can +be a single int (same for all context data) or a list/tuple of +ints. Default is 0.

+
+
target_delta_tint | List[int], optional

Time difference between target data and t=0 (task init time). Can +be a single int (same for all target data) or a list/tuple of ints. +Default is 0.

+
+
time_freqstr, optional

Time frequency of the data. Default: 'D' (daily).

+
+
xarray_interp_methodstr, optional

Interpolation method to use when interpolating +xarray.DataArray. Default is 'linear'.

+
+
discrete_xarray_samplingbool, optional

When randomly sampling xarray variables, whether to sample at +discrete points defined at grid cell centres, or at continuous +points within the grid. Default is False.

+
+
dtypeobject, optional

Data type of the data. Used to cast the data to the specified +dtype. Default: 'float32'.

+
+
+
+
+ +
+
+config_fname = 'task_loader_config.json'
+
+ +
+
+count_context_and_target_data_dims()
+

Count the number of data dimensions in the context and target data.

+
+

Returns

+

context_dims : tuple. Tuple of data dimensions in the context data. +target_dims : tuple. Tuple of data dimensions in the target data.

+
+
+

Raises

+
+
ValueError

If the context/target data is not a tuple/list of +xarray.DataArray, xarray.Dataset or +pandas.DataFrame.

+
+
+
+
+ +
+
+infer_context_and_target_var_IDs()
+

Infer the variable IDs of the context and target data.

+
+

Returns

+

context_var_IDs : tuple. Tuple of variable IDs in the context data. +target_var_IDs : tuple. Tuple of variable IDs in the target data.

+
+
+

Raises

+
+
ValueError

If the context/target data is not a tuple/list of +xarray.DataArray, xarray.Dataset or +pandas.DataFrame.

+
+
+
+
+ +
+
+load_dask() None
+

Load any dask data into memory.

+
+

Returns

+

None.

+
+
+ +
+
+sample_da(da: ~xarray.core.dataarray.DataArray | ~xarray.core.dataset.Dataset, sampling_strat: str | int | float | ~numpy.ndarray, seed: int | None = None) -> (<class 'numpy.ndarray'>, <class 'numpy.ndarray'>)
+

Sample a DataArray according to a given strategy.

+
+

Parameters

+
+
daxarray.DataArray | xarray.Dataset

DataArray to sample, assumed to be sliced for the task already.

+
+
sampling_stratstr | int | float | numpy.ndarray

Sampling strategy, either “all” or an integer for random grid cell +sampling.

+
+
seedint, optional

Seed for random sampling. Default: None.

+
+
+
+
+

Returns

+
+
DataTuple[numpy.ndarray, numpy.ndarray]

Tuple of sampled target data and sampled context data.

+
+
+
+
+

Raises

+
+
InvalidSamplingStrategyError

If the sampling strategy is not valid.

+
+
InvalidSamplingStrategyError

If a numpy coordinate array is passed to sample an xarray object, +but the coordinates are out of bounds.

+
+
+
+
+ +
+
+sample_df(df: ~pandas.core.frame.DataFrame | ~pandas.core.series.Series, sampling_strat: str | int | float | ~numpy.ndarray, seed: int | None = None) -> (<class 'numpy.ndarray'>, <class 'numpy.ndarray'>)
+

Sample a DataArray according to a given strategy.

+
+

Parameters

+
+
dfpandas.DataFrame | pandas.Series

DataArray to sample, assumed to be time-sliced for the task +already.

+
+
sampling_stratstr | int | float | numpy.ndarray

Sampling strategy, either “all” or an integer for random grid cell +sampling.

+
+
seedint, optional

Seed for random sampling. Default: None.

+
+
+
+
+

Returns

+
+
DataTuple[X_c, Y_c]

Tuple of sampled target data and sampled context data.

+
+
+
+
+

Raises

+
+
InvalidSamplingStrategyError

If the sampling strategy is not valid.

+
+
InvalidSamplingStrategyError

If a numpy coordinate array is passed to sample a pandas object, +but the DataFrame does not contain all the requested samples.

+
+
+
+
+ +
+
+sample_offgrid_aux(X_t: ndarray | Tuple[ndarray, ndarray], offgrid_aux: DataArray | Dataset) ndarray
+

Sample auxiliary data at off-grid locations.

+
+

Parameters

+
+
X_tnumpy.ndarray | Tuple[numpy.ndarray, numpy.ndarray]

Off-grid locations at which to sample the auxiliary data. Can be a +tuple of two numpy arrays, or a single numpy array.

+
+
offgrid_auxxarray.DataArray | xarray.Dataset

Auxiliary data at off-grid locations.

+
+
+
+
+

Returns

+
+
numpy.ndarray

+
+
+
+
+ +
+
+save(folder: str)
+

Save TaskLoader config to JSON in folder

+
+ +
+
+task_generation(date: Timestamp, context_sampling: str | int | float | ndarray | List[str | int | float | ndarray] = 'all', target_sampling: str | int | float | ndarray | List[str | int | float | ndarray] = 'all', split_frac: float = 0.5, datewise_deterministic: bool = False, seed_override: int | None = None) Task
+

Generate a task for a given date.

+

There are several sampling strategies available for the context and +target data:

+
+
    +
  • “all”: Sample all observations.

  • +
  • int: Sample N observations uniformly at random.

  • +
  • float: Sample a fraction of observations uniformly at random.

  • +
  • numpy.ndarray, shape (2, N): Sample N observations +at the given x1, x2 coordinates. Coords are assumed to be +unnormalised.

  • +
+
+
+

Parameters

+
+
datepandas.Timestamp

Date for which to generate the task.

+
+
context_samplingstr | int | float | numpy.ndarray | List[str | int | float | numpy.ndarray]

Sampling strategy for the context data, either a list of sampling +strategies for each context set, or a single strategy applied to +all context sets. Default is "all".

+
+
target_samplingstr | int | float | numpy.ndarray | List[str | int | float | numpy.ndarray]

Sampling strategy for the target data, either a list of sampling +strategies for each target set, or a single strategy applied to all +target sets. Default is "all".

+
+
split_fracfloat

The fraction of observations to use for the context set with the +“split” sampling strategy for linked context and target set pairs. +The remaining observations are used for the target set. Default is +0.5.

+
+
datewise_deterministicbool

Whether random sampling is datewise_deterministic based on the +date. Default is False.

+
+
seed_overrideOptional[int]

Override the seed for random sampling. This can be used to use the +same random sampling at different date. Default is None.

+
+
+
+
+

Returns

+
+
taskTask

Task object containing the context and target data.

+
+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/data/processor.html b/reference/data/processor.html new file mode 100644 index 00000000..7e942f1c --- /dev/null +++ b/reference/data/processor.html @@ -0,0 +1,557 @@ + + + + + + + deepsensor.data.processor — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.data.processor

+
+
+class deepsensor.data.processor.DataProcessor(folder: str | None = None, time_name: str = 'time', x1_name: str = 'x1', x2_name: str = 'x2', x1_map: tuple | None = None, x2_map: tuple | None = None, deepcopy: bool = True, verbose: bool = False)
+

Normalise xarray and pandas data for use in deepsensor models

+
+
+__call__(data: DataArray | Dataset | DataFrame | List[DataArray | Dataset | DataFrame], method: str = 'mean_std') DataArray | Dataset | DataFrame | List[DataArray | Dataset | DataFrame]
+

Normalise data.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset | pandas.DataFrame]

Data to normalise.

+
+
methodstr, optional
+
Normalisation method. Defaults to “mean_std”. Options:
    +
  • “mean_std”: Normalise to mean=0 and std=1

  • +
  • “min_max”: Normalise to min=-1 and max=1

  • +
+
+
+
+
+
+
+

Returns

+
+
xarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset | pandas.DataFrame]

Normalised data.

+
+
+
+
+ +
+
+__init__(folder: str | None = None, time_name: str = 'time', x1_name: str = 'x1', x2_name: str = 'x2', x1_map: tuple | None = None, x2_map: tuple | None = None, deepcopy: bool = True, verbose: bool = False)
+

Initialise a DataProcessor object.

+
+

Parameters

+
+
folderstr, optional

Folder to load normalisation params from. Defaults to None.

+
+
x1_namestr, optional

Name of first spatial coord (e.g. “lat”). Defaults to “x1”.

+
+
x2_namestr, optional

Name of second spatial coord (e.g. “lon”). Defaults to “x2”.

+
+
x1_maptuple, optional

2-tuple of raw x1 coords to linearly map to (0, 1), respectively. +Defaults to (0, 1) (i.e. no normalisation).

+
+
x2_maptuple, optional

2-tuple of raw x2 coords to linearly map to (0, 1), respectively. +Defaults to (0, 1) (i.e. no normalisation).

+
+
deepcopybool, optional

Whether to make a deepcopy of raw data to ensure it is not changed +by reference when normalising. Defaults to True.

+
+
verbosebool, optional

Whether to print verbose output. Defaults to False.

+
+
+
+
+ +
+
+__str__()
+

Return str(self).

+
+ +
+
+add_to_config(var_ID, **kwargs)
+

Add kwargs to config dict for variable var_ID

+
+ +
+
+check_params_computed(var_ID, method) bool
+

Check if normalisation params computed for a given variable.

+
+

Parameters

+
+
var_ID

+
+
method

+
+
+
+
+

Returns

+
+
bool

Whether normalisation params are computed for a given variable.

+
+
+
+
+ +
+
+config_fname = 'data_processor_config.json'
+
+ +
+
+get_config(var_ID, data, method=None)
+

Get pre-computed normalisation params or compute them for variable +var_ID.

+
+

Parameters

+
+
var_ID

+
+
data

+
+
method…, optional

…, by default None.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+classmethod load_dask(data: DataArray | Dataset)
+

Load dask data into memory.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map(data: DataArray | Dataset | DataFrame | Series, method: str | None = None, add_offset: bool = True, unnorm: bool = False)
+

Normalise or unnormalise the data values and coords in an xarray or +pandas object.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | pandas.Series

+
+
methodstr, optional

…, by default None.

+
+
add_offsetbool, optional

…, by default True.

+
+
unnormbool, optional

…, by default False.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map_array(data: DataArray | Dataset | DataFrame | Series | ndarray, var_ID: str, method: str | None = None, unnorm: bool = False, add_offset: bool = True)
+

Normalise or unnormalise the data values in an xarray, pandas, or +numpy object.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | pandas.Series | numpy.ndarray

+
+
var_IDstr

+
+
methodstr, optional

…, by default None.

+
+
unnormbool, optional

…, by default False.

+
+
add_offsetbool, optional

…, by default True.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map_coord_array(coord_array: ndarray, unnorm: bool = False)
+

Normalise or unnormalise a coordinate array.

+
+

Parameters

+
+
coord_arraynumpy.ndarray

Array of shape (2, N) containing coords.

+
+
unnormbool, optional

Whether to unnormalise. Defaults to False.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map_coords(data: DataArray | Dataset | DataFrame | Series, unnorm=False)
+

Normalise spatial coords in a pandas or xarray object.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | pandas.Series

+
+
unnormbool, optional

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+map_x1_and_x2(x1: ndarray, x2: ndarray, unnorm: bool = False)
+

Normalise or unnormalise spatial coords in a array.

+
+

Parameters

+
+
x1numpy.ndarray

Array of shape (N_x1,) containing spatial coords of x1.

+
+
x2numpy.ndarray

Array of shape (N_x2,) containing spatial coords of x2.

+
+
unnormbool, optional

Whether to unnormalise. Defaults to False.

+
+
+
+
+

Returns

+
+
Tuple[numpy.ndarray, numpy.ndarray]

Normalised or unnormalised spatial coords of x1 and x2.

+
+
+
+
+ +
+
+save(folder: str)
+

Save DataProcessor config to JSON in folder

+
+ +
+
+set_coord_params(time_name, x1_name, x1_map, x2_name, x2_map) None
+

Set coordinate normalisation params.

+
+

Parameters

+
+
time_name

+
+
x1_name

+
+
x1_map

+
+
x2_name

+
+
x2_map

+
+
+
+
+

Returns

+

None.

+
+
+ +
+
+unnormalise(data: DataArray | Dataset | DataFrame | List[DataArray | Dataset | DataFrame], add_offset: bool = True) DataArray | Dataset | DataFrame | List[DataArray | Dataset | DataFrame]
+

Unnormalise data.

+
+

Parameters

+
+
dataxarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset | pandas.DataFrame]

Data to unnormalise.

+
+
add_offsetbool, optional

Whether to add the offset to the data when unnormalising. Set to +False to unnormalise uncertainty values (e.g. std dev). Defaults to +True.

+
+
+
+
+

Returns

+
+
xarray.DataArray | xarray.Dataset | pandas.DataFrame | List[xarray.DataArray | xarray.Dataset | pandas.DataFrame]

Unnormalised data.

+
+
+
+
+ +
+ +
+
+deepsensor.data.processor.xarray_to_coord_array_normalised(da: Dataset | DataArray) ndarray
+

Convert xarray to normalised coordinate array.

+
+

Parameters

+
+
daxarray.Dataset | xarray.DataArray

+
+
+
+
+

Returns

+
+
numpy.ndarray

A normalised coordinate array of shape (2, N).

+
+
+
+
+ +
+
+deepsensor.data.processor.mask_coord_array_normalised(coord_arr: ndarray, mask_da: DataArray | Dataset | None)
+

Remove points from (2, N) numpy array that are outside gridded xarray boolean mask.

+

If coord_arr is shape (2, N), then mask_da is a shape (N,) boolean array +(True if point is inside mask, False if outside).

+
+

Parameters

+
+
coord_arr

+
+
mask_da

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+deepsensor.data.processor.da1_da2_same_grid(da1: DataArray, da2: DataArray) bool
+

Check if da1 and da2 are on the same grid.

+
+

Note

+

da1 and da2 are assumed normalised by DataProcessor.

+
+
+

Parameters

+
+
da1xarray.DataArray

+
+
da2xarray.DataArray

+
+
+
+
+

Returns

+
+
bool

Whether da1 and da2 are on the same grid.

+
+
+
+
+ +
+
+deepsensor.data.processor.interp_da1_to_da2(da1: DataArray, da2: DataArray) DataArray
+

Interpolate da1 to da2.

+
+

Note

+

da1 and da2 are assumed normalised by DataProcessor.

+
+
+

Parameters

+
+
da1xarray.DataArray

+
+
da2xarray.DataArray

+
+
+
+
+

Returns

+
+
xarray.DataArray

Interpolated xarray.

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/data/task.html b/reference/data/task.html new file mode 100644 index 00000000..97e04dbb --- /dev/null +++ b/reference/data/task.html @@ -0,0 +1,330 @@ + + + + + + + deepsensor.data.task — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.data.task

+
+
+class deepsensor.data.task.Task(task_dict: dict)
+

Task dictionary class.

+

Inherits from dict and adds methods for printing and modifying the +data.

+
+
+__init__(task_dict: dict) None
+

Initialise a Task object.

+
+

Parameters

+
+
task_dictdict

Dictionary containing the task.

+
+
+
+
+ +
+
+__repr__()
+

Print a convenient summary of the task dictionary.

+

Print the type of each entry and if it is an array, print its shape, +otherwise print the value.

+
+ +
+
+__str__()
+

Print a convenient summary of the task dictionary.

+

For array entries, print their shape, otherwise print the value.

+
+ +
+
+add_batch_dim()
+

Add a batch dimension to the arrays in the task dictionary.

+
+

Returns

+

task : dict. Task dictionary with batch dimension added to the array elements.

+
+
+ +
+
+cast_to_float32()
+

Cast the arrays in the task dictionary to float32.

+
+

Returns

+

task : dict. Task dictionary with arrays cast to float32.

+
+
+ +
+
+convert_to_tensor()
+

Convert to tensor object based on deep learning backend

+
+
Returns

task: dict. Task dictionary with arrays converted to deep learning tensor objects

+
+
+
+ +
+
+mask_nans_nps()
+
+ +
+
+mask_nans_numpy()
+

Replace NaNs with zeroes and set a mask to indicate where the NaNs were.

+
+

Returns

+

task : dict. Task with NaNs set to zeros and a mask indicating where the missing values are.

+
+
+ +
+
+op(f, op_flag=None)
+

Apply function f to the array elements of a task dictionary.

+

Useful for recasting to a different dtype or reshaping (e.g. adding a +batch dimension).

+
+

Parameters

+
+
ffunction

Function to apply to the array elements of the task.

+
+
taskdict

Task dictionary.

+
+
op_flagstr

Flag to set in the task dictionary’s ops key.

+
+
+
+
+

Returns

+
+
taskdict.

Task dictionary with f applied to the array elements and +op_flag set in the ops key.

+
+
+
+
+ +
+
+remove_nans_from_task_Y_t_if_present()
+

If NaNs are present in task[“Y_t”], remove them (and corresponding task[“X_t”])

+
+ +
+
+classmethod summarise_repr(k, v)
+
+ +
+
+classmethod summarise_str(k, v)
+
+ +
+ +
+
+deepsensor.data.task.append_obs_to_task(task: Task, X_new: int | Dimension | int8 | int16 | int32 | int64 | uint8 | uint16 | uint32 | uint64 | bool | bool_ | float | float16 | float32 | float64 | longdouble | complex | complex64 | complex128 | clongdouble | ndarray | ModuleType[autograd.tracer.Box] | ModuleType[tensorflow.Tensor] | ModuleType[tensorflow.Variable] | ModuleType[tensorflow.IndexedSlices] | ModuleType[jaxlib.xla_extension.ArrayImpl] | ModuleType[jax.core.Tracer] | ModuleType[torch.Tensor], Y_new: int | Dimension | int8 | int16 | int32 | int64 | uint8 | uint16 | uint32 | uint64 | bool | bool_ | float | float16 | float32 | float64 | longdouble | complex | complex64 | complex128 | clongdouble | ndarray | ModuleType[autograd.tracer.Box] | ModuleType[tensorflow.Tensor] | ModuleType[tensorflow.Variable] | ModuleType[tensorflow.IndexedSlices] | ModuleType[jaxlib.xla_extension.ArrayImpl] | ModuleType[jax.core.Tracer] | ModuleType[torch.Tensor], context_set_idx: int)
+

Append a single observation to a context set in task.

+

Makes a deep copy of the data structure to avoid affecting the original +object.

+
+ +
+
+deepsensor.data.task.flatten_X(X: ndarray | Tuple[ndarray, ndarray]) ndarray
+

Convert tuple of gridded coords to (2, N) array if necessary.

+
+

Parameters

+
+
Xnumpy.ndarray | Tuple[numpy.ndarray, numpy.ndarray]

+
+
+
+
+

Returns

+
+
numpy.ndarray

+
+
+
+
+ +
+
+deepsensor.data.task.flatten_Y(Y: ndarray | Tuple[ndarray, ndarray]) ndarray
+

Convert gridded data of shape (N_dim, N_x1, N_x2) to (N_dim, N_x1 * N_x2) +array if necessary.

+
+

Parameters

+
+
Ynumpy.ndarray | Tuple[numpy.ndarray, numpy.ndarray]

+
+
+
+
+

Returns

+
+
numpy.ndarray

+
+
+
+
+ +
+
+deepsensor.data.task.flatten_gridded_data_in_task(task: Task) Task
+

Convert any gridded data in Task to flattened arrays.

+

Necessary for AR sampling, which doesn’t yet permit gridded context sets.

+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+
Task

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/data/utils.html b/reference/data/utils.html new file mode 100644 index 00000000..95f4c925 --- /dev/null +++ b/reference/data/utils.html @@ -0,0 +1,244 @@ + + + + + + + deepsensor.data.utils — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.data.utils

+
+
+deepsensor.data.utils.construct_x1x2_ds(gridded_ds)
+

Construct an xarray.Dataset containing two vars, where each var is +a 2D gridded channel whose values contain the x_1 and x_2 coordinate +values, respectively.

+
+

Parameters

+
+
gridded_dsxarray.Dataset

+
+
+
+
+

Returns

+
+
xarray.Dataset

+
+
+
+
+ +
+
+deepsensor.data.utils.construct_circ_time_ds(dates, freq)
+

Return an xarray.Dataset containing a circular variable for time. +The freq entry dictates the frequency of cycling of the circular +variable. E.g.:

+
+
    +
  • 'H': cycles once per day at hourly intervals

  • +
  • 'D': cycles once per year at daily intervals

  • +
  • 'M': cycles once per year at monthly intervals

  • +
+
+
+

Parameters

+
+
dates: …

+
+
freq

+
+
+
+
+

Returns

+
+
xarray.Dataset

+
+
+
+
+ +
+
+deepsensor.data.utils.compute_xarray_data_resolution(ds: DataArray | Dataset) float
+

Computes the resolution of an xarray object with coordinates x1 and x2.

+

The data resolution is the finer of the two coordinate resolutions (x1 and +x2). For example, if x1 has a resolution of 0.1 degrees and x2 has a +resolution of 0.2 degrees, the data resolution returned will be 0.1 +degrees.

+
+

Parameters

+
+
dsxarray.DataArray | xarray.Dataset

Xarray object with coordinates x1 and x2.

+
+
+
+
+

Returns

+
+
data_resolutionfloat

Resolution of the data (in spatial units, e.g. 0.1 degrees).

+
+
+
+
+ +
+
+deepsensor.data.utils.compute_pandas_data_resolution(df: DataFrame | Series, n_times: int = 1000, percentile: int = 5) float
+

Approximates the resolution of non-gridded pandas data with indexes time, +x1, and x2.

+

The resolution is approximated as the Nth percentile of the distances +between neighbouring observations, possibly using a subset of the dates in +the data. The default is to use 1000 dates (or all dates if there are fewer +than 1000) and to use the 5th percentile. This means that the resolution is +the distance between the closest 5% of neighbouring observations.

+
+

Parameters

+
+
dfpandas.DataFrame | pandas.Series

Dataframe or series with indexes time, x1, and x2.

+
+
n_timesint, optional

Number of dates to sample. Defaults to 1000. If “all”, all dates are +used.

+
+
percentileint, optional

Percentile of pairwise distances for computing the resolution. +Defaults to 5.

+
+
+
+
+

Returns

+
+
data_resolutionfloat

Resolution of the data (in spatial units, e.g. 0.1 degrees).

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/index.html b/reference/index.html new file mode 100644 index 00000000..df83215e --- /dev/null +++ b/reference/index.html @@ -0,0 +1,162 @@ + + + + + + + API Reference — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

API Reference

+

This part of the documentation contains the API reference for the package. It is structured by modules, and each module contains its respective classes, functions, and attributes. The API is designed to be as simple as possible while still allowing for a lot of flexibility. The API is divided into several submodules, which are described in the following sections.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/model/convnp.html b/reference/model/convnp.html new file mode 100644 index 00000000..d02b517f --- /dev/null +++ b/reference/model/convnp.html @@ -0,0 +1,785 @@ + + + + + + + deepsensor.model.convnp — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.model.convnp

+
+
+class deepsensor.model.convnp.ConvNP(*args, **kwargs)
+

A ConvNP regression probabilistic model.

+

Wraps around the neuralprocesses package to construct a ConvNP model. +See: https://github.com/wesselb/neuralprocesses/blob/main/neuralprocesses/architectures/convgnp.py

+

Multiple dispatch is implemented using plum to allow for re-using the +model’s forward prediction object when computing the logpdf, entropy, etc. +Alternatively, the model can be run forwards with a Task object of data +from the TaskLoader.

+

The ConvNP can optionally be instantiated with:

+
+
    +
  • a DataProcessor object to auto-unnormalise the data at inference +time with the .predict method.

  • +
  • a TaskLoader object to infer sensible default model parameters +from the data.

  • +
+
+

These additional parameters can be passed to the __init__ method to +customise the model, which will override any defaults inferred from a +TaskLoader.

+
+

Parameters

+
+
points_per_unitint, optional

Density of the internal discretisation. Defaults to 100.

+
+
likelihoodstr, optional

Likelihood. Must be one of "cnp" (equivalently "het"), +"gnp" (equivalently "lowrank"), or "cnp-spikes-beta" +(equivalently "spikes-beta"). Defaults to "cnp".

+
+
dim_xint, optional

Dimensionality of the inputs. Defaults to 1.

+
+
dim_yint, optional

Dimensionality of the outputs. Defaults to 1.

+
+
dim_ycint or tuple[int], optional

Dimensionality of the outputs of the context set. You should set this +if the dimensionality of the outputs of the context set is not equal +to the dimensionality of the outputs of the target set. You should +also set this if you want to use multiple context sets. In that case, +set this equal to a tuple of integers indicating the respective output +dimensionalities.

+
+
dim_ytint, optional

Dimensionality of the outputs of the target set. You should set this +if the dimensionality of the outputs of the target set is not equal to +the dimensionality of the outputs of the context set.

+
+
dim_aux_tint, optional

Dimensionality of target-specific auxiliary variables.

+
+
conv_archstr, optional

Convolutional architecture to use. Must be one of +"unet[-res][-sep]" or "conv[-res][-sep]". Defaults to +"unet".

+
+
unet_channelstuple[int], optional

Channels of every layer of the UNet. Defaults to six layers each with +64 channels.

+
+
unet_kernelsint or tuple[int], optional

Sizes of the kernels in the UNet. Defaults to 5.

+
+
unet_resize_convsbool, optional

Use resize convolutions rather than transposed convolutions in the +UNet. Defaults to False.

+
+
unet_resize_conv_interp_methodstr, optional

Interpolation method for the resize convolutions in the UNet. Can be +set to "bilinear". Defaults to “bilinear”.

+
+
num_basis_functionsint, optional

Number of basis functions for the low-rank likelihood. Defaults to +64.

+
+
dim_lvint, optional

Dimensionality of the latent variable. Setting to >0 constructs a +latent neural process. Defaults to 0.

+
+
encoder_scalesfloat or tuple[float], optional

Initial value for the length scales of the set convolutions for the +context sets embeddings. Set to a tuple equal to the number of context +sets to use different values for each set. Set to a single value to use +the same value for all context sets. Defaults to +1 / points_per_unit.

+
+
encoder_scales_learnablebool, optional

Whether the encoder SetConv length scale(s) are learnable. Defaults to +False.

+
+
decoder_scalefloat, optional

Initial value for the length scale of the set convolution in the +decoder. Defaults to 1 / points_per_unit.

+
+
decoder_scale_learnablebool, optional

Whether the decoder SetConv length scale(s) are learnable. Defaults to +False.

+
+
aux_t_mlp_layerstuple[int], optional

Widths of the layers of the MLP for the target-specific auxiliary +variable. Defaults to three layers of width 128.

+
+
epsilonfloat, optional

Epsilon added by the set convolutions before dividing by the density +channel. Defaults to 1e-2.

+
+
dtypedtype, optional

Data type.

+
+
+
+
+__call__(task, n_samples=10, requires_grad=False)
+

Compute ConvNP distribution.

+
+

Parameters

+
+
taskTask

+
+
n_samplesint, optional

Number of samples to draw from the distribution, by default 10.

+
+
requires_gradbool, optional

Whether to compute gradients, by default False.

+
+
+
+
+

Returns

+
+

The ConvNP distribution.

+
+
+
+
+ +
+
+__init__(*args, **kwargs)
+

Generate a new model using nps.construct_convgnp with default or +specified parameters.

+

This method does not take a TaskLoader or DataProcessor object, +so the model will not auto-unnormalise predictions at inference time.

+
+
+__init__(self, data_processor: deepsensor.data.processor.DataProcessor, task_loader: deepsensor.data.loader.TaskLoader, *args, verbose: bool = True, **kwargs)
+
+ +

Instantiate model from TaskLoader, using data to infer model parameters +(unless overridden).

+
+

Parameters

+
+
data_processorDataProcessor

DataProcessor object.

+
+
task_loaderTaskLoader

TaskLoader object.

+
+
verbosebool, optional

Whether to print inferred model parameters, by default True.

+
+
+
+
+__init__(self, data_processor: deepsensor.data.processor.DataProcessor, task_loader: deepsensor.data.loader.TaskLoader, neural_process: plum.type.ModuleType[tensorflow.keras.Model] | plum.type.ModuleType[torch.nn.Module])
+
+ +

Instantiate with a pre-defined neural process model.

+
+
+

Parameters

+
+
data_processorDataProcessor

DataProcessor object.

+
+
task_loaderTaskLoader

TaskLoader object.

+
+
neural_processTFModel | TorchModel

Pre-defined neural process model.

+
+
+
+
+__init__(self, model_ID: str)
+
+ +

Instantiate a model from a folder containing model weights and config.

+
+
+__init__(self, data_processor: deepsensor.data.processor.DataProcessor, task_loader: deepsensor.data.loader.TaskLoader, model_ID: str)
+
+ +

Instantiate a model from a folder containing model weights and config.

+
+
+ +
+
+ar_sample(task: Task, n_samples: int = 1, X_target_AR: ndarray | None = None, ar_subsample_factor: int = 1, fill_type: Literal['mean', 'sample'] = 'mean')
+

Autoregressive sampling from the model.

+

AR sampling with optional functionality to only draw AR samples over a +subset of the target set and then infill the rest of the sample with +the model mean or joint sample conditioned on the AR samples.

+
+

Note

+

AR sampling only works for 0th context/target set

+
+
+

Parameters

+
+
taskTask

The task to sample from.

+
+
n_samplesint, optional

The number of samples to draw from the distribution, by default 1.

+
+
X_target_ARnumpy.ndarray, optional

Locations to draw AR samples over. If None, AR samples will be +drawn over the target locations in the task. Defaults to None.

+
+
ar_subsample_factorint, optional

Subsample target locations to draw AR samples over. Defaults to 1.

+
+
fill_typeLiteral[“mean”, “sample”], optional

How to infill the rest of the sample. Must be one of “mean” or +“sample”. Defaults to “mean”.

+
+
+
+
+

Returns

+
+
numpy.ndarray

The samples.

+
+
+
+
+ +
+
+covariance(dist: AbstractDistribution)
+

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

+
+
+
+
+

Returns

+
+

+
+
+
+
+covariance(self, task: deepsensor.data.task.Task)
+
+ +

+
+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+joint_entropy(dist: AbstractDistribution)
+

Model entropy over target points given context points.

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to compute the entropy of.

+
+
+
+
+

Returns

+
+
float

The model entropy.

+
+
+
+
+joint_entropy(self, task: deepsensor.data.task.Task)
+
+ +

Model entropy over target points given context points.

+
+
+

Parameters

+
+
taskTask

The task to compute the entropy of.

+
+
+
+
+

Returns

+
+
float

The model entropy.

+
+
+
+
+ +
+
+load(model_ID: str)
+

Load a model from a folder containing model weights and config.

+
+ +
+
+logpdf(dist: AbstractDistribution, task: Task)
+

Model outputs joint distribution over all targets: Concat targets along +observation dimension.

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to compute the logpdf of.

+
+
taskTask

The task to compute the logpdf of.

+
+
+
+
+

Returns

+
+
float

The logpdf.

+
+
+
+
+logpdf(self, task: deepsensor.data.task.Task)
+
+ +

Model outputs joint distribution over all targets: Concat targets along +observation dimension.

+
+
+

Parameters

+
+
taskTask

The task to compute the logpdf of.

+
+
+
+
+

Returns

+
+
float

The logpdf.

+
+
+
+
+ +
+
+loss_fn(task: Task, fix_noise=None, num_lv_samples: int = 8, normalise: bool = False)
+

Compute the loss of a task.

+
+

Parameters

+
+
taskTask

The task to compute the loss of.

+
+
fix_noise

Whether to fix the noise to the value specified in the model +config.

+
+
num_lv_samplesint, optional

If latent variable model, number of lv samples for evaluating the +loss, by default 8.

+
+
normalisebool, optional

Whether to normalise the loss by the number of target points, by +default False.

+
+
+
+
+

Returns

+
+
float

The loss.

+
+
+
+
+ +
+
+mean(dist: AbstractDistribution)
+

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

+
+
+
+
+

Returns

+
+

+
+
+
+
+mean(self, task: deepsensor.data.task.Task)
+
+ +

+
+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+mean_marginal_entropy(dist: AbstractDistribution)
+

Mean marginal entropy over target points given context points.

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to compute the entropy of.

+
+
+
+
+

Returns

+
+
float

The mean marginal entropy.

+
+
+
+
+mean_marginal_entropy(self, task: deepsensor.data.task.Task)
+
+ +

Mean marginal entropy over target points given context points.

+
+
+

Parameters

+
+
taskTask

The task to compute the entropy of.

+
+
+
+
+

Returns

+
+
float

The mean marginal entropy.

+
+
+
+
+ +
+
+classmethod modify_task(task)
+

Cast numpy arrays to TensorFlow or PyTorch tensors, add batch dim, and +mask NaNs.

+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+sample(dist: AbstractDistribution, n_samples: int = 1, noiseless: bool = True)
+

Create samples from a ConvNP distribution.

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to sample from.

+
+
n_samplesint, optional

The number of samples to draw from the distribution, by default 1.

+
+
noiselessbool, optional

Whether to sample from the noiseless distribution, by default True.

+
+
+
+
+

Returns

+
+
numpy.ndarray | List[numpy.ndarray]

The samples as an array or list of arrays.

+
+
+
+
+sample(self, task: deepsensor.data.task.Task, n_samples: int = 1, noiseless: bool = True)
+
+ +

Create samples from a ConvNP distribution.

+
+
+

Parameters

+
+
taskTask

The task to sample from.

+
+
n_samplesint, optional

The number of samples to draw from the distribution, by default 1.

+
+
noiselessbool, optional

Whether to sample from the noiseless distribution, by default True.

+
+
+
+
+

Returns

+
+
numpy.ndarray | List[numpy.ndarray]

The samples as an array or list of arrays.

+
+
+
+
+ +
+
+save(model_ID: str)
+

Save the model weights and config to a folder.

+
+ +
+
+slice_diag(task: Task)
+

Slice out the ConvCNP part of the ConvNP distribution.

+
+

Parameters

+
+
taskTask

The task to slice.

+
+
+
+
+

Returns

+
+

+
+
+
+
+slice_diag(self, dist: neuralprocesses.dist.dist.AbstractDistribution)
+
+ +

Slice out the ConvCNP part of the ConvNP distribution.

+
+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

The distribution to slice.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+stddev(dist: AbstractDistribution)
+

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

+
+
+
+
+

Returns

+
+

+
+
+
+
+stddev(self, task: deepsensor.data.task.Task)
+
+ +

+
+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+variance(dist: AbstractDistribution)
+

+
+

Parameters

+
+
distneuralprocesses.dist.AbstractMultiOutputDistribution

+
+
+
+
+

Returns

+
+

+
+
+
+
+variance(self, task: deepsensor.data.task.Task)
+
+ +

+
+
+

Parameters

+
+
taskTask

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+ +
+
+deepsensor.model.convnp.concat_tasks(tasks: List[Task], multiple: int = 1) Task
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/model/defaults.html b/reference/model/defaults.html new file mode 100644 index 00000000..b4f5e668 --- /dev/null +++ b/reference/model/defaults.html @@ -0,0 +1,219 @@ + + + + + + + deepsensor.model.defaults — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.model.defaults

+
+
+deepsensor.model.defaults.gen_ppu(task_loader: TaskLoader) int
+

Computes data-informed settings for the model’s internal grid density (ppu, +points per unit)

+

Loops over all context and target variables in the TaskLoader and +computes the data resolution for each. The model ppu is then set to the +maximum data ppu.

+
+

Parameters

+
+
task_loaderTaskLoader

TaskLoader object containing context and target sets.

+
+
+
+
+

Returns

+
+
model_ppuint

Model ppu (points per unit), i.e. the number of points per unit of +input space.

+
+
+
+
+ +
+
+deepsensor.model.defaults.gen_decoder_scale(model_ppu: int) float
+

Computes informed setting for the decoder SetConv scale.

+

This sets the length scale of the Gaussian basis functions used interpolate +from the model’s internal grid to the target locations.

+

The decoder scale should be as small as possible given the model’s +internal grid. The value chosen is 1 / model_ppu (i.e. the length scale is +equal to the model’s internal grid spacing).

+
+

Parameters

+
+
model_ppuint

Model ppu (points per unit), i.e. the number of points per unit of +input space.

+
+
+
+
+

Returns

+
+
decoder_scalefloat

Decoder scale.

+
+
+
+
+ +
+
+deepsensor.model.defaults.gen_encoder_scales(model_ppu: int, task_loader: TaskLoader) list[float]
+

Computes data-informed settings for the encoder SetConv scale for each +context set.

+

This sets the length scale of the Gaussian basis functions used to encode +the context sets.

+

For off-grid station data, the scale should be as small as possible given +the model’s internal grid density (ppu, points per unit). The value chosen +is 0.5 / model_ppu (i.e. half the model’s internal resolution).

+

For gridded data, the scale should be such that the functional +representation smoothly interpolates the data. This is determined by +computing the data resolution (the distance between the nearest two data +points) for each context variable. The encoder scale is then set to 0.5 * +data_resolution.

+
+

Parameters

+
+
model_ppuint

Model ppu (points per unit), i.e. the number of points per unit of +input space.

+
+
task_loaderTaskLoader

TaskLoader object containing context and target sets.

+
+
+
+
+

Returns

+
+
encoder_scaleslist[float]

List of encoder scales for each context set.

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/model/index.html b/reference/model/index.html new file mode 100644 index 00000000..84bf626d --- /dev/null +++ b/reference/model/index.html @@ -0,0 +1,159 @@ + + + + + + + model module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/reference/model/model.html b/reference/model/model.html new file mode 100644 index 00000000..a7be7bdb --- /dev/null +++ b/reference/model/model.html @@ -0,0 +1,747 @@ + + + + + + + deepsensor.model.model — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.model.model

+
+
+class deepsensor.model.model.DeepSensorModel(data_processor: DataProcessor | None = None, task_loader: TaskLoader | None = None)
+

Bases: ProbabilisticModel

+

Implements DeepSensor prediction functionality of a ProbabilisticModel. +Allows for outputting an xarray object containing on-grid predictions or a +pandas object containing off-grid predictions.

+
+
+__init__(data_processor: DataProcessor | None = None, task_loader: TaskLoader | None = None)
+

Initialise DeepSensorModel.

+
+

Parameters

+
+
data_processorDataProcessor

DataProcessor object, used to unnormalise predictions.

+
+
task_loaderTaskLoader

TaskLoader object, used to determine target variables for +unnormalising.

+
+
+
+
+ +
+
+covariance(task: Task, *args, **kwargs)
+

Computes the model covariance matrix over target points based on given +context data. Shape (N, N).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
covnumpy.ndarray

Should return covariance matrix over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+joint_entropy(task: Task, *args, **kwargs)
+

Computes the model joint entropy over target points based on given +context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
joint_entropyfloat

Should return joint entropy over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+logpdf(task: Task, *args, **kwargs)
+

Computes the joint model logpdf over target points based on given +context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
logpdffloat

Should return joint logpdf over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+loss(task: Task, *args, **kwargs)
+

Computes the model loss over target points based on given context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
lossfloat

Should return loss over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+mean(task: Task, *args, **kwargs)
+

Computes the model mean prediction over target points based on given context +data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
meannumpy.ndarray

Should return mean prediction over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+mean_marginal_entropy(task: Task, *args, **kwargs)
+

Computes the mean marginal entropy over target points based on given +context data.

+
+

Note

+

Note: Getting a vector of marginal entropies would be useful too.

+
+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
mean_marginal_entropyfloat

Should return mean marginal entropy over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+predict(tasks: List[Task] | Task, X_t: Dataset | DataArray | DataFrame | Series | Index | ndarray, X_t_mask: Dataset | DataArray = None, X_t_is_normalised: bool = False, resolution_factor: int = 1, n_samples: int = 0, ar_sample: bool = False, ar_subsample_factor: int = 1, unnormalise: bool = True, seed: int = 0, append_indexes: dict = None, progress_bar: int = 0, verbose: bool = False)
+

Predict on a regular grid or at off-grid locations.

+
+

Parameters

+
+
tasksList[Task] | Task

List of tasks containing context data.

+
+
X_txarray.Dataset | xarray.DataArray | pandas.DataFrame | pandas.Series | pandas.Index | numpy.ndarray

Target locations to predict at. Can be an xarray object containing +on-grid locations or a pandas object containing off-grid locations.

+
+
X_t_is_normalisedbool

Whether the X_t coords are normalised. If False, will normalise +the coords before passing to model. Default False.

+
+
resolution_factorfloat

Optional factor to increase the resolution of the target grid by. +E.g. 2 will double the target resolution, 0.5 will halve it. +Applies to on-grid predictions only. Default 1.

+
+
n_samplesint

Number of joint samples to draw from the model. If 0, will not +draw samples. Default 0.

+
+
ar_samplebool

Whether to use autoregressive sampling. Default False.

+
+
unnormalisebool

Whether to unnormalise the predictions. Only works if self has +a data_processor and task_loader attribute. Default +True.

+
+
seedint

Random seed for deterministic sampling. Default 0.

+
+
append_indexesdict

Dictionary of index metadata to append to pandas indexes in the +off-grid case. Default None.

+
+
progress_barint

Whether to display a progress bar over tasks. Default 0.

+
+
verbosebool

Whether to print time taken for prediction. Default False.

+
+
+
+
+

Returns

+
+
predictionsxarray.Dataset | xarray.DataArray | pandas.DataFrame | pandas.Series | pandas.Index

If X_t is a pandas object, returns pandas objects containing +off-grid predictions.

+

If X_t is an xarray object, returns xarray object containing +on-grid predictions.

+

If n_samples == 0, returns only mean and std predictions.

+

If n_samples > 0, returns mean, std and samples predictions.

+
+
+
+
+

Raises

+
+
ValueError

If X_t is not an xarray object and +resolution_factor is not 1 or ar_subsample_factor is not 1.

+
+
ValueError

If X_t is not a pandas object and append_indexes is not +None.

+
+
ValueError

If X_t is not an xarray, pandas or numpy object.

+
+
ValueError

If append_indexes are not all the same length as X_t.

+
+
+
+
+ +
+
+sample(task: Task, n_samples=1, *args, **kwargs)
+

Draws n_samples joint samples over target points based on given +context data. Returned shape is (n_samples, n_target).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
n_samplesint

Number of samples to draw.

+
+
+
+
+

Returns

+
+
samplesTuple[numpy.ndarray, numpy.ndarray]

Should return joint samples over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+stddev(task: Task)
+

Model marginal standard deviation over target points given context +points. Shape (N,).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
stdnumpy.ndarray

Should return marginal standard deviation over target points.

+
+
+
+
+ +
+
+variance(task: Task, *args, **kwargs)
+

Model marginal variance over target points given context points. +Shape (N,).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
varnumpy.ndarray

Should return marginal variance over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+ +
+
+class deepsensor.model.model.ProbabilisticModel
+

Bases: object

+

Base class for probabilistic model used for DeepSensor. +Ensures a set of methods required for DeepSensor +are implemented by specific model classes that inherit from it.

+
+
+covariance(task: Task, *args, **kwargs)
+

Computes the model covariance matrix over target points based on given +context data. Shape (N, N).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
covnumpy.ndarray

Should return covariance matrix over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+joint_entropy(task: Task, *args, **kwargs)
+

Computes the model joint entropy over target points based on given +context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
joint_entropyfloat

Should return joint entropy over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+logpdf(task: Task, *args, **kwargs)
+

Computes the joint model logpdf over target points based on given +context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
logpdffloat

Should return joint logpdf over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+loss(task: Task, *args, **kwargs)
+

Computes the model loss over target points based on given context data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
lossfloat

Should return loss over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+mean(task: Task, *args, **kwargs)
+

Computes the model mean prediction over target points based on given context +data.

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
meannumpy.ndarray

Should return mean prediction over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+mean_marginal_entropy(task: Task, *args, **kwargs)
+

Computes the mean marginal entropy over target points based on given +context data.

+
+

Note

+

Note: Getting a vector of marginal entropies would be useful too.

+
+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
mean_marginal_entropyfloat

Should return mean marginal entropy over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+sample(task: Task, n_samples=1, *args, **kwargs)
+

Draws n_samples joint samples over target points based on given +context data. Returned shape is (n_samples, n_target).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
n_samplesint

Number of samples to draw.

+
+
+
+
+

Returns

+
+
samplesTuple[numpy.ndarray, numpy.ndarray]

Should return joint samples over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+
+stddev(task: Task)
+

Model marginal standard deviation over target points given context +points. Shape (N,).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
stdnumpy.ndarray

Should return marginal standard deviation over target points.

+
+
+
+
+ +
+
+variance(task: Task, *args, **kwargs)
+

Model marginal variance over target points given context points. +Shape (N,).

+
+

Parameters

+
+
taskTask

Task containing context data.

+
+
+
+
+

Returns

+
+
varnumpy.ndarray

Should return marginal variance over target points.

+
+
+
+
+

Raises

+
+
NotImplementedError

If not implemented by child class.

+
+
+
+
+ +
+ +
+
+deepsensor.model.model.create_empty_spatiotemporal_xarray(X: Dataset | DataArray, dates: List, coord_names: dict = {'x1': 'x1', 'x2': 'x2'}, data_vars: List = ['var'], prepend_dims: List[str] = None, prepend_coords: dict = None)
+
+ +
+
+deepsensor.model.model.increase_spatial_resolution(X_t_normalised, resolution_factor, coord_names: dict = {'x1': 'x1', 'x2': 'x2'})
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/model/nps.html b/reference/model/nps.html new file mode 100644 index 00000000..dd2397d9 --- /dev/null +++ b/reference/model/nps.html @@ -0,0 +1,323 @@ + + + + + + + deepsensor.model.nps — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.model.nps

+
+
+deepsensor.model.nps.convert_task_to_nps_args(task: Task)
+

Infer & build model call signature from task dict.

+
+

Parameters

+
+
taskTask

Task object containing context and target sets.

+
+
+
+
+

Returns

+
+

+
+
+
+
+ +
+
+deepsensor.model.nps.run_nps_model(neural_process, task: Task, n_samples: int | None = None, requires_grad: bool = False)
+

Run neuralprocesses model.

+
+

Parameters

+
+
neural_processneuralprocesses.Model

Neural process model.

+
+
taskTask

Task object containing context and target sets.

+
+
n_samplesint, optional

Number of samples to draw from the model. Defaults to None (single +sample).

+
+
requires_gradbool, optional

Whether to require gradients. Defaults to False.

+
+
+
+
+

Returns

+
+
distneuralprocesses.distributions.Distribution

Distribution object containing the model’s predictions.

+
+
+
+
+ +
+
+deepsensor.model.nps.run_nps_model_ar(neural_process, task: Task, num_samples: int = 1)
+

Run neural_process in AR mode.

+
+

Parameters

+
+
neural_processneuralprocesses.Model

Neural process model.

+
+
taskTask

Task object containing context and target sets.

+
+
num_samplesint, optional

Number of samples to draw from the model. Defaults to 1.

+
+
+
+
+

Returns

+
+
Tuple[…, …, …, …]

Tuple of mean, variance, noiseless samples, and noisy samples.

+
+
+
+
+ +
+
+deepsensor.model.nps.construct_neural_process(dim_x: int = 2, dim_yc: int = 1, dim_yt: int = 1, dim_aux_t: int | None = None, dim_lv: int = 0, conv_arch: str = 'unet', unet_channels: Tuple[int, int, int, int] = (64, 64, 64, 64), unet_resize_convs: bool = True, unet_resize_conv_interp_method: Literal['bilinear'] = 'bilinear', aux_t_mlp_layers: Tuple[int] | None = None, likelihood: Literal['cnp', 'gnp', 'cnp-spikes-beta'] = 'cnp', unet_kernels: int = 5, points_per_unit: int = 100, encoder_scales: float = 0.01, encoder_scales_learnable: bool = False, decoder_scale: float = 0.01, decoder_scale_learnable: bool = False, num_basis_functions: int = 64, epsilon: float = 0.01)
+

Construct a neuralprocesses ConvNP model.

+

See: https://github.com/wesselb/neuralprocesses/blob/main/neuralprocesses/architectures/convgnp.py

+

Docstring below modified from neuralprocesses. If more kwargs are +needed, they must be explicitly passed to neuralprocesses constructor +(not currently safe to use **kwargs here).

+
+

Parameters

+
+
dim_xint, optional

Dimensionality of the inputs. Defaults to 1.

+
+
dim_yint, optional

Dimensionality of the outputs. Defaults to 1.

+
+
dim_ycint or tuple[int], optional

Dimensionality of the outputs of the context set. You should set this +if the dimensionality of the outputs of the context set is not equal to +the dimensionality of the outputs of the target set. You should also +set this if you want to use multiple context sets. In that case, set +this equal to a tuple of integers indicating the respective output +dimensionalities.

+
+
dim_ytint, optional

Dimensionality of the outputs of the target set. You should set this if +the dimensionality of the outputs of the target set is not equal to the +dimensionality of the outputs of the context set.

+
+
dim_aux_tint, optional

Dimensionality of target-specific auxiliary variables.

+
+
points_per_unitint, optional

Density of the internal discretisation. Defaults to 100.

+
+
likelihoodstr, optional

Likelihood. Must be one of "cnp" (equivalently "het"), +"gnp" (equivalently "lowrank"), or "cnp-spikes-beta" +(equivalently "spikes-beta"). Defaults to "cnp".

+
+
conv_archstr, optional

Convolutional architecture to use. Must be one of +"unet[-res][-sep]" or "conv[-res][-sep]". Defaults to +"unet".

+
+
unet_channels: tuple[int], optional

Channels of every layer of the UNet. Defaults to six layers each with +64 channels.

+
+
unet_kernelsint or tuple[int], optional

Sizes of the kernels in the UNet. Defaults to 5.

+
+
unet_resize_convsbool, optional

Use resize convolutions rather than transposed convolutions in the +UNet. Defaults to False.

+
+
unet_resize_conv_interp_methodstr, optional

Interpolation method for the resize convolutions in the UNet. Can be +set to "bilinear". Defaults to “bilinear”.

+
+
num_basis_functionsint, optional

Number of basis functions for the low-rank likelihood. Defaults to +64.

+
+
dim_lvint, optional

Dimensionality of the latent variable. Setting to >0 constructs a +latent neural process. Defaults to 0.

+
+
encoder_scalesfloat or tuple[float], optional

Initial value for the length scales of the set convolutions for the +context sets embeddings. Set to a tuple equal to the number of context +sets to use different values for each set. Set to a single value to use +the same value for all context sets. Defaults to +1 / points_per_unit.

+
+
encoder_scales_learnablebool, optional

Whether the encoder SetConv length scale(s) are learnable. Defaults to +False.

+
+
decoder_scalefloat, optional

Initial value for the length scale of the set convolution in the +decoder. Defaults to 1 / points_per_unit.

+
+
decoder_scale_learnablebool, optional

Whether the decoder SetConv length scale(s) are learnable. Defaults to +False.

+
+
aux_t_mlp_layerstuple[int], optional

Widths of the layers of the MLP for the target-specific auxiliary +variable. Defaults to three layers of width 128.

+
+
epsilonfloat, optional

Epsilon added by the set convolutions before dividing by the density +channel. Defaults to 1e-2.

+
+
+
+
+

Returns

+
+
model.Model:

ConvNP model.

+
+
+
+
+

Raises

+
+
NotImplementedError

If specified backend has no default dtype.

+
+
+
+
+ +
+
+deepsensor.model.nps.compute_encoding_tensor(model, task: Task)
+

Compute the encoding tensor for a given task.

+
+

Parameters

+
+
model

Model object.

+
+
taskTask

Task object containing context and target sets.

+
+
+
+
+

Returns

+
+
encodingnumpy.ndarray

Encoding tensor? #TODO

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/plot.html b/reference/plot.html new file mode 100644 index 00000000..89e42b1c --- /dev/null +++ b/reference/plot.html @@ -0,0 +1,393 @@ + + + + + + + deepsensor.plot module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.plot module

+
+
+deepsensor.plot.acquisition_fn(task: Task, acquisition_fn_ds: ndarray, X_new_df: DataFrame, data_processor: DataProcessor, crs: Axes | PolarAxes | AitoffAxes | HammerAxes | LambertAxes | MollweideAxes | Axes3D, col_dim: str = 'iteration', cmap: str | Colormap = 'Greys_r', figsize: int = 3, add_colorbar: bool = True, max_ncol: int = 5) Figure
+
+
Args:
+
task (Task):

Task containing the context set used to compute the acquisition +function.

+
+
acquisition_fn_ds (numpy.ndarray):

Acquisition function dataset.

+
+
X_new_df (pandas.DataFrame):

Dataframe containing the placement locations.

+
+
data_processor (DataProcessor):

Data processor used to unnormalise the context set and placement +locations.

+
+
crs (matplotlib.axes.Axes | matplotlib.projections.polar.PolarAxes | matplotlib.projections.geo.AitoffAxes | matplotlib.projections.geo.HammerAxes | matplotlib.projections.geo.LambertAxes | matplotlib.projections.geo.MollweideAxes | mpl_toolkits.mplot3d.Axes3D):

Coordinate reference system for the plots.

+
+
col_dim (str, optional):

Column dimension to plot over, by default “iteration”.

+
+
cmap (str | matplotlib.colors.Colormap, optional):

Color map to use for the plots, by default “Greys_r”.

+
+
figsize (int, optional):

Figure size in inches, by default 3.

+
+
add_colorbar (bool, optional):

Whether to add a colorbar to the plots, by default True.

+
+
max_ncol (int, optional):

Maximum number of columns to use for the plots, by default 5.

+
+
+
+
Returns:
+
matplotlib.pyplot.Figure

A figure containing the acquisition function plots.

+
+
+
+
Raises:
+
ValueError:

If a column dimension is encountered that is not one of +["time", "sample"].

+
+
AssertionError:

If the number of columns in the acquisition function dataset is +greater than max_ncol.

+
+
+
+
+
+ +
+
+deepsensor.plot.context_encoding(model, task: Task, task_loader: TaskLoader, batch_idx: int = 0, context_set_idxs: List[int] | int | None = None, land_idx: int | None = None, cbar: bool = True, clim: Tuple | None = None, cmap: str | Colormap = 'viridis', verbose_titles: bool = True, titles: dict | None = None, size: int = 3, return_axes: bool = False)
+

Plot the encoding of a context set in a task.

+
+
Args:
+
model (DeepSensorModel):

DeepSensor model.

+
+
task (Task):

Task containing context set to plot encoding of …

+
+
task_loader (TaskLoader):

DataLoader used to load the data, containing context set metadata +used for plotting.

+
+
batch_idx (int, optional):

Batch index in encoding to plot, by default 0.

+
+
context_set_idxs (List[int] | int, optional):

Indices of context sets to plot, by default None (plots all context +sets).

+
+
land_idx (int, optional):

Index of the land mask in the encoding (used to overlay land +contour on plots), by default None.

+
+
cbar (bool, optional):

Whether to add a colorbar to the plots, by default True.

+
+
clim (tuple, optional):

Colorbar limits, by default None.

+
+
cmap (str | matplotlib.colors.Colormap, optional):

Color map to use for the plots, by default “viridis”.

+
+
verbose_titles (bool, optional):

Whether to include verbose titles for the variable IDs in the +context set (including the time index), by default True.

+
+
titles (dict, optional):

Dict of titles to override for each subplot, by default None. If +None, titles are generated from context set metadata.

+
+
size (int, optional):

Size of the figure in inches, by default 3.

+
+
return_axes (bool, optional):

Whether to return the axes of the figure, by default False.

+
+
+
+
Returns:
+
matplotlib.figure.Figure | Tuple[matplotlib.figure.Figure, matplotlib.pyplot.Axes]:

Either a figure containing the context set encoding plots, or a +tuple containing the figure and +the axes of the figure (if +return_axes was set to True).

+
+
+
+
+
+ +
+
+deepsensor.plot.feature_maps(model, task: Task, n_features_per_layer: int = 1, seed: int | None = None, figsize: int = 3, add_colorbar: bool = False, cmap: str | Colormap = 'Greys') Figure
+

Plot the feature maps of a ConvNP model’s decoder layers after a +forward pass with a Task.

+

Currently only plots feature maps for the downsampling path.

+
+
Args:
+
model (ConvNP):

+
+
task (Task):

+
+
n_features_per_layer (int, optional):

…, by default 1.

+
+
seed (int, optional):

…, by default None.

+
+
figsize (int, optional):

…, by default 3.

+
+
add_colorbar (bool, optional):

…, by default False.

+
+
cmap (str | matplotlib.colors.Colormap, optional):

…, by default “Greys”.

+
+
+
+
Returns:
+
matplotlib.figure.Figure:

A figure containing the feature maps.

+
+
+
+
Raises:
+
ValueError:

If the backend is not recognised.

+
+
+
+
+
+ +
+
+deepsensor.plot.offgrid_context(axes: ndarray | List[Axes] | Tuple[Axes], task: Task, data_processor: DataProcessor | None = None, task_loader: TaskLoader | None = None, plot_target: bool = False, add_legend: bool = True, context_set_idxs: List[int] | int | None = None, markers: str | None = None, colors: str | None = None, **scatter_kwargs) None
+

Plot the off-grid context points on axes.

+

Uses a provided DataProcessor to unnormalise the +context coordinates if provided.

+
+
Args:
+
axes (numpy.ndarray | List[matplotlib.axes.Axes] | Tuple[matplotlib.axes.Axes]:

Axes to plot on.

+
+
task (Task):

Task containing the context set to plot.

+
+
data_processor (DataProcessor, optional):

Data processor used to unnormalise the context set, by default +None.

+
+
task_loader (TaskLoader, optional):

Task loader used to load the data, containing context set metadata +used for plotting, by default None.

+
+
plot_target (bool, optional):

Whether to plot the target set, by default False.

+
+
add_legend (bool, optional):

Whether to add a legend to the plot, by default True.

+
+
context_set_idxs (List[int] | int, optional):

Indices of context sets to plot, by default None (plots all context +sets).

+
+
markers (str, optional):

Marker styles to use for each context set, by default None.

+
+
colors (str, optional):

Colors to use for each context set, by default None.

+
+
scatter_kwargs:

Additional keyword arguments to pass to the scatter plot.

+
+
+
+
Returns:

None.

+
+
+
+ +
+
+deepsensor.plot.offgrid_context_observations(axes: ndarray | List[Axes] | Tuple[Axes], task: Task, data_processor: DataProcessor, task_loader: TaskLoader, context_set_idx: int, format_str: str | None = None, extent: Tuple[int, int, int, int] | None = None, color: str = 'black') None
+

Plot unnormalised context observation values.

+
+
Args:
+
axes (numpy.ndarray | List[matplotlib.axes.Axes] | Tuple[matplotlib.axes.Axes]):

Axes to plot on.

+
+
task (Task):

Task containing the context set to plot.

+
+
data_processor (DataProcessor):

Data processor used to unnormalise the context set.

+
+
task_loader (TaskLoader):

Task loader used to load the data, containing context set metadata +used for plotting.

+
+
context_set_idx (int):

Index of the context set to plot.

+
+
format_str (str, optional):

Format string for the context observation values, by default None.

+
+
extent (Tuple[int, int, int, int], optional):

Extent of the plot, by default None.

+
+
color (str, optional):

Color of the text, by default “black”.

+
+
+
+
Returns:

None.

+
+
Raises:
+
AssertionError:

If the context set is gridded.

+
+
AssertionError:

If the context set is not 1D.

+
+
AssertionError:

If the task’s “Y_c” value for the context set ID is not 2D.

+
+
AssertionError:

If the task’s “Y_c” value for the context set ID does not have +exactly one variable.

+
+
+
+
+
+ +
+
+deepsensor.plot.placements(task: Task, X_new_df: DataFrame, data_processor: DataProcessor, crs: Axes | PolarAxes | AitoffAxes | HammerAxes | LambertAxes | MollweideAxes | Axes3D, extent: Tuple[int, int, int, int] | str | None = None, figsize: int = 3, **scatter_kwargs) Figure
+

+
+
Args:
+
task (Task):

Task containing the context set used to compute the acquisition +function.

+
+
X_new_df (pandas.DataFrame):

Dataframe containing the placement locations.

+
+
data_processor (DataProcessor):

Data processor used to unnormalise the context set and placement +locations.

+
+
crs (matplotlib.axes.Axes | matplotlib.projections.polar.PolarAxes | matplotlib.projections.geo.AitoffAxes | matplotlib.projections.geo.HammerAxes | matplotlib.projections.geo.LambertAxes | matplotlib.projections.geo.MollweideAxes | mpl_toolkits.mplot3d.Axes3D):

Coordinate reference system for the plots.

+
+
extent (Tuple[int, int, int, int] | str, optional):

Extent of the plots, by default None.

+
+
figsize (int, optional):

Figure size in inches, by default 3.

+
+
+
+
Returns:
+
matplotlib.figure.Figure

A figure containing the placement plots.

+
+
+
+
+
+ +
+
+deepsensor.plot.receptive_field(receptive_field, data_processor: DataProcessor, crs: Axes | PolarAxes | AitoffAxes | HammerAxes | LambertAxes | MollweideAxes | Axes3D, extent: str = 'global') Figure
+

+
+
Args:
+
receptive_field (…):

Receptive field to plot.

+
+
data_processor (DataProcessor):

Data processor used to unnormalise the context set.

+
+
crs (matplotlib.axes.Axes | matplotlib.projections.polar.PolarAxes | matplotlib.projections.geo.AitoffAxes | matplotlib.projections.geo.HammerAxes | matplotlib.projections.geo.LambertAxes | matplotlib.projections.geo.MollweideAxes | mpl_toolkits.mplot3d.Axes3D):

Coordinate reference system for the plots.

+
+
extent (str, optional):

Extent of the plot, by default “global”.

+
+
+
+
Returns:

None.

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/tensorflow/index.html b/reference/tensorflow/index.html new file mode 100644 index 00000000..fe6fc3b3 --- /dev/null +++ b/reference/tensorflow/index.html @@ -0,0 +1,123 @@ + + + + + + + tensorflow module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

tensorflow module

+

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/torch/index.html b/reference/torch/index.html new file mode 100644 index 00000000..eaf90672 --- /dev/null +++ b/reference/torch/index.html @@ -0,0 +1,123 @@ + + + + + + + torch module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

torch module

+

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/train/index.html b/reference/train/index.html new file mode 100644 index 00000000..cf7ac87a --- /dev/null +++ b/reference/train/index.html @@ -0,0 +1,135 @@ + + + + + + + train module — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

train module

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/reference/train/train.html b/reference/train/train.html new file mode 100644 index 00000000..180f8eb4 --- /dev/null +++ b/reference/train/train.html @@ -0,0 +1,184 @@ + + + + + + + deepsensor.train.train — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

deepsensor.train.train

+
+
+deepsensor.train.train.set_gpu_default_device() None
+

Set default GPU device for the backend.

+
+

Raises

+
+
RuntimeError

If no GPU is available.

+
+
RuntimeError

If backend is not supported.

+
+
NotImplementedError

If backend is not supported.

+
+
+
+
+

Returns

+

None.

+
+
+ +
+
+deepsensor.train.train.train_epoch(model: ConvNP, tasks: List[Task], lr: float = 5e-05, batch_size: int = None, opt=None, progress_bar=False, tqdm_notebook=False) List[float]
+

Train model for one epoch.

+
+

Parameters

+
+
modelConvNP

Model to train.

+
+
tasksList[Task]

List of tasks to train on.

+
+
lrfloat, optional

Learning rate, by default 5e-5.

+
+
batch_sizeint, optional

Batch size. Defaults to None. If None, no batching is performed.

+
+
optOptimizer, optional

TF or Torch optimizer. Defaults to None. If None, +tensorflow:tensorflow.keras.optimizer.Adam is used.

+
+
progress_barbool, optional

Whether to display a progress bar. Defaults to False.

+
+
tqdm_notebookbool, optional

Whether to use a notebook progress bar. Defaults to False.

+
+
+
+
+

Returns

+
+
List[float]

List of losses for each task/batch.

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/search.html b/search.html new file mode 100644 index 00000000..bf50ab6e --- /dev/null +++ b/search.html @@ -0,0 +1,123 @@ + + + + + + Search — DeepSensor 0.2.5 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + + + +
+ +
+ +
+
+ +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/searchindex.js b/searchindex.js new file mode 100644 index 00000000..db3e67aa --- /dev/null +++ b/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["community/code-of-conduct", "community/contributing", "community/faq", "community/index", "community/roadmap", "contact", "getting-started/extending", "getting-started/index", "getting-started/installation", "getting-started/tutorials/index", "getting-started/tutorials/quickstart", "index", "reference/active_learning/acquisition_fns", "reference/active_learning/algorithms", "reference/active_learning/index", "reference/data/index", "reference/data/loader", "reference/data/processor", "reference/data/task", "reference/data/utils", "reference/index", "reference/model/convnp", "reference/model/defaults", "reference/model/index", "reference/model/model", "reference/model/nps", "reference/plot", "reference/tensorflow/index", "reference/torch/index", "reference/train/index", "reference/train/train"], "filenames": ["community/code-of-conduct.rst", "community/contributing.rst", "community/faq.rst", "community/index.rst", "community/roadmap.rst", "contact.rst", "getting-started/extending.rst", "getting-started/index.rst", "getting-started/installation.rst", "getting-started/tutorials/index.rst", "getting-started/tutorials/quickstart.rst", "index.rst", "reference/active_learning/acquisition_fns.rst", "reference/active_learning/algorithms.rst", "reference/active_learning/index.rst", "reference/data/index.rst", "reference/data/loader.rst", "reference/data/processor.rst", "reference/data/task.rst", "reference/data/utils.rst", "reference/index.rst", "reference/model/convnp.rst", "reference/model/defaults.rst", "reference/model/index.rst", "reference/model/model.rst", "reference/model/nps.rst", "reference/plot.rst", "reference/tensorflow/index.rst", "reference/torch/index.rst", "reference/train/index.rst", "reference/train/train.rst"], "titles": ["Developer Code of Conduct", "Contributing to DeepSensor", "Community FAQ", "DeepSensor\u2019s user and contributor community", "DeepSensor Roadmap", "Contact the developers", "Extending DeepSensor with new models", "Getting started", "Installation instructions", "Tutorials", "Tutorial: Quickstart", "Welcome to DeepSensor\u2019s documentation!", "deepsensor.active_learning.acquisition_fns", "deepsensor.active_learning.algorithms", "active_learning module", "data module", "deepsensor.data.loader", "deepsensor.data.processor", "deepsensor.data.task", "deepsensor.data.utils", "API Reference", "deepsensor.model.convnp", "deepsensor.model.defaults", "model module", "deepsensor.model.model", "deepsensor.model.nps", "deepsensor.plot module", "tensorflow module", "torch module", "train module", "deepsensor.train.train"], "terms": {"thi": [2, 6, 7, 8, 10, 11, 12, 16, 19, 20, 21, 22, 25], "aim": [2, 11], "answer": 2, "common": 2, "about": 2, "deepsensor": [2, 7, 8, 10, 14, 15, 20, 23, 29], "librari": 2, "It": [2, 20], "i": [2, 3, 8, 10, 11, 12, 13, 16, 17, 18, 19, 20, 21, 22, 24, 25, 26, 30], "our": [2, 3], "wai": [2, 6, 8], "streamlin": 2, "onboard": 2, "process": [2, 10, 11, 21, 25], "clarifi": 2, "expect": [2, 12], "If": [2, 3, 8, 11, 13, 16, 17, 18, 19, 21, 24, 25, 26, 30], "you": [2, 3, 7, 8, 11, 21, 25], "have": [2, 26], "here": [2, 10, 25], "pleas": [2, 3, 11], "open": [2, 3, 11], "an": [2, 3, 12, 13, 16, 17, 18, 19, 21, 24], "issu": 2, "submit": 2, "pull": 2, "request": [2, 3, 16], "q": 2, "what": 2, "purpos": 2, "project": [2, 11, 26], "briefli": 2, "describ": [2, 8, 20], "its": [2, 11, 18, 20], "object": [2, 12, 16, 17, 18, 19, 21, 22, 24, 25], "intend": [2, 8], "audienc": 2, "how": [2, 7, 21], "can": [2, 3, 6, 8, 10, 11, 16, 21, 24, 25], "contribut": [2, 3, 8, 11], "There": [2, 16], "ar": [2, 3, 11, 12, 16, 17, 18, 19, 20, 21, 24, 25, 26], "mani": 2, "from": [2, 3, 6, 7, 10, 13, 16, 17, 18, 21, 22, 24, 25, 26], "write": 2, "code": [2, 3, 8, 10, 11], "fix": [2, 6, 21], "bug": 2, "improv": [2, 12], "document": [2, 7, 20], "translat": 2, "content": 2, "check": [2, 17], "guid": [2, 3], "detail": [2, 6], "step": 2, "where": [2, 13, 16, 18, 19], "find": 2, "guidelin": 2, "md": 2, "file": [2, 16], "root": [2, 12], "directori": [2, 8], "repositori": [2, 8, 11], "do": [2, 8], "need": [2, 8, 25], "sign": 2, "contributor": [2, 11], "licens": 2, "agreement": 2, "cla": 2, "At": 2, "current": [2, 11, 12, 25, 26], "time": [2, 10, 16, 17, 19, 21, 24, 26], "we": [2, 3, 6, 7, 8, 10, 11], "requir": [2, 24, 25], "report": 2, "github": [2, 21, 25], "make": [2, 17, 18], "sure": 2, "provid": [2, 6, 7, 13, 26], "inform": [2, 22], "includ": [2, 26], "reproduc": 2, "outcom": 2, "new": [2, 7, 10, 11, 12, 13, 21], "featur": [2, 26], "label": 2, "potenti": 2, "benefit": 2, "set": [2, 6, 10, 12, 16, 17, 18, 21, 22, 24, 25, 26, 30], "up": [2, 8, 10], "develop": [2, 3, 11], "environ": 2, "follow": [2, 16, 20], "instruct": [2, 7, 11], "run": [2, 21, 25], "ask": [2, 3], "chat": 2, "slack": [2, 3], "contact": [2, 11], "core": [2, 11, 16, 18], "group": [2, 3], "maintain": 2, "directli": [2, 10], "conduct": [2, 3, 11], "ye": 2, "valu": [2, 12, 17, 18, 19, 21, 22, 25, 26], "respect": [2, 17, 19, 20, 21, 25], "inclus": 2, "read": [2, 3], "befor": [2, 21, 24, 25], "get": [2, 11, 17, 24], "touch": [2, 11], "other": [2, 16], "join": [2, 3], "team": 2, "stai": [2, 11], "also": [2, 7, 10, 11, 21, 25], "stand": 2, "meet": 2, "which": [2, 8, 16, 18, 20, 21], "invit": [2, 3], "so": [2, 21], "receiv": 2, "them": [2, 11, 17, 18], "even": 2, "m": [2, 19], "coder": 2, "absolut": [2, 12], "made": 2, "form": 2, "design": [2, 20], "test": 2, "more": [2, 6, 25], "everyon": 2, "": [2, 8, 10, 12, 18, 21, 22, 25, 26], "skill": 2, "valuabl": 2, "discuss": [2, 3], "learn": [2, 11, 13, 18, 30], "claim": 2, "work": [2, 3, 8, 21, 24], "comment": 2, "express": 2, "your": [2, 3, 8, 10, 11], "interest": [2, 3, 11], "help": 2, "out": [2, 16, 21], "unassign": 2, "like": 2, "assign": 2, "propos": [2, 13], "signific": 2, "chang": [2, 8, 17], "For": [2, 18, 19, 22], "good": 2, "practic": 2, "first": [2, 6, 16, 17], "gather": 2, "feedback": 2, "onc": [2, 19], "consensu": 2, "proce": 2, "my": 2, "pr": 2, "merg": 2, "ensur": [2, 17, 24], "pass": [2, 10, 13, 16, 21, 24, 25, 26], "all": [2, 8, 10, 12, 16, 19, 21, 22, 24, 25, 26], "ha": [2, 19, 24, 25], "been": 2, "review": 2, "least": 2, "one": [2, 21, 25, 26, 30], "address": 2, "ani": [2, 16, 18, 21], "releas": [2, 8], "cycl": [2, 19], "updat": 2, "roll": 2, "manner": 2, "roadmap": [2, 3, 11], "plan": 2, "critic": 2, "might": 2, "credit": 2, "given": [2, 16, 17, 21, 22, 24, 25], "acknowledg": 2, "note": [2, 24], "forev": 2, "record": 2, "histori": 2, "The": [3, 11, 12, 16, 19, 20, 21, 22], "who": 3, "anyon": 3, "place": 3, "question": 3, "idea": 3, "share": 3, "channel": [3, 19, 21, 25], "http": [3, 21, 25], "com": [3, 21, 25], "ai4environ": 3, "signup": 3, "welcom": 3, "faq": [3, 11], "tomand": 5, "ba": 5, "ac": 5, "uk": 5, "To": [6, 11], "simpli": [6, 11], "creat": [6, 8, 21], "class": [6, 12, 13, 16, 17, 18, 20, 21, 24], "inherit": [6, 13, 18, 24], "deepsensormodel": [6, 13, 23, 24, 26], "implement": [6, 12, 21, 24], "low": [6, 21, 25], "level": 6, "predict": [6, 10, 12, 21, 24, 25], "method": [6, 8, 12, 16, 17, 18, 21, 24, 25], "defin": [6, 16, 21], "probabilisticmodel": [6, 12, 23, 24], "mean": [6, 12, 13, 17, 19, 21, 24, 25], "stddev": [6, 12, 14, 21, 24], "In": [6, 7, 8, 11, 21, 25], "exampl": [6, 8, 10, 11, 19], "ll": 6, "call": [6, 25], "examplemodel": 6, "A": [6, 17, 21, 26], "veri": 6, "naiv": 6, "context": [6, 10, 12, 13, 16, 18, 21, 22, 24, 25, 26], "def": 6, "__init__": [6, 12, 13, 16, 17, 18, 21, 24], "self": [6, 17, 21, 24], "data_processor": [6, 10, 21, 24, 26], "dataprocessor": [6, 10, 15, 17, 21, 24, 26], "task_load": [6, 10, 13, 21, 22, 24, 26], "taskload": [6, 10, 13, 15, 16, 21, 22, 24, 26], "initi": [6, 21, 25], "parent": [6, 12], "data": [6, 10, 11, 20, 21, 22, 24, 26], "processor": [6, 10, 15, 20, 21, 26], "task": [6, 10, 12, 13, 15, 16, 20, 21, 24, 25, 26, 30], "loader": [6, 10, 15, 20, 21, 26], "super": 6, "comput": [6, 12, 17, 19, 21, 22, 24, 25, 26], "target": [6, 10, 12, 13, 16, 21, 22, 24, 25, 26], "locat": [6, 10, 13, 16, 21, 22, 24, 26], "return": [6, 26], "np": [6, 10, 20, 21, 23], "y_c": [6, 16, 26], "0": [6, 10, 12, 13, 16, 17, 19, 21, 22, 24, 25, 26], "1": [6, 10, 12, 13, 17, 19, 21, 22, 24, 25, 26], "after": [6, 10, 26], "us": [6, 8, 11, 12, 13, 16, 17, 18, 19, 21, 22, 24, 25, 26, 30], "same": [6, 16, 17, 21, 24, 25], "built": 6, "convnp": [6, 10, 20, 23, 25, 26, 30], "see": [6, 21, 25], "jupyt": [6, 10], "notebook": [6, 10, 30], "part": [7, 20, 21], "show": 7, "instal": 7, "extend": [7, 11], "some": 7, "tutori": [7, 11], "pypi": 7, "sourc": [7, 11], "model": [7, 10, 11, 12, 13, 17, 20, 26, 30], "quickstart": [7, 9], "python": [8, 11], "packag": [8, 11, 20, 21], "number": [8, 16, 19, 21, 22, 24, 25, 26], "section": [8, 20], "two": [8, 10, 16, 19, 22], "main": [8, 21, 25], "want": [8, 21, 25], "latest": 8, "stabl": 8, "access": 8, "recommend": 8, "easiest": [8, 11], "pip": [8, 11], "backend": [8, 11, 18, 25, 26, 30], "choic": [8, 11], "tensorflow": [8, 11, 18, 20, 21, 30], "pytorch": [8, 11, 21], "torch": [8, 10, 11, 18, 20, 21, 30], "keep": 8, "easi": 8, "machin": [8, 11], "contain": [8, 10, 12, 16, 17, 18, 19, 20, 21, 22, 24, 25, 26], "doc": 8, "clone": 8, "git": 8, "e": [8, 17, 18, 19, 22, 24], "v": [8, 18], "demonstr": 10, "simpl": [10, 11, 20], "train": [10, 11, 13, 20], "convolut": [10, 21, 25], "condit": [10, 21], "neural": [10, 11, 21, 25], "convcnp": [10, 21], "spatial": [10, 17, 19], "interpol": [10, 16, 17, 21, 22, 25], "era5": 10, "go": 10, "import": [10, 11], "less": 10, "than": [10, 19, 21, 25, 26], "30": 10, "line": 10, "train_epoch": [10, 29, 30], "xarrai": [10, 11, 13, 16, 17, 19, 24], "xr": 10, "panda": [10, 11, 13, 16, 17, 19, 24, 26], "pd": 10, "numpi": [10, 12, 16, 17, 18, 21, 24, 25, 26], "load": [10, 16, 17, 21, 26], "raw": [10, 17], "ds_raw": 10, "open_dataset": 10, "air_temperatur": 10, "normalis": [10, 17, 21, 24], "x1_name": [10, 17], "lat": [10, 17], "x1_map": [10, 17], "15": 10, "75": 10, "x2_name": [10, 17], "lon": [10, 17], "x2_map": [10, 17], "200": 10, "330": 10, "d": [10, 16, 19], "gener": [10, 11, 16, 21, 26], "10": [10, 21], "grid": [10, 16, 17, 18, 19, 22, 24, 26], "cell": [10, 16], "train_task": 10, "date": [10, 16, 19, 24], "date_rang": 10, "2013": 10, "01": [10, 25], "2014": 10, "11": 10, "7": 10, "context_sampl": [10, 16], "random": [10, 12, 14, 16, 24], "uniform": 10, "target_sampl": [10, 16], "append": [10, 18, 24], "epoch": [10, 30], "rang": 10, "progress_bar": [10, 13, 24, 30], "true": [10, 12, 13, 17, 21, 24, 25, 26], "dens": 10, "point": [10, 12, 13, 16, 17, 21, 22, 24, 26], "test_task": 10, "12": 10, "31": 10, "mean_d": 10, "std_d": 10, "x_t": [10, 13, 16, 18, 24], "origin": [10, 18], "unit": [10, 19, 22], "coordin": [10, 13, 16, 17, 19, 26], "system": [10, 26], "dataset": [10, 13, 16, 17, 19, 24, 26], "dimens": [10, 16, 18, 21, 26], "25": 10, "53": 10, "datetime64": 10, "n": [10, 16, 17, 18, 24], "float32": [10, 16, 18], "72": 10, "5": [10, 16, 19, 21, 22, 24, 25, 26, 30], "70": 10, "67": 10, "65": 10, "22": 10, "20": 10, "17": 10, "202": 10, "205": 10, "207": 10, "322": 10, "325": 10, "327": 10, "variabl": [10, 16, 17, 18, 19, 21, 22, 24, 25, 26], "air": 10, "246": 10, "244": 10, "4": 10, "245": 10, "290": 10, "2": [10, 16, 17, 18, 19, 21, 24, 25], "289": 10, "8": [10, 21], "timeseri": 10, "off": [10, 16, 22, 24, 26], "arrai": [10, 16, 17, 18, 21], "argument": [10, 16, 26], "three": [10, 21, 25], "dai": [10, 19], "decemb": 10, "mean_df": 10, "std_df": 10, "50": 10, "280": 10, "40": 10, "250": 10, "t": [10, 16, 18], "260": 10, "183056": 10, "277": 10, "947373": 10, "02": 10, "261": 10, "08943": 10, "278": 10, "219599": 10, "03": 10, "257": 10, "128185": 10, "444229": 10, "avail": [10, 16, 30], "ad": [10, 18, 21, 25], "visualis": 10, "environment": 11, "faithfulli": 11, "match": 11, "flexibl": [11, 20], "intuit": 11, "interfac": 11, "wrap": [11, 21], "around": [11, 21], "power": 11, "neuralprocessess": 11, "function": [11, 12, 18, 20, 21, 22, 24, 25, 26], "while": [11, 20], "allow": [11, 20, 21, 24], "user": 11, "familiar": 11, "world": 11, "avoid": [11, 18], "murki": 11, "depth": 11, "tensor": [11, 13, 18, 21, 25], "compat": 11, "both": 11, "abil": 11, "thank": 11, "choos": 11, "between": [11, 16, 19, 22], "undergo": 11, "activ": [11, 13], "product": 11, "research": 11, "consid": [11, 13], "bibtex": 11, "entri": [11, 18, 19], "click": 11, "button": 11, "top": 11, "right": 11, "page": 11, "togeth": 11, "instead": 11, "replac": [11, 18], "abov": 11, "command": 11, "start": 11, "commun": 11, "api": 11, "refer": [11, 17, 26], "active_learn": [11, 20], "modul": [11, 20, 21], "plot": [11, 20], "index": [11, 12, 13, 16, 19, 24, 26], "search": [11, 12, 13], "acquisitionfunct": [12, 13, 14], "acquisit": [12, 26], "__call__": [12, 13, 16, 17, 21], "ndarrai": [12, 16, 17, 18, 21, 24, 25, 26], "shape": [12, 16, 17, 18, 24], "notimplementederror": [12, 24, 25, 30], "becaus": 12, "abstract": 12, "must": [12, 21, 25], "subclass": 12, "context_set_idx": [12, 13, 18, 26], "int": [12, 13, 16, 18, 19, 21, 22, 24, 25, 26, 30], "add": [12, 17, 18, 21, 26], "observ": [12, 16, 18, 19, 21, 26], "when": [12, 16, 17, 21], "acquisitionfunctionoracl": [12, 13, 14], "signifi": 12, "acquisitionfunctionparallel": [12, 14], "across": 12, "parallel": 12, "meanstddev": [12, 14], "margin": [12, 21, 24], "varianc": [12, 21, 24, 25], "meanvari": [12, 14], "pnormstddev": [12, 14], "arg": [12, 16, 21, 24, 26], "p": 12, "kwarg": [12, 16, 17, 21, 24, 25], "norm": 12, "vector": [12, 24], "standard": [12, 24], "deviat": [12, 24], "option": [12, 13, 16, 17, 19, 21, 24, 25, 26, 30], "default": [12, 13, 16, 17, 19, 20, 21, 23, 24, 25, 26, 30], "meanmarginalentropi": [12, 14], "entropi": [12, 21, 24], "distribut": [12, 21, 25], "jointentropi": [12, 14], "joint": [12, 21, 24], "oraclema": [12, 14], "oracl": 12, "error": 12, "oraclerms": [12, 14], "squar": 12, "oraclemarginalnl": [12, 14], "neg": 12, "log": 12, "likelihood": [12, 21, 25], "oraclejointnl": [12, 14], "seed": [12, 16, 24, 26], "42": 12, "contextdist": [12, 14], "distanc": [12, 19, 22], "closest": [12, 19], "expectedimprov": [12, 14], "onli": [12, 13, 21, 24, 26], "valid": [12, 16], "maximis": 12, "greedyalgorithm": [13, 14], "x_": 13, "dataarrai": [13, 16, 17, 19, 24], "datafram": [13, 16, 17, 19, 24, 26], "seri": [13, 16, 17, 19, 24], "x_s_mask": 13, "none": [13, 16, 17, 18, 21, 24, 25, 26, 30], "x_t_mask": [13, 24], "n_new_context": 13, "x_normalis": 13, "bool": [13, 16, 17, 18, 21, 24, 25, 26, 30], "fals": [13, 16, 17, 21, 24, 25, 26, 30], "model_infill_method": 13, "str": [13, 16, 17, 18, 21, 24, 25, 26], "query_infil": 13, "proposed_infil": 13, "target_set_idx": 13, "verbos": [13, 17, 21, 24, 26], "greedi": 13, "acquisition_fn": [13, 14, 20, 26], "list": [13, 16, 17, 21, 22, 24, 26, 30], "diff": 13, "tupl": [13, 16, 17, 18, 21, 24, 25, 26], "iter": [13, 26], "docstr": [13, 25], "todo": [13, 25], "sensor": 13, "prioriti": 13, "order": 13, "correspond": [13, 18], "space": [13, 22], "x_new_df": [13, 26], "acquisition_fn_d": [13, 26], "valueerror": [13, 16, 24, 26], "min_or_max": 13, "min": [13, 17], "max": [13, 17], "y_t_aux": 13, "mask": [13, 17, 18, 21, 26], "doe": [13, 16, 21, 26], "algorithm": [14, 20], "xarray_to_coord_array_normalis": [15, 17], "mask_coord_array_normalis": [15, 17], "da1_da2_same_grid": [15, 17], "interp_da1_to_da2": [15, 17], "append_obs_to_task": [15, 18], "flatten_x": [15, 18], "flatten_i": [15, 18], "flatten_gridded_data_in_task": [15, 18], "util": [15, 20], "construct_x1x2_d": [15, 19], "construct_circ_time_d": [15, 19], "compute_xarray_data_resolut": [15, 19], "compute_pandas_data_resolut": [15, 19], "task_loader_id": 16, "aux_at_context": 16, "aux_at_target": 16, "link": 16, "context_delta_t": 16, "target_delta_t": 16, "time_freq": 16, "xarray_interp_method": 16, "linear": 16, "discrete_xarray_sampl": 16, "dtype": [16, 18, 21, 25], "each": [16, 18, 19, 20, 21, 22, 25, 26, 30], "initialis": [16, 17, 18, 24], "behaviour": 16, "path": [16, 26], "overwrit": 16, "either": [16, 26], "els": 16, "save": [16, 17, 21], "config": [16, 17, 21], "folder": [16, 17, 21], "wa": [16, 26], "ignor": 16, "singl": [16, 18, 21, 25], "auxiliari": [16, 21, 25], "element": [16, 18], "sampl": [16, 18, 19, 21, 24, 25, 26], "second": [16, 17], "specifi": [16, 21, 25], "integ": [16, 21, 25], "case": [16, 21, 24, 25], "differ": [16, 18, 21, 25], "init": 16, "frequenc": [16, 19], "daili": [16, 19], "randomli": 16, "whether": [16, 17, 21, 24, 25, 26, 30], "discret": 16, "centr": 16, "continu": 16, "within": 16, "type": [16, 18, 21], "cast": [16, 18, 21], "config_fnam": [16, 17], "task_loader_config": 16, "json": [16, 17], "count_context_and_target_data_dim": 16, "count": 16, "context_dim": 16, "target_dim": 16, "infer_context_and_target_var_id": 16, "infer": [16, 21, 25], "id": [16, 26], "context_var_id": 16, "target_var_id": 16, "load_dask": [16, 17], "dask": [16, 17], "memori": [16, 17], "sample_da": 16, "da": [16, 17], "sampling_strat": 16, "float": [16, 18, 19, 21, 22, 24, 25, 30], "accord": 16, "strategi": 16, "assum": [16, 17], "slice": [16, 21], "alreadi": 16, "invalidsamplingstrategyerror": 16, "bound": 16, "sample_df": 16, "df": [16, 19], "frame": 16, "x_c": 16, "sample_offgrid_aux": 16, "offgrid_aux": 16, "task_gener": 16, "timestamp": 16, "split_frac": 16, "datewise_determinist": 16, "seed_overrid": 16, "sever": [16, 20], "uniformli": 16, "fraction": 16, "x1": [16, 17, 19, 24], "x2": [16, 17, 19, 24], "coord": [16, 17, 18, 24], "unnormalis": [16, 17, 21, 24, 26], "appli": [16, 18, 24], "split": 16, "pair": 16, "remain": 16, "base": [16, 18, 24], "overrid": [16, 21, 26], "time_nam": 17, "deepcopi": 17, "mean_std": 17, "std": [17, 24], "min_max": 17, "param": 17, "name": 17, "g": [17, 18, 19, 24], "linearli": 17, "map": [17, 26], "print": [17, 18, 21, 24], "output": [17, 21, 24, 25], "__str__": [17, 18], "add_to_config": 17, "var_id": 17, "dict": [17, 18, 24, 25, 26], "check_params_comput": 17, "data_processor_config": 17, "get_config": 17, "pre": [17, 21], "classmethod": [17, 18, 21], "add_offset": 17, "unnorm": 17, "map_arrai": 17, "map_coord_arrai": 17, "coord_arrai": 17, "map_coord": 17, "map_x1_and_x2": 17, "n_x1": [17, 18], "n_x2": [17, 18], "set_coord_param": 17, "offset": 17, "uncertainti": 17, "dev": 17, "convert": [17, 18], "coord_arr": 17, "mask_da": 17, "remov": [17, 18], "outsid": 17, "boolean": 17, "insid": 17, "da1": 17, "da2": 17, "task_dict": 18, "dictionari": [18, 24], "modifi": [18, 25], "__repr__": 18, "conveni": 18, "summari": 18, "otherwis": 18, "add_batch_dim": 18, "batch": [18, 21, 26, 30], "cast_to_float32": 18, "convert_to_tensor": 18, "deep": 18, "mask_nans_np": 18, "mask_nans_numpi": 18, "nan": [18, 21], "zero": 18, "indic": [18, 21, 25, 26], "were": 18, "miss": 18, "op": 18, "f": 18, "op_flag": 18, "recast": 18, "reshap": 18, "flag": 18, "kei": 18, "remove_nans_from_task_y_t_if_pres": 18, "present": 18, "y_t": 18, "summarise_repr": 18, "k": 18, "summarise_str": 18, "x_new": 18, "int8": 18, "int16": 18, "int32": 18, "int64": 18, "uint8": 18, "uint16": 18, "uint32": 18, "uint64": 18, "bool_": 18, "float16": 18, "float64": 18, "longdoubl": 18, "complex": 18, "complex64": 18, "complex128": 18, "clongdoubl": 18, "moduletyp": [18, 21], "autograd": 18, "tracer": 18, "box": 18, "indexedslic": 18, "jaxlib": 18, "xla_extens": 18, "arrayimpl": 18, "jax": 18, "y_new": 18, "copi": 18, "structur": [18, 20], "affect": 18, "x": [18, 24], "necessari": 18, "y": 18, "n_dim": 18, "flatten": 18, "doesn": 18, "yet": 18, "permit": 18, "gridded_d": 19, "construct": [19, 21, 25], "var": [19, 24], "2d": [19, 26], "whose": 19, "x_1": 19, "x_2": 19, "freq": 19, "circular": 19, "dictat": 19, "h": 19, "per": [19, 22], "hourli": 19, "interv": 19, "year": 19, "monthli": 19, "resolut": [19, 22, 24], "finer": 19, "degre": 19, "data_resolut": [19, 22], "n_time": 19, "1000": 19, "percentil": 19, "approxim": 19, "non": 19, "nth": 19, "neighbour": 19, "possibli": 19, "subset": [19, 21], "fewer": 19, "5th": 19, "pairwis": 19, "attribut": [20, 24], "possibl": [20, 22], "still": 20, "lot": 20, "divid": [20, 21, 25], "submodul": 20, "context_encod": [20, 26], "feature_map": [20, 26], "offgrid_context": [20, 26], "offgrid_context_observ": [20, 26], "placement": [20, 26], "receptive_field": [20, 26], "regress": 21, "probabilist": [21, 24], "neuralprocess": [21, 25], "wesselb": [21, 25], "blob": [21, 25], "architectur": [21, 25], "convgnp": [21, 25], "py": [21, 25], "multipl": [21, 25], "dispatch": 21, "plum": 21, "re": [21, 25], "forward": [21, 26], "logpdf": [21, 24], "etc": 21, "altern": 21, "instanti": 21, "auto": 21, "sensibl": 21, "These": 21, "addit": [21, 26], "customis": 21, "points_per_unit": [21, 25], "densiti": [21, 22, 25], "intern": [21, 22, 25], "discretis": [21, 25], "100": [21, 25], "cnp": [21, 25], "equival": [21, 25], "het": [21, 25], "gnp": [21, 25], "lowrank": [21, 25], "spike": [21, 25], "beta": [21, 25], "dim_x": [21, 25], "dimension": [21, 25], "input": [21, 22, 25], "dim_i": [21, 25], "dim_yc": [21, 25], "should": [21, 22, 24, 25], "equal": [21, 22, 25], "dim_yt": [21, 25], "dim_aux_t": [21, 25], "specif": [21, 24, 25], "conv_arch": [21, 25], "unet": [21, 25], "sep": [21, 25], "conv": [21, 25], "unet_channel": [21, 25], "everi": [21, 25], "layer": [21, 25, 26], "six": [21, 25], "64": [21, 25], "unet_kernel": [21, 25], "size": [21, 25, 26, 30], "kernel": [21, 25], "unet_resize_conv": [21, 25], "resiz": [21, 25], "rather": [21, 25], "transpos": [21, 25], "unet_resize_conv_interp_method": [21, 25], "bilinear": [21, 25], "num_basis_funct": [21, 25], "basi": [21, 22, 25], "rank": [21, 25], "dim_lv": [21, 25], "latent": [21, 25], "encoder_scal": [21, 22, 25], "length": [21, 22, 24, 25], "scale": [21, 22, 25], "embed": [21, 25], "encoder_scales_learn": [21, 25], "encod": [21, 22, 25, 26], "setconv": [21, 22, 25], "learnabl": [21, 25], "decoder_scal": [21, 22, 25], "decod": [21, 22, 25, 26], "decoder_scale_learn": [21, 25], "aux_t_mlp_lay": [21, 25], "width": [21, 25], "mlp": [21, 25], "128": [21, 25], "epsilon": [21, 25], "1e": [21, 25], "n_sampl": [21, 24, 25], "requires_grad": [21, 25], "draw": [21, 24, 25], "gradient": [21, 25], "construct_convgnp": 21, "take": 21, "unless": 21, "overridden": 21, "neural_process": [21, 25], "kera": [21, 30], "nn": 21, "tfmodel": 21, "torchmodel": 21, "model_id": 21, "weight": 21, "ar_sampl": [21, 24], "x_target_ar": 21, "ar_subsample_factor": [21, 24], "fill_typ": 21, "liter": [21, 25], "autoregress": [21, 24], "over": [21, 22, 24, 26], "infil": 21, "rest": 21, "0th": 21, "drawn": 21, "subsampl": 21, "covari": [21, 24], "dist": [21, 25], "abstractdistribut": 21, "abstractmultioutputdistribut": 21, "joint_entropi": [21, 24], "concat": 21, "along": 21, "loss_fn": 21, "fix_nois": 21, "num_lv_sampl": 21, "loss": [21, 24, 30], "nois": 21, "lv": 21, "evalu": 21, "mean_marginal_entropi": [21, 24], "modify_task": 21, "dim": 21, "noiseless": [21, 25], "slice_diag": 21, "concat_task": [21, 23], "gen_ppu": [22, 23], "ppu": 22, "loop": 22, "maximum": [22, 26], "model_ppu": 22, "gen_decoder_scal": [22, 23], "gaussian": 22, "small": 22, "chosen": 22, "gen_encoder_scal": [22, 23], "station": 22, "half": 22, "represent": 22, "smoothli": 22, "determin": [22, 24], "nearest": 22, "create_empty_spatiotemporal_xarrai": [23, 24], "increase_spatial_resolut": [23, 24], "convert_task_to_nps_arg": [23, 25], "run_nps_model": [23, 25], "run_nps_model_ar": [23, 25], "construct_neural_process": [23, 25], "compute_encoding_tensor": [23, 25], "matrix": 24, "cov": 24, "child": 24, "would": 24, "too": 24, "x_t_is_normalis": 24, "resolution_factor": 24, "append_index": 24, "regular": 24, "factor": 24, "increas": 24, "doubl": 24, "halv": 24, "determinist": 24, "metadata": [24, 26], "displai": [24, 30], "progress": [24, 30], "bar": [24, 30], "taken": 24, "n_target": 24, "coord_nam": 24, "data_var": 24, "prepend_dim": 24, "prepend_coord": 24, "x_t_normalis": 24, "build": 25, "signatur": 25, "num_sampl": 25, "mode": 25, "noisi": 25, "below": 25, "thei": 25, "explicitli": 25, "constructor": 25, "safe": 25, "cr": 26, "ax": 26, "polarax": 26, "aitoffax": 26, "hammerax": 26, "lambertax": 26, "mollweideax": 26, "axes3d": 26, "col_dim": 26, "cmap": 26, "colormap": 26, "greys_r": 26, "figsiz": 26, "3": 26, "add_colorbar": 26, "max_ncol": 26, "figur": 26, "matplotlib": 26, "polar": 26, "geo": 26, "mpl_toolkit": 26, "mplot3d": 26, "column": 26, "color": 26, "inch": 26, "colorbar": 26, "pyplot": 26, "rais": 26, "encount": 26, "assertionerror": 26, "greater": 26, "batch_idx": 26, "land_idx": 26, "cbar": 26, "clim": 26, "viridi": 26, "verbose_titl": 26, "titl": 26, "return_ax": 26, "dataload": 26, "land": 26, "overlai": 26, "contour": 26, "limit": 26, "subplot": 26, "n_features_per_lay": 26, "grei": 26, "downsampl": 26, "recognis": 26, "plot_target": 26, "add_legend": 26, "marker": 26, "scatter_kwarg": 26, "legend": 26, "style": 26, "keyword": 26, "scatter": 26, "format_str": 26, "extent": 26, "black": 26, "format": 26, "string": 26, "text": 26, "1d": 26, "exactli": 26, "global": 26, "recept": 26, "field": 26, "set_gpu_default_devic": [29, 30], "gpu": 30, "devic": 30, "runtimeerror": 30, "support": 30, "lr": 30, "5e": 30, "05": 30, "batch_siz": 30, "opt": 30, "tqdm_notebook": 30, "rate": 30, "perform": 30, "optim": 30, "tf": 30, "adam": 30}, "objects": {"deepsensor.active_learning.acquisition_fns": [[12, 0, 1, "", "AcquisitionFunction"], [12, 0, 1, "", "AcquisitionFunctionOracle"], [12, 0, 1, "", "AcquisitionFunctionParallel"], [12, 0, 1, "", "ContextDist"], [12, 0, 1, "", "ExpectedImprovement"], [12, 0, 1, "", "JointEntropy"], [12, 0, 1, "", "MeanMarginalEntropy"], [12, 0, 1, "", "MeanStddev"], [12, 0, 1, "", "MeanVariance"], [12, 0, 1, "", "OracleJointNLL"], [12, 0, 1, "", "OracleMAE"], [12, 0, 1, "", "OracleMarginalNLL"], [12, 0, 1, "", "OracleRMSE"], [12, 0, 1, "", "Random"], [12, 0, 1, "", "Stddev"], [12, 0, 1, "", "pNormStddev"]], "deepsensor.active_learning.acquisition_fns.AcquisitionFunction": [[12, 1, 1, "", "__call__"], [12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.ContextDist": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.ExpectedImprovement": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.JointEntropy": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.MeanStddev": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.MeanVariance": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.OracleJointNLL": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.OracleMAE": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.OracleMarginalNLL": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.OracleRMSE": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.Random": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.Stddev": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.acquisition_fns.pNormStddev": [[12, 1, 1, "", "__init__"]], "deepsensor.active_learning.algorithms": [[13, 0, 1, "", "GreedyAlgorithm"]], "deepsensor.active_learning.algorithms.GreedyAlgorithm": [[13, 1, 1, "", "__call__"], [13, 1, 1, "", "__init__"]], "deepsensor.data.loader": [[16, 0, 1, "", "TaskLoader"]], "deepsensor.data.loader.TaskLoader": [[16, 1, 1, "", "__call__"], [16, 1, 1, "", "__init__"], [16, 2, 1, "", "config_fname"], [16, 1, 1, "", "count_context_and_target_data_dims"], [16, 1, 1, "", "infer_context_and_target_var_IDs"], [16, 1, 1, "", "load_dask"], [16, 1, 1, "", "sample_da"], [16, 1, 1, "", "sample_df"], [16, 1, 1, "", "sample_offgrid_aux"], [16, 1, 1, "", "save"], [16, 1, 1, "", "task_generation"]], "deepsensor.data.processor": [[17, 0, 1, "", "DataProcessor"], [17, 3, 1, "", "da1_da2_same_grid"], [17, 3, 1, "", "interp_da1_to_da2"], [17, 3, 1, "", "mask_coord_array_normalised"], [17, 3, 1, "", "xarray_to_coord_array_normalised"]], "deepsensor.data.processor.DataProcessor": [[17, 1, 1, "", "__call__"], [17, 1, 1, "", "__init__"], [17, 1, 1, "", "__str__"], [17, 1, 1, "", "add_to_config"], [17, 1, 1, "", "check_params_computed"], [17, 2, 1, "", "config_fname"], [17, 1, 1, "", "get_config"], [17, 1, 1, "", "load_dask"], [17, 1, 1, "", "map"], [17, 1, 1, "", "map_array"], [17, 1, 1, "", "map_coord_array"], [17, 1, 1, "", "map_coords"], [17, 1, 1, "", "map_x1_and_x2"], [17, 1, 1, "", "save"], [17, 1, 1, "", "set_coord_params"], [17, 1, 1, "", "unnormalise"]], "deepsensor.data.task": [[18, 0, 1, "", "Task"], [18, 3, 1, "", "append_obs_to_task"], [18, 3, 1, "", "flatten_X"], [18, 3, 1, "", "flatten_Y"], [18, 3, 1, "", "flatten_gridded_data_in_task"]], "deepsensor.data.task.Task": [[18, 1, 1, "", "__init__"], [18, 1, 1, "", "__repr__"], [18, 1, 1, "", "__str__"], [18, 1, 1, "", "add_batch_dim"], [18, 1, 1, "", "cast_to_float32"], [18, 1, 1, "", "convert_to_tensor"], [18, 1, 1, "", "mask_nans_nps"], [18, 1, 1, "", "mask_nans_numpy"], [18, 1, 1, "", "op"], [18, 1, 1, "", "remove_nans_from_task_Y_t_if_present"], [18, 1, 1, "", "summarise_repr"], [18, 1, 1, "", "summarise_str"]], "deepsensor.data.utils": [[19, 3, 1, "", "compute_pandas_data_resolution"], [19, 3, 1, "", "compute_xarray_data_resolution"], [19, 3, 1, "", "construct_circ_time_ds"], [19, 3, 1, "", "construct_x1x2_ds"]], "deepsensor.model.convnp": [[21, 0, 1, "", "ConvNP"], [21, 3, 1, "", "concat_tasks"]], "deepsensor.model.convnp.ConvNP": [[21, 1, 1, "", "__call__"], [21, 1, 1, "", "__init__"], [21, 1, 1, "", "ar_sample"], [21, 1, 1, "", "covariance"], [21, 1, 1, "", "joint_entropy"], [21, 1, 1, "", "load"], [21, 1, 1, "", "logpdf"], [21, 1, 1, "", "loss_fn"], [21, 1, 1, "", "mean"], [21, 1, 1, "", "mean_marginal_entropy"], [21, 1, 1, "", "modify_task"], [21, 1, 1, "", "sample"], [21, 1, 1, "", "save"], [21, 1, 1, "", "slice_diag"], [21, 1, 1, "", "stddev"], [21, 1, 1, "", "variance"]], "deepsensor.model.defaults": [[22, 3, 1, "", "gen_decoder_scale"], [22, 3, 1, "", "gen_encoder_scales"], [22, 3, 1, "", "gen_ppu"]], "deepsensor.model.model": [[24, 0, 1, "", "DeepSensorModel"], [24, 0, 1, "", "ProbabilisticModel"], [24, 3, 1, "", "create_empty_spatiotemporal_xarray"], [24, 3, 1, "", "increase_spatial_resolution"]], "deepsensor.model.model.DeepSensorModel": [[24, 1, 1, "", "__init__"], [24, 1, 1, "", "covariance"], [24, 1, 1, "", "joint_entropy"], [24, 1, 1, "", "logpdf"], [24, 1, 1, "", "loss"], [24, 1, 1, "", "mean"], [24, 1, 1, "", "mean_marginal_entropy"], [24, 1, 1, "", "predict"], [24, 1, 1, "", "sample"], [24, 1, 1, "", "stddev"], [24, 1, 1, "", "variance"]], "deepsensor.model.model.ProbabilisticModel": [[24, 1, 1, "", "covariance"], [24, 1, 1, "", "joint_entropy"], [24, 1, 1, "", "logpdf"], [24, 1, 1, "", "loss"], [24, 1, 1, "", "mean"], [24, 1, 1, "", "mean_marginal_entropy"], [24, 1, 1, "", "sample"], [24, 1, 1, "", "stddev"], [24, 1, 1, "", "variance"]], "deepsensor.model.nps": [[25, 3, 1, "", "compute_encoding_tensor"], [25, 3, 1, "", "construct_neural_process"], [25, 3, 1, "", "convert_task_to_nps_args"], [25, 3, 1, "", "run_nps_model"], [25, 3, 1, "", "run_nps_model_ar"]], "deepsensor.plot": [[26, 3, 1, "", "acquisition_fn"], [26, 3, 1, "", "context_encoding"], [26, 3, 1, "", "feature_maps"], [26, 3, 1, "", "offgrid_context"], [26, 3, 1, "", "offgrid_context_observations"], [26, 3, 1, "", "placements"], [26, 3, 1, "", "receptive_field"]], "deepsensor.train.train": [[30, 3, 1, "", "set_gpu_default_device"], [30, 3, 1, "", "train_epoch"]]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:attribute", "3": "py:function"}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "function", "Python function"]}, "titleterms": {"develop": [0, 5], "code": 0, "conduct": 0, "contribut": 1, "deepsensor": [1, 3, 4, 6, 11, 12, 13, 16, 17, 18, 19, 21, 22, 24, 25, 26, 30], "commun": [2, 3], "faq": 2, "question": 2, "": [3, 11], "user": 3, "contributor": 3, "tabl": [3, 7, 11, 14, 15, 20, 23, 29], "content": [3, 7, 11, 14, 15, 20, 23, 29], "roadmap": 4, "contact": 5, "extend": 6, "new": 6, "model": [6, 21, 22, 23, 24, 25], "get": 7, "start": 7, "instal": [8, 11], "instruct": 8, "from": 8, "pypi": 8, "sourc": 8, "tutori": [9, 10], "quickstart": 10, "welcom": 11, "document": 11, "cite": 11, "quick": 11, "indic": 11, "active_learn": [12, 13, 14], "acquisition_fn": 12, "paramet": [12, 13, 16, 17, 18, 19, 21, 22, 24, 25, 30], "return": [12, 13, 16, 17, 18, 19, 21, 22, 24, 25, 30], "rais": [12, 13, 16, 24, 25, 30], "algorithm": 13, "modul": [14, 15, 23, 26, 27, 28, 29], "data": [15, 16, 17, 18, 19], "loader": 16, "processor": 17, "task": 18, "util": 19, "api": 20, "refer": 20, "convnp": 21, "default": 22, "np": 25, "plot": 26, "tensorflow": 27, "torch": 28, "train": [29, 30]}, "envversion": {"sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 60}, "alltitles": {"Developer Code of Conduct": [[0, "developer-code-of-conduct"]], "Contributing to DeepSensor": [[1, "contributing-to-deepsensor"]], "Community FAQ": [[2, "community-faq"]], "Questions": [[2, "questions"]], "DeepSensor\u2019s user and contributor community": [[3, "deepsensor-s-user-and-contributor-community"]], "Table of contents:": [[3, null], [7, null], [14, null], [15, null], [20, null], [23, null], [29, null]], "DeepSensor Roadmap": [[4, "deepsensor-roadmap"]], "Contact the developers": [[5, "contact-the-developers"]], "Extending DeepSensor with new models": [[6, "extending-deepsensor-with-new-models"]], "Getting started": [[7, "getting-started"]], "Installation instructions": [[8, "installation-instructions"]], "Install from PyPI": [[8, "install-from-pypi"]], "Install from source": [[8, "install-from-source"]], "Tutorials": [[9, "tutorials"]], "Tutorials:": [[9, null]], "Tutorial: Quickstart": [[10, "tutorial-quickstart"]], "Welcome to DeepSensor\u2019s documentation!": [[11, "welcome-to-deepsensor-s-documentation"]], "Citing DeepSensor": [[11, "citing-deepsensor"]], "Quick installation": [[11, "quick-installation"]], "Contents:": [[11, null]], "Indices and tables": [[11, "indices-and-tables"]], "deepsensor.active_learning.acquisition_fns": [[12, "deepsensor-active-learning-acquisition-fns"]], "Parameters": [[12, "parameters"], [12, "id1"], [12, "id2"], [12, "id3"], [12, "id4"], [12, "id5"], [12, "id6"], [12, "id7"], [12, "id8"], [12, "id9"], [12, "id10"], [12, "id11"], [12, "id12"], [12, "id13"], [12, "id14"], [13, "parameters"], [13, "id1"], [16, "parameters"], [16, "id1"], [16, "id6"], [16, "id9"], [16, "id12"], [16, "id14"], [17, "parameters"], [17, "id1"], [17, "id2"], [17, "id4"], [17, "id6"], [17, "id8"], [17, "id10"], [17, "id12"], [17, "id14"], [17, "id16"], [17, "id18"], [17, "id20"], [17, "id22"], [17, "id24"], [17, "id26"], [17, "id28"], [18, "parameters"], [18, "id3"], [18, "id5"], [18, "id7"], [18, "id9"], [19, "parameters"], [19, "id1"], [19, "id3"], [19, "id5"], [21, "parameters"], [21, "id1"], [21, "id2"], [21, "id3"], [21, "id4"], [21, "id6"], [21, "id8"], [21, "id10"], [21, "id12"], [21, "id14"], [21, "id16"], [21, "id18"], [21, "id20"], [21, "id22"], [21, "id24"], [21, "id26"], [21, "id28"], [21, "id30"], [21, "id32"], [21, "id34"], [21, "id36"], [21, "id38"], [21, "id40"], [21, "id42"], [21, "id44"], [22, "parameters"], [22, "id1"], [22, "id3"], [24, "parameters"], [24, "id1"], [24, "id2"], [24, "id5"], [24, "id8"], [24, "id11"], [24, "id14"], [24, "id17"], [24, "id20"], [24, "id23"], [24, "id25"], [24, "id28"], [24, "id31"], [24, "id34"], [24, "id37"], [24, "id40"], [24, "id43"], [24, "id46"], [24, "id49"], [24, "id51"], [25, "parameters"], [25, "id1"], [25, "id3"], [25, "id5"], [25, "id7"], [30, "parameters"]], "Returns": [[12, "returns"], [13, "returns"], [16, "returns"], [16, "id2"], [16, "id3"], [16, "id5"], [16, "id7"], [16, "id10"], [16, "id13"], [16, "id15"], [17, "returns"], [17, "id3"], [17, "id5"], [17, "id7"], [17, "id9"], [17, "id11"], [17, "id13"], [17, "id15"], [17, "id17"], [17, "id19"], [17, "id21"], [17, "id23"], [17, "id25"], [17, "id27"], [17, "id29"], [18, "returns"], [18, "id1"], [18, "id2"], [18, "id4"], [18, "id6"], [18, "id8"], [18, "id10"], [19, "returns"], [19, "id2"], [19, "id4"], [19, "id6"], [21, "returns"], [21, "id5"], [21, "id7"], [21, "id9"], [21, "id11"], [21, "id13"], [21, "id15"], [21, "id17"], [21, "id19"], [21, "id21"], [21, "id23"], [21, "id25"], [21, "id27"], [21, "id29"], [21, "id31"], [21, "id33"], [21, "id35"], [21, "id37"], [21, "id39"], [21, "id41"], [21, "id43"], [21, "id45"], [22, "returns"], [22, "id2"], [22, "id4"], [24, "returns"], [24, "id3"], [24, "id6"], [24, "id9"], [24, "id12"], [24, "id15"], [24, "id18"], [24, "id21"], [24, "id24"], [24, "id26"], [24, "id29"], [24, "id32"], [24, "id35"], [24, "id38"], [24, "id41"], [24, "id44"], [24, "id47"], [24, "id50"], [24, "id52"], [25, "returns"], [25, "id2"], [25, "id4"], [25, "id6"], [25, "id8"], [30, "returns"], [30, "id1"]], "Raises": [[12, "raises"], [13, "raises"], [13, "id2"], [16, "raises"], [16, "id4"], [16, "id8"], [16, "id11"], [24, "raises"], [24, "id4"], [24, "id7"], [24, "id10"], [24, "id13"], [24, "id16"], [24, "id19"], [24, "id22"], [24, "id27"], [24, "id30"], [24, "id33"], [24, "id36"], [24, "id39"], [24, "id42"], [24, "id45"], [24, "id48"], [24, "id53"], [25, "raises"], [30, "raises"]], "deepsensor.active_learning.algorithms": [[13, "deepsensor-active-learning-algorithms"]], "active_learning module": [[14, "active-learning-module"]], "data module": [[15, "data-module"]], "deepsensor.data.loader": [[16, "deepsensor-data-loader"]], "deepsensor.data.processor": [[17, "deepsensor-data-processor"]], "deepsensor.data.task": [[18, "deepsensor-data-task"]], "deepsensor.data.utils": [[19, "deepsensor-data-utils"]], "API Reference": [[20, "api-reference"]], "deepsensor.model.convnp": [[21, "deepsensor-model-convnp"]], "deepsensor.model.defaults": [[22, "deepsensor-model-defaults"]], "model module": [[23, "model-module"]], "deepsensor.model.model": [[24, "deepsensor-model-model"]], "deepsensor.model.nps": [[25, "deepsensor-model-nps"]], "deepsensor.plot module": [[26, "deepsensor-plot-module"]], "tensorflow module": [[27, "tensorflow-module"]], "torch module": [[28, "torch-module"]], "train module": [[29, "train-module"]], "deepsensor.train.train": [[30, "deepsensor-train-train"]]}, "indexentries": {"acquisitionfunction (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunction"]], "acquisitionfunctionoracle (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunctionOracle"]], "acquisitionfunctionparallel (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunctionParallel"]], "contextdist (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.ContextDist"]], "expectedimprovement (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.ExpectedImprovement"]], "jointentropy (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.JointEntropy"]], "meanmarginalentropy (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy"]], "meanstddev (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.MeanStddev"]], "meanvariance (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.MeanVariance"]], "oraclejointnll (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.OracleJointNLL"]], "oraclemae (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.OracleMAE"]], "oraclemarginalnll (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.OracleMarginalNLL"]], "oraclermse (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.OracleRMSE"]], "random (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.Random"]], "stddev (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.Stddev"]], "__call__() (deepsensor.active_learning.acquisition_fns.acquisitionfunction method)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunction.__call__"]], "__init__() (deepsensor.active_learning.acquisition_fns.acquisitionfunction method)": [[12, "deepsensor.active_learning.acquisition_fns.AcquisitionFunction.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.contextdist method)": [[12, "deepsensor.active_learning.acquisition_fns.ContextDist.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.expectedimprovement method)": [[12, "deepsensor.active_learning.acquisition_fns.ExpectedImprovement.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.jointentropy method)": [[12, "deepsensor.active_learning.acquisition_fns.JointEntropy.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.meanmarginalentropy method)": [[12, "deepsensor.active_learning.acquisition_fns.MeanMarginalEntropy.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.meanstddev method)": [[12, "deepsensor.active_learning.acquisition_fns.MeanStddev.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.meanvariance method)": [[12, "deepsensor.active_learning.acquisition_fns.MeanVariance.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.oraclejointnll method)": [[12, "deepsensor.active_learning.acquisition_fns.OracleJointNLL.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.oraclemae method)": [[12, "deepsensor.active_learning.acquisition_fns.OracleMAE.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.oraclemarginalnll method)": [[12, "deepsensor.active_learning.acquisition_fns.OracleMarginalNLL.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.oraclermse method)": [[12, "deepsensor.active_learning.acquisition_fns.OracleRMSE.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.random method)": [[12, "deepsensor.active_learning.acquisition_fns.Random.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.stddev method)": [[12, "deepsensor.active_learning.acquisition_fns.Stddev.__init__"]], "__init__() (deepsensor.active_learning.acquisition_fns.pnormstddev method)": [[12, "deepsensor.active_learning.acquisition_fns.pNormStddev.__init__"]], "pnormstddev (class in deepsensor.active_learning.acquisition_fns)": [[12, "deepsensor.active_learning.acquisition_fns.pNormStddev"]], "greedyalgorithm (class in deepsensor.active_learning.algorithms)": [[13, "deepsensor.active_learning.algorithms.GreedyAlgorithm"]], "__call__() (deepsensor.active_learning.algorithms.greedyalgorithm method)": [[13, "deepsensor.active_learning.algorithms.GreedyAlgorithm.__call__"]], "__init__() (deepsensor.active_learning.algorithms.greedyalgorithm method)": [[13, "deepsensor.active_learning.algorithms.GreedyAlgorithm.__init__"]], "taskloader (class in deepsensor.data.loader)": [[16, "deepsensor.data.loader.TaskLoader"]], "__call__() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.__call__"]], "__init__() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.__init__"]], "config_fname (deepsensor.data.loader.taskloader attribute)": [[16, "deepsensor.data.loader.TaskLoader.config_fname"]], "count_context_and_target_data_dims() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.count_context_and_target_data_dims"]], "infer_context_and_target_var_ids() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.infer_context_and_target_var_IDs"]], "load_dask() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.load_dask"]], "sample_da() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.sample_da"]], "sample_df() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.sample_df"]], "sample_offgrid_aux() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.sample_offgrid_aux"]], "save() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.save"]], "task_generation() (deepsensor.data.loader.taskloader method)": [[16, "deepsensor.data.loader.TaskLoader.task_generation"]], "dataprocessor (class in deepsensor.data.processor)": [[17, "deepsensor.data.processor.DataProcessor"]], "__call__() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.__call__"]], "__init__() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.__init__"]], "__str__() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.__str__"]], "add_to_config() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.add_to_config"]], "check_params_computed() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.check_params_computed"]], "config_fname (deepsensor.data.processor.dataprocessor attribute)": [[17, "deepsensor.data.processor.DataProcessor.config_fname"]], "da1_da2_same_grid() (in module deepsensor.data.processor)": [[17, "deepsensor.data.processor.da1_da2_same_grid"]], "get_config() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.get_config"]], "interp_da1_to_da2() (in module deepsensor.data.processor)": [[17, "deepsensor.data.processor.interp_da1_to_da2"]], "load_dask() (deepsensor.data.processor.dataprocessor class method)": [[17, "deepsensor.data.processor.DataProcessor.load_dask"]], "map() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map"]], "map_array() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map_array"]], "map_coord_array() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map_coord_array"]], "map_coords() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map_coords"]], "map_x1_and_x2() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.map_x1_and_x2"]], "mask_coord_array_normalised() (in module deepsensor.data.processor)": [[17, "deepsensor.data.processor.mask_coord_array_normalised"]], "save() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.save"]], "set_coord_params() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.set_coord_params"]], "unnormalise() (deepsensor.data.processor.dataprocessor method)": [[17, "deepsensor.data.processor.DataProcessor.unnormalise"]], "xarray_to_coord_array_normalised() (in module deepsensor.data.processor)": [[17, "deepsensor.data.processor.xarray_to_coord_array_normalised"]], "task (class in deepsensor.data.task)": [[18, "deepsensor.data.task.Task"]], "__init__() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.__init__"]], "__repr__() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.__repr__"]], "__str__() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.__str__"]], "add_batch_dim() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.add_batch_dim"]], "append_obs_to_task() (in module deepsensor.data.task)": [[18, "deepsensor.data.task.append_obs_to_task"]], "cast_to_float32() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.cast_to_float32"]], "convert_to_tensor() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.convert_to_tensor"]], "flatten_x() (in module deepsensor.data.task)": [[18, "deepsensor.data.task.flatten_X"]], "flatten_y() (in module deepsensor.data.task)": [[18, "deepsensor.data.task.flatten_Y"]], "flatten_gridded_data_in_task() (in module deepsensor.data.task)": [[18, "deepsensor.data.task.flatten_gridded_data_in_task"]], "mask_nans_nps() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.mask_nans_nps"]], "mask_nans_numpy() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.mask_nans_numpy"]], "op() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.op"]], "remove_nans_from_task_y_t_if_present() (deepsensor.data.task.task method)": [[18, "deepsensor.data.task.Task.remove_nans_from_task_Y_t_if_present"]], "summarise_repr() (deepsensor.data.task.task class method)": [[18, "deepsensor.data.task.Task.summarise_repr"]], "summarise_str() (deepsensor.data.task.task class method)": [[18, "deepsensor.data.task.Task.summarise_str"]], "compute_pandas_data_resolution() (in module deepsensor.data.utils)": [[19, "deepsensor.data.utils.compute_pandas_data_resolution"]], "compute_xarray_data_resolution() (in module deepsensor.data.utils)": [[19, "deepsensor.data.utils.compute_xarray_data_resolution"]], "construct_circ_time_ds() (in module deepsensor.data.utils)": [[19, "deepsensor.data.utils.construct_circ_time_ds"]], "construct_x1x2_ds() (in module deepsensor.data.utils)": [[19, "deepsensor.data.utils.construct_x1x2_ds"]], "convnp (class in deepsensor.model.convnp)": [[21, "deepsensor.model.convnp.ConvNP"]], "__call__() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.__call__"]], "__init__() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.__init__"]], "ar_sample() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.ar_sample"]], "concat_tasks() (in module deepsensor.model.convnp)": [[21, "deepsensor.model.convnp.concat_tasks"]], "covariance() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.covariance"]], "joint_entropy() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.joint_entropy"]], "load() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.load"]], "logpdf() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.logpdf"]], "loss_fn() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.loss_fn"]], "mean() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.mean"]], "mean_marginal_entropy() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.mean_marginal_entropy"]], "modify_task() (deepsensor.model.convnp.convnp class method)": [[21, "deepsensor.model.convnp.ConvNP.modify_task"]], "sample() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.sample"]], "save() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.save"]], "slice_diag() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.slice_diag"]], "stddev() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.stddev"]], "variance() (deepsensor.model.convnp.convnp method)": [[21, "deepsensor.model.convnp.ConvNP.variance"]], "gen_decoder_scale() (in module deepsensor.model.defaults)": [[22, "deepsensor.model.defaults.gen_decoder_scale"]], "gen_encoder_scales() (in module deepsensor.model.defaults)": [[22, "deepsensor.model.defaults.gen_encoder_scales"]], "gen_ppu() (in module deepsensor.model.defaults)": [[22, "deepsensor.model.defaults.gen_ppu"]], "deepsensormodel (class in deepsensor.model.model)": [[24, "deepsensor.model.model.DeepSensorModel"]], "probabilisticmodel (class in deepsensor.model.model)": [[24, "deepsensor.model.model.ProbabilisticModel"]], "__init__() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.__init__"]], "covariance() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.covariance"]], "covariance() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.covariance"]], "create_empty_spatiotemporal_xarray() (in module deepsensor.model.model)": [[24, "deepsensor.model.model.create_empty_spatiotemporal_xarray"]], "increase_spatial_resolution() (in module deepsensor.model.model)": [[24, "deepsensor.model.model.increase_spatial_resolution"]], "joint_entropy() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.joint_entropy"]], "joint_entropy() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.joint_entropy"]], "logpdf() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.logpdf"]], "logpdf() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.logpdf"]], "loss() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.loss"]], "loss() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.loss"]], "mean() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.mean"]], "mean() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.mean"]], "mean_marginal_entropy() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.mean_marginal_entropy"]], "mean_marginal_entropy() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.mean_marginal_entropy"]], "predict() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.predict"]], "sample() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.sample"]], "sample() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.sample"]], "stddev() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.stddev"]], "stddev() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.stddev"]], "variance() (deepsensor.model.model.deepsensormodel method)": [[24, "deepsensor.model.model.DeepSensorModel.variance"]], "variance() (deepsensor.model.model.probabilisticmodel method)": [[24, "deepsensor.model.model.ProbabilisticModel.variance"]], "compute_encoding_tensor() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.compute_encoding_tensor"]], "construct_neural_process() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.construct_neural_process"]], "convert_task_to_nps_args() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.convert_task_to_nps_args"]], "run_nps_model() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.run_nps_model"]], "run_nps_model_ar() (in module deepsensor.model.nps)": [[25, "deepsensor.model.nps.run_nps_model_ar"]], "acquisition_fn() (in module deepsensor.plot)": [[26, "deepsensor.plot.acquisition_fn"]], "context_encoding() (in module deepsensor.plot)": [[26, "deepsensor.plot.context_encoding"]], "feature_maps() (in module deepsensor.plot)": [[26, "deepsensor.plot.feature_maps"]], "offgrid_context() (in module deepsensor.plot)": [[26, "deepsensor.plot.offgrid_context"]], "offgrid_context_observations() (in module deepsensor.plot)": [[26, "deepsensor.plot.offgrid_context_observations"]], "placements() (in module deepsensor.plot)": [[26, "deepsensor.plot.placements"]], "receptive_field() (in module deepsensor.plot)": [[26, "deepsensor.plot.receptive_field"]], "set_gpu_default_device() (in module deepsensor.train.train)": [[30, "deepsensor.train.train.set_gpu_default_device"]], "train_epoch() (in module deepsensor.train.train)": [[30, "deepsensor.train.train.train_epoch"]]}}) \ No newline at end of file