diff --git a/.github/workflows/pyleco_CI.yml b/.github/workflows/pyleco_CI.yml index c2cb3f3c3..20ec80adb 100644 --- a/.github/workflows/pyleco_CI.yml +++ b/.github/workflows/pyleco_CI.yml @@ -13,13 +13,14 @@ jobs: run: shell: bash -l {0} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Install pyleco requirements uses: mamba-org/setup-micromamba@v1 with: environment-file: environment.yml + create-args: python=3.10 cache-environment-key: pylatest-ubuntu-latest-mamba-${{ env.CACHE_NUMBER }}-${{ hashFiles('environment.yml') }} cache-downloads: false - name: Python and Mamba version @@ -27,8 +28,9 @@ jobs: python --version micromamba info - name: Lint with ruff - uses: chartboost/ruff-action@v1 + uses: astral-sh/ruff-action@v1 with: + version: 0.4.10 # ruff-action@v1 is broken in regard to ruff 0.5.0 args: --extend-select=E9,F63,F7,F82 --show-source - uses: ammaraskar/sphinx-problem-matcher@master - name: Generate docs @@ -43,6 +45,7 @@ jobs: run: | echo "::add-matcher::.github/sphinx.json" make doctest SPHINXOPTS="-W --keep-going" + type_checking: name: Static Type Checking runs-on: "ubuntu-latest" @@ -50,7 +53,7 @@ jobs: run: shell: bash -l {0} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Install pyleco requirements @@ -65,14 +68,15 @@ jobs: run: pip install mypy - name: Run mypy run: mypy . + test_coverage: - name: Code Coverage + name: Code Coverage on codecov runs-on: "ubuntu-latest" defaults: run: shell: bash -l {0} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Install pyleco requirements @@ -87,49 +91,17 @@ jobs: # If the pytest problem matcher stops working because of bad paths, do an editable install run: pip install -e .[dev] # editable for covtest - name: Test for Coverage - run: pytest --junitxml=pytest.xml --cov-report=term-missing:skip-covered --cov=pyleco | tee pytest-coverage.txt - - name: Pytest Coverage Comment - id: coverageComment - uses: MishaKav/pytest-coverage-comment@main + run: pytest --cov=pyleco --cov-report=xml + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 with: - pytest-coverage-path: ./pytest-coverage.txt - title: Coverage Report - badge-title: PyLECO Coverage - hide-badge: false - hide-report: false - create-new-comment: false - hide-comment: false - report-only-changed-files: false - remove-link-from-badge: false - unique-id-for-comment: python3 - junitxml-path: ./pytest.xml - junitxml-title: Coverage Summary - - name: Check output coverage - run: | - echo "Coverage Percantage - ${{ steps.coverageComment.outputs.coverage }}" - echo "Coverage Color - ${{ steps.coverageComment.outputs.color }}" - echo "Coverage Html - ${{ steps.coverageComment.outputs.coverageHtml }}" - - echo "Coverage Warnings - ${{ steps.coverageComment.outputs.warnings }}" - - echo "Coverage Errors - ${{ steps.coverageComment.outputs.errors }}" - echo "Coverage Failures - ${{ steps.coverageComment.outputs.failures }}" - echo "Coverage Skipped - ${{ steps.coverageComment.outputs.skipped }}" - echo "Coverage Tests - ${{ steps.coverageComment.outputs.tests }}" - echo "Coverage Time - ${{ steps.coverageComment.outputs.time }}" + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: true + files: ./coverage.xml,!./cache + flags: unittests + name: codecov-umbrella + verbose: true - echo "Not Success Test Info - ${{ steps.coverageComment.outputs.notSuccessTestInfo }}" - - - name: Create the Badge - uses: schneegans/dynamic-badges-action@v1.6.0 - with: - auth: ${{ secrets.pyleco_coverage_gist_secret }} - gistID: 7a8a7b874b62ed803eb56ca04830bede - filename: pyleco-coverage.json - label: Coverage Report - message: ${{ steps.coverageComment.outputs.coverage }} - color: ${{ steps.coverageComment.outputs.color }} - namedLogo: python test: name: Python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} @@ -140,9 +112,9 @@ jobs: fail-fast: true matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Install pyleco requirements @@ -159,13 +131,18 @@ jobs: run: pip install .[dev] - name: Pyleco version run: python -c "import pyleco;print(pyleco.__version__)" - - name: Run pytest with xvfb + - name: Run pytest under Linux with xvfb if: runner.os == 'Linux' run: | echo "::add-matcher::.github/pytest.json" xvfb-run -a pytest - - name: Run pytest - if: runner.os != 'Linux' + - name: Run pytest under Windows + if: runner.os == 'Windows' run: | echo "::add-matcher::.github/pytest.json" pytest + - name: Run pytest under Mac without acceptance tests + if: runner.os == 'macOS' + run: | + echo "::add-matcher::.github/pytest.json" + pytest --ignore=tests/acceptance_tests diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml new file mode 100644 index 000000000..409abc9cf --- /dev/null +++ b/.github/workflows/python-publish.yml @@ -0,0 +1,31 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries + +name: Build and Upload Python Package + +on: + release: + types: [published] + +jobs: + deploy: + + runs-on: ubuntu-latest + environment: release + permissions: + id-token: write # IMPORTANT: this permission is mandatory for trusted publishing + + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build twine + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@v1.8.11 diff --git a/.gitignore b/.gitignore index b6e47617d..5ae2e731b 100644 --- a/.gitignore +++ b/.gitignore @@ -114,6 +114,9 @@ venv.bak/ .spyderproject .spyproject +# VS code project settings +.vscode/* + # Rope project settings .ropeproject diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..537ed1402 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,180 @@ +# CHANGELOG + +## [Unreleased] + +### Changed + +* Openrpc is optional (available via `openrpc` option) for Python 3.13 onwards. + +### Added + +* Support for Python 3.13 + + +## [0.4.0] 2024-06-19 + +_Binary data handling facilitated with utility functions._ + +### Changed +* Proxy_server stops if binding failed ([#86](https://github.com/pymeasure/pyleco/pull/86)) + +### Added + +* Add convenience functions for using additional frames for binary payload ([#82](https://github.com/pymeasure/pyleco/pull/82)) +* Improve getting_started and readme documentation ([#80](https://github.com/pymeasure/pyleco/pull/80)) +* Add RELEASE.md ([#79](https://github.com/pymeasure/pyleco/pull/79)) +* Add locking actor ([#84](https://github.com/pymeasure/pyleco/pull/84)) + +### Fixed + +* Fix listener documentation ([#88](https://github.com/pymeasure/pyleco/pull/88)) + +**Full Changelog**: https://github.com/pymeasure/pyleco/compare/v0.3.2...v0.4.0 + + +## [0.3.2] 2024-05-07 + +### Fixed + +* Fix dependency on outdated UUIDv7 generating library to `uuid6` package ([#75](https://github.com/pymeasure/pyleco/pull/75)) +* Fix codecov CI ([#73](https://github.com/pymeasure/pyleco/pull/73)) + +**Full Changelog**: https://github.com/pymeasure/pyleco/compare/v0.3.1...v0.3.2 + + +## [0.3.1] 2024-04-12 + +### Fixed + +* Fix `Coordinator` to not use period in hostname as namespace ([#69](https://github.com/pymeasure/pyleco/pull/69)) +* Fix `DataLogger` timer ([#70](https://github.com/pymeasure/pyleco/pull/70)) + +**Full Changelog**: https://github.com/pymeasure/pyleco/compare/v0.3.0...v0.3.1 + + +## [0.3.0] 2024-03-13 + +_Use self defined objects instead of jsonrpc2-objects and jsonrpc2-pyclient._ + +### Changed + +- Rename `cls` parameter to `device_class` in `Actor` and `TransparentDirector`. +- Substitute `jsonrpc2-objects` and `jsonrpc2-pyclient` by self written objects ([#65](https://github.com/pymeasure/pyleco/pull/65)) +- Move error definitions from `pyleco.errors` to `pyleco.json_utils.errors` ([#63](https://github.com/pymeasure/pyleco/pull/63)) +- Move `pyleco.errors.CommunicationError` to `pyleco.json_utils.errors` ([#63](https://github.com/pymeasure/pyleco/pull/63)) +- Deprecate `generate_error_with_data` in favor of `DataError.from_error` class method ([#63](https://github.com/pymeasure/pyleco/pull/63)) +- Python requirement lowered to Python 3.8 ([#64](https://github.com/pymeasure/pyleco/pull/64)) +- Rework the message buffer in the base communicator and harmonize with pipe handler's buffer ([#66](https://github.com/pymeasure/pyleco/pull/66)) +- Bump CI actions versions for node.js 20 ([#62](https://github.com/pymeasure/pyleco/pull/62)) + +### Added + +- Add __future__.annotations to all files, which need it for annotations for Python 3.7/3.8. +- Add self written `RPCServer` as alternative to openrpc package. + +### Deprecated + +- Deprecate `pyleco.errors` in favor of `json_utils.errors` and `json_utils.json_objects`. +- Deprecate to use `CommunicatorPipe.buffer`, use `message_buffer` instead. + +### Fixed + +- Fix Listener's communcator did not know, when listening stopped ([#67](https://github.com/pymeasure/pyleco/pull/67)) + +**Full Changelog**: https://github.com/pymeasure/pyleco/compare/v0.2.2...v0.3.0 + + +## [0.2.2] - 2024-02-14 + +### Fixed + +- Fix Communicator to distinguish correctly different json rpc messages ([#57](https://github.com/pymeasure/pyleco/issues/57)) +- Fix MessageHandler not distinguish correctly batch requests ([#56](https://github.com/pymeasure/pyleco/issues/56)) +- Bump setup-python action version to v5 + +**Full Changelog**: https://github.com/pymeasure/pyleco/compare/v0.2.1...v.0.2.2 + + +## [0.2.1] - 2024-02-13 + +### Fixed + +- Fix BaseCommunciator to hand over message, if it is an error message (#55) + +**Full Changelog**: https://github.com/pymeasure/pyleco/compare/v0.2.0...v.0.2.1 + + +## [0.2.0] - 2024-02-13 + +_Several deprecated parts are removed and inner workings are changed._ + +### Changed + +- **Breaking:** change `MessageHandler.handle_commands` to `handle_message` ([#44](https://github.com/pymeasure/pyleco/pull/44)) +- **Breaking:** change PipeHandler inner workings of handling messages ([#44](https://github.com/pymeasure/pyleco/pull/44)) +- Add `BaseCommunicator` as a base class for Communicator and MessageHandler ([#48](https://github.com/pymeasure/pyleco/pull/48)) +- Refactor the Coordinator `handle_commands` ([#50](https://github.com/pymeasure/pyleco/pull/50)) + +### Added + +- Add the `Coordinator`, the `proxy_server`, and the `starter` as scripts to the command line ([#53](https://github.com/pymeasure/pyleco/pull/53)) + +### Removed + +- **Breaking:** remove `Coordinator.ask_raw` (#48) +- **Breaking:** remove legacy subscription messages from extended message handler (#48) + +### Fixed + +- Fix DataLogger to start a timer, even if not specified explicitly ([#51](https://github.com/pymeasure/pyleco/pull/51)) + +**Full Changelog**: https://github.com/pymeasure/pyleco/compare/v0.1.0...v.0.2.0 + + +## [0.1.0] - 2024-02-01 + +### Changed + +- Change message and protocols according to LECO change ([`9d74731da`](https://github.com/pymeasure/pyleco/commit/9d74731da06d147b1773f1f411bd943a36b4a83d)) (@BenediktBurger) +- Change Coordinator's `fname` to `full_name` ([`f3564c0`](https://github.com/pymeasure/pyleco/commit/f3564c08f04ed63bbab5b1100560e7b50239d83c)) (@BenediktBurger) + +### Added + +- Add compatibility with Python 3.9 ([`18abb87`](https://github.com/pymeasure/pyleco/commit/18abb87fea259f9e87411d88cca92a886bbd62b4)) (@BenediktBurger) +- Add compatibility with Python 3.12 ([#22](https://github.com/pymeasure/pyleco/pull/22)) (@BenediktBurger) +- Add more tests. +- Add more functionality to internal protocol and test suite ([`42e107c5cb90`](https://github.com/pymeasure/pyleco/commit/42e107c5cb90704dbb99ef1c5a50be739f3acf85)) (@BenediktBurger) +- Add Communicator functionality to the MessageHandler by distinguishing messages. (`9b0cc42`, `45913a5`, `97d902b`) (@BenediktBurger) +- Add CI for testing ([#22](https://github.com/pymeasure/pyleco/pull/22), [#7](https://github.com/pymeasure/pyleco/pull/7), #34, #29, #26) (@BenediktBurger) +- Add codecov code coverage calculation to CI ([#32](https://github.com/pymeasure/pyleco/pull/32)) (@BenediktBurger) +- Add `GETTING_STARTED.md` with a tutorial ([`000245b`](https://github.com/pymeasure/pyleco/commit/000245b7d693336a36b3f8bb5b0d0fe13a1bd6a7)) ([#24](https://github.com/pymeasure/pyleco/pull/24)) (@BenediktBurger, @bklebel) + +### Removed + +- **Breaking:** remove deprecated `Publisher` (use `DataPublisher` instead); move `Republisher` and `ExtendedPublisher` to pyleco-extras package ([#33](https://github.com/pymeasure/pyleco/pull/33)) (@BenediktBurger) +- **Breaking:** remove deprecated `call_method_rpc` and `call_method_rpc_async` + +### Fixed + +- Fix typos, also in variable / method names + + +## [alpha-0.0.1] - 2023-12-12 + +_Initial alpha version, complies with [LECO protocol alpha-0.0.1](https://github.com/pymeasure/leco-protocol/releases/tag/alpha-0.0.1)_ + +### New Contributors + +@BenediktBurger, @bilderbuchi, @bklebel + + +[unreleased]: https://github.com/pymeasure/pyleco/compare/v0.4.0...HEAD +[0.4.0]: https://github.com/pymeasure/pyleco/releases/tag/v0.4.0 +[0.3.2]: https://github.com/pymeasure/pyleco/releases/tag/v0.3.2 +[0.3.1]: https://github.com/pymeasure/pyleco/releases/tag/v0.3.1 +[0.3.0]: https://github.com/pymeasure/pyleco/releases/tag/v0.3.0 +[0.2.2]: https://github.com/pymeasure/pyleco/releases/tag/v0.2.2 +[0.2.1]: https://github.com/pymeasure/pyleco/releases/tag/v0.2.1 +[0.2.0]: https://github.com/pymeasure/pyleco/releases/tag/v0.2.0 +[0.1.0]: https://github.com/pymeasure/pyleco/releases/tag/v0.1.0 +[alpha-0.0.1]: https://github.com/pymeasure/pyleco/releases/tag/alpha-0.0.1 diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 000000000..5efa56919 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,26 @@ +cff-version: 1.2.0 +title: "PyLECO" +message: >- + If you use this software, please cite it using the + metadata from this file. +type: software +authors: + - family-names: Burger + given-names: Benedikt + orcid: "https://orcid.org/0000-0003-3302-3674" + - family-names: Klebel-Knobloch + given-names: Benjamin + - family-names: Buchner + given-names: Christoph +identifiers: + - type: doi + value: 10.5281/zenodo.10837366 +doi: 10.5281/zenodo.10837366 +repository-code: 'https://github.com/pymeasure/pyleco' +abstract: >- + Python implementation of the Laboratory Experiment COntrol + (LECO) protocol. +publisher: + - name: Zenodo +version: 0.4.0 +date-released: 2024-06-19 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..c2ad872c5 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,60 @@ +# How to Contribute to PyLECO + +You are welcome to contribute to PyLECO. + +There are many ways, how to contribute: +- Share your experience with PyLECO, +- ask questions, +- suggest improvements, +- report bugs, +- improve the documentation, +- fix bugs, +- add new features +- ... + + +## Suggestions / Questions / Bugs + +If you have a suggestion, a question, or found a bug, please open an `issue` on our [github issues](https://github.com/pymeasure/pyleco/issues) page or on the discussions page. + +### Bug reports + +For technical suggestions, questions, or bug reports, please try to be as descriptive as possible in order to allow others to help you better. + +For a bug report, the following information is a good start: +- Which PyLECO version do you use? installed from PyPI / conda-forge or from the current main branch from github? +- Python version +- Operating system + +If you are able, you can try to fix the bug and open a pull request, see below. + +### Show and Tell + +You are especially welcome to share, how you use PyLECO in the [show-and-tell discussions](https://github.com/pymeasure/pyleco/discussions/categories/show-and-tell). + + +## New Features / Bug Fixes + +If you want to add a new feature, please open an issue first in order to discuss the feature and ideas of its implementation. + +### Development + +Once the general idea is tied down, you can open a pull request (towards the `main` branch) with your code. +We encourage to open a pull request early on, to incorporate review comments from the beginning. + +For development, we recommend _test driven development_, that is writing tests and the features at the same time supporting each other. + +For example for a bug fix: +1. Write a test for the expected behaviour, which will fail (as there is a bug), +2. fix the code, such that the bug is fixed and the test succeeds, +3. refactor the code. + +### Test Framework + +We use pytest as our test framework. +All tests are in the `tests` folder. +Each module has its own file with unit tests in a similarly named structure, for example `pyleco/core/message.py` has the `tests/core/test_message.py` test file. + +There is a special folder, `tests/acceptance_tests` with acceptance tests, that are tests, which test several modules working together. + +The module [`pymeasure.test`](pyleco/test.py) offers fake classes, e.g. a fake zmq Socket, in order to facilitate writing tests. diff --git a/GETTING_STARTED.md b/GETTING_STARTED.md new file mode 100644 index 000000000..96c94d48c --- /dev/null +++ b/GETTING_STARTED.md @@ -0,0 +1,456 @@ +# Getting started + +This tutorial gives an overview how to start a LECO setup in Python. + +## Installation + +Install PyLECO as described in the README. + +Eventually, you will be able to install PyLECO via pip or conda: +```pip install pyleco``` + +## Setup the Infrastructure + +### The Coordinators + +The core of the infrastructure are communication servers, called Coordinators. +As LECO consists in two parts, the control protocol and the data protocol, there are two servers: + +1. The _Coordinator_ in [`coordinator.py`](pyleco/coordinators/coordinator.py) is the server of the control protocol. +2. [`proxy_server.py`](pyleco/coordinators/proxy_server.py) contains the server of the data protocol. + +In order to start these Coordinators, execute `coordinator` and `proxy_server` in a terminal. +Alternatively, execute the corresponding files with python: +for example, change directory in the folder of this getting started file and execute `python3 pyleco/coordinators/coordinator.py` under linux or `py pyleco/coordinators/coordinator.py` under Windows with the Windows Launcher installed. + +If you need settings which are different from the defaults, you can use command line parameters. +The command line parameter `-h` or `--help` gives an overview of all available parameters. +For example `python3 pyleco/coordinators/coordinator.py -h` gives the information, while `python3 pyleco/coordinators/coordinator.py --port 12345` makes the Coordinator listen on tcp port 12345 instead of the default one. + +### The Starter + +LECO allows to have many small parts working together instead of one monolithic program. +For convenience, there is an additional server, the [`Starter`](pyleco/management/starter.py), which can be used to start a bigger number of small parts all at once. +With the starter, we can sidestep having to start all these small parts individually in their own terminal window. + +The starter scans a directory (given as argument) for python files. +It will start, if told to do so, the method `task` of a given file name in a separate thread. +That allows to specify several different tasks, for example each one controlling one measurement instrument, and to start them by sending a command to the starter. +How this works exactly, is described below. + +In order to start the starter itself, execute `starter --directory ~/tasks` in a terminal with the tasks being in the subfolder `tasks` of the home directory. +Alternatively, execute its file with the path to the directory, for example `python3 pyleco/management/starter.py --directory ~/tasks`. + +#### Define a Task File + +The example file [`pymeasure_actor.py`](examples/pymeasure_actor.py) contains an example, how a task file in that directory could look like: + +The first docstring of the file should be the description of that task. +The starter offers that docstring, if you query it for its available tasks. + +You have to specify a `task` method with exactly one parameter, the `stop_event`. +The starter will give a `threading.Event` instance as an argument in order to tell the task when to stop. +You could write any `task` method, which checks regularly the `stop_event` and stops, if that event is set. +PyLECO offers helper methods to do most of the work for controlling an instrument. + +If you have a python class you want to control remotely, for example a [pymeasure](https://pymeasure.readthedocs.io) instrument driver for a fiber laser called `YAR`, you can use the [`Actor`](pyleco/actors/actor.py). + +In this case, this will be your task file `YAR_controller.py`: + +```python +"""Example scheme for an Actor for pymeasure instruments. 'fiberAmp'""" + +from pyleco.actors.actor import Actor +from pymeasure.instruments.ipgphotonics import YAR + +# Parameters +adapter = "ASRL5" # VISA resource string, for example serial port COM5 + +def task(stop_event) -> None: + """The task which is run by the starter.""" + with Actor( + name="fiberAmp", # you can access it under this name + device_class=YAR, # the class to instantiate later on + ) as actor: + actor.connect(adapter) # create an instance `actor.device = YAR(adapter)` + + actor.listen(stop_event=stop_event) # listen for commands +``` + +The `Actor` will listen under the name `fiberAmp` for incoming commands and handle them until the `stop_event` is set. +For example you can get or set properties of the `YAR` instance, or you can call methods. + +#### Start / Stop a Task + +Now we have our three servers (Coordinator, proxy_server, Starter) up and running, but the task is not yet started. +In order to start a task, we have to tell the Starter to do so. +The easiest way is to use the [`StarterDirector`](pyleco/directors/starter_director.py) (found in the `directors` directory) in a python console or script: +```python +from pyleco.directors.starter_director import StarterDirector +director = StarterDirector(actor="starter") +director.start_tasks(["YAR_controller"]) +``` + +The `actor` parameter tells the Director the name of the `Starter` instance which defaults to `"starter"`. +As the task file is named `YAR_controller.py`, the task name is `"YAR_controller". +You can give a list of tasks to start. + +The starter director has also methods to stop a task, give a list of available tasks, and to give a list of the state of the known tasks (running, stopped, crashed). + + +## Control an Experiment + +### Control the Instrument: The Director + +Now we have our severs running and also the task controlling our fiber amplifier is running. +In order to remotely control the fiber amplifier, we have to send messages to it. +Again we have a director to direct the actions of the fiber amplifier. + +This time, the director is a more generic one, the [`TransparentDirector`](pyleco/directors/transparent_director.py) (also in the directors directory). +If you read or write any property of the TransparentDirector's `device`, it will read or write to the remotely controlled instrument. +For example +```python +from pyleco.directors.transparent_director import TransparentDirector + +director = TransparentDirector(actor="fiberAmp") + +if not director.device.emission_enabled: + director.device.emission_enabled = True + +``` +will read, whether the emission of the fiber amplifier is enabled, if not, it will be enabled. + +Behind the scenes, the transparent director sends a request to the Actor named `"fiberAmp"` (remember, that is the name we gave to the actor in the task file) to read the `emission_enabled` property of the `YAR` instance. +The Actor will read the `device.emission_enabled` property and return it to the director. + +Alternatively, you could get/set the parameters manually. +The following lines are equivalent to the previous code: +```python +if not director.get_parameters(parameters=["emission_enabled"])["emission_enabled"]: + director.set_parameters({"emission_enabled": True}) +``` +The get/set parameters method allow to get/set more than one parameter in one message. + +If you want to call the method `clear` of the `YAR` instance, you can use `director.call_action(action="clear")`. +Any positional or keyword arguments you give to the `call_action` method will be given to the `clear` method. +For example `director.call_action(action="set_power", power=5)` will cause the actor to call `device.set_power(power=5)`. + + +### Different Computers + +LECO is not limited to a single computer, you can use it within a wider network. + +#### Single Coordinator Setup + +Each Component (any program participating in LECO) needs to connect to a Coordinator. +Per default, the PyLECO components look for a Coordinator on the same computer (localhost). +If the Coordinator is on another computer or listens on another port, you can specify these value as arguments. +For example: `Actor(host="myserver.com", port=12345)` will connect to the Coordinator on a computer available (via DNS) as `myserver.com` which listens on the port `12345`. +You can also specify the IP address instead of urls. + +#### Multi Coordinator Setup + +##### Setup of the Coordinators + +You can have more than one Coordinator, even on the same computer. +Each Coordinator needs its own combination of host name and port number (a single socket). +That means, they have at least to reside on different computers or have different port numbers. + +You can tell a Coordinator about other Coordinators, either by specifying the `--coordinators` command line argument (a comma separated list of coordinator addresses) or by using the `CoordinatorDirector`'s `add_nodes({namespace: address})` command. +If you tell one Coordinator about a second one, it will connect to all other Coordinators, to which the second one is connected. +In this way, the network is established automatically, and Components connected to a single Coordinator, can get access to all other Components as soon as this single Coordinator is connected to the wider network. + +##### The namespace + +Each Coordinator establishes a _Namespace_, which defaults to the name of the computer it resides on. +Each Component connected to that Coordinator belongs to that namespace. +The _full name_ of the Component consists of the namespace and its own name, joined by a period. +For example, let's say the Coordinator of our setup in the beginning has the namespace `N1`, then the fiber amplifier would be available under the name `N1.fiberAmp`. +If you do not specify the namespace, LECO assumes that the recipient's namespace is the same as the sender's namespace, similar to the area code of a phone number. + +You can send from any Component connected to any Coordinator (which has to be connected to `N1` Coordinator or be `N1` Coordinator itself) a message to `N1.fiberAmp` and it will arrive there. +The response will always return as well, as you must specify the sender's namespace. + +All Components (rectangular elements) on different computers can communicate with each other in the following graph. +Note that all Components connected to a certain Coordinator (circle) share the same _Namespace_, e.g. `N1`. + +```mermaid +flowchart TD + subgraph PC1[PC 1] + C1(("N1.COORDINATOR + (default port)")) + ca[N1.ComponentA]<-->C1 + CX(("NX.COORDINATOR + (different port)")) + cxz[NX.ComponentZ] + cxz <--> CX + end + + subgraph PC2[PC 2] + C2((N2.COORDINATOR)) + cb[N2.ComponentB]<-->C2 + c2a[N2.ComponentA]<-->C2 + c1d[N1.ComponentD] + end + + subgraph PC3[PC 3] + cc[N1.ComponentC] + end + + C1 <--> C2 + cc <--> C1 + C1 <--> CX + C2 <--> CX + + c1d <--> C1 +``` + + +### Collect Data + +If you're doing an experiment, you typically want to collect data as well. + +You can publish data via the data protocol. +As a helper class, you can use the [`DataPublisher`](pyleco/utils/data_publisher.py): +```python +from pyleco.utils.data_publisher import DataPublisher + +publisher = DataPublisher(full_name="N1.abc") +publisher.send_data("def") +``` +That will publish the data `"def"` from the sender `"N1.abc"`. + +Anyone connected to the same proxy_server can listen to that published data. +You have to subscribe to the sender `"N1.abc"` first, though. + +The [`DataLogger`](pyleco/management/data_logger.py) (in the `management` directory) collects these data snippets (if they are in the form of dictionaries: `{variable: value}` with a variable name and an associated value) and creates datapoints. +Afterwards you can save the collected datapoints. + + +## Include LECO in your own Programs + +### LECO Basics + +Some basic information about LECO and its python implementation. + +#### JSON-RPC and Remote Procedure Calls + +Default messages are based on Remote Procedure Calls (RPC) according to the [JSON-RPC](https://www.jsonrpc.org/specification) protocol. +Basically you request to execute a procedure call on a remote station and receive the response of that call. + +#### Communicator + +There are several tools in the `utils` directory, which offer some convenience methods to send RPC requests via the LECO protocol and to deceipher the responses. +For interchangeability, they all follow a consistent API defined in the `CommunicatorProtocol` and can be used as an argument for any `communicator` parameter. + +For example `communicator.ask_rpc(receiver="N1.fiberAmp", method="get_parameters", parameters=["emission_enabled"])` would call the `get_parameters` method of the actor called `"N1.fiberAmp"` with the keyword argument `parameters=["emission_enabled"]`. +The result of that method is the content, for example `{"emission_enabled": True}`. +This example is the spelled out code behind the call of the director example shown above. + +The most simple util implementing the `CommunicatorProtocol` is the [`Communicator`](pyleco/utils/communicator.py) (in `utils` directory). +It allows to send messages and read the answer. +However, it does not listen continuously for incoming messages. +It is great for scripts as it does not require additional threads. +_Directors_ create such a `Communicator`, unless they are given one. + + +### Daemon Type + +For a program, which runs happily in the background listening for commands and executing them, you can base your code on the [`MessageHandler`](pyleco/utils/message_handler.py) (in `utils` directory). +The MessageHandler will listen to incoming messages and handle them in a continuous loop. +The `listen` method has three parts: + +1. The method `_listen_setup`, where you can specify, what to do at the start of listening, +1. the method `_listen_loop_element`, which is executed in the loop until the `stop_event` is set. + Typically, it listens for incoming messages and handles them, +1. the method `_listen_close`, which finishes after having been told to stop. + + +If you want to make a method available for RPC calls, you can use the `register_rpc_method` method. +That allows any Component to use that method via `ask_rpc`. +For example `register_rpc_method(do_something)` would register the method `do_something`, such that someone else could do `ask_rpc(method="do_something")` calling that method. + +The message handler (and its subclasses) have to be in their listening loop to do their work: +```python +from pyleco.utils.message_handler import MessageHandler + +message_handler = MessageHandler("my_name") +message_handler.listen() # infinity loop +``` + +The [`ExtendedMessageHandler`](pyleco/utils/extended_message_handler.py) is a subclass, which can subscribe to the data protocol and act on received data messages. + + +### In an Additional Thread + +Sometimes, you want to have continuous LECO communication, but do not want to block the thread, for example in a GUI. + +The [`Listener`](pyleco/utils/listener.py) (in `utils` directory) starts a special form of the `MessageHandler` in another thread and offers a communicator: + +```python +from pyleco.utils.listener import Listener +from pyleco.directors.transparent_director import TransparentDirector + +listener = Listener("my_name") +listener.start_listen() +communicator = listener.get_communicator() + +listener.register_rpc_method(my_method) + +director = TransparentDirector(communicator=communicator) +``` + +In this example, the message handler will offer the `my_method` method via RPC (via the listener's `register_rpc_method`). +However, that method will be executed in the message handler thread! +You can send messages via the message handler by using the listener's `communciator`. +Here the director uses this communicator instead of creating its own one with its own name. + +The following graph shows how the `Listener` is used in a (multi-threaded) application. +Rounded elements run in a continuous loop. + +```mermaid +flowchart TD + Proxy(((proxy_server))) + C((COORDINATOR)) + + subgraph Application + subgraph MainThread[Main Thread] + main([main])-.->|creates|Listener + main -.->|calls|sl + + subgraph Listener + sl[[start_listen]]-.-> + gc[[get_communicator]] + rrm[[register_rpc_method]] + end + + gc-.->|generates|LC[Listener Communicator] + main <-->LC + main <-->Director <-->LC + main -.->|calls|rrm + end + sl-.->|"starts"|ListenerThread + main-.->|starts|AnotherThread + rrm-.->|registers method|ListenerEMH + + subgraph ListenerThread[Listener Thread] + ListenerEMH([ExtendedMessageHandler]) + end + + subgraph AnotherThread[Another Thread] + loop([loop])<-->Director6 + loop<-->LC2 + Director6[Director]<-->LC2[Listener Communicator] + end + loop-.->|calls|gc + LC2<-->ListenerEMH + gc-.->|generates|LC2 + + LC<-->ListenerEMH + end + + ListenerEMH <---> C + Proxy ==> ListenerEMH + +``` +The `main` method creates a `Listener` and calls its `start_listen` method, which creates a new thread (`Listener Thread`) with an `ExtendedMessageHandler`. +It can use the `Listener Communicator` (generated via `get_communicator`) to communicate to the LECO network, either directly or via one or several `Director`s. +Similarly, another thread can use (its own) `Listener Communicator` to communicate with the LECO network. + + + +## Graphical overview + +Here a graphical overview over a single computer experimental setup. +Solid lines indicate data flow, dashed lines actions. +Rounded fields elements run in a continuous loop. +Circles are Coordinators. +Note that the control protocol (with the _COORDINATOR_) is symmetric, while the data protocol (_proxy_server_, bold lines) is one way only. + +```mermaid +flowchart TD + COM2 <--> C(("COORDINATOR + (control protocol)")) + + PUB[DataPublisher] ==> Proxy + Proxy((("proxy_server + (data protocol)"))) ==> SUB[Subscriber] + Proxy ==>SUBD + + subgraph DataLogger + SUBD([Subscriber])-->STO[(Storage)] + end + + + subgraph Jupyter Notebook + Script <--> Director + Director -.->|creates|COM2[Communicator] + Director <-->COM2 + Script{{Script}} <--> Director2 <--> COM2 + COM2 + subgraph Director2 + end + end + + + subgraph "`Actor`" + MessageHandler([MessageHandler]) <--> InstrumentDriver[Instrument Driver] + MessageHandler --> PUB2[DataPublisher] + end + C <--> MessageHandler + InstrumentDriver <--> Instrument{{Measurement Instrument}} + PUB2 ==> Proxy + + + subgraph GUI-App + subgraph Main Thread + Listener-.->|generates|ListenerCOM[Listener Communicator] + GUI{{GUI}} + Director5 + end + + subgraph ListenerThread[Listener Thread] + ListenerEMH([ExtendedMessageHandler]) + end + + Listener-.->|starts|ListenerThread + ListenerCOM<-->ListenerEMH + GUI<-->ListenerCOM + Director5[Director]<-->ListenerCOM + GUI<-->Director5 + ListenerEMH-->|notifies|GUI + GUI-.->|starts|Listener + end + ListenerEMH <---> C + Proxy ==> ListenerEMH + + subgraph starter_p[Starter process] + starter([Starter]) + t1([task 1]) + t2([task 2]) + t3([task 3]) + starter -.->|start/stop thread|t1 + starter -.->|start/stop thread|t2 + starter -.->|start/stop thread|t3 + end + starter <-->C + t1 <--> C + t2 <--> C + t2 ==> Proxy + t3 ==> Proxy + +``` +The `Actor` contains a `MessageHandler` handling incoming messages in order to control a hardware measurement instrument via a _Driver_. It also has a `DataPublisher` to publish measurement data regularly. + +A script uses _Directors_ to send messages via a `Communicator` (generated by the first _Director_) to that measurement instrument. + +The `Starter` controls several _tasks_ in their own threads. +These tasks may communicate via the LECO protocol individually. +They can be, for example, instrument actors. + +The GUI-App has a `Listener` in its main thread, which starts an `ExtendedMessageHandler` in another thread and offers its _Listener Communicator_ in order to communicate. +The GUI can then communicate via that Communicator to the network, either directly or using _Directors_. +It can send commands to the measurement instrument via the _Actor_. +It can also subscribe to the data published by the _Actor_. + +The `DataLogger` collects the data published via the data protocol (`proxy_server`). diff --git a/LICENSE b/LICENSE index a926372d0..b167ec601 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2023 PyMeasure Developers +Copyright (c) 2023-2024 PyLECO Developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index 5a78a8941..bea949af7 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,130 @@ # PyLECO -Python reference implementation of the Laboratory Experiment COntrol (LECO) protocol (https://github.com/pymeasure/leco-protocol). -The [main branch](https://github.com/pymeasure/pyleco/tree/main) contains reviewed code, which does not yet contain all necessary modules and classes. -The most recent development is in the [development branch -](https://github.com/pymeasure/pyleco/tree/development), which might contain commits with PyLECO being in a broken state. -The [stable-development branch](https://github.com/pymeasure/pyleco/tree/stable-development) contains a stable, working version of PyLECO. -Follow that branch to have always working code (as far as possible), but following the ongoing development. +![PyPI - Python Version](https://img.shields.io/pypi/pyversions/pyleco) +[![codecov](https://codecov.io/gh/pymeasure/pyleco/graph/badge.svg?token=9OB3GWDLRB)](https://codecov.io/gh/pymeasure/pyleco) +[![pypi release](https://img.shields.io/pypi/v/pyleco.svg)](https://pypi.org/project/pyleco/) +[![conda-forge release](https://anaconda.org/conda-forge/pyleco/badges/version.svg)](https://anaconda.org/conda-forge/pyleco) +[![DOI](https://zenodo.org/badge/594982645.svg)](https://zenodo.org/doi/10.5281/zenodo.10837366) +[![Common Changelog](https://common-changelog.org/badge.svg)](https://common-changelog.org) -Note: LECO is still under development, such that the code and API might change. +Python reference implementation of the [Laboratory Experiment COntrol (LECO) protocol](https://github.com/pymeasure/leco-protocol). -![badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/BenediktBurger/7a8a7b874b62ed803eb56ca04830bede/raw/pyleco-coverage.json) +**Note**: LECO is still under development, such that the code and API might change. +The LECO protocol branch [pyleco-state](https://github.com/pymeasure/leco-protocol/tree/pyleco-state) contains the assumptions used in this project, which are not yet accepted into the LECO main branch. +See this [documentation](https://leco-laboratory-experiment-control-protocol--69.org.readthedocs.build/en/69/) for the LECO definitions including these assumptions. +These things might change, if LECO defines them differently. +For a tutorial on how to get started, see [GETTING_STARTED.md](https://github.com/pymeasure/pyleco/blob/main/GETTING_STARTED.md). + +You are welcome to contribute, for more information see [CONTRIBUTING.md](https://github.com/pymeasure/pyleco/blob/main/CONTRIBUTING.md). + + +## Quick Start + +1. Install Python, +2. install PyLECO with `pip install pyleco` or `conda install conda-forge::pyleco`, +3. import the package `pyleco` in your python scripts, +4. and use it as desired. + + +## LECO Overview + +### Network Topology + +PyLECO is an implementation of LECO, for the full protocol specifications please visit https://github.com/pymeasure/leco-protocol. +LECO offers a protocol for data exchange, for example for laboratory experimental control. + +There exist two different communication protocols in LECO. +1. The control protocol allows to exchange messages between any two _Components_ in a LECO network, which is useful for controlling devices. +2. The data protocol is a broadcasting protocol to send information to all those, who want to receive it, which is useful for regular measurement data or for log entries. + +A LECO network needs at least one _Coordinator_ (server), which routes the messages among the connected Components. + +Each _Component_ has a name unique in the network. +This name consists in the name of the _Coordinator_ they are connected to and their own name. +For example `N1.component1` is the full name of `component1` connected to the _Coordinator_ of the _Namespace_ `N1`. +That _Coordinator_ itself is called `N1.COORDINATOR`, as _Coordinators_ are always called `COORDINATOR`. + +### Remote Procedure Calls + +The default messaging content of the control protocol are _remote procedure calls_ (RPC) according to [JSON-RPC](https://www.jsonrpc.org/specification). +RPC means, that you execute a method (or procedure) on a remote _Component_. +For example you have an Actor, which is for example a Component controlling a measurement instrument. +In order to set the output of that measurement instrument, you want to call the `set_output` method of that instrument. +For that purpose, you send a message which encodes exactly that (via jsonrpc): the method to call and the parameters of that method. + + +## Usage of the Control Protocol + +### Minimum Setup + +For a minimum setup, you need: +* a _Coordinator_ (just execute `coordinator` in your terminal or run the `coordinator.py` file with your Python interpreter), +* one _Component_. + +For example, you can use a `Communicator` instance to send/receive messages via LECO protocol. +The following example requests the list of _Components_ connected currently to the _Coordinator_: + +```python +from pyleco.utils.communicator import Communicator + +c = Communicator(name="TestCommunicator") +connected_components = c.ask_rpc(method="send_local_components") +print(connected_components) +``` + +### Instrument Control + +Let's say you have an instrument with a pymeasure driver `Driver`, which you want to control. + +You need to start (in different threads): +* a _Coordinator_ (as described above), +* an `Actor` instance listening to commands and controlling the instrument: `actor = Actor(name="inst_actor", cls=Driver)`. + For an example see the `pymeasure_actor.py` in the examples folder, +* a `TransparentDirector`: `director=TransparentDirector(actor="inst_actor")`. The `actor` parameter has to match the Actor's `name` parameter. + For an example of a measurement script see `measurement_script.py` in the examples folder. + +If you want to set some property of the instrument (e.g. `instrument.voltage = 5`), you can just use the `director` transparently: `director.device.voltage = 5`. +In the background, the TransparentDirector, which does not have a `device`, sends a message to the Actor to set that parameter. +The Actor in turn sets that parameter of the instrument driver, which in turn will send some command to the device to take an appropriate action (e.g. setting the voltage to 5 V). + +Currently you cannot call methods in a similar, transparent way, without manual intervention. +You can add `RemoteCall` descriptor (in transparent_director module) to the `director` for each method call you want to use. +Afterwards you can use these methods transparently similar to the property shown above. + + +## Overview of Offered Packages and Modules + +PyLECO offers the following subpackages and modules. +For more information and for examples see the docstrings of the relevant methods and classes. + +* The `core` subpackage contains elements necessary for implementing LECO and for interacting with PyLECO, for example: + * The `Message` and `DataMessage` class help to create and interpret LECO messages for the control and broadcasting protocol, respectively. + * The `leco_protocols` module contains _Protocol_ classes for the different LECO _Components_, in order to test, whether a _Component_ satisfies the LECO standard for communicating with other programs. + * The `internal_protocols` module contains _Protocol_ classes which define the API access to PyLECO. +* The `utils` subpackage contains modules useful for creating LECO Components. + * The`Communicator` can send and receive messages, but neither blocks (just for a short time waiting for an answer) nor requires an extra thread. + It satisfies the `CommunicatorProtocol` and is useful in scripts. + * The `MessageHandler` also satisfies the `CommunicatorProtocol`, but handles incoming messages in a continuous loop (blocking until stopped). + It is useful for creating standalone scripts, like tasks for the _Starter_. + * The `ExtendedMessageHandler` adds the capability to subscribe and receive data protocol messages. + * The `Listener` offers an interface according to the `CommunicatorProtocol`, but listens at the same time in an extra thread for incoming messages (with an `ExtendedMessageHandler`). + It is useful if you want to react to incoming messages (via data or control protocol) and if you want to send messages of your own accord, for example for GUI applications. +* The `coordinators` subpackage contains the different _Coordinators_. + * `Coordinator` is the _Coordinator_ for the control protocol (exchanging messages). + * `proxy_server` is the _Coordinator_ for the data protocol (broadcasting). +* The `actors` subpackage contains _Actor_ classes to control devices. +* The `management` subpackage contains _Components_ useful for experiment management. + * The `Starter` can execute tasks in separate threads. + A task could be an _Actor_ controlling some _Device_. + * The `DataLogger` listens to published data (via the data protocol) and collects them. +* The `directors` subpackage contains Directors, which facilitate controlling actors or management utilities. + * The `Director` is a base _Director_. + It can communicate via any util, which implements the `CommunicatorProtocol`. + * For example the `CoordinatorDirector` has a method for getting _Coordinators_ and _Components_ connected to a _Coordinator_. + * The `TransparentDirector` reads / writes all messages to the remote actor, such that you use the director's `device` as if it were the instrument itself. + +### PyLECO extras + +The [pyleco-extras](https://github.com/BenediktBurger/pyleco-extras) package contains additional modules. +Among them are GUIs controlling the `DataLogger` and the `Starter`. diff --git a/RELEASE.md b/RELEASE.md new file mode 100644 index 000000000..f84f7ae0f --- /dev/null +++ b/RELEASE.md @@ -0,0 +1,25 @@ +# Release workflow + +## PyPI + +1. Draft a new release on github with a new tag (create on publish) in the style `vMajor.Minor.Patch`, where `Major`, `Minor`, and `Patch` are integers according to semnatic versioning +1. Use the autogenerated changelog to update `CHANGELOG.md` in main +1. Update the version and release date in `CITATION.cff` +1. Commit the aforementioned changes +1. Copy the modified changelog from the file to the github release draft +1. Release the new version on github +1. Approve the release run in the actions tab +1. Verify that the upload to PyPI succeeded + + +## Conda-forge + +1. Do the PyPI release first +1. Rebase the `conda-forge-release` branch on the latest release +1. Ensure correct `pyproject.toml`: + * State the version explicitly (e.g. `version = "1.2.3"` instead of `dynamic = ["version"]`) + * Comment out the `openrpc` dependency + * Comment out the section (including header!) `[tool.setuptools_scm]` +1. Create a tag with `vMajor.Minor.Patch-cf` at the head of that branch +1. Update the conda-forge repository with the version and checksum of the tar.gz archive of that tag: + * On windows use `certUtil -hashfile pyleco-1.2.3-cf.tar.gz SHA256` (adjust the version) to generate the SHA256 hash diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000..3c86c0a6a --- /dev/null +++ b/codecov.yml @@ -0,0 +1,7 @@ +coverage: + status: + project: + default: + # basic + target: auto + threshold: 0.1% # allows a small drop of coverage for flaky tests diff --git a/environment.yml b/environment.yml index 29eca0a7c..dd786f897 100644 --- a/environment.yml +++ b/environment.yml @@ -2,12 +2,11 @@ name: pyleco channels: - conda-forge dependencies: - - pyzmq=25.1.0 + - pyzmq #=25.1.2 don't pin version for python<3.9 + - uuid6=2024.1.12 - pip # don't pin, to gain newest conda compatibility fixes - - pip: - - jsonrpc2-pyclient==4.3.0 - - openrpc==8.1.0 - - uuid7==0.1.0 + # - pip: + # - openrpc==8.1.0 don't pin presence for python<3.9 # Development dependencies below - pytest=7.2.0 - pytest-cov=4.1.0 diff --git a/examples/measurement_script.py b/examples/measurement_script.py new file mode 100644 index 000000000..e2ab89969 --- /dev/null +++ b/examples/measurement_script.py @@ -0,0 +1,70 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from time import sleep + +from pyleco.directors.transparent_director import TransparentDevice, TransparentDirector, RemoteCall + + +class YARTransparentDevice(TransparentDevice): + """Transparent device with method calls of the ipg photonics YAR from pymeasure.""" + + clear = RemoteCall() + + +def setup_director(actor: str) -> TransparentDirector: + return TransparentDirector(actor=actor, cls=YARTransparentDevice) + + +def start_laser(director: TransparentDirector, target: float) -> None: + director.device.clear() # allowed due to the `RemoteCall` above + if not director.device.emission_enabled: + director.device.emission_enabled = True + sleep(5) + current = director.device.power_setpoint + while current != target: + difference = target - current + director.device.power_setpoint = current + max(difference, 1) + sleep(5) + current = director.device.power_setpoint + + +def stop_laser(director: TransparentDirector) -> None: + director.device.power_setpoint = 0 + sleep(5) + director.device.emission_enabled = False + + +def main() -> None: + """Do some experiment.""" + director = setup_director(actor="Namespace.YAR_actor") + start_laser(director=director, target=10) + for _ in range(10): + print(director.device.power) + sleep(5) + stop_laser(director) + + +if __name__ == "__main__": + main() diff --git a/examples/pymeasure_actor.py b/examples/pymeasure_actor.py new file mode 100644 index 000000000..e86f82f78 --- /dev/null +++ b/examples/pymeasure_actor.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +""" +Example scheme for an Actor for pymeasure instruments. 'pymeasure_actor' +""" +# This first docstring is shown in the GUI corresponding to the starter, such that it may be +# identified more easily. + + +import logging + +from pyleco.actors.actor import Actor +from pyleco.utils.data_publisher import DataPublisher +from pymeasure.instruments.ipgphotonics import YAR # type:ignore[import-not-found] + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +# Parameters +interval = 0.05 # Readout interval in s +adapter = "COM15" # pymeasure adapter string + + +def readout(device: YAR, publisher: DataPublisher) -> None: + """This method is executed every `interval`. + + :param device: The device driver managed by the Actor. + :param publisher: The :class:`DataPublisher` instance of the Actor to publish data. + """ + publisher.send_data(data={'power': device.power}) + + +def task(stop_event) -> None: + """The task which is run by the starter.""" + # Initialize + with Actor(name="pymeasure_actor", device_class=YAR, periodic_reading=interval) as actor: + actor.read_publish = readout # define the regular readout function + actor.connect(adapter) # connect to the device + + # Continuous loop + actor.listen(stop_event=stop_event) # listen for commands and do the regular readouts + + # Finish + # in listen and __exit__ included + + +if __name__ == "__main__": + """Run the task if the module is executed.""" + log.addHandler(logging.StreamHandler()) + log.setLevel(logging.INFO) + + class Signal: + def is_set(self): + return False + try: + task(Signal()) + except KeyboardInterrupt: + pass diff --git a/pyleco/__init__.py b/pyleco/__init__.py index a5c01b1f8..7f0868c75 100644 --- a/pyleco/__init__.py +++ b/pyleco/__init__.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/pyleco/actors/__init__.py b/pyleco/actors/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyleco/actors/actor.py b/pyleco/actors/actor.py new file mode 100644 index 000000000..046706e59 --- /dev/null +++ b/pyleco/actors/actor.py @@ -0,0 +1,273 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +from __future__ import annotations + +from typing import Any, Callable, Generic, Optional, Sequence, TypeVar, Union +from warnings import warn + +import zmq + +from ..utils.message_handler import MessageHandler +from ..utils.data_publisher import DataPublisher +from ..utils.timers import RepeatingTimer + + +Device = TypeVar("Device") + + +class Actor(MessageHandler, Generic[Device]): + """Control an instrument listening to zmq messages and regularly read some values. + + .. code:: + + a = Actor("testing", TestClass) + # define some function `readout(device: Device, publisher: DataPublisher)` + a.read_publish = readout + a.connect("COM5") # connect to device + # in listen everything happens until told to stop from elsewhere + a.listen(stop_event) + a.disconnect() + + Like the :class:`MessageHandler`, this class can be used as a context manager disconnecting at + the end of the context. + + The (via RPC available) methods :meth:`get_parameters`, :meth:`set_parameters`, and + :meth:`call_action` get/set parameters of the device or call an action of the device. + You can also register device methods with :meth:`register_device_method`, such that this method + is available via RPC as well. + + :param str name: Name to listen to and to publish values with. + :param class device_class: Instrument class. + :param int port: Port number to connect to. + :param periodic_reading: Interval between periodic readouts in s. + :param dict auto_connect: Kwargs to automatically connect to the device. + :param class cls: See :code:`device_class`. + + .. deprecated:: 0.3 + Deprecated, use :code:`device_class` instead. + + :param \\**kwargs: Keywoard arguments for the general message handling. + """ + + device: Device + + def __init__( + self, + name: str, + device_class: Optional[type[Device]] = None, + periodic_reading: float = -1, + auto_connect: Optional[dict] = None, + context: Optional[zmq.Context] = None, + cls: Optional[type[Device]] = None, + **kwargs, + ): + context = context or zmq.Context.instance() + super().__init__(name=name, context=context, **kwargs) + if cls is not None: + warn("Parameter `cls` is deprecated, use `device_class` instead.", FutureWarning) + device_class = cls + if device_class is None: + # Keep this check as long as device_class is optional due to deprecated cls parameter + raise ValueError("You have to specify a `device_class`!") + self.device_class = device_class + + # Pipe for the periodic readout timer + self.pipe: zmq.Socket = context.socket(zmq.PAIR) + self.pipe.set_hwm(1) + pipe_port = self.pipe.bind_to_random_port("inproc://listenerPipe", min_port=12345) + self.pipeL: zmq.Socket = context.socket(zmq.PAIR) + self.pipeL.set_hwm(1) + self.pipeL.connect(f"inproc://listenerPipe:{pipe_port}") + + self.timer = RepeatingTimer(interval=periodic_reading, function=self.queue_readout) + self.publisher = DataPublisher(full_name=name, log=self.root_logger) + + if auto_connect: + self.connect(**auto_connect) + self.log.info(f"Actor '{name}' initialized.") + + def register_rpc_methods(self) -> None: + super().register_rpc_methods() + self.register_rpc_method(self.get_parameters) + self.register_rpc_method(self.set_parameters) + self.register_rpc_method(self.call_action) + self.register_rpc_method(self.start_polling) + self.register_rpc_method(self.stop_polling) + self.register_rpc_method(self.get_polling_interval) + self.register_rpc_method(self.set_polling_interval) + self.register_rpc_method(self.connect) + self.register_rpc_method(self.disconnect) + + def register_device_method(self, method: Callable) -> None: + """Make a device method available via RPC. The method name is prefixed with `device.`.""" + # TODO TBD how to call a device method? + name = method.__name__ + self.register_rpc_method(method=method, name="device." + name) + + def __del__(self) -> None: + self.disconnect() + + def __exit__(self, *args, **kwargs) -> None: + super().__exit__(*args, **kwargs) + self.disconnect() + + def set_full_name(self, full_name: str) -> None: + super().set_full_name(full_name=full_name) + self.publisher.full_name = full_name + + def _listen_setup(self) -> zmq.Poller: + """Setup for listening.""" + poller = super()._listen_setup() + poller.register(self.pipeL, zmq.POLLIN) + return poller + + def _listen_loop_element(self, poller: zmq.Poller, waiting_time: Optional[int] + ) -> dict[zmq.Socket, int]: + """Check the socks for incoming messages and handle them. + + :param waiting_time: Timeout of the poller in ms. + """ + socks = super()._listen_loop_element(poller, waiting_time) + if self.pipeL in socks: + self.pipeL.recv() + self.readout() + del socks[self.pipeL] + return socks + + def queue_readout(self) -> None: + self.pipe.send(b"") + + def read_publish(self, device: Device, publisher: DataPublisher) -> None: + """Read the device and publish the results. + + Defaults to doing nothing. Implement in a subclass. + """ + self.log.warning("No 'read_publish' method defined, periodic readout does nothing.") + + def readout(self) -> None: + """Do periodic readout of the instrument and publish the data. + + Defaults to calling :meth:`read_publish` with the device and publisher as arguments. + """ + self.read_publish(device=self.device, publisher=self.publisher) + + def start_timer(self, interval: Optional[float] = None) -> None: + """Start the readout timer. + + :param interval: Readout interval in s. If None, use the last value. + """ + if interval is not None: + self.timer.interval = interval + if self.timer.interval < 0: + return + try: + self.timer.start() + except RuntimeError: + self.timer = RepeatingTimer(interval=self.timer.interval, function=self.queue_readout) + self.timer.start() + + def stop_timer(self) -> None: + """Stop the readout timer.""" + try: + self.timer.cancel() + except AttributeError: + pass + + def start_polling(self, polling_interval: Optional[float] = None) -> None: + self.start_timer(interval=polling_interval) + + def stop_polling(self) -> None: + self.stop_timer() + + @property + def polling_interval(self) -> float: + """Timeout interval of the readout timer in s.""" + return self.timer.interval + + @polling_interval.setter + def polling_interval(self, value: float) -> None: + self.timer.interval = value + + def get_polling_interval(self) -> float: + return self.polling_interval + + def set_polling_interval(self, polling_interval: float) -> None: + self.polling_interval = polling_interval + + def connect(self, *args, **kwargs) -> None: + """Connect to the device with the given arguments and keyword arguments.""" + # TODO read auto_connect? + self.log.info("Connecting") + self.device = self.device_class(*args, **kwargs) + self.start_timer() + + def disconnect(self) -> None: + """Disconnect the device.""" + self.log.info("Disconnecting.") + self.stop_timer() + try: + # Assumes a pymeasure instrument + self.device.adapter.close() # type: ignore + except AttributeError: + pass + try: + del self.device + except AttributeError: + pass + + def get_parameters(self, parameters: Union[list[str], tuple[str, ...]]) -> dict[str, Any]: + """Get device properties from the list `properties`.""" + # `parameters` should be `Iterable[str]`, however, openrpc does not like that. + data = {} + for key in parameters: + path = key.split(".") + v = self.device + for attr in path: + v = getattr(v, attr) + if callable(v): + raise TypeError(f"Attribute '{key}' is a callable!") + data[key] = v + return data + + def set_parameters(self, parameters: dict[str, Any]) -> None: + """Set device properties from a dictionary.""" + for key, value in parameters.items(): + path = key.split(".") + obj = self.device + for attr in path[:-1]: + obj = getattr(obj, attr) + setattr(obj, path[-1], value) + + def call_action(self, action: str, args: Optional[Sequence] = None, + kwargs: Optional[dict[str, Any]] = None) -> Any: + """Call a device action with positional ``args`` and keyword arguments ``kwargs``.""" + if args is None: + args = () + if kwargs is None: + kwargs = {} + path = action.split(".") + obj = self.device + for attr in path[:-1]: + obj = getattr(obj, attr) + return getattr(obj, path[-1])(*args, **kwargs) diff --git a/pyleco/actors/locking_actor.py b/pyleco/actors/locking_actor.py new file mode 100644 index 000000000..fcf1da055 --- /dev/null +++ b/pyleco/actors/locking_actor.py @@ -0,0 +1,115 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from typing import Any, Generic, Optional, Sequence, TypeVar, Union + +from zmq import Context + +from .actor import Actor + + +Device = TypeVar("Device") + + +class AccessDeniedError(BaseException): + pass + + +class LockingActor(Actor, Generic[Device]): + """An Actor which allows to lock the device or parts of it.""" + + def __init__( + self, + name: str, + device_class: Optional[type[Device]] = None, + periodic_reading: float = -1, + auto_connect: Optional[dict] = None, + context: Optional[Context] = None, + **kwargs, + ): + super().__init__(name, device_class, periodic_reading, auto_connect, context, **kwargs) + self._locks: dict[Optional[str], bytes] = {} + + def register_rpc_methods(self) -> None: + super().register_rpc_methods() + self.register_rpc_method(self.lock) + self.register_rpc_method(self.unlock) + self.register_rpc_method(self.force_unlock) + + # RPC methods for locking + def lock(self, resource: Optional[str] = None) -> bool: + """Lock the controlled device or one of its resources and return the success state.""" + if self.check_access_rights(resource): + self._locks[resource] = self.current_message.sender + return True + else: + return False + + def unlock(self, resource: Optional[str] = None) -> None: + """Unlock the controlled device or one of its resources. + + Only the locking Component may unlock. + """ + if self.check_access_rights(resource): + self._locks.pop(resource, None) + + def force_unlock(self, resource: Optional[str] = None) -> None: + """Unlock the controlled device or one of its resources even if someone else locked it.""" + self._locks.pop(resource, None) + + # modified methods for device access + def get_parameters(self, parameters: Union[list[str], tuple[str, ...]]) -> dict[str, Any]: + # `parameters` should be `Iterable[str]`, however, openrpc does not like that. + for parameter in parameters: + self._check_access_rights_raising(parameter) + return super().get_parameters(parameters=parameters) + + def set_parameters(self, parameters: dict[str, Any]) -> None: + for parameter in parameters.keys(): + self._check_access_rights_raising(parameter) + return super().set_parameters(parameters=parameters) + + def call_action( + self, action: str, args: Optional[Sequence] = None, kwargs: Optional[dict[str, Any]] = None + ) -> Any: + self._check_access_rights_raising(action) + return super().call_action(action=action, args=args, kwargs=kwargs) + + # helper methods + def check_access_rights(self, resource: Optional[str]) -> bool: + requester = self.current_message.sender + if resource is None: + elements = [] + else: + elements = resource.split(".") + for i in range(-1, len(elements)): + local_owner = self._locks.get(".".join(elements[:i + 1])) if i >= 0 else None + if local_owner is not None and requester != local_owner: + return False + return True + + def _check_access_rights_raising(self, resource: str) -> None: + if self.check_access_rights(resource=resource) is False: + raise AccessDeniedError(f"Resource '{resource}' is locked by someone else.") diff --git a/pyleco/coordinators/__init__.py b/pyleco/coordinators/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyleco/coordinators/coordinator.py b/pyleco/coordinators/coordinator.py new file mode 100644 index 000000000..bbeb21c54 --- /dev/null +++ b/pyleco/coordinators/coordinator.py @@ -0,0 +1,524 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from json import JSONDecodeError +import logging +from socket import gethostname +from typing import Any, Optional, Union + +import zmq + +if __name__ != "__main__": + from ..core import COORDINATOR_PORT + from ..utils.coordinator_utils import CommunicationError, Directory, ZmqNode, ZmqMultiSocket,\ + MultiSocket + from ..core.message import Message, MessageTypes + from ..core.serialization import get_json_content_type, JsonContentTypes + from ..json_utils.errors import NODE_UNKNOWN, RECEIVER_UNKNOWN + from ..json_utils.json_objects import ErrorResponse, Request, ParamsRequest, DataError + from ..json_utils.rpc_server import RPCServer + from ..utils.timers import RepeatingTimer + from ..utils.zmq_log_handler import ZmqLogHandler + from ..utils.events import Event, SimpleEvent + from ..utils.log_levels import PythonLogLevels +else: # pragma: no cover + from pyleco.core import COORDINATOR_PORT + from pyleco.utils.coordinator_utils import CommunicationError, Directory, ZmqNode,\ + ZmqMultiSocket, MultiSocket + from pyleco.core.message import Message, MessageTypes + from pyleco.core.serialization import get_json_content_type, JsonContentTypes + from pyleco.json_utils.errors import NODE_UNKNOWN, RECEIVER_UNKNOWN + from pyleco.json_utils.json_objects import ErrorResponse, Request, ParamsRequest, DataError + from pyleco.json_utils.rpc_server import RPCServer + from pyleco.utils.timers import RepeatingTimer + from pyleco.utils.zmq_log_handler import ZmqLogHandler + from pyleco.utils.events import Event, SimpleEvent + from pyleco.utils.log_levels import PythonLogLevels + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class Coordinator: + """A Coordinator program, routing messages among connected peers. + + .. code:: + + with Coordinator() as coordinator: + coordinator.routing() + + :param str namespace: Name of the node. Defaults to hostname. + :param str host: Hostname of the system of this Coordinator, that others may connect to it. + :param int port: Port to listen to. + :param timeout: Timeout waiting for messages in ms. + :param cleaning_interval: Interval between two addresses cleaning runs in s. + :param expiration_time: Time, when a stored address expires in s. + :param context: ZMQ context or similar. + """ + + current_message: Message + current_identity: bytes + closed: bool = False + + def __init__( + self, + namespace: Optional[Union[bytes, str]] = None, + host: Optional[str] = None, + port: int = COORDINATOR_PORT, + timeout: int = 50, + cleaning_interval: float = 5, + expiration_time: float = 15, + context: Optional[zmq.Context] = None, + multi_socket: Optional[MultiSocket] = None, + **kwargs, + ) -> None: + if namespace is None: + self.namespace = gethostname().split(".")[0].encode() + elif isinstance(namespace, str): + self.namespace = namespace.encode() + elif isinstance(namespace, bytes): + self.namespace = namespace + else: + raise ValueError("`namespace` must be str or bytes or None.") + self.full_name = self.namespace + b".COORDINATOR" + log.info(f"Start Coordinator of node {self.namespace!r} at port '{port}'.") + self.address = f"{host or gethostname()}:{port}" + self.directory = Directory( + namespace=self.namespace, full_name=self.full_name, address=self.address + ) + self.global_directory: dict[bytes, list[str]] = {} # All Components + self.timeout = timeout + self.cleaner = RepeatingTimer( + interval=cleaning_interval, + function=self.remove_expired_addresses, + args=(expiration_time,), + ) + + self.cleaner.start() + + context = context or zmq.Context.instance() + self.sock = multi_socket or ZmqMultiSocket(context=context) + self.context = context + self.sock.bind(port=port) + + self.register_methods() + + super().__init__(**kwargs) + + def register_methods(self): + """Add methods to the OpenRPC register and change the name.""" + self.rpc = rpc = RPCServer(title="COORDINATOR", debug=True) + rpc.title = self.full_name.decode() + # Component + rpc.method()(self.pong) + # Extended Component + rpc.method()(self.set_log_level) + rpc.method()(self.shut_down) + # Coordinator proper + rpc.method()(self.sign_in) + rpc.method()(self.sign_out) + rpc.method()(self.coordinator_sign_in) + rpc.method()(self.coordinator_sign_out) + rpc.method()(self.add_nodes) + rpc.method()(self.send_nodes) + rpc.method()(self.record_components) + rpc.method()(self.send_local_components) + rpc.method()(self.send_global_components) + rpc.method(description=self.remove_expired_addresses.__doc__)(self.remove_expired_addresses) + + def __del__(self) -> None: + try: + self.close() + except AttributeError: + pass # if creation failed, closing may fail during deletion. + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_traceback) -> None: + self.close() + + def close(self) -> None: + """Sign out and close the sockets.""" + log.debug("Closing Coordinator.") + if not self.closed: + self.shut_down() + self.sock.close(timeout=1) + self.cleaner.cancel() + log.info(f"Coordinator {self.full_name!r} closed.") + self.closed = True + + def create_message( + self, receiver: bytes, data: Optional[Union[bytes, str, object]] = None, **kwargs + ) -> Message: + return Message(receiver=receiver, sender=self.full_name, data=data, **kwargs) + + def send_message(self, receiver: bytes, data: Optional[object] = None, **kwargs) -> None: + """Send a message with any socket, including routing. + + :param receiver: Receiver name + :param data: Object to send. + :param \\**kwargs: Keyword arguments for the header. + """ + self.deliver_message( + sender_identity=b"", + message=self.create_message(receiver=receiver, data=data, **kwargs), + ) + + def send_main_sock_reply( + self, + sender_identity: bytes, + original_message: Message, + data: Optional[Union[bytes, str, object]] = None, + message_type: Optional[Union[bytes, int, MessageTypes]] = None, + ) -> None: + response = self.create_message( + receiver=original_message.sender, + conversation_id=original_message.conversation_id, + data=data, + message_type=message_type, + ) + self.sock.send_message(sender_identity, response) + + def remove_expired_addresses(self, expiration_time: float) -> None: + """Remove all expired addresses from the directory. + + :param float expiration_time: Expiration limit in s. + """ + log.debug("Cleaning addresses.") + self._clean_components(expiration_time=expiration_time) + self.directory.find_expired_nodes(expiration_time=expiration_time) + + def _clean_components(self, expiration_time: float) -> None: + to_admonish = self.directory.find_expired_components(expiration_time=expiration_time) + for identity, name in to_admonish: + message = self.create_message( + receiver=b".".join((self.namespace, name)), + message_type=MessageTypes.JSON, + data=Request(id=0, method="pong"), + ) + self.sock.send_message(identity, message) + self.publish_directory_update() + + def routing( + self, coordinators: Optional[list[str]] = None, stop_event: Optional[Event] = None + ) -> None: + """Route all messages. + + Connect to Coordinators at the beginning. + + :param list coordinators: list of coordinator addresses. + """ + # Connect to Coordinators. + if coordinators is not None: + for coordinator in coordinators: + self.directory.add_node_sender( + node=ZmqNode(context=self.context), address=coordinator, namespace=b"" + ) + # Route messages until told to stop. + self.stop_event = stop_event or SimpleEvent() + while not self.stop_event.is_set(): + if self.sock.message_received(self.timeout): + self.read_and_route() + self.directory.check_unfinished_node_connections() + # Cleanup + log.info("Coordinator routing stopped.") + + def read_and_route(self) -> None: + """Do the routing of one message.""" + try: + sender_identity, message = self.sock.read_message() + except TypeError as exc: + log.exception("Not enough frames read.", exc_info=exc) + return + else: + # Handle different communication cases. + self.deliver_message(sender_identity=sender_identity, message=message) + + def deliver_message(self, sender_identity: bytes, message: Message) -> None: + """Deliver a message `message` from some `sender_identity` to some recipient. + + Messages from this Coordinator must have :code:`sender_identity = b""`. + """ + log.debug( + f"From identity {sender_identity!r}, from {message.sender!r}, to {message.receiver!r}," + f" header {message.header!r}, cid {message.conversation_id!r}, '{message.payload}'." + ) + # Update heartbeat + if sender_identity: + try: + self.directory.update_heartbeat(sender_identity=sender_identity, message=message) + except CommunicationError as exc: + log.error(f"Updating heartbeat of {message.sender!r} failed due to '{exc}'.") + self.send_main_sock_reply( + sender_identity=sender_identity, + original_message=message, + message_type=MessageTypes.JSON, + data=exc.error_payload, + ) + return + # Route the message + receiver_namespace, receiver_name = message.receiver_elements + if message.receiver == b"COORDINATOR" or message.receiver == self.full_name: + self.handle_commands(sender_identity=sender_identity, message=message) + elif receiver_namespace == self.namespace or receiver_namespace == b"": + self._deliver_locally(message=message, receiver_name=receiver_name) + else: + self._deliver_remotely(message=message, receiver_namespace=receiver_namespace) + + def _deliver_locally(self, message: Message, receiver_name: bytes) -> None: + try: + receiver_identity = self.directory.get_component_id(name=receiver_name) + except ValueError: + log.error(f"Receiver '{message.receiver!r}' is not in the addresses list.") + error = DataError.from_error(RECEIVER_UNKNOWN, data=message.receiver.decode()) + self.send_message( + receiver=message.sender, + conversation_id=message.conversation_id, + message_type=MessageTypes.JSON, + data=ErrorResponse(id=None, error=error), + ) + else: + self.sock.send_message(receiver_identity, message) + + def _deliver_remotely(self, message: Message, receiver_namespace: bytes) -> None: + try: + self.directory.send_node_message(namespace=receiver_namespace, message=message) + except ValueError: + error = DataError.from_error(NODE_UNKNOWN, data=receiver_namespace.decode()) + self.send_message( + receiver=message.sender, + conversation_id=message.conversation_id, + message_type=MessageTypes.JSON, + data=ErrorResponse(id=None, error=error), + ) + + def handle_commands(self, sender_identity: bytes, message: Message) -> None: + """Handle commands for the Coordinator itself. + + :param bytes sender_identity: Identity of the original sender. + :param Message message: The message object. + """ + if not message.payload: + return # Empty payload, just heartbeat. + self.current_message = message + self.current_identity = sender_identity + if message.header_elements.message_type == MessageTypes.JSON: + self.handle_json_commands(message=message) + else: + log.error( + f"Message from {message.sender!r} of unknown type received: {message.payload[0]!r}" + ) + + def handle_json_commands(self, message: Message) -> None: + try: + data: Union[list[dict[str, Any]], dict[str, Any]] = message.data # type: ignore + except JSONDecodeError: + log.error( + f"Invalid JSON message from {message.sender!r} received: {message.payload[0]!r}" + ) + return + json_type = get_json_content_type(data) + if JsonContentTypes.REQUEST in json_type: + try: + self.handle_rpc_call(message=message) + except Exception as exc: + log.exception( + f"Invalid JSON-RPC message from {message.sender!r} received: {data}", + exc_info=exc, + ) + elif JsonContentTypes.RESULT_RESPONSE == json_type: + if data.get("result", False) is not None: # type: ignore + log.info(f"Unexpeced result received: {data}") + elif JsonContentTypes.ERROR in json_type: + log.error(f"Error from {message.sender!r} received: {data}.") + elif JsonContentTypes.RESULT in json_type: + for element in data: + if element.get("result", False) is not None: # type: ignore + log.info(f"Unexpeced result received: {data}") + else: + log.error( + f"Invalid JSON RPC message from {message.sender!r} received: {message.payload[0]!r}" + ) # noqa + + def handle_rpc_call(self, message: Message) -> None: + reply = self.rpc.process_request(message.payload[0]) + sender_namespace = message.sender_elements.namespace + log.debug(f"Reply {reply!r} to {message.sender!r} at node {sender_namespace!r}.") + if sender_namespace == self.namespace or sender_namespace == b"": + self.send_main_sock_reply( + sender_identity=self.current_identity, + original_message=message, + data=reply, + message_type=MessageTypes.JSON, + ) + else: + self.send_message( + receiver=message.sender, + conversation_id=message.conversation_id, + message_type=MessageTypes.JSON, + data=reply, + ) + + # Component procedures + def pong(self) -> None: + """Respond in order to test the connection""" + pass + + @staticmethod + def set_log_level(level: str) -> None: + plevel = PythonLogLevels[level] + log.setLevel(plevel) + + def shut_down(self) -> None: + self.sign_out_from_all_coordinators() + try: + self.stop_event.set() + except AttributeError: # pragma: no cover + pass + + # Coordinator procedures + def sign_in(self) -> None: + message = self.current_message + sender_identity = self.current_identity + sender_name = message.sender_elements.name + self.directory.add_component(name=sender_name, identity=sender_identity) + log.info(f"New Component {sender_name!r} at {sender_identity!r}.") + self.publish_directory_update() + + def sign_out(self) -> None: + message = self.current_message + sender_identity = self.current_identity + sender_name = message.sender_elements.name + self.directory.remove_component(name=sender_name, identity=sender_identity) + log.info(f"Component {sender_name!r} signed out.") + self.publish_directory_update() + + def coordinator_sign_in(self) -> None: + message = self.current_message + sender_identity = self.current_identity + sender_namespace, sender_name = message.sender_elements + message.sender = sender_name # remove namespace in order to respond via main socket + self.directory.add_node_receiver(identity=sender_identity, namespace=sender_namespace) + + def coordinator_sign_out(self) -> None: + message = self.current_message + sender_identity = self.current_identity + sender_namespace, sender_name = message.sender_elements + assert sender_name == b"COORDINATOR", "Only Coordinators may use coordinator sign out." + node = self.directory.get_node(namespace=sender_namespace) + self.directory.remove_node(namespace=sender_namespace, identity=sender_identity) + node.send_message( + Message( + receiver=sender_namespace + b".COORDINATOR", + sender=self.full_name, + conversation_id=message.conversation_id, + message_type=MessageTypes.JSON, + data=Request(id=100, method="coordinator_sign_out"), + ) + ) + + def add_nodes(self, nodes: dict) -> None: # : dict[str, str] + for node, address in nodes.items(): + node = node.encode() + try: + self.directory.add_node_sender( + ZmqNode(context=self.context), address=address, namespace=node + ) + except ValueError: + pass # already connected + + def record_components(self, components: list[str]) -> None: + """Record Components of another Coordinator.""" + message = self.current_message + self.global_directory[message.sender_elements.namespace] = components + + def send_nodes(self) -> dict[str, str]: + return self.directory.get_nodes_str_dict() + + def send_local_components(self) -> list[str]: + """Send the names of locally connected Components.""" + return self.directory.get_component_names() + + def send_global_components(self) -> dict[str, list[str]]: + """Send the names of all Components in this LECO network.""" + data = {ns.decode(): components for ns, components in self.global_directory.items()} + data[self.namespace.decode()] = self.send_local_components() + return data + + # Additional procedures + def sign_out_from_all_coordinators(self) -> None: + """Sign out from other Coordinators.""" + self.directory.sign_out_from_all_nodes() + + def publish_directory_update(self) -> None: + """Send a directory update to the other coordinators.""" + # TODO TBD whether to send the whole directory or only a diff. + nodes = self.directory.get_nodes_str_dict() + components = self.directory.get_component_names() + for node in self.directory.get_nodes().keys(): + self.send_message( + receiver=b".".join((node, b"COORDINATOR")), + message_type=MessageTypes.JSON, + data=[ + ParamsRequest(id=5, method="add_nodes", params={"nodes": nodes}).model_dump(), + ParamsRequest( + id=6, method="record_components", params={"components": components} + ).model_dump(), + ], + ) + + +def main() -> None: + # Absolute imports if the file is executed. + from pyleco.utils.parser import parser, parse_command_line_parameters # noqa: F811 + + # Define parser + parser.add_argument( + "-c", + "--coordinators", + default="", + help="connect to this comma separated list of coordinators", + ) + parser.add_argument("--namespace", help="set the Node's namespace") + parser.add_argument("-p", "--port", type=int, help="port number to bind to") + + # Parse and interpret command line parameters + gLog = logging.getLogger() + kwargs = parse_command_line_parameters(logger=gLog, parser=parser, logging_default=logging.INFO) + if len(log.handlers) <= 1: + log.addHandler(logging.StreamHandler()) + cos = kwargs.pop("coordinators", "") + coordinators = cos.replace(" ", "").split(",") + + # Run the Coordinator + with Coordinator(**kwargs) as c: + handler = ZmqLogHandler(full_name=c.full_name.decode()) + gLog.addHandler(handler) + c.routing(coordinators=coordinators) + + +if __name__ == "__main__": # pragma: no cover + main() diff --git a/pyleco/coordinators/proxy_server.py b/pyleco/coordinators/proxy_server.py new file mode 100644 index 000000000..6af3f5c1e --- /dev/null +++ b/pyleco/coordinators/proxy_server.py @@ -0,0 +1,214 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +""" +Zero MQ Proxy server for data exchange. + +methods +------- +pub_sub_proxy + Run a publisher subscriber proxy in the current thread (blocking). +start_proxy + Start a proxy server, either local or remote, in its own thread. + + +Execute this module to start a proxy server. If no remote connection given, a +local proxy is created (necessary for remote proxies). +command line arguments: + -v show all the data passing through the proxy + -s NAME/IP Subscribe to the local proxy of some other computer + -p NAME/IP Publish to the local proxy of some other computer + +Created on Mon Jun 27 09:57:05 2022 by Benedikt Burger +""" + +from __future__ import annotations +import logging +import threading +from typing import Optional + +import zmq + +if __name__ == "__main__": + from pyleco.core import PROXY_RECEIVING_PORT +else: + from ..core import PROXY_RECEIVING_PORT + + +log = logging.Logger(__name__) + +port = PROXY_RECEIVING_PORT + + +# Technical method to start the proxy server. Use `start_proxy` instead. +def pub_sub_proxy( + context: zmq.Context, + captured: bool = False, + sub: str = "localhost", + pub: str = "localhost", + offset: int = 0, + event: Optional[threading.Event] = None, +) -> None: + """Run a publisher subscriber proxy in the current thread (blocking).""" + s: zmq.Socket = context.socket(zmq.XSUB) + p: zmq.Socket = context.socket(zmq.XPUB) + _port = port - 2 * offset + if sub == "localhost" and pub == "localhost": + log.info(f"Start local proxy server: listening on {_port}, publishing on {_port - 1}.") + s.bind(f"tcp://*:{_port}") + p.bind(f"tcp://*:{_port - 1}") + else: + log.info( + f"Start remote proxy server subsribing to {sub}:{_port - 1} and publishing to " + f"{pub}:{_port}." + ) + s.connect(f"tcp://{sub}:{port -1 - 2 * offset}") + p.connect(f"tcp://{pub}:{port - 2 * offset}") + + if captured: + log.info("Capturing all messages.") + c: zmq.Socket = context.socket(zmq.PUB) + c.bind("inproc://capture") + else: + c = None # type: ignore + if event is not None: + event.set() + try: + zmq.proxy_steerable(p, s, capture=c) + except zmq.ContextTerminated: + log.info("Proxy context terminated.") + except Exception as exc: + log.exception("Some other exception on proxy happened.", exc) + + +def start_proxy( + context: Optional[zmq.Context] = None, + captured: bool = False, + sub: str = "localhost", + pub: str = "localhost", + offset: int = 0, +) -> zmq.Context: + """Start a proxy server, either local or remote, in its own thread. + + Examples: + + .. code-block:: python + + # Between software on the local computer, necessary on every computer: + c = start_proxy() + # Get the data from a to localhost: + c = start_proxy(sub="a.domain.com") + # Send local data to b: + c = start_proxy(pub="b.domain.com") + # Send from a to b, can be executed on some third computer: + c = start_proxy(sub="a.domain.com", + pub="b.domain.com") + # Stop the proxy: + c.destroy() + + :param context: The zmq context. + :param bool captured: Print the captured messages. + :param str sub: Name or IP Address of the server to subscribe to. + :param str pub: Name or IP Address of the server to publish to. + :param offset: How many servers (pairs of ports) to offset from the base one. + :return: The zmq context. To stop, call `context.destroy()`. + """ + context = context or zmq.Context.instance() + event = threading.Event() + thread = threading.Thread( + target=pub_sub_proxy, args=(context, captured, sub, pub, offset, event) + ) + thread.daemon = True + thread.start() + started = event.wait(1) + if not started: + raise TimeoutError("Starting of proxy server failed.") + log.info("Proxy thread started.") + return context + + +def main( + arguments: Optional[list[str]] = None, stop_event: Optional[threading.Event] = None +) -> None: + from pyleco.utils.parser import ArgumentParser, parse_command_line_parameters + + parser = ArgumentParser(prog="Proxy server") + parser.add_argument( + "-s", "--sub", help="set the host name to subscribe to", default="localhost" + ) + parser.add_argument("-p", "--pub", help="set the host name to publish to", default="localhost") + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="increase the logging level by one, may be used more than once", + ) + parser.add_argument( + "-c", + "--captured", + action="store_true", + default=False, + help="log all messages sent through the proxy", + ) + parser.add_argument("-o", "--offset", help="shifting the port numbers.", default=0, type=int) + kwargs = parse_command_line_parameters( + parser=parser, logger=log, arguments=arguments, logging_default=logging.INFO + ) + + log.addHandler(logging.StreamHandler()) + if kwargs.get("captured"): + log.setLevel(logging.DEBUG) + merely_local = kwargs.get("pub") == "localhost" and kwargs.get("sub") == "localhost" + + if not merely_local: + log.info( + f"Remote proxy from {kwargs.get('sub', 'localhost')} " + f"to {kwargs.get('pub', 'localhost')}." + ) + else: + log.info( + "This data broker manages the data between measurement software, " + f"which publishes on port {port}, and all the consumers of data " + f" (DataLogger, Beamprofiler etc.), which subscribe on port {port - 1}." + ) + context = zmq.Context() + start_proxy(context=context, **kwargs) + if merely_local: + start_proxy(context=context, offset=1) # for log entries + reader = context.socket(zmq.SUB) + reader.connect("inproc://capture") + reader.subscribe(b"") + poller = zmq.Poller() + poller.register(reader, zmq.POLLIN) + while stop_event is None or not stop_event.is_set(): + if socks := dict(poller.poll(1)): + if reader in socks: + received = reader.recv_multipart() + log.debug(f"Message brokered: {received}") + context.term() + + +if __name__ == "__main__": # pragma: no cover + main() diff --git a/pyleco/core/__init__.py b/pyleco/core/__init__.py index 51f96f556..e87dfcd81 100644 --- a/pyleco/core/__init__.py +++ b/pyleco/core/__init__.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal diff --git a/pyleco/core/data_message.py b/pyleco/core/data_message.py new file mode 100644 index 000000000..3737bf0dc --- /dev/null +++ b/pyleco/core/data_message.py @@ -0,0 +1,115 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from json import JSONDecodeError +from typing import Any, Iterable, Optional, Union + +from .serialization import deserialize_data, generate_conversation_id, serialize_data, MessageTypes + + +class DataMessage: + """A message of the data protocol.""" + + topic: bytes + header: bytes + payload: list[bytes] + + def __init__(self, + topic: Union[bytes, str], + header: Optional[bytes] = None, + data: Optional[Union[bytes, str, Any]] = None, + conversation_id: Optional[bytes] = None, + message_type: Union[MessageTypes, int] = MessageTypes.NOT_DEFINED, + additional_payload: Optional[Iterable[bytes]] = None, + **kwargs) -> None: + super().__init__(**kwargs) + self.topic = topic.encode() if isinstance(topic, str) else topic + if header and (conversation_id or message_type != MessageTypes.NOT_DEFINED): + raise ValueError( + "You may not specify the header and some header element at the same time!") + if header is None: + cid = generate_conversation_id() if conversation_id is None else conversation_id + self.header = cid + message_type.to_bytes(length=1, byteorder="big") + else: + self.header = header + if isinstance(data, bytes): + self.payload = [data] + elif isinstance(data, str): + self.payload = [data.encode()] + elif data is None: + self.payload = [] + else: + self.payload = [serialize_data(data)] + if additional_payload is not None: + self.payload.extend(additional_payload) + + @classmethod + def from_frames(cls, topic: bytes, header: bytes, *payload: bytes): + """Create a message from a frames list, for example after reading from a socket. + + .. code:: + + frames = socket.recv_multipart() + message = DataMessage.from_frames(*frames) + """ + message = cls(topic=topic, header=header, additional_payload=payload) + return message + + def to_frames(self) -> list[bytes]: + return [self.topic, self.header] + self.payload + + @property + def conversation_id(self) -> bytes: + return self.header[:-1] + + @property + def message_type(self) -> int: + return self.header[-1] + + @property + def data(self) -> object: + return deserialize_data(self.payload[0]) if self.payload else None + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DataMessage): + return NotImplemented + partial_comparison = ( + self.topic == other.topic + and self.header == other.header + ) + try: + # Try to compare the data (python objects) instead of their bytes representation. + my_data = self.data + other_data = other.data + except JSONDecodeError: + # Maybe the payload is binary, compare the raw payload + return partial_comparison and self.payload == other.payload + else: + return (partial_comparison and my_data == other_data + and self.payload[1:] == other.payload[1:]) + + def __repr__(self) -> str: + list_of_frames_strings = [str(frame) for frame in self.to_frames()] + return f"DataMessage.from_frames({', '.join(list_of_frames_strings)})" diff --git a/pyleco/core/internal_protocols.py b/pyleco/core/internal_protocols.py index 10c211a6f..7c6ae246c 100644 --- a/pyleco/core/internal_protocols.py +++ b/pyleco/core/internal_protocols.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -27,14 +27,15 @@ They are not defined by LECO itself as it does not touch the message transfer. -Any Component could use these tools in order to send and read messsages. +Any Component could use these tools in order to send and read messages. For example a Director might use these tools to direct an Actor. """ +from __future__ import annotations from typing import Any, Optional, Protocol, Iterable, Union from .message import Message, MessageTypes -from .rpc_generator import RPCGenerator +from ..json_utils.rpc_generator import RPCGenerator class CommunicatorProtocol(Protocol): @@ -48,46 +49,86 @@ class CommunicatorProtocol(Protocol): rpc_generator: RPCGenerator timeout: float = 1 # default reading timeout in seconds + @property + def full_name(self) -> str: + return self.name if self.namespace is None else ".".join((self.namespace, self.name)) + def sign_in(self) -> None: ... # pragma: no cover def sign_out(self) -> None: ... # pragma: no cover def send_message(self, message: Message) -> None: ... # pragma: no cover - def read_message(self, conversation_id: Optional[bytes], timeout: Optional[float] = None - ) -> Message: ... # pragma: no cover + def read_message( + self, conversation_id: Optional[bytes], timeout: Optional[float] = None + ) -> Message: ... # pragma: no cover - def ask_message(self, message: Message, timeout: Optional[float] = None - ) -> Message: ... # pragma: no cover + def ask_message( + self, message: Message, timeout: Optional[float] = None + ) -> Message: ... # pragma: no cover def close(self) -> None: ... # pragma: no cover # Utilities - def send(self, - receiver: Union[bytes, str], - conversation_id: Optional[bytes] = None, - data: Optional[Any] = None, - **kwargs) -> None: + def send( + self, + receiver: Union[bytes, str], + conversation_id: Optional[bytes] = None, + data: Optional[Any] = None, + **kwargs, + ) -> None: """Send a message based on kwargs.""" - self.send_message(message=Message( - receiver=receiver, conversation_id=conversation_id, data=data, **kwargs - )) - - def ask(self, receiver: Union[bytes, str], conversation_id: Optional[bytes] = None, - data: Optional[Any] = None, - timeout: Optional[float] = None, - **kwargs) -> Message: + self.send_message( + message=Message(receiver=receiver, conversation_id=conversation_id, data=data, **kwargs) + ) + + def ask( + self, + receiver: Union[bytes, str], + conversation_id: Optional[bytes] = None, + data: Optional[Any] = None, + timeout: Optional[float] = None, + **kwargs, + ) -> Message: """Send a message based on kwargs and retrieve the response.""" - return self.ask_message(message=Message( - receiver=receiver, conversation_id=conversation_id, data=data, **kwargs), - timeout=timeout) - - def ask_rpc(self, receiver: Union[bytes, str], method: str, timeout: Optional[float] = None, - **kwargs) -> Any: + return self.ask_message( + message=Message( + receiver=receiver, conversation_id=conversation_id, data=data, **kwargs + ), + timeout=timeout, + ) + + def interpret_rpc_response( + self, response_message: Message, extract_additional_payload: bool = False + ) -> Union[Any, tuple[Any, list[bytes]]]: + """Retrieve the return value of a RPC response and optionally the additional payload.""" + result = self.rpc_generator.get_result_from_response(response_message.payload[0]) + if extract_additional_payload: + return result, response_message.payload[1:] + else: + return result + + def ask_rpc( + self, + receiver: Union[bytes, str], + method: str, + timeout: Optional[float] = None, + additional_payload: Optional[Iterable[bytes]] = None, + extract_additional_payload: bool = False, + **kwargs, + ) -> Any: + """Send a JSON-RPC request (with method \\**kwargs) and return the response value.""" string = self.rpc_generator.build_request_str(method=method, **kwargs) - response = self.ask(receiver=receiver, data=string, message_type=MessageTypes.JSON, - timeout=timeout) - return self.rpc_generator.get_result_from_response(response.payload[0]) + response = self.ask( + receiver=receiver, + data=string, + message_type=MessageTypes.JSON, + additional_payload=additional_payload, + timeout=timeout, + ) + return self.interpret_rpc_response( + response, extract_additional_payload=extract_additional_payload + ) class SubscriberProtocol(Protocol): @@ -97,6 +138,8 @@ def subscribe_single(self, topic: bytes) -> None: ... # pragma: no cover def unsubscribe_single(self, topic: bytes) -> None: ... # pragma: no cover + def unsubscribe_all(self) -> None: ... # pragma: no cover + def subscribe(self, topics: Union[str, Iterable[str]]) -> None: """Subscribe to a topic or list of topics.""" if isinstance(topics, str): diff --git a/pyleco/core/leco_protocols.py b/pyleco/core/leco_protocols.py index afb5ffc17..788217d8b 100644 --- a/pyleco/core/leco_protocols.py +++ b/pyleco/core/leco_protocols.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -66,8 +66,9 @@ def test_method_is_available(component_methods, method): raise AssertionError(f"Method {method} is not available.") """ +from __future__ import annotations try: - from enum import StrEnum + from enum import StrEnum # type: ignore except ImportError: # For python<3.11 from enum import Enum @@ -104,7 +105,7 @@ def shut_down(self) -> None: ... class CoordinatorProtocol(ComponentProtocol, Protocol): - """A command protocol Coordinator""" + """A command protocol Coordinator.""" def sign_in(self) -> None: ... diff --git a/pyleco/core/message.py b/pyleco/core/message.py index 5c6977d20..1bcb8036e 100644 --- a/pyleco/core/message.py +++ b/pyleco/core/message.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -22,8 +22,9 @@ # THE SOFTWARE. # +from __future__ import annotations from json import JSONDecodeError -from typing import Any, Optional, Union +from typing import Any, Iterable, Optional, Union from . import VERSION_B @@ -63,6 +64,7 @@ def __init__(self, conversation_id: Optional[bytes] = None, message_id: Optional[bytes] = None, message_type: Union[MessageTypes, int] = MessageTypes.NOT_DEFINED, + additional_payload: Optional[Iterable[bytes]] = None, ) -> None: self.receiver = receiver.encode() if isinstance(receiver, str) else receiver self.sender = sender.encode() if isinstance(sender, str) else sender @@ -80,6 +82,8 @@ def __init__(self, self.payload = [] else: self.payload = [serialize_data(data)] + if additional_payload is not None: + self.payload.extend(additional_payload) @classmethod def from_frames(cls, version: bytes, receiver: bytes, sender: bytes, header: bytes, @@ -91,9 +95,8 @@ def from_frames(cls, version: bytes, receiver: bytes, sender: bytes, header: byt frames = socket.recv_multipart() message = Message.from_frames(*frames) """ - inst = cls(receiver, sender, header=header) + inst = cls(receiver, sender, header=header, additional_payload=payload) inst.version = version - inst.payload = list(payload) return inst def to_frames(self) -> list[bytes]: @@ -147,5 +150,6 @@ def __eq__(self, other: Any) -> bool: and self.payload[1:] == other.payload[1:]) def __repr__(self) -> str: - list_of_frames_strings = [str(frame) for frame in self.to_frames()] + list_of_frames_strings = [ + str(frame) for frame in self._to_frames_without_sender_check()] return f"Message.from_frames({', '.join(list_of_frames_strings)})" diff --git a/pyleco/core/rpc_generator.py b/pyleco/core/rpc_generator.py deleted file mode 100644 index 7bf806fe0..000000000 --- a/pyleco/core/rpc_generator.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# This file is part of the PyLECO package. -# -# Copyright (c) 2023-2023 PyLECO Developers -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# - -from typing import Any, Union - -from jsonrpc2pyclient._irpcclient import IRPCClient # type: ignore -from jsonrpcobjects.objects import Error - - -# according to error raised by IRPCClient if decoding fails. -INVALID_SERVER_RESPONSE = Error(code=-32000, message="Invalid response from server.") - - -class RPCGenerator(IRPCClient): - """Builds and interprets json rpc messages.""" - - # TODO it stores an always growing list of "id"s, if you do not call "get_result". - - def build_request_str(self, method: str, *args, **kwargs) -> str: - if args and kwargs: - raise ValueError( - "You may not specify list of positional arguments " - "and give additional keyword arguments at the same time.") - return self._build_request(method=method, params=kwargs or list(args) or None - ).model_dump_json() - - def get_result_from_response(self, data: Union[bytes, str]) -> Any: - """Get the result of that object or raise an error.""" - return self._get_result_from_response(data=data) - - def clear_id_list(self) -> None: - """Reset the list of created ids.""" - self._ids: dict[int, int] = {} diff --git a/pyleco/core/serialization.py b/pyleco/core/serialization.py index a2007cc1f..94fc86f77 100644 --- a/pyleco/core/serialization.py +++ b/pyleco/core/serialization.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -22,18 +22,22 @@ # THE SOFTWARE. # -from enum import IntEnum +from __future__ import annotations +import datetime +from enum import IntEnum, IntFlag import json from typing import Any, Optional, NamedTuple, Union -from uuid_extensions import uuid7 # type: ignore # as long as uuid does not yet support UUIDv7 -from jsonrpcobjects.objects import (Request, - ParamsRequest, - ResultResponse, - ErrorResponse, - Notification, - ParamsNotification, - ) +# as long as uuid does not yet support UUIDv7 use uuid6 +from uuid6 import uuid7 +from ..json_utils.json_objects import ( + Request, + ParamsRequest, + ResultResponse, + ErrorResponse, + Notification, + ParamsNotification, +) json_objects = ( @@ -68,6 +72,18 @@ class MessageTypes(IntEnum): JSON = 1 +class JsonContentTypes(IntFlag): + """Type of the JSON content.""" + INVALID = 0 + REQUEST = 1 + RESPONSE = 2 + RESULT = 4 + ERROR = 8 + BATCH = 16 + RESULT_RESPONSE = RESPONSE + RESULT + ERROR_RESPONSE = RESPONSE + ERROR + + def create_header_frame(conversation_id: Optional[bytes] = None, message_id: Optional[Union[bytes, int]] = 0, message_type: Union[bytes, int, MessageTypes] = MessageTypes.NOT_DEFINED, @@ -138,4 +154,34 @@ def deserialize_data(content: bytes) -> Any: def generate_conversation_id() -> bytes: """Generate a conversation_id.""" - return uuid7(as_type="bytes") # type: ignore + return uuid7().bytes + + +def conversation_id_to_datetime(conversation_id: bytes) -> datetime.datetime: + seconds_since_epoch = int.from_bytes(conversation_id[:6], byteorder="big", signed=False) / 1000 + return datetime.datetime.fromtimestamp(seconds_since_epoch, tz=datetime.timezone.utc) + + +def _get_json_object_type(data: dict[str, Any]) -> JsonContentTypes: + if isinstance(data, dict): + if "method" in data.keys(): + return JsonContentTypes.REQUEST + elif "result" in data.keys(): + return JsonContentTypes.RESULT_RESPONSE + elif "error" in data.keys(): + return JsonContentTypes.ERROR_RESPONSE + return JsonContentTypes.INVALID + + +def get_json_content_type(data: Any) -> JsonContentTypes: + if isinstance(data, list): + content = JsonContentTypes.BATCH if data else JsonContentTypes.INVALID + for element in data: + element_typ = _get_json_object_type(element) + if element_typ == JsonContentTypes.INVALID: + return JsonContentTypes.INVALID + else: + content |= element_typ + return content + else: + return _get_json_object_type(data) diff --git a/pyleco/directors/__init__.py b/pyleco/directors/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyleco/directors/coordinator_director.py b/pyleco/directors/coordinator_director.py new file mode 100644 index 000000000..572267d0a --- /dev/null +++ b/pyleco/directors/coordinator_director.py @@ -0,0 +1,55 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging + +from .director import Director + + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class CoordinatorDirector(Director): + """Direct a Coordinator.""" + + def __init__(self, actor="COORDINATOR", **kwargs) -> None: + super().__init__(actor=actor, **kwargs) + + def get_local_components(self) -> list[str]: + """Get the directory.""" + return self.ask_rpc(method="send_local_components") + + def get_global_components(self) -> dict[str, list[str]]: + """Get the directory.""" + return self.ask_rpc(method="send_global_components") + + def get_nodes(self) -> dict[str, str]: + """Get all known nodes.""" + return self.ask_rpc(method="send_nodes") + + def add_nodes(self, coordinators: dict[str, str]) -> None: + """Tell the Coordinator about other coordinators (dict).""" + return self.ask_rpc(method="add_nodes", nodes=coordinators) diff --git a/pyleco/directors/data_logger_director.py b/pyleco/directors/data_logger_director.py new file mode 100644 index 000000000..59ab2d91f --- /dev/null +++ b/pyleco/directors/data_logger_director.py @@ -0,0 +1,84 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +from typing import Any, Optional + +from .director import Director +from ..management.data_logger import ValuingModes, TriggerTypes + + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class DataLoggerDirector(Director): + """Director for the DataLogger. + + :param actor: Name of the actor to direct. + """ + + def __init__(self, actor: str = "dataLogger", **kwargs) -> None: + super().__init__(actor=actor, **kwargs) + + def start_collecting(self, *, + variables: Optional[list[str]] = None, + units: Optional[dict[str, Any]] = None, + trigger_type: Optional[TriggerTypes] = None, + trigger_timeout: Optional[float] = None, + trigger_variable: Optional[str] = None, + valuing_mode: Optional[ValuingModes] = None, + value_repeating: Optional[bool] = None, + ) -> None: + self.ask_rpc(method="start_collecting", + trigger_type=trigger_type, + trigger_timeout=trigger_timeout, + trigger_variable=trigger_variable, + variables=variables, + units=units, + valuing_mode=valuing_mode, + value_repeating=value_repeating, + ) + + def get_last_datapoint(self) -> dict[str, Any]: + """Read the last datapoint.""" + return self.ask_rpc("get_last_datapoint") + + def save_data(self) -> str: + """Save the data and return the name of the file.""" + # increase the timeout as saving might take longer than usual requests + tmo = self.communicator.timeout + self.communicator.timeout = 1000 + name = self.ask_rpc("save_data") + self.communicator.timeout = tmo + return name + + def save_data_async(self) -> bytes: + """Save the data asynchronously.""" + return self.ask_rpc_async("save_data") + + def stop_collecting(self) -> None: + """Stop the data acquisition.""" + self.ask_rpc(method="stop_collecting") diff --git a/pyleco/directors/director.py b/pyleco/directors/director.py new file mode 100644 index 000000000..8e6f9fc16 --- /dev/null +++ b/pyleco/directors/director.py @@ -0,0 +1,257 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +from typing import Any, Iterable, Optional, Sequence, Union + +from ..core.internal_protocols import CommunicatorProtocol +from ..utils.communicator import Communicator +from ..utils.log_levels import get_leco_log_level +from ..core.serialization import generate_conversation_id +from ..core.message import Message, MessageTypes + + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class Director: + """Basic director handling. + + They can be used as a ContextManager: + .. code:: + + with BaseDirector() as d: + d.get_properties(["property1", "property2"]) + + :param actor: Default name of the Actor to communicate with. Stored as :attr:`actor`. + :param communicator: A Communicator class to communicate with the actor. + If None, create a new Communicator instance. + :param name: The name of this Director. + """ + + def __init__(self, actor: Optional[Union[bytes, str]] = None, + communicator: Optional[CommunicatorProtocol] = None, + name: str = "Director", + **kwargs) -> None: + self.actor = actor + if communicator is None: + communicator = Communicator(name=name, **kwargs) + try: + communicator.sign_in() + except TimeoutError: + log.error("Signing in timed out!") + kwargs = {} + self._own_communicator = True # whether to sign out or not. + else: + self._own_communicator = False + self.communicator = communicator + self.generator = communicator.rpc_generator + super().__init__(**kwargs) + + def close(self) -> None: + if self._own_communicator: + self.communicator.close() + + def sign_out(self) -> None: + """Sign the communicator out.""" + self.communicator.sign_out() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_traceback) -> None: + self.close() + + # Message handling + def ask_message(self, actor: Optional[Union[bytes, str]] = None, + data: Optional[Any] = None, **kwargs) -> Message: + actor = self._actor_check(actor) + log.debug(f"Asking {actor!r} with message '{data}'.") + response = self.communicator.ask(actor, data=data, **kwargs) + log.debug(f"Data '{response.data}' received.") + return response + + def _actor_check(self, actor: Optional[Union[bytes, str]]) -> Union[bytes, str]: + actor = actor or self.actor + if actor is None: + raise ValueError("Some actor has to be specified.") + return actor + + # Helper methods + def _prepare_call_action_params(self, args: tuple[Any, ...], + kwargs: dict[str, Any]) -> dict[str, Any]: + """Generate a params dictionary for the call action method.""" + params: dict[str, Any] = {} + if args: + params["args"] = args + if kwargs: + params["kwargs"] = kwargs + return params + + # Remote control synced + def ask_rpc( + self, + method: str, + actor: Optional[Union[bytes, str]] = None, + additional_payload: Optional[Iterable[bytes]] = None, + extract_additional_payload: bool = False, + **kwargs, + ) -> Any: + """Remotely call the `method` procedure on the `actor` and return the return value.""" + receiver = self._actor_check(actor) + return self.communicator.ask_rpc( + receiver=receiver, + method=method, + additional_payload=additional_payload, + extract_additional_payload=extract_additional_payload, + **kwargs, + ) + + # Component + def get_rpc_capabilities(self, actor: Optional[Union[bytes, str]] = None) -> dict: + """Get a list of the remotely callable procedures of the actor.""" + return self.ask_rpc(method="rpc.discover", actor=actor) + + def shut_down_actor(self, actor: Optional[Union[bytes, str]] = None) -> None: + """Stop the actor.""" + self.ask_rpc(method="shut_down", actor=actor) + + def set_actor_log_level(self, level: Union[str, int], actor: Optional[Union[bytes, str]] = None + ) -> None: + """Set the log level of the actor.""" + if isinstance(level, int): + level = get_leco_log_level(level).value + self.ask_rpc("set_log_level", level=level, actor=actor) + + # Actor + def get_parameters(self, parameters: Union[str, Sequence[str]], + actor: Optional[Union[bytes, str]] = None) -> dict[str, Any]: + """Get the values of these `properties` (list, tuple).""" + if isinstance(parameters, str): + parameters = (parameters,) + response = self.ask_rpc(method="get_parameters", parameters=parameters, actor=actor) + if not isinstance(response, dict): + raise ConnectionError(f"{response} returned, but dict expected.") + return response + + def set_parameters(self, parameters: dict[str, Any], + actor: Optional[Union[bytes, str]] = None) -> None: + """Set the `properties` dictionary.""" + self.ask_rpc(method="set_parameters", parameters=parameters, actor=actor) + + def call_action(self, action: str, *args, actor: Optional[Union[bytes, str]] = None, + **kwargs) -> Any: + """Call an action remotely and return its return value. + + :param str action: Name of the action to call. If you have positional arguments, this + parameter has to be the first positional argument + :param \\*args: Arguments for the action to call. + :param str actor: Name of the actor to execute the action. + Defaults to the stored actor name. + :param \\**kwargs: Keyword arguments for the action to call. + """ + params = self._prepare_call_action_params(args, kwargs) + return self.ask_rpc("call_action", action=action, actor=actor, **params) + + # Async methods: Just send, read later. + def send( + self, + actor: Optional[Union[bytes, str]] = None, + data=None, + additional_payload: Optional[Iterable[bytes]] = None, + **kwargs, + ) -> bytes: + """Send a request and return the conversation_id.""" + actor = self._actor_check(actor) + cid0 = generate_conversation_id() + self.communicator.send( + actor, conversation_id=cid0, data=data, additional_payload=additional_payload, **kwargs + ) + return cid0 + + def ask_rpc_async( + self, + method: str, + actor: Optional[Union[bytes, str]] = None, + additional_payload: Optional[Iterable[bytes]] = None, + **kwargs, + ) -> bytes: + """Send a rpc request, the response can be read later with :meth:`read_rpc_response`.""" + string = self.generator.build_request_str(method=method, **kwargs) + return self.send( + actor=actor, + data=string, + message_type=MessageTypes.JSON, + additional_payload=additional_payload, + ) + + def read_rpc_response( + self, + conversation_id: Optional[bytes] = None, + extract_additional_payload: bool = False, + **kwargs, + ) -> Any: + """Read the response value corresponding to a request with a certain `conversation_id`.""" + response_message = self.communicator.read_message(conversation_id=conversation_id, **kwargs) + return self.communicator.interpret_rpc_response( + response_message=response_message, extract_additional_payload=extract_additional_payload + ) + + # Actor + def get_parameters_async(self, parameters: Union[str, Sequence[str]], + actor: Optional[Union[bytes, str]] = None) -> bytes: + """Request the values of these `properties` (list, tuple) and return the conversation_id. + + You can use :meth:`read_rpc_response` to read the response. + """ + if isinstance(parameters, str): + parameters = (parameters,) + # return self.send(data=[[Commands.GET, properties]]) + return self.ask_rpc_async(method="get_parameters", parameters=parameters, actor=actor) + + def set_parameters_async(self, parameters: dict[str, Any], + actor: Optional[Union[bytes, str]] = None) -> bytes: + """Set the `properties` dictionary and return the conversation_id. + + You can use :meth:`read_rpc_response` to read the response. + """ + # return self.send(data=[[Commands.SET, properties]]) + return self.ask_rpc_async(method="set_parameters", parameters=parameters, actor=actor) + + def call_action_async(self, action: str, *args, actor: Optional[Union[bytes, str]] = None, + **kwargs) -> bytes: + """Call a method remotely and return the conversation_id. + + You can use :meth:`read_rpc_response` to read the response. + + :param str action: Name of the action to call. + :param \\*args: Arguments for the action to call. + :param str actor: Name of the actor to execute the action. + Defaults to the stored actor name. + :param \\**kwargs: Keyword arguments for the action to call. + """ + params = self._prepare_call_action_params(args, kwargs) + return self.ask_rpc_async(method="call_action", action=action, actor=actor, **params) diff --git a/pyleco/directors/locking_director.py b/pyleco/directors/locking_director.py new file mode 100644 index 000000000..0fd79ffed --- /dev/null +++ b/pyleco/directors/locking_director.py @@ -0,0 +1,39 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from typing import Optional + +from .director import Director + + +class LockingDirector(Director): + + def lock(self, resource: Optional[str] = None) -> bool: + return self.ask_rpc("lock", resource=resource) + + def unlock(self, resource: Optional[str] = None) -> None: + return self.ask_rpc("unlock", resource=resource) + + def force_unlock(self, resource: Optional[str] = None) -> None: + return self.ask_rpc("force_unlock", resource=resource) diff --git a/pyleco/directors/starter_director.py b/pyleco/directors/starter_director.py new file mode 100644 index 000000000..68adb6e04 --- /dev/null +++ b/pyleco/directors/starter_director.py @@ -0,0 +1,102 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +from typing import Optional, Union + +from .director import Director + + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class StarterDirector(Director): + """Director for the Starter. + + :param actor: Name of the actor to direct. + """ + + def __init__(self, actor: str = "starter", **kwargs) -> None: + super().__init__(actor=actor, **kwargs) + + def start_tasks(self, names: Union[list[str], str], actor: Optional[Union[bytes, str]] = None + ) -> None: + """Start the task or tasks. + + :param names: Single task name or list of task names to start. + :param name: Name of the starter to communicate with. + """ + if isinstance(names, str): + names = [names] + self.ask_rpc(method="start_tasks", names=names, actor=actor) + + def restart_tasks(self, names: Union[list[str], str], actor: Optional[Union[bytes, str]] = None + ) -> None: + """Restart the task or tasks. + + :param names: Single task name or list of task names to restart. + :param name: Name of the starter to communicate with. + """ + if isinstance(names, str): + names = [names] + self.ask_rpc(method="restart_tasks", names=names, actor=actor) + + def stop_tasks(self, names: Union[list[str], str], actor: Optional[Union[bytes, str]] = None + ) -> None: + """Stop the task or tasks. + + :param names: Single task name or list of task names to stop. + :param name: Name of the starter to communicate with. + """ + if isinstance(names, str): + names = [names] + self.ask_rpc(method="stop_tasks", names=names, actor=actor) + + def install_tasks(self, names: Union[list[str], str], actor: Optional[Union[bytes, str]] = None + ) -> None: + """Install the tasks. + + :param names: Single task name or list of task names to install. + :param name: Name of the starter to communicate with. + """ + if isinstance(names, str): + names = [names] + self.ask_rpc(method="install_tasks", names=names, actor=actor) + + def status_tasks(self, names: Optional[Union[list[str], str]] = None, + actor: Optional[Union[bytes, str]] = None) -> dict[str, int]: + """Query the status of these tasks and all running ones. + + :param names: List of task names to ask for. + :param name: Name of the starter to communicate with. + """ + if isinstance(names, str): + names = [names] + return self.ask_rpc(method="status_tasks", names=names, actor=actor) + + def list_tasks(self, actor: Optional[Union[bytes, str]] = None) -> list[dict[str, str]]: + """List all available tasks with name and tooltip.""" + return self.ask_rpc(method="list_tasks", actor=actor) diff --git a/pyleco/directors/transparent_director.py b/pyleco/directors/transparent_director.py new file mode 100644 index 000000000..677032fe1 --- /dev/null +++ b/pyleco/directors/transparent_director.py @@ -0,0 +1,144 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +from typing import Generic, Optional, TypeVar, Union +from warnings import warn + +from .director import Director + + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +Device = TypeVar("Device") + + +class RemoteCall: + """Descriptor for remotely calling methods. + + You can add methods by simpling adding this Descriptor. + Whenever this instance is called, it executes :code:`call_method` + with the attribute name as `method` parameter. For example: + + .. code:: + + class XYZ(BaseDirector): + method = RemoteCall("Docstring for that method.") # add a RemoteCall instance as attr. + director = XYZ() + director.method(*some_args, **kwargs) # execute this instance. + # equivalent to: + director.call_method("method", *some_args, **kwargs) + + :param str name: Name of the method, only necessary if the RemoteCall is added after class + creation. + :param str doc: Docstring for the method. {name} is replaced by the attribute name of the + instance of RemoteCall, in the example by 'method'. + """ + + def __init__(self, name: str = "", doc: Optional[str] = None, **kwargs) -> None: + self._name = name + if doc is None: + doc = "Call '{name}' at the remote driver." + self._doc = doc + super().__init__(**kwargs) + + def __set_name__(self, owner, name) -> None: + self._name = name + self._doc = self._doc.format(name=self._name) + + def __get__(self, obj: Director, objtype=None): + if obj is None: + return self + + def remote_call(*args, **kwargs): + obj.call_action(self._name, *args, **kwargs) + + remote_call.__doc__ = self._doc + return remote_call + + +class TransparentDevice: + """For all property access, the remote device is called. + + If you want to call methods, you can add them. with :class:`RemoteCall` to a subclass of this + instrument. + """ + + director: Director + + def __init__(self, director: Director): + self.director = director + + def call_action(self, action: str, *args, **kwargs): + self.director.call_action(action, *args, **kwargs) + + def __getattr__(self, name): + if name in dir(self): + return super().__getattribute__(name) + else: + return self.director.get_parameters(parameters=(name,)).get(name) + + def __setattr__(self, name, value) -> None: + if name in dir(self) or name.startswith("_") or name in ("director"): + super().__setattr__(name, value) + else: + self.director.set_parameters(parameters={name: value}) + + # TODO generate a list of capabilities of the actor and return these capabilities during a call + # to __dir__. That enables autocompletion etc. + + +class TransparentDirector(Director, Generic[Device]): + """Director getting/setting all properties remotely. + + It has a :attr:`device` attribute. Whenever you get/set an attribute of `device`, the Director + will call the Actor and try to get/set the corresponding attribute of the Actor's device. + If you want to add method calls, you might use the :class:`RemoteCall` Descriptor to add methods + to a subclass of :class:`TransparentDevice` and give that class to the `device_class` parameter. + For example :code:`method = RemoteCall()` in the class definition will make sure, + that :code:`device.method(*args, **kwargs)` will be executed remotely. + + :param actor: Name of the actor to direct. + :param device_class: Subclass of :class:`TransparentDevice` to use as a device dummy. + :param cls: see :code:`device_class`. + + .. deprecated:: 0.3 + Use :code:`device_class` instead. + """ + + def __init__( + self, + actor: Optional[Union[bytes, str]] = None, + device_class: type[Device] = TransparentDevice, # type: ignore[assignment] + cls: Optional[type[Device]] = None, + **kwargs, + ): + super().__init__(actor=actor, **kwargs) + if cls is not None: + warn("Parameter `cls` is deprecated, use `device_class` instead.", FutureWarning) + device_class = cls + self.device = device_class(director=self) # type: ignore[call-arg] diff --git a/pyleco/errors.py b/pyleco/errors.py index c34e6592c..0cb685dd0 100644 --- a/pyleco/errors.py +++ b/pyleco/errors.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -23,27 +23,32 @@ # from typing import Any +from warnings import warn -from jsonrpcobjects.objects import Error, DataError, ErrorResponse +from .json_utils.json_objects import Error, DataError, ErrorResponse -# JSON specification: -# -32000 to -32099 Server error reserved for implementation-defined server-errors +from .json_utils.errors import NOT_SIGNED_IN, DUPLICATE_NAME, NODE_UNKNOWN, RECEIVER_UNKNOWN # noqa -# TODO define valid error codes: Proposal: -# general error: -32000 -# Routing errors (Coordinator) between -32090 and -32099 -NOT_SIGNED_IN = Error(code=-32090, message="You did not sign in!") -DUPLICATE_NAME = Error(code=-32091, message="The name is already taken.") -NODE_UNKNOWN = Error(code=-32092, message="Node is not known.") -RECEIVER_UNKNOWN = Error(code=-32093, message="Receiver is not in addresses list.") + +warn("The `pyleco.errors` module is deprecated, use the objects from the `pyleco.json_utils` " + "subpackage instead.", FutureWarning) def generate_error_with_data(error: Error, data: Any) -> DataError: - return DataError(code=error.code, message=error.message, data=data) + """Generate a DataError from an Error. + + .. deprecated:: 0.3 + Use `DataError.from_error` instead. + """ + return DataError.from_error(error=error, data=data) class CommunicationError(ConnectionError): - """Something went wrong, send an `error_msg` to the recipient.""" + """Something went wrong, send an `error_msg` to the recipient. + + .. deprecated:: 0.3 + Use the definition in `communicator_utils` module instead. + """ def __init__(self, text: str, error_payload: ErrorResponse, *args: Any) -> None: super().__init__(text, *args) diff --git a/pyleco/json_utils/__init__.py b/pyleco/json_utils/__init__.py new file mode 100644 index 000000000..c0acfaca7 --- /dev/null +++ b/pyleco/json_utils/__init__.py @@ -0,0 +1,23 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# diff --git a/pyleco/json_utils/errors.py b/pyleco/json_utils/errors.py new file mode 100644 index 000000000..4f3080b0e --- /dev/null +++ b/pyleco/json_utils/errors.py @@ -0,0 +1,128 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +""" +Based on jsonrpc2-objects + +This module provides exceptions for each JSON-RPC 2.0 error. + +There is one Exception defined for each pre-defined JSON-RPC 2.0 error. +Additionally, there is a ServerError for implementation-defined errors. + +Each exception extends a base exception JSONRPCError. +""" + + +from typing import Optional, Type + +from .json_objects import DataError, Error, ErrorType + +# JSONRPC 2.0 defined errors +INVALID_REQUEST = Error(code=-32600, message="Invalid Request") +METHOD_NOT_FOUND = Error(code=-32601, message="Method not found") +INVALID_PARAMS = Error(code=-32602, message="Invalid params") +INTERNAL_ERROR = Error(code=-32603, message="Internal error") +PARSE_ERROR = Error(code=-32700, message="Parse error") + +# -32000 to -32099 Server error reserved for implementation-defined server-errors +# general error: -32000 +SERVER_ERROR = Error(code=-32000, message="Server error") + +# LECO defined errors +# Routing errors (Coordinator) between -32090 and -32099 +NOT_SIGNED_IN = Error(code=-32090, message="You did not sign in!") +DUPLICATE_NAME = Error(code=-32091, message="The name is already taken.") +NODE_UNKNOWN = Error(code=-32092, message="Node is not known.") +RECEIVER_UNKNOWN = Error(code=-32093, message="Receiver is not in addresses list.") + +# Error during deserialization error of the server's response +INVALID_SERVER_RESPONSE = Error(code=-32000, message="Invalid response from server.") + + +class JSONRPCError(Exception): + """Base error that all JSON RPC exceptions extend.""" + + def __init__(self, error: ErrorType) -> None: + msg = f"{error.code}: {error.message}" + self.rpc_error = error + if isinstance(error, DataError): + msg += f"\nError Data: {error.data}" + super().__init__(msg) + + +class ParseError(JSONRPCError): + """Error raised when invalid JSON was received by the server.""" + + def __init__(self, error: Optional[ErrorType] = None) -> None: + super().__init__(error or PARSE_ERROR) + + +class InvalidRequest(JSONRPCError): + """Error raised when the JSON sent is not a valid Request object.""" + + def __init__(self, error: Optional[ErrorType] = None) -> None: + super().__init__(error or INVALID_REQUEST) + + +class MethodNotFound(JSONRPCError): + """Error raised when the method does not exist / is not available.""" + + def __init__(self, error: Optional[ErrorType] = None) -> None: + super().__init__(error or METHOD_NOT_FOUND) + + +class InvalidParams(JSONRPCError): + """Error raised when invalid method parameter(s) are supplied.""" + + def __init__(self, error: Optional[ErrorType] = None) -> None: + super().__init__(error or INVALID_PARAMS) + + +class InternalError(JSONRPCError): + """Error raised when there is an internal JSON-RPC error.""" + + def __init__(self, error: Optional[ErrorType] = None) -> None: + super().__init__(error or INTERNAL_ERROR) + + +class ServerError(JSONRPCError): + """Error raised when a server error occurs.""" + + def __init__(self, error: ErrorType) -> None: + super().__init__(error) + + +def get_exception_by_code(code: int) -> Optional[Type[JSONRPCError]]: + """Get the JSON-RPC error corresponding to an error code. + + :param code: The JSON-RPC error code. + :return: JSON RPC error object or None. + """ + return { + -32600: InvalidRequest, + -32601: MethodNotFound, + -32602: InvalidParams, + -32603: InternalError, + -32700: ParseError, + }.get(code) diff --git a/pyleco/json_utils/json_objects.py b/pyleco/json_utils/json_objects.py new file mode 100644 index 000000000..000dd9e22 --- /dev/null +++ b/pyleco/json_utils/json_objects.py @@ -0,0 +1,158 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +""" +Names based on the classes of jsonrpc2-objects. + +As jsonrpc2-objects uses pydantic models, these objects offer the `dump_model` and `dump_model_json` +methods. +""" + +from __future__ import annotations +from dataclasses import asdict, dataclass +import json +from typing import Any, List, Optional, TypeVar, Union + +ErrorType = Union["DataError", "Error"] +NotificationType = Union["Notification", "ParamsNotification"] +RequestType = Union["ParamsRequest", "Request"] +ResponseType = Union["ErrorResponse", "ResultResponse"] + + +@dataclass +class JsonObject: + def model_dump(self) -> dict[str, Any]: + """Create a dictionary of the attributes.""" + return asdict(self) + + def model_dump_json(self) -> str: + """Create a json representation.""" + return json.dumps(self.model_dump(), separators=(",", ":")) + + +@dataclass +class Request(JsonObject): + """Request the result of a remote call.""" + + id: Union[int, str] + method: str + jsonrpc: str = "2.0" + + +@dataclass +class ParamsRequest(JsonObject): + """Request the result of a remote call with parameters.""" + + id: Union[int, str] + method: str + params: Union[list, dict] + jsonrpc: str = "2.0" + + +@dataclass +class Notification(JsonObject): + """Do a remote call without requesting a response.""" + + method: str + jsonrpc: str = "2.0" + + +@dataclass +class ParamsNotification(JsonObject): + """Do a remote call with parameters without requesting a response.""" + + method: str + params: Union[list, dict] + jsonrpc: str = "2.0" + + +@dataclass +class ResultResponse(JsonObject): + """A response containing a result.""" + + id: Union[int, str] + result: Any + jsonrpc: str = "2.0" + + +@dataclass +class Error(JsonObject): + """An error to be sent via an :class:`ErrorResponse`.""" + + code: int + message: str + + +@dataclass +class DataError(JsonObject): + """An error with data, to be sent via an :class:`ErrorResponse`.""" + + code: int + message: str + data: Any + + @classmethod + def from_error(cls, error: Error, data: Any) -> DataError: + return cls(code=error.code, message=error.message, data=data) + + +@dataclass +class ErrorResponse(JsonObject): + """A response containing an error.""" + + id: Optional[Union[int, str]] + error: ErrorType + jsonrpc: str = "2.0" + + def model_dump(self) -> dict[str, Any]: + pre_dict = asdict(self) + pre_dict["error"] = asdict(self.error) + return pre_dict + +""" +Batch Handling. + +Not included in jsonrpc2-objects, but defined by JSONRPC 2.0 +""" +BatchType = TypeVar("BatchType", RequestType, ResponseType) + + +class BatchObject(List[BatchType]): + """A batch of JSONRPC message objects. + + It works like a list of appropriate message objects and offers the possibility to dump + this batch object to a plain python object or to JSON. + """ + # Parent class is typing.List, as Python<3.9 does not like list[BatchType] + # Not defined by jsonrpc2-objects + + def model_dump(self) -> list[dict[str, Any]]: + return [obj.model_dump() for obj in self] + + def model_dump_json(self) -> str: + return json.dumps(self.model_dump(), separators=(",", ":")) + + +RequestBatch = BatchObject[RequestType] +ResponseBatch = BatchObject[ResponseType] diff --git a/pyleco/json_utils/rpc_generator.py b/pyleco/json_utils/rpc_generator.py new file mode 100644 index 000000000..377ab2e0f --- /dev/null +++ b/pyleco/json_utils/rpc_generator.py @@ -0,0 +1,97 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import json +import logging +from typing import Any, Union + +from .json_objects import Request, ParamsRequest, DataError, Error, ResultResponse +from .errors import ServerError, get_exception_by_code, JSONRPCError, INVALID_SERVER_RESPONSE + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class RPCGenerator: + """This class can generate a JSONRPC request string and interpret the result string.""" + + _id_counter: int = 1 + + def build_request_str(self, method: str, *args, **kwargs) -> str: + if args and kwargs: + raise ValueError( + "You may not specify list of positional arguments " + "and give additional keyword arguments at the same time." + ) + id = self._id_counter + self._id_counter += 1 + r: Union[Request, ParamsRequest] + if args or kwargs: + r = ParamsRequest(id=id, method=method, params=kwargs or list(args)) + else: + r = Request(id=id, method=method) + return r.model_dump_json() + + def get_result_from_response(self, data: Union[bytes, str, dict]) -> Any: + """Get the result of that object or raise an error.""" + # copied from jsonrpc2-pyclient and modified + try: + # Parse string to JSON. + if not isinstance(data, dict): + json_data = json.loads(data) + else: + json_data = data + + # Raise error if JSON RPC error response. + if error_content := json_data.get("error"): + error: Union[Error, DataError] + if error_content.get("data"): + error = DataError(**error_content) + else: + error = Error(**error_content) + exc = get_exception_by_code(error.code) or ServerError + raise exc(error) + + # Return result if JSON RPC result response. + if "result" in json_data.keys(): + return ResultResponse(**json_data).result + + # Not valid JSON RPC response if it has no error or result. + raise JSONRPCError( + DataError( + code=INVALID_SERVER_RESPONSE.code, + message=INVALID_SERVER_RESPONSE.message, + data=json_data, + ) + ) + except (json.JSONDecodeError, TypeError, AttributeError) as exc: + log.exception(f"{type(exc).__name__}:") + raise JSONRPCError( + DataError( + code=INVALID_SERVER_RESPONSE.code, + message=INVALID_SERVER_RESPONSE.message, + data=data, + ) + ) from exc diff --git a/pyleco/json_utils/rpc_server.py b/pyleco/json_utils/rpc_server.py new file mode 100644 index 000000000..b1e83e625 --- /dev/null +++ b/pyleco/json_utils/rpc_server.py @@ -0,0 +1,28 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +try: + from openrpc import RPCServer # type: ignore # noqa: F401 +except ModuleNotFoundError: + from .rpc_server_definition import RPCServer # type: ignore # noqa: F401 diff --git a/pyleco/json_utils/rpc_server_definition.py b/pyleco/json_utils/rpc_server_definition.py new file mode 100644 index 000000000..8440f90a8 --- /dev/null +++ b/pyleco/json_utils/rpc_server_definition.py @@ -0,0 +1,140 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import json +import logging +from typing import Any, Callable, Optional, Union + +from .errors import INTERNAL_ERROR, SERVER_ERROR, INVALID_REQUEST +from .json_objects import ResultResponse, ErrorResponse, DataError, ResponseType, ResponseBatch + + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class RPCServer: + def __init__( + self, + title: Optional[str] = None, + version: Optional[str] = None, + debug: bool = False, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.title = title or "RPC Server" + self._version = version or "0.1.0" + self._rpc_methods: dict[str, Callable] = {} + self.method(name="rpc.discover")(self.discover) + + def method(self, name: Optional[str] = None, **kwargs) -> Callable[[Callable], None]: + def method_registrar(method: Callable) -> None: + return self._register_method(name=name or method.__name__, method=method) + return method_registrar + + def _register_method(self, name: str, method: Callable) -> None: + self._rpc_methods[name] = method + + def process_request(self, data: Union[bytes, str]) -> Optional[str]: + try: + json_data = json.loads(data) + result = self.process_request_object(json_data=json_data) + return result.model_dump_json() if result else None + except Exception as exc: + log.exception(f"{type(exc).__name__}:", exc_info=exc) + return ErrorResponse(id=None, error=INTERNAL_ERROR).model_dump_json() + + def process_request_object( + self, json_data: object + ) -> Optional[Union[ResponseType, ResponseBatch]]: + result: Optional[Union[ResponseType, ResponseBatch]] + if isinstance(json_data, list): + result = ResponseBatch() + for element in json_data: + result_element = self._process_single_request(element) + if result_element is not None: + result.append(result_element) + elif isinstance(json_data, dict): + result = self._process_single_request(json_data) + else: + result = ErrorResponse( + id=None, + error=DataError.from_error(INVALID_REQUEST, json_data), + ) + if result: + return result + else: + return None + + def _process_single_request( + self, request: dict[str, Any] + ) -> Union[ResultResponse, ErrorResponse, None]: + id_ = None + try: + id_ = request.get("id") + method_name = request.get("method") + if method_name is None: + return ErrorResponse( + id=id_, error=DataError.from_error(INVALID_REQUEST, data=request) + ) + params = request.get("params") + method = self._rpc_methods[method_name] + if isinstance(params, dict): + result = method(**params) + elif isinstance( + params, + list, + ): + result = method(*params) + else: + result = method() + if id_ is not None: + return ResultResponse(id=id_, result=result) + else: + return None + except Exception as exc: + log.exception(f"{type(exc).__name__}:", exc_info=exc) + return ErrorResponse(id=id_, error=SERVER_ERROR) + + def discover(self) -> dict[str, Any]: + """list all the capabilities of the server.""" + result: dict[str, Any] = {"openrpc": "1.2.6"} + result["info"] = {"title": self.title, "version": self._version} + methods: list[dict[str, Any]] = [] + for name, method in self._rpc_methods.items(): + if name == "rpc.discover": + # do not list it + continue + method_dict = {"name": name} + if method.__doc__: + lines = method.__doc__.split("\n") + method_dict["summary"] = lines[0] + if lines[1:]: + method_dict["description"] = " ".join( + line.strip() for line in lines[1:] if line + ).strip() + methods.append(method_dict) + result["methods"] = methods + return result diff --git a/pyleco/management/__init__.py b/pyleco/management/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyleco/management/data_logger.py b/pyleco/management/data_logger.py new file mode 100644 index 000000000..794f228e4 --- /dev/null +++ b/pyleco/management/data_logger.py @@ -0,0 +1,407 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from typing import Union, Sequence + +import datetime +try: + from enum import StrEnum # type: ignore +except ImportError: # pragma: no cover + # For python<3.11 + from enum import Enum + + class StrEnum(str, Enum): # type: ignore + pass +import json +import logging +from threading import Lock +from typing import Any, Callable, Optional, Iterable + +try: + import numpy as np # type: ignore[import-not-found] +except ModuleNotFoundError: + def average(values: Sequence[Union[float, int]]) -> float: + return sum(values) / len(values) +else: + average = np.average # type: ignore + +if __name__ == "__main__": # pragma: no cover + from pyleco.utils.timers import RepeatingTimer + from pyleco.utils.extended_message_handler import ExtendedMessageHandler, DataMessage + from pyleco.utils.parser import parser, parse_command_line_parameters + from pyleco.utils.data_publisher import DataPublisher +else: + from ..utils.timers import RepeatingTimer + from ..utils.extended_message_handler import ExtendedMessageHandler, DataMessage + from ..utils.parser import parser, parse_command_line_parameters + from ..utils.data_publisher import DataPublisher + + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) +StrFormatter = logging.Formatter("%(asctime)s\t%(levelname)s\t%(name)s\t%(message)s") + +nan = float("nan") + + +class TriggerTypes(StrEnum): + TIMER = "timer" + VARIABLE = "variable" + NONE = "none" + + +class ValuingModes(StrEnum): + LAST = "last" + AVERAGE = "average" + + +class DataLogger(ExtendedMessageHandler): + """Collect data and save it to the disk, if required. + + The data logger listens to commands via the control protocol and to data via the data protocol. + Whenever triggered, either by a timer or by receiving certain data via data protocol, it + generates a data point. + Each new datapoint is published via the data protocol, such that a user might follow the data + acquisition. + The data point contains values for each variable. The value is either the last one received + since the last data point, or the average of all values received sind the last data point, or + `float("nan")`. + + If desired, the datalogger may save all datapoints to disk. + + .. code:: + + datalogger = DataLogger() + datalogger.listen() # listen until a shutdown signal is received. + # Now you may send a "start_collecting" message to start a measurement. + """ + + # TODO names + tmp: dict[str, list[Any]] # contains all values since last datapoint + lists: dict[str, list[Any]] # contains datapoints. + units: dict[str, Any] # contains the units of the variables TODO TBD what the value is. + last_datapoint: dict[str, Any] + last_save_name: str = "" + + # configuration variables + trigger_type: TriggerTypes = TriggerTypes.NONE + _last_trigger_type: TriggerTypes = TriggerTypes.NONE + trigger_timeout: float = 1 + trigger_variable: str = "" + value_repeating: bool = False + valuing: Callable[[list], Any] + + def __init__(self, name: str = "DataLoggerN", directory: str = ".", **kwargs) -> None: + super().__init__(name=name, **kwargs) + self.directory = directory + self.publisher = DataPublisher(full_name=name) + self.valuing = average + # Initialize values + self.list_lock = Lock() + self.reset_data_storage() + self.units = {} + # TODO add auto_save functionality? + + def register_rpc_methods(self) -> None: + super().register_rpc_methods() + self.register_rpc_method(self.set_valuing_mode) # TODO offer during a measurement? + self.register_rpc_method(self.start_collecting) + self.register_rpc_method(self.save_data) + self.register_rpc_method(self.stop_collecting) + self.register_rpc_method(self.get_last_datapoint) + self.register_rpc_method(self.get_list_length) + self.register_rpc_method(self.get_last_save_name) + self.register_rpc_method(self.get_configuration) + + def __del__(self) -> None: + self.stop_collecting() + + def _listen_setup(self, start_data: Optional[dict[str, Any]] = None, # type: ignore[override] + **kwargs): + poller = super()._listen_setup(**kwargs) + if start_data is not None: + self.start_collecting(**start_data) + return poller + + def _listen_close(self, waiting_time: Optional[int] = None) -> None: + self.stop_collecting() + super()._listen_close(waiting_time=waiting_time) + + def set_full_name(self, full_name: str) -> None: + super().set_full_name(full_name=full_name) + self.publisher.full_name = full_name + + # Data management + def handle_subscription_message(self, data_message: DataMessage) -> None: + sender = data_message.topic.decode() + try: + content: dict[str, Any] = data_message.data # type: ignore + modified_dict = {".".join((sender, k)): v for k, v in content.items()} + except Exception: + log.exception(f"Could not decode message {data_message}.") + else: + self.handle_subscription_data(modified_dict) + + def handle_subscription_data(self, data: dict[str, Any]) -> None: + """Store `data` dict in `tmp`""" + with self.list_lock: + for key, value in data.items(): + try: + self.tmp[key].append(value) + except KeyError: + log.debug("Got value for '%s', but no list present.", key) + if self.trigger_type == TriggerTypes.VARIABLE and self.trigger_variable in data.keys(): + self.make_datapoint() + + def make_datapoint(self) -> dict[str, Any]: + """Store a datapoint.""" + datapoint = self.calculate_data() + self.last_datapoint = datapoint + if self.namespace is not None: + self.publisher.send_data(data=self.last_datapoint) + return datapoint + + def calculate_data(self) -> dict[str, Any]: + """Calculate data for a data point and return the data point.""" + datapoint = {} + with self.list_lock: + if 'time' in self.lists.keys(): + now = datetime.datetime.now(datetime.timezone.utc) + today = datetime.datetime.combine( + self.today, datetime.time(), datetime.timezone.utc + ) + time = (now - today).total_seconds() + self.tmp['time'].append(time) + for variable, datalist in self.lists.items(): + value = datapoint[variable] = self.calculate_single_data( + variable, self.tmp[variable] + ) + datalist.append(value) + for key in self.tmp.keys(): + self.tmp[key].clear() + return datapoint + + def calculate_single_data(self, variable: str, tmp: list): + if tmp: + value = self.valuing(tmp) + elif self.value_repeating: + try: + # no lock, as this method is called in in a locked environment! + value = self.lists[variable][-1] + except (KeyError, IndexError): # No last value present. + value = nan + else: + value = nan + return value + + @staticmethod + def last(data: list[Any]) -> Any: + """Return the last value of an iterable with error handling.""" + try: + return data[-1] + except TypeError: + return data + except IndexError: + # empty list + return nan + + # Control + def start_collecting(self, *, + variables: Optional[list[str]] = None, + units: Optional[dict[str, Any]] = None, + trigger_type: Optional[TriggerTypes] = None, + trigger_timeout: Optional[float] = None, + trigger_variable: Optional[str] = None, + valuing_mode: Optional[ValuingModes] = None, + value_repeating: Optional[bool] = None, + ) -> None: + """Start collecting data. + + If you do not give a specific parameter, the value of the last measurement is used again. + """ + self.stop_collecting() + log.info(f"Start collecting data. Trigger: {trigger_type}, {trigger_timeout}, " + f"{trigger_variable}; subscriptions: {variables}") + self.today = datetime.datetime.now(datetime.timezone.utc).date() + self.trigger_type = trigger_type or self._last_trigger_type + self._last_trigger_type = self.trigger_type + if trigger_timeout is not None: + self.trigger_timeout = trigger_timeout + if trigger_variable is not None: + self.trigger_variable = trigger_variable + if value_repeating is not None: + self.value_repeating = value_repeating + if self.trigger_type == TriggerTypes.TIMER: + self.start_timer_trigger(timeout=self.trigger_timeout) + self.set_valuing_mode(valuing_mode=valuing_mode) + self.setup_variables(self.lists.keys() if variables is None else variables) + self.units = units if units else {} + + def setup_variables(self, variables: Iterable[str]) -> None: + """Subscribe to the variables.""" + self.reset_data_storage() + subscriptions: set[str] = set() + for variable in variables: + if "." in variable: + # this is the new style: topic is sender name, data is in content + parts = variable.split(".") + if len(parts) == 2: + # assume to be in the same namespace + if self.namespace is None: + log.error(f"Cannot subscribe to '{variable}' as the namespace is not known.") # noqa + continue + parts.insert(0, self.namespace) + variable = ".".join(parts) + subscriptions.add(".".join(parts[:2])) + else: + # old style: topic is variable name + subscriptions.add(variable) + with self.list_lock: + self.lists[variable] = [] + self.tmp[variable] = [] + self.subscribe(topics=subscriptions) + + def reset_data_storage(self) -> None: + """Reset the data storage.""" + with self.list_lock: + self.tmp = {} + self.lists = {} + self.last_datapoint = {} + + def start_timer_trigger(self, timeout: float) -> None: + self.timer = RepeatingTimer(timeout, self.make_datapoint) + self.timer.start() + + def set_valuing_mode(self, valuing_mode: Optional[ValuingModes]) -> None: + if valuing_mode == ValuingModes.LAST: + self.valuing = self.last + elif valuing_mode == ValuingModes.AVERAGE: + self.valuing = average + elif valuing_mode is None: + pass # already setup + + def save_data(self, meta: Optional[dict] = None, suffix: str = "", header: str = "") -> str: + """Save the data. + + :param addr: Reply address for the filename. + :param dict meta: The meta data to save. Use e.g. in subclass + Protected keys: units, today, name, configuration, user. + :param str suffix: Suffix to append to the filename. + :return str: Name of the saved file. + """ + # Preparation. + if meta is None: + meta = {} + folder = self.directory + # Pickle the header and lists. + file_name = datetime.datetime.now().strftime("%Y_%m_%dT%H_%M_%S") + suffix + meta.update({ + 'units': self.units, + 'today': self.today.isoformat(), + 'file_name': file_name, + 'logger_name': self.full_name, + 'configuration': self.get_configuration(), + # 'user': self.user_data, # user stored meta data + }) + try: + with self.list_lock: + with open(f"{folder}/{file_name}.json", 'w') as file: + json.dump(obj=(header, self.lists, meta), fp=file) + except TypeError as exc: + log.exception("Some type error during saving occurred.", exc_info=exc) + raise + except PermissionError as exc: + log.exception(f"Writing permission denied for '{folder}'.", exc_info=exc) + raise + else: + # Indicate the name. + log.info(f"Saved data to '{folder}/{file_name}'.") + self.last_save_name = file_name + return file_name + + def stop_collecting(self) -> None: + """Stop the data acquisition.""" + log.info("Stopping to collect data.") + self.trigger_type = TriggerTypes.NONE + self.unsubscribe_all() + try: + self.timer.cancel() + del self.timer + except AttributeError: + pass + + def get_configuration(self) -> dict[str, Any]: + """Get the currently used configuration as a dictionary.""" + config: dict[str, Any] = {} + # Trigger + config['trigger_type'] = self.trigger_type.value + config['trigger_timeout'] = self.trigger_timeout + config['trigger_variable'] = self.trigger_variable + # Value + vm = ValuingModes.LAST if self.valuing == self.last else ValuingModes.AVERAGE + config['valuing_mode'] = vm.value + config['value_repeating'] = self.value_repeating + # Header and Variables. + with self.list_lock: + config['variables'] = list(self.lists.keys()) + config['units'] = self.units + # config['autoSave'] = self.actionAutoSave.isChecked() + return config + + def get_last_datapoint(self) -> dict[str, Any]: + """Read the last datapoint.""" + return self.last_datapoint + + def get_last_save_name(self) -> Union[str, None]: + """Return the name of the last save.""" + return self.last_save_name + + def get_list_length(self) -> int: + """Return the length of the lists.""" + with self.list_lock: + length = len(self.lists[list(self.lists.keys())[0]]) if self.lists else 0 + return length + + +def main() -> None: + """Start a datalogger at script execution.""" + parser.description = "Log data." + parser.add_argument("-d", "--directory", + help="set the directory to save the data to") + + gLog = logging.getLogger() # print all log entries! + kwargs = parse_command_line_parameters(parser=parser, parser_description="Log data.", + logger=gLog) + if not gLog.handlers: + handler = logging.StreamHandler() + handler.setFormatter(StrFormatter) + gLog.addHandler(handler) + + datalogger = DataLogger(log=gLog, **kwargs) + datalogger.listen() + + +if __name__ == "__main__": # pragma: no cover + main() diff --git a/pyleco/management/starter.py b/pyleco/management/starter.py new file mode 100644 index 000000000..9c32521cf --- /dev/null +++ b/pyleco/management/starter.py @@ -0,0 +1,300 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from enum import IntFlag +from importlib import import_module, reload +import logging +import os +from os import path +import sys +import threading +from typing import Any, Optional, Union + +if __name__ != "__main__": + from ..utils.message_handler import MessageHandler + from ..utils.parser import parser, parse_command_line_parameters +else: # pragma: no cover + from pyleco.utils.message_handler import MessageHandler + from pyleco.utils.parser import parser, parse_command_line_parameters + + +log = logging.getLogger("starter") +StrFormatter = logging.Formatter("%(asctime)s\t%(levelname)s\t%(name)s\t%(message)s") + + +modules: dict[str, Any] = {} # A dictionary of the task modules + + +def sanitize_tasks( + tasks: Optional[Union[list[str], tuple[str, ...], str]], +) -> Union[tuple[str, ...], list[str]]: + """Ensure that the tasks are a list of tasks.""" + if tasks is None: + return () + if not isinstance(tasks, (list, tuple)): + tasks = (tasks,) + for task in tasks: + if not isinstance(task, str): + log.error(f"Invalid task name '{task}' received.") + return () + return tasks + + +class Status(IntFlag): + STOPPED = 0 + RUNNING = 1 # currently running + STARTED = 2 # has been started and should be running + INSTALLED = 4 # check regularly, whether it is running, restart if not running anymore + + +class Starter(MessageHandler): + """Listen to communication and start tasks as required. + + The Starter can start functions called `task` with the following signature + ``task(stop_event: threading.Event) -> None: ...``. + Whenever a task should be started, the Starter looks in :attr:`directory` for a module with that + name and loads it. + If the module has been loaded already, it is reloaded to get the newest version. + Attention: dependencies are not reloaded! + Then it starts the method `task` of that module in a separate thread. + + When the Starter stops a task, it sets the corresponding `threading.Event`. + + The first str line of a module is the modules description, available from :meth:`list_tasks`. + + If you write your own task modules, make sure, that you react in a reasonable time to a stop + event. + You can use `while stop_event.wait(timeout)` to execute something regularly. + Pyleco Actors should be able to consume a `threading.Event` in order to be compatible. + + .. code:: + starter = Starter("starter") + starter.listen() + + :param str directory: Absolute path to the directory with the tasks modules. + :param tasks: List of task names to execute on startup. + """ + + def __init__( + self, + name: str = "starter", + directory: Optional[str] = None, + tasks: Optional[list[str]] = None, + **kwargs, + ) -> None: + super().__init__(name=name, **kwargs) + self.threads: dict[str, threading.Thread] = {} # List of threads + self.events: dict[str, threading.Event] = {} # Events to stop the threads. + self.started_tasks: dict[str, int | Status] = {} # A list of all tasks started + if directory is not None: + self.directory = path.normpath(directory) + head, tail = path.split(self.directory) + sys.path.append(head) + self.folder_name = tail + else: + # TODO remove? + self.directory = "test_tasks" + self.folder_name = "test_tasks" + + log.info(f"Starter started with tasks in folder '{self.directory}'.") + self.start_tasks(tasks or ()) + + def register_rpc_methods(self) -> None: + super().register_rpc_methods() + self.register_rpc_method(self.start_tasks) + self.register_rpc_method(self.stop_tasks) + self.register_rpc_method(self.restart_tasks) + self.register_rpc_method(self.install_tasks) + self.register_rpc_method(self.list_tasks) + self.register_rpc_method(self.status_tasks) + self.register_rpc_method(self.uninstall_tasks) + + def _listen_close(self, waiting_time: Optional[int] = None) -> None: + """Close the listening loop.""" + super()._listen_close(waiting_time=waiting_time) + self.stop_all_tasks() + log.info("Starter stopped.") + + def stop_all_tasks(self) -> None: + self.started_tasks = {} + keys = list(self.threads.keys()) + for name in keys: + # set all stop signals + self.events[name].set() + for name in keys: + self.wait_for_stopped_thread(name) + + def heartbeat(self) -> None: + """Check installed tasks at heartbeating.""" + super().heartbeat() + self.check_installed_tasks() + + def start_tasks(self, names: Union[list[str], tuple[str, ...]]) -> None: + for name in sanitize_tasks(names): + self.start_task(name) + + def start_task(self, name: str) -> None: + """Start the `Task` object in a script with `name` in a separate thread.""" + if name in self.threads.keys() and self.threads[name].is_alive(): + log.error(f"Task '{name}' is already running.") + self.started_tasks[name] |= Status.RUNNING + else: + log.info(f"Starting task '{name}'.") + self.started_tasks[name] = self.started_tasks.get(name, 0) | Status.STARTED + try: + if name in modules.keys(): + modules[name] = script = reload(modules[name]) + else: + modules[name] = script = import_module(f"{self.folder_name}.{name}") + except Exception as exc: + log.exception(f"Loading task '{name}' failed.", exc_info=exc) + return + self.events[name] = threading.Event() + try: + self.threads[name] = thread = threading.Thread( + target=script.task, args=(self.events[name],), daemon=True + ) + except Exception as exc: + log.exception(f"Creation of task '{name}' failed.", exc_info=exc) + return + thread.start() + + def stop_tasks(self, names: Union[list[str], tuple[str, ...]]) -> None: + for name in sanitize_tasks(names): + self.stop_task(name) + + def stop_task(self, name: str) -> None: + """Stop a task and don't restart it, if it was installed.""" + try: + del self.started_tasks[name] + except KeyError: + pass # Not present + if name not in self.threads.keys(): + return + log.info(f"Stopping task '{name}'.") + self.events[name].set() + self.wait_for_stopped_thread(name) + + def wait_for_stopped_thread(self, name: str) -> None: + thread = self.threads[name] + thread.join(timeout=2) + if thread.is_alive(): + log.warning(f"Task '{name}' did not stop in time!") + # TODO add possibility to stop thread otherwise. + try: + del self.threads[name] + except Exception as exc: + log.exception(f"Deleting task '{name}' failed", exc_info=exc) + + def restart_tasks(self, names: Union[list[str], tuple[str, ...]]) -> None: + for name in sanitize_tasks(names): + self.stop_task(name) + self.start_task(name) + + def install_tasks(self, names: Union[list[str], tuple[str, ...]]) -> None: + for name in sanitize_tasks(names): + self.install_task(name) + + def install_task(self, name: str) -> None: + """Add tasks to the installed list.""" + log.info(f"Install task '{name}'.") + self.started_tasks[name] = self.started_tasks.get(name, 0) | Status.INSTALLED + + def uninstall_tasks(self, names: Union[list[str], tuple[str, ...]]) -> None: + for name in sanitize_tasks(names): + self.uninstall_task(name) + + def uninstall_task(self, name: str) -> None: + """Uninstalls a task without stopping it, if it is already running.""" + self.started_tasks[name] = self.started_tasks.get(name, 0) & ~Status.INSTALLED + + def status_tasks(self, names: Optional[list[str]] = None) -> dict[str, Status]: + """Enumerate the status of the started/running tasks and keep the records clean. + + :param list names: List of tasks to look for. + """ + ret_data = {} if names is None else {key: Status.STOPPED for key in names} + for key in list(self.threads.keys()): + if self.threads[key].is_alive(): + self.started_tasks[key] |= Status.RUNNING + else: + self.started_tasks[key] = self.started_tasks.get(key, 0) & ~Status.RUNNING + del self.threads[key] + log.warning(f"Thread '{key}' stopped unexpectedly.") + ret_data.update(self.started_tasks) # type: ignore + return ret_data + + def list_tasks(self) -> list[dict[str, str]]: + """List all tasks (with name and tooltip) available in the folder.""" + try: + filenames = os.listdir(self.directory) + except FileNotFoundError: + log.error(f"Task folder '{self.directory}' not found.") + return [] + tasks = [] + for name in filenames: + if name.endswith(".py") and not name == "__init__.py": + with open(f"{self.directory}/{name}", "r") as file: + # Search for the first line with triple quotes + for i in range(10): + if file.readline().strip() == '"""': + break + tooltip = file.readline() # first line after line with triple quotes + tasks.append({"name": name.replace(".py", ""), "tooltip": tooltip}) + log.debug(f"Tasks found: {tasks}.") + return tasks + + def check_installed_tasks(self) -> None: + """Check whether installed tasks are running.""" + self.status_tasks() + for task, s in self.started_tasks.items(): + if s & Status.INSTALLED and not s & Status.RUNNING: + log.info(f"Starting installed task '{task}' with status {s}.") + self.start_task(task) + + +def main() -> None: + parser.add_argument("tasks", nargs="*", help="Tasks to execute at startup.") + parser.add_argument( + "-d", + "--directory", + help="set the directory to search for tasks, do not add a trailing slash", + ) + + gLog = logging.getLogger() # print all log entries! + if not gLog.handlers: + handler = logging.StreamHandler() + handler.setFormatter(StrFormatter) + gLog.addHandler(handler) + kwargs = parse_command_line_parameters( + parser=parser, parser_description="Start tasks as required.", logger=gLog + ) + + starter = Starter(log=gLog, **kwargs) + starter.listen() + + +if __name__ == "__main__": # pragma: no cover + main() diff --git a/pyleco/management/test_tasks/failing_task.py b/pyleco/management/test_tasks/failing_task.py new file mode 100644 index 000000000..aa7e2aab9 --- /dev/null +++ b/pyleco/management/test_tasks/failing_task.py @@ -0,0 +1,3 @@ +# it shall fail. + +raise ValueError("I fail for testing.") diff --git a/pyleco/management/test_tasks/no_task.py b/pyleco/management/test_tasks/no_task.py new file mode 100644 index 000000000..1ed1cbd28 --- /dev/null +++ b/pyleco/management/test_tasks/no_task.py @@ -0,0 +1,3 @@ +""" +Task which can be imported, but not started as method `task` is missing. +""" diff --git a/pyleco/management/test_tasks/test_task.py b/pyleco/management/test_tasks/test_task.py new file mode 100644 index 000000000..62f9fe7fb --- /dev/null +++ b/pyleco/management/test_tasks/test_task.py @@ -0,0 +1,50 @@ +""" +Example scheme for an Actor for pymeasure instruments. 'test_task' +""" + +from threading import Event +from time import sleep + +from pyleco.actors.actor import Actor + + +class FakeInstrument: # pragma: no cover + _prop1 = 5 + + def __init__(self): + pass + + def connect(self): + pass + + @property + def constant(self): + return 7 + + @property + def prop1(self): + return self._prop1 + + @prop1.setter + def prop1(self, value): + self._prop1 = value + + def triple(self, factor: float = 1) -> float: + return factor * 3 + + +def task(stop_event: Event) -> None: + """The task which is run by the starter.""" + # Initialize + while stop_event.wait(.5): + sleep(.1) + return + with Actor(name="pymeasure_actor", device_class=FakeInstrument, + periodic_reading=-1) as actor: + actor.connect() # connect to the device + + # Continuous loop + actor.listen(stop_event=stop_event) # listen for commands and do the regular readouts + + # Finish + # in listen and __exit__ included diff --git a/pyleco/test.py b/pyleco/test.py index fc13990a7..edbce43ce 100644 --- a/pyleco/test.py +++ b/pyleco/test.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -22,11 +22,12 @@ # THE SOFTWARE. # -from typing import Any, Optional, Sequence, Union +from __future__ import annotations +from typing import Any, Iterable, Optional, Sequence, Union from .core.message import Message from .core.internal_protocols import CommunicatorProtocol -from .core.rpc_generator import RPCGenerator +from .json_utils.rpc_generator import RPCGenerator class FakeContext: @@ -125,7 +126,6 @@ def close(self, linger: Optional[int] = None) -> None: class FakePoller: """A fake zmq poller.""" - def __init__(self) -> None: self._sockets: list[FakeSocket] = [] @@ -219,14 +219,26 @@ def __init__(self, remote_class, **kwargs): super().__init__(**kwargs) self.remote_class = remote_class - def ask_rpc(self, method: str, actor: Optional[Union[bytes, str]] = None, **kwargs) -> Any: + def ask_rpc( + self, + method: str, + actor: Optional[Union[bytes, str]] = None, + additional_payload: Optional[Iterable[bytes]] = None, + extract_additional_payload: bool = False, + **kwargs, + ) -> Any: assert hasattr(self.remote_class, method), f"Remote class does not have method '{method}'." self.method = method self.kwargs = kwargs return self.return_value - def ask_rpc_async(self, method: str, actor: Optional[Union[bytes, str]] = None, - **kwargs) -> bytes: + def ask_rpc_async( + self, + method: str, + actor: Optional[Union[bytes, str]] = None, + additional_payload: Optional[Iterable[bytes]] = None, + **kwargs, + ) -> bytes: assert hasattr(self.remote_class, method), f"Remote class does not have method '{method}'." self.method = method self.kwargs = kwargs diff --git a/pyleco/utils/__init__.py b/pyleco/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyleco/utils/base_communicator.py b/pyleco/utils/base_communicator.py new file mode 100644 index 000000000..f164d2c18 --- /dev/null +++ b/pyleco/utils/base_communicator.py @@ -0,0 +1,209 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +from time import perf_counter +from typing import Optional, Protocol + +import zmq + +from ..core.internal_protocols import CommunicatorProtocol +from ..core.message import Message, MessageTypes +from ..json_utils.errors import JSONRPCError, DUPLICATE_NAME, NOT_SIGNED_IN + + +NOT_SIGNED_IN_ERROR_CODE = str(NOT_SIGNED_IN.code).encode() + + +class MessageBuffer: + _messages: list[Message] + _requested_ids: set[bytes] + + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) + self._messages = [] + self._requested_ids = set() + + def add_conversation_id(self, conversation_id: bytes) -> None: + """Add a conversation_id such that its response has to be recalled by name.""" + self._requested_ids.add(conversation_id) + + def remove_conversation_id(self, conversation_id: bytes) -> None: + """Remove a conversation_id from the requested ids.""" + self._requested_ids.discard(conversation_id) + + def is_conversation_id_requested(self, conversation_id: bytes) -> bool: + """Check whether this conversation_id is requested by someone.""" + return conversation_id in self._requested_ids + + def add_message(self, message: Message): + """Add a message to the buffer.""" + self._messages.append(message) + + def retrieve_message(self, conversation_id: Optional[bytes] = None) -> Optional[Message]: + """Retrieve the requested message or the next free one for `conversation_id=None`.""" + for i, msg in enumerate(self._messages): + cid = msg.conversation_id + if conversation_id == cid: + self._requested_ids.discard(cid) + return self._messages.pop(i) + elif cid not in self._requested_ids and conversation_id is None: + return self._messages.pop(i) + return None + + def __len__(self): + return len(self._messages) + + +class BaseCommunicator(CommunicatorProtocol, Protocol): + """Abstract class of a Communicator with some logic. + """ + + socket: zmq.Socket + log: logging.Logger + namespace: Optional[str] + message_buffer: MessageBuffer + + # Setup methods for call in init + def setup_message_buffer(self) -> None: + """Create the message buffer variables.""" + self.message_buffer = MessageBuffer() + + def close(self) -> None: + """Close the connection.""" + self.socket.close(1) + + # Context manager + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_traceback) -> None: + self.close() + + # Base communication + def _send_socket_message(self, message: Message) -> None: + self.socket.send_multipart(message.to_frames()) + + def send_message(self, message: Message) -> None: + """Send a message, supplying sender information.""" + if not message.sender: + message.sender = self.full_name.encode() + self.log.debug(f"Sending {message}") + self._send_socket_message(message=message) + + def sign_in(self) -> None: + string = self.rpc_generator.build_request_str(method="sign_in") + try: + msg = self.ask(b"COORDINATOR", data=string, message_type=MessageTypes.JSON) + self.interpret_rpc_response(msg) + except JSONRPCError as exc: + json_error = exc.rpc_error + if json_error.code == DUPLICATE_NAME.code: + self.log.warning("Sign in failed, the name is already used.") + else: + self.log.warning(f"Sign in failed, unknown error '{json_error}'.") + except TimeoutError: + self.log.error("Signing in timed out.") + else: + self.finish_sign_in(msg) + + def finish_sign_in(self, response_message: Message) -> None: + self.namespace = response_message.sender_elements.namespace.decode() + self.log.info(f"Signed in to Node '{self.namespace}'.") + + def heartbeat(self) -> None: + """Send a heartbeat to the router.""" + self.log.debug("heartbeat") + self.send_message(Message(b"COORDINATOR")) + + def sign_out(self) -> None: + try: + self.ask_rpc(b"COORDINATOR", method="sign_out") + except TimeoutError: + self.log.warning("Waiting for sign out response timed out.") + except Exception as exc: + self.log.exception("Signing out failed.", exc_info=exc) + else: + self.finish_sign_out() + + def finish_sign_out(self) -> None: + self.log.info(f"Signed out from Node '{self.namespace}'.") + self.namespace = None + + # Reading messages with buffer + def _read_socket_message(self, timeout: Optional[float] = None) -> Message: + """Read the next message from the socket, without further processing.""" + if self.socket.poll(int((timeout if timeout is not None else self.timeout) * 1000)): + return Message.from_frames(*self.socket.recv_multipart()) + raise TimeoutError("Reading timed out") + + def _find_socket_message(self, conversation_id: Optional[bytes] = None, + timeout: Optional[float] = None, + ) -> Message: + """Find a specific message among socket messages, storing the other ones in the buffer. + + :param conversation_id: Conversation ID to filter for, or next free message if None. + """ + stop = perf_counter() + (timeout if timeout is not None else self.timeout) + while True: + msg = self._read_socket_message(timeout) + self.check_for_not_signed_in_error(message=msg) + cid = msg.conversation_id + if conversation_id == cid: + self.message_buffer.remove_conversation_id(conversation_id=cid) + return msg + elif self.message_buffer.is_conversation_id_requested(conversation_id=cid): + self.message_buffer.add_message(msg) + elif conversation_id is None: + return msg + else: + self.message_buffer.add_message(msg) + if perf_counter() > stop: + # inside the loop to do it at least once, even if timeout is 0 + break + raise TimeoutError("Message not found.") + + def check_for_not_signed_in_error(self, message: Message) -> None: + if (message.sender_elements.name == b"COORDINATOR" + and message.payload + and b"error" in message.payload[0] + and NOT_SIGNED_IN_ERROR_CODE in message.payload[0]): + self.handle_not_signed_in() + + def read_message(self, conversation_id: Optional[bytes] = None, + timeout: Optional[float] = None) -> Message: + message = self.message_buffer.retrieve_message(conversation_id=conversation_id) + if message is None: + message = self._find_socket_message(conversation_id=conversation_id, timeout=timeout) + return message + + def handle_not_signed_in(self) -> None: + self.namespace = None + self.sign_in() + self.log.warning("I was not signed in, signing in.") + + def ask_message(self, message: Message, timeout: Optional[float] = None) -> Message: + self.send_message(message=message) + return self.read_message(conversation_id=message.conversation_id, timeout=timeout) diff --git a/pyleco/utils/communicator.py b/pyleco/utils/communicator.py new file mode 100644 index 000000000..b04fa2288 --- /dev/null +++ b/pyleco/utils/communicator.py @@ -0,0 +1,168 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +from time import perf_counter +from typing import Optional, Union + +import zmq + +from ..core import COORDINATOR_PORT +from ..core.message import Message, MessageTypes +from ..json_utils.rpc_generator import RPCGenerator +from ..json_utils.errors import NOT_SIGNED_IN +from .base_communicator import BaseCommunicator + + +class Communicator(BaseCommunicator): + """A simple Communicator, which sends requests and reads the answer. + + This Communicator does not listen for incoming messages. It only handles messages, whenever + you try to read one. It is intended for sending messages and reading the answer without + implementing threading. + + The communicator can be used in a context, which ensures sign-in and sign-out: + + .. code:: + + with Communicator(name="test") as com: + com.send("receiver") + + :param str host: Hostname + :param int port: Port to connect to. + :param str name: Name to send messages as. + :param int timeout: Timeout in s. + :param bool auto_open: Open automatically a connection upon instantiation. + :param str protocol: Protocol name to use. + :param bool standalone: Whether to bind to the port in standalone mode. + """ + + def __init__( + self, + name: str, + host: str = "localhost", + port: Optional[int] = COORDINATOR_PORT, + timeout: float = 0.1, + auto_open: bool = True, + protocol: str = "tcp", + standalone: bool = False, + **kwargs, + ) -> None: + self.log = logging.getLogger(f"{__name__}.Communicator") + self.host = host + self.port = port + self._conn_details = protocol, standalone + self.timeout = timeout + self.log.info(f"Communicator initialized on {host}:{port}.") + if auto_open: + self.open() + self.name = name + self.namespace = None + self._last_beat: float = 0 + self.rpc_generator = RPCGenerator() + super().__init__(**kwargs) + self.setup_message_buffer() + + def open(self, context: Optional[zmq.Context] = None) -> None: + """Open the connection.""" + context = context or zmq.Context.instance() + self.socket: zmq.Socket = context.socket(zmq.DEALER) + protocol, standalone = self._conn_details + if standalone: + self.socket.bind(f"{protocol}://*:{self.port}") + else: + self.socket.connect(f"{protocol}://{self.host}:{self.port}") + + def close(self) -> None: + """Close the connection.""" + if (not hasattr(self, "socket")) or self.socket.closed: + return + try: + self.sign_out() + except TimeoutError: + self.log.warning("Closing, the sign out failed with a timeout.") + except ConnectionRefusedError: + self.log.warning("Closing, the sign out failed with a refused connection.") + finally: + super().close() + + def reset(self) -> None: + """Reset socket""" + self.close() + self.open() + + def __del__(self) -> None: + self.close() + + def __enter__(self): # -> typing.Self for py>=3.11 + """Called with `with` keyword, returns the Director.""" + if not hasattr(self, "socket"): + self.open() + self.sign_in() + return self + + def send_message(self, message: Message) -> None: + now = perf_counter() + if now > self._last_beat + 15 and message.payload and b"sign_in" not in message.payload[0]: + self.sign_in() + self._last_beat = now + super().send_message(message=message) + + def poll(self, timeout: Optional[float] = None) -> int: + """Check how many messages arrived.""" + if timeout is None: + timeout = self.timeout + return self.socket.poll(timeout=int(timeout * 1000)) # in ms + + def handle_not_signed_in(self): + super().handle_not_signed_in() + raise ConnectionResetError("Have not been signed in, signing in.") + + def ask_message(self, message: Message, timeout: Optional[float] = None) -> Message: + """Send and read the answer, signing in if necessary.""" + for _ in range(2): + try: + return super().ask_message(message=message, timeout=timeout) + except ConnectionResetError: + pass # sign in required, retry + raise ConnectionRefusedError(NOT_SIGNED_IN.message) + + def ask_json(self, receiver: Union[bytes, str], json_string: str, + timeout: Optional[float] = None + ) -> bytes: + message = Message(receiver=receiver, data=json_string, message_type=MessageTypes.JSON) + response = self.ask_message(message=message, timeout=timeout) + return response.payload[0] + + # Messages + def sign_in(self) -> None: + """Sign in to the Coordinator and return the node.""" + self._last_beat = perf_counter() # to not sign in again... + super().sign_in() + if self.namespace is None: + raise ConnectionRefusedError("Sign in failed.") + + def get_capabilities(self, receiver: Union[bytes, str]) -> dict: + return self.ask_rpc(receiver=receiver, method="rpc.discover") diff --git a/pyleco/utils/coordinator_utils.py b/pyleco/utils/coordinator_utils.py new file mode 100644 index 000000000..56e42e28b --- /dev/null +++ b/pyleco/utils/coordinator_utils.py @@ -0,0 +1,535 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from abc import abstractmethod +from dataclasses import dataclass +import logging +from time import perf_counter +from typing import Any, Protocol, Optional, Union + +import zmq + +from ..core import COORDINATOR_PORT +from ..core.message import Message, MessageTypes +from ..core.serialization import deserialize_data +from ..json_utils.errors import NOT_SIGNED_IN, DUPLICATE_NAME +from ..json_utils.rpc_generator import RPCGenerator +from ..json_utils.json_objects import ErrorResponse, Request + + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class CommunicationError(ConnectionError): + """Something went wrong, send an `error_msg` to the recipient.""" + + def __init__(self, text: str, error_payload: ErrorResponse, *args: Any) -> None: + super().__init__(text, *args) + self.error_payload = error_payload + + +class MultiSocket(Protocol): + """Represents a socket with multiple connections.""" + + closed: bool = False + + @abstractmethod + def bind(self, host: str = "", port: Union[int, str] = 0) -> None: ... # pragma: no cover + + @abstractmethod + def unbind(self) -> None: ... # pragma: no cover + + @abstractmethod + def close(self, timeout: int) -> None: ... # pragma: no cover + + @abstractmethod + def send_message(self, identity: bytes, message: Message) -> None: ... # pragma: no cover + + @abstractmethod + def message_received(self, timeout: int = 0) -> bool: ... # pragma: no cover + + @abstractmethod + def read_message(self) -> tuple[bytes, Message]: ... # pragma: no cover + + +class ZmqMultiSocket(MultiSocket): + """A MultiSocket using a zmq ROUTER socket.""" + + def __init__(self, context: Optional[zmq.Context] = None, *args, **kwargs) -> None: + context = zmq.Context.instance() if context is None else context + self._sock: zmq.Socket = context.socket(zmq.ROUTER) + super().__init__(*args, **kwargs) + + @property + def closed(self) -> bool: # type: ignore[override] + return self._sock.closed # type: ignore + + def bind(self, host: str = "*", port: Union[str, int] = COORDINATOR_PORT) -> None: + self._sock.bind(f"tcp://{host}:{port}") + + def unbind(self) -> None: + # TODO add the right address + self._sock.unbind("") + + def close(self, timeout: int = 0) -> None: + self._sock.close(linger=timeout) + + def send_message(self, identity: bytes, message: Message) -> None: + self._sock.send_multipart((identity, *message.to_frames())) + + def message_received(self, timeout: int = 0) -> bool: + return bool(self._sock.poll(timeout=timeout)) + + def read_message(self) -> tuple[bytes, Message]: + identity, *response = self._sock.recv_multipart() + return identity, Message.from_frames(*response) + + +class FakeMultiSocket(MultiSocket): + def __init__(self, *args, **kwargs) -> None: + self._messages_read: list[tuple[bytes, Message]] = [] + self._messages_sent: list[tuple[bytes, Message]] = [] + super().__init__(*args, **kwargs) + + def bind(self, host: str = "*", port: Union[int, str] = 5) -> None: + pass + + def unbind(self) -> None: + pass # pragma: no cover + + def close(self, timeout: int) -> None: + self.closed = True # pragma: no cover + + def send_message(self, identity: bytes, message: Message) -> None: + self._messages_sent.append((identity, message)) + + def message_received(self, timeout: int = 0) -> bool: + return len(self._messages_read) > 0 # pragma: no cover + + def read_message(self) -> tuple[bytes, Message]: + return self._messages_read.pop(0) + + +@dataclass +class Component: + """A component connected to the Coordinator.""" + + identity: bytes + heartbeat: float + + +class Node: + """Represents a connection to another Node.""" + + def __init__(self, **kwargs) -> None: + self.address: str = "" + self.namespace: bytes = b"" + self.heartbeat: float = -1 + super().__init__(**kwargs) + + def connect(self, address: str) -> None: + self.address = address + + def disconnect(self, closing_time=None) -> None: + raise NotImplementedError("Implement in subclass") # pragma: no cover + + def is_connected(self) -> bool: + return False + + def send_message(self, message: Message) -> None: + raise NotImplementedError("Implement in subclass") # pragma: no cover + + def message_received(self, timeout: int = 0) -> bool: + raise NotImplementedError("Implement in subclass") # pragma: no cover + + def read_message(self, timeout: int = 0) -> Message: + raise NotImplementedError("Implement in subclass") # pragma: no cover + + +class ZmqNode(Node): + """Represents a zmq connection to another node.""" + + def __init__(self, context: Optional[zmq.Context] = None, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._context = context or zmq.Context.instance() + + def connect(self, address: str) -> None: + """Connect to a Coordinator at address.""" + super().connect(address) + self._dealer = self._context.socket(zmq.DEALER) + self._dealer.connect(f"tcp://{address}") + + def disconnect(self, closing_time=None) -> None: + """Close the connection to the Coordinator.""" + try: + self._dealer.close(linger=closing_time) + del self._dealer + except AttributeError: + pass # already deleted. + + def is_connected(self) -> bool: + try: + return not self._dealer.closed + except AttributeError: + return False + + def send_message(self, message: Message) -> None: + """Send a multipart message to the Coordinator.""" + self._dealer.send_multipart(message.to_frames()) + + def message_received(self, timeout: int = 0) -> bool: + return bool(self._dealer.poll(timeout=timeout)) + + def read_message(self, timeout: int = 0) -> Message: + return Message.from_frames(*self._dealer.recv_multipart()) + + +class FakeNode(Node): + def __init__(self, messages_read: Optional[list[Message]] = None, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._messages_sent: list[Message] = [] + self._messages_read: list[Message] = [] if messages_read is None else messages_read + + def connect(self, address) -> None: + super().connect(address) + self._connected = True + + def disconnect(self, closing_time=None) -> None: + self._connected = False + + def is_connected(self) -> bool: + return self._connected + + def send_message(self, message: Message) -> None: + self._messages_sent.append(message) + + def message_received(self, timeout: float = 0) -> bool: + return bool(len(self._messages_read)) + + def read_message(self, timeout: int = 0) -> Message: + return self._messages_read.pop(0) + + +class Directory: + """Maintains the directory with all the connected Components and Coordinators.""" + + def __init__(self, namespace: bytes, full_name: bytes, address: str) -> None: + self._components: dict[bytes, Component] = {} + self._nodes: dict[bytes, Node] = {} # resolution from the namespace + self._node_ids: dict[bytes, Node] = {} # resolution from the id + self._waiting_nodes: dict[str, Node] = {} + self.namespace = namespace + self.full_name = full_name + self._address = address + self.rpc_generator = RPCGenerator() + + def add_component(self, name: bytes, identity: bytes) -> None: + if (component := self._components.get(name)): + if component.identity == identity: + component.heartbeat = perf_counter() + else: + log.error(f"Cannot add component {name!r} as the name is already taken.") + raise ValueError(DUPLICATE_NAME.message) + self._components[name] = Component(identity=identity, heartbeat=perf_counter()) + + def remove_component(self, name: bytes, identity: Optional[bytes]) -> None: + component = self._components.get(name) + if component is None: + return # already removed. + elif identity and component.identity != identity: + raise ValueError("Identities do not match.") + del self._components[name] + + def add_node_sender(self, node: Node, address: str, namespace: bytes) -> None: + """Add an sending connection to that node, unless already connected to that namespace.""" + if ":" not in address: + address = f"{address}:{COORDINATOR_PORT}" + if namespace == self.namespace or address == self._address: + raise ValueError("Cannot connect to myself.") + if namespace in self._nodes.keys(): + raise ValueError("Already connected.") + if address in self._waiting_nodes.keys(): + raise ValueError("Already trying to connect.") + log.info(f"Signing in to remote node ad '{address}'.") + node.heartbeat = perf_counter() + node.connect(address) + # node.send_message(Message(receiver=b"COORDINATOR", sender=self.full_name, + # data=[[Commands.CO_SIGNIN]])) + node.send_message( + message=Message( + receiver=b"COORDINATOR", + sender=self.full_name, + message_type=MessageTypes.JSON, + data=self.rpc_generator.build_request_str(method="coordinator_sign_in"), + ) + ) + self._waiting_nodes[address] = node + + def add_node_receiver(self, identity: bytes, namespace: bytes) -> None: + """Add a receiving connection to the node.""" + node = self._nodes.get(namespace) + if node is None: + node = Node() + node.namespace = namespace + elif node in self._node_ids.values(): + raise ValueError("Another Coordinator is known!") + node.heartbeat = perf_counter() + self._node_ids[identity] = node + + def check_unfinished_node_connections(self) -> None: + for key, node in list(self._waiting_nodes.items()): + if node.message_received(): + try: + response = node.read_message() + except TypeError as exc: + log.exception("Message decoding failed.", exc_info=exc) + continue + self._handle_node_message(key=key, message=response) + + def _handle_node_message(self, key: str, message: Message) -> None: + data = deserialize_data(content=message.payload[0]) + if isinstance(data, dict) and data.get("result", False) is None: + self._finish_sign_in_to_remote(key=key, message=message) + elif isinstance(data, dict) and (error := data.get("error") is not None): + log.error(f"Coordinator sign in to node {message.sender_elements.namespace!r} failed with '{error}'.") # noqa: E501 + self._remove_waiting_node(key=key) + else: + log.warning( + f"Unknown message {message.payload!r} from {message.sender!r} at DEALER socket '{key}'.") # noqa: E501 + + def _finish_sign_in_to_remote(self, key: str, message: Message) -> None: + node = self._waiting_nodes.pop(key) + sender_namespace = message.sender_elements.namespace + log.info(f"Renaming DEALER socket from temporary '{key}' to {sender_namespace!r}.") + self._nodes[sender_namespace] = node + node.namespace = sender_namespace + self._combine_sender_and_receiver_nodes(node=node) + node.send_message( + Message( + receiver=message.sender, + sender=self.full_name, + message_type=MessageTypes.JSON, + data=( + "[" + + self.rpc_generator.build_request_str( + method="add_nodes", nodes=self.get_nodes_str_dict() + ) + + ", " + + self.rpc_generator.build_request_str( + method="record_components", components=self.get_component_names() + ) + + "]" + ), + ) + ) + + def _combine_sender_and_receiver_nodes(self, node: Node) -> None: + for identity, receiver_node in self._node_ids.items(): + if not receiver_node.is_connected() and receiver_node.namespace == node.namespace: + node.heartbeat = receiver_node.heartbeat + self._node_ids[identity] = node + log.debug(f"Combining the receiver information to node {node.namespace!r}.") + break + + def remove_node(self, namespace: bytes, identity: bytes) -> None: + node = self._node_ids.get(identity) + if node and node.namespace == namespace: + self._remove_node_without_checks(namespace=namespace) + else: + raise ValueError("Identities do not match: You are not you!") + + def _remove_node_without_checks(self, namespace: bytes) -> None: + node = self._nodes.get(namespace) + if node is None: + for key, node in list(self._node_ids.items()): + if node.namespace == namespace: + del self._node_ids[key] + break + else: + del self._nodes[namespace] + self._remove_value_from_dict(value=node, dictionary=self._node_ids) + + def _remove_value_from_dict(self, value, dictionary: dict) -> None: + for key, v in list(dictionary.items()): + if value == v: + del dictionary[key] + + def _remove_waiting_node(self, key: str) -> None: + del self._waiting_nodes[key] + + def update_heartbeat(self, sender_identity: bytes, message: Message) -> None: + sender = message.sender_elements + if sender.namespace == b"" or sender.namespace == self.namespace: + self._update_local_sender_heartbeat(sender_identity=sender_identity, message=message) + elif sender_identity in self._node_ids.keys(): + # Message from another Coordinator's DEALER socket + self._node_ids[sender_identity].heartbeat = perf_counter() + elif ( + sender.name == b"COORDINATOR" + and message.payload + and b"coordinator_sign_" in message.payload[0] # "method": " + ): + pass # Coordinator signing in/out, no heartbeat yet + else: + # Either a Component communicates with the wrong namespace setting or + # the other Coordinator is not known yet (reconnection) + raise CommunicationError( + f"Message payload '{message.payload}' from not signed in Component {message.sender!r} or node.", # noqa: E501 + error_payload=ErrorResponse(id=None, error=NOT_SIGNED_IN)) + + def _update_local_sender_heartbeat(self, sender_identity: bytes, message: Message) -> None: + component = self._components.get(message.sender_elements.name) + if component: + if sender_identity == component.identity: + component.heartbeat = perf_counter() + else: + raise CommunicationError( + DUPLICATE_NAME.message, + error_payload=ErrorResponse(id=None, error=DUPLICATE_NAME) + ) + elif message.payload and (b'"sign_in"' in message.payload[0] + or b'"sign_out"' in message.payload[0]): + pass # Signing in, no heartbeat yet + else: + raise CommunicationError( + f"Message payload '{message.payload}' from not signed in Component {message.sender!r}.", # noqa: E501 + error_payload=ErrorResponse(id=None, error=NOT_SIGNED_IN)) + + def find_expired_components(self, expiration_time: float) -> list[tuple[bytes, bytes]]: + """Find expired components, return those to admonish, and remove those too old.""" + now = perf_counter() + to_admonish = [] + for name, component in list(self._components.items()): + if now > component.heartbeat + 3 * expiration_time: + self.remove_component(name=name, identity=None) + elif now > component.heartbeat + expiration_time: + to_admonish.append((component.identity, name)) + return to_admonish + + def find_expired_nodes(self, expiration_time: float) -> None: + """Find expired nodes, admonish or remove them.""" + self._find_expired_connected_nodes(expiration_time) + self._find_expired_waiting_nodes(expiration_time) + + def _find_expired_waiting_nodes(self, expiration_time: float) -> None: + now = perf_counter() + for key, node in list(self._waiting_nodes.items()): + if now > node.heartbeat + 3 * expiration_time: + log.info(f"Removing unresponsive node at address '{key}'.") + self._remove_waiting_node(key=key) + + def _find_expired_connected_nodes(self, expiration_time: float) -> None: + now = perf_counter() + for identity, node in list(self._node_ids.items()): + self._check_node_expiration(expiration_time, now, node=node, identity=identity) + + def _check_node_expiration( + self, + expiration_time: float, + now: float, + node: Node, + identity: bytes = b"", + ) -> None: + if now > node.heartbeat + 3 * expiration_time: + log.info(f"Node {node.namespace!r} at {identity!r} is unresponsive, removing.") + self._remove_node_without_checks(namespace=node.namespace) + elif now > node.heartbeat + expiration_time: + if node.is_connected(): + log.debug(f"Node {node.namespace!r} expired with identity {identity!r}, pinging.") + node.send_message( + Message( + receiver=node.namespace + b".COORDINATOR", + sender=self.full_name, + message_type=MessageTypes.JSON, + data=Request(id=0, method="pong"), + ) + ) + + def get_components(self) -> dict[bytes, Component]: + return self._components + + def get_component_names(self) -> list[str]: + return [key.decode() for key in self._components.keys()] + + def get_component_id(self, name: bytes) -> bytes: + try: + return self._components[name].identity + except KeyError: + raise ValueError(f"Component {name!r} is not known.") + + def get_node(self, namespace: bytes) -> Node: + try: + return self._nodes[namespace] + except KeyError: + raise ValueError("Node not known.") + + def get_node_id(self, namespace: bytes) -> bytes: + for id, node in self._node_ids.items(): + if node.namespace == namespace: + return id + raise ValueError(f"No receiving connection to namespace {namespace!r} found.") + + def get_nodes(self) -> dict[bytes, Node]: + return self._nodes + + def get_nodes_str_dict(self) -> dict[str, str]: + nodes = {self.namespace.decode(): self._address} + for key, node in self._nodes.items(): + nodes[key.decode()] = node.address + return nodes + + def get_node_ids(self) -> dict[bytes, Node]: + return self._node_ids + + def send_node_message(self, namespace: bytes, message: Message) -> None: + try: + node = self._nodes[namespace] + except KeyError: + raise ValueError(f"Node {namespace!r} is not known.") + else: + node.send_message(message) + + def sign_out_from_node(self, namespace: bytes) -> None: + try: + node = self._nodes[namespace] + except KeyError: + raise ValueError("Node is not known.") + node.send_message( + Message( + receiver=b".".join((namespace, b"COORDINATOR")), + sender=self.full_name, + message_type=MessageTypes.JSON, + data=self.rpc_generator.build_request_str(method="coordinator_sign_out"), + ) + ) + node.disconnect() + self._remove_node_without_checks(namespace) + + def sign_out_from_all_nodes(self) -> None: + nodes = list(self._nodes.keys()) + log.info(f"Signing out from fellow Coordinators: {', '.join([n.decode() for n in nodes])}.") + for namespace in nodes: + self.sign_out_from_node(namespace=namespace) diff --git a/pyleco/utils/data_publisher.py b/pyleco/utils/data_publisher.py new file mode 100644 index 000000000..04f726c96 --- /dev/null +++ b/pyleco/utils/data_publisher.py @@ -0,0 +1,128 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +import pickle +from typing import Any, Iterable, Optional, Union + +import zmq + +from ..core import PROXY_RECEIVING_PORT +from ..core.data_message import DataMessage, MessageTypes + + +class DataPublisher: + """ + Publishing data via the LECO data protocol. + + :param str full_name: Name of the publishing Component + :param str address: Address of the server, default is localhost. + :param int port: Port of the server, defaults to 11100, default proxy. + :param log: Logger to log to. + + Sending :class:`DataMessage` via the data protocol. + + Quantities may be expressed as a (magnitude number, units str) tuple. + """ + + full_name: str + + def __init__( + self, + full_name: str, + host: str = "localhost", + port: int = PROXY_RECEIVING_PORT, + log: Optional[logging.Logger] = None, + context: Optional[zmq.Context] = None, + **kwargs, + ) -> None: + if log is None: + self.log = logging.getLogger(f"{__name__}.Publisher") + else: + self.log = log.getChild("Publisher") + self.log.info(f"Publisher started at {host}:{port}.") + context = context or zmq.Context.instance() + self.socket: zmq.Socket = context.socket(zmq.PUB) + self.socket.connect(f"tcp://{host}:{port}") + self.full_name = full_name + super().__init__(**kwargs) + + def __del__(self) -> None: + self.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_traceback) -> None: + self.close() + + def close(self) -> None: + self.socket.close(1) + + def __call__(self, data: Any) -> None: + """Publish `data`.""" + self.send_data(data=data) + + def send_message(self, message: DataMessage) -> None: + """Send a data protocol message.""" + self.socket.send_multipart(message.to_frames()) + + def send_data( + self, + data: Any, + topic: Optional[Union[bytes, str]] = None, + conversation_id: Optional[bytes] = None, + message_type: Union[MessageTypes, int] = MessageTypes.NOT_DEFINED, + additional_payload: Optional[Iterable[bytes]] = None, + ) -> None: + """Send the `data` via the data protocol.""" + message = DataMessage( + topic=topic or self.full_name, + data=data, + conversation_id=conversation_id, + message_type=message_type, + additional_payload=additional_payload, + ) + self.send_message(message) + + def send_legacy(self, data: dict[str, Any]) -> None: + for key, value in data.items(): + # 234 is message type for legacy pickle: publish variable name as topic and pickle it + self.send_data(topic=key, data=pickle.dumps(value), message_type=234) + + def set_full_name(self, full_name: str) -> None: + """Set the full name of the data publisher. + + This method is useful for the listener's handler. That way a change of the listener's + name or namespace is transferred easily to the publisher as well. + + .. code:: + + listener = Listener() + publisher = data_publisher(full_name=listener.full_name) + listener.message_handler.register_on_name_change_method(publisher.set_full_name) + + """ + self.full_name = full_name diff --git a/pyleco/utils/events.py b/pyleco/utils/events.py new file mode 100644 index 000000000..38e6d3873 --- /dev/null +++ b/pyleco/utils/events.py @@ -0,0 +1,44 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from typing import Protocol + + +class Event(Protocol): + """Check compatibility with threading.Event.""" + def is_set(self) -> bool: ... # pragma: no cover + + def set(self) -> None: ... # pragma: no cover + + +class SimpleEvent(Event): + """A simple Event if the one from `threading` module is not necessary.""" + def __init__(self) -> None: + self._flag = False + + def is_set(self) -> bool: + return self._flag + + def set(self) -> None: + self._flag = True diff --git a/pyleco/utils/extended_message_handler.py b/pyleco/utils/extended_message_handler.py new file mode 100644 index 000000000..02817d16a --- /dev/null +++ b/pyleco/utils/extended_message_handler.py @@ -0,0 +1,128 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import json +import pickle +from typing import Optional + +import zmq + +from .message_handler import MessageHandler +from ..core import PROXY_SENDING_PORT +from ..core.data_message import DataMessage +from ..core.internal_protocols import SubscriberProtocol + + +class ExtendedMessageHandler(MessageHandler, SubscriberProtocol): + """Message handler, which handles also data protocol messages.""" + + def __init__(self, + name: str, + context: Optional[zmq.Context] = None, + host: str = "localhost", + data_host: Optional[str] = None, + data_port: int = PROXY_SENDING_PORT, + **kwargs) -> None: + if context is None: + context = zmq.Context.instance() + super().__init__(name=name, context=context, host=host, **kwargs) + self._subscriptions: list[bytes] = [] # List of all subscriptions + self.subscriber: zmq.Socket = context.socket(zmq.SUB) + if data_host is None: + data_host = host + self.subscriber.connect(f"tcp://{data_host}:{data_port}") + + def register_rpc_methods(self) -> None: + super().register_rpc_methods() + self.register_rpc_method(self.subscribe) + self.register_rpc_method(self.unsubscribe) + self.register_rpc_method(self.unsubscribe_all) + + def close(self) -> None: + self.subscriber.close(1) + return super().close() + + def _listen_setup(self, **kwargs) -> zmq.Poller: + poller = super()._listen_setup(**kwargs) + poller.register(self.subscriber, zmq.POLLIN) + return poller + + def _listen_loop_element(self, poller: zmq.Poller, waiting_time: Optional[int] + ) -> dict[zmq.Socket, int]: + socks = super()._listen_loop_element(poller=poller, waiting_time=waiting_time) + if self.subscriber in socks: + self.read_subscription_message() + del socks[self.subscriber] + return socks + + def read_subscription_message(self) -> None: + """Read a message from the data protocol.""" + try: + message = DataMessage.from_frames(*self.subscriber.recv_multipart()) + except Exception as exc: + self.log.exception("Invalid data", exc) + return + if message.message_type > 200: + # legacy style: topic is a variable name! + self.handle_full_legacy_subscription_message(message) + else: + self.handle_subscription_message(message) + + def handle_subscription_message(self, message: DataMessage) -> None: + """Handle a message read from the data protocol and handle it.""" + raise NotImplementedError + + def handle_full_legacy_subscription_message(self, message: DataMessage) -> None: + """Handle an illegal subscription message (topic is variable name).""" + if message.message_type == 234: + value = pickle.loads(message.payload[0]) + elif message.message_type == 235: + value = json.loads(message.payload[0]) + else: + raise ValueError("Legacy long message cannot be handled") + self.handle_subscription_data({message.topic.decode(): value}) + + def handle_subscription_data(self, data: dict) -> None: + # TODO deprecated + raise NotImplementedError + + def subscribe_single(self, topic: bytes) -> None: + if topic not in self._subscriptions: + self.log.debug(f"Subscribing to {topic!r}.") + self._subscriptions.append(topic) + self.subscriber.subscribe(topic) + else: + self.log.info(f"Already subscribed to {topic!r}.") + + def unsubscribe_single(self, topic: bytes) -> None: + self.log.debug(f"Unsubscribing from {topic!r}.") + self.subscriber.unsubscribe(topic) + if topic in self._subscriptions: + self._subscriptions.remove(topic) + + def unsubscribe_all(self) -> None: + """Unsubscribe from all subscriptions.""" + while self._subscriptions: + self.unsubscribe_single(self._subscriptions.pop()) diff --git a/pyleco/utils/listener.py b/pyleco/utils/listener.py new file mode 100644 index 000000000..c627adfcc --- /dev/null +++ b/pyleco/utils/listener.py @@ -0,0 +1,184 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +from threading import Thread, Event +from time import sleep +from typing import Any, Callable, Optional, Union + +from ..core import PROXY_SENDING_PORT, COORDINATOR_PORT +from .pipe_handler import PipeHandler, CommunicatorPipe + +log = logging.getLogger(__name__) +log.addHandler(logging.NullHandler()) + + +class Listener: + """Listening on incoming messages in a separate thread. + + On one side it handles incoming messages (data and control protocol) in another thread. + On the other side, it offers the :meth:`get_communicator` method, which returns a + :class:`Communicator`, offering communication to the network. + + Call :meth:`.start_listen()` to actually listen. + + ..code:: + + listener = Listener() + listener.start_listen() # starts a message handler in another thread + communicator = listener.get_communicator() # get a Communicator endpoint for this thread + response = communicator.ask_message(some_message_object) + + :param name: Name to listen under for control commands. + :param int data_port: Port number for the data protocol. + :param logger: Logger instance whose logs should be published. Defaults to "__main__". + """ + + communicator: CommunicatorPipe + message_handler: PipeHandler + + def __init__(self, + name: str, + host: str = "localhost", + port: int = COORDINATOR_PORT, + data_host: Optional[str] = None, + data_port: int = PROXY_SENDING_PORT, + logger: Optional[logging.Logger] = None, + timeout: float = 1, + **kwargs) -> None: + super().__init__(**kwargs) + log.info(f"Start Listener for '{name}'.") + + self.name = name + self.logger = logger + self.timeout = timeout + + self.coordinator_address = host, port + self.data_address = data_host or host, data_port + + def close(self) -> None: + """Close everything.""" + self.stop_listen() + + @property + def name(self) -> str: + try: + return self.communicator.name + except AttributeError: + return self._name + + @name.setter + def name(self, value: str) -> None: + self._name = value + try: + self.communicator.name = value + except AttributeError: + pass + + # Methods to control the Listener + def start_listen(self) -> None: + """Start to listen in a thread.""" + self.stop_listen() + self.stop_event = Event() + self.thread = Thread( + target=self._listen, + args=( + self.name, + self.stop_event, + self.coordinator_address[0], + self.coordinator_address[1], + self.data_address[0], + self.data_address[1], + )) + self.thread.daemon = True + self.thread.start() + for _ in range(10): + sleep(0.05) + try: + self.communicator: CommunicatorPipe = self.message_handler.get_communicator( + timeout=self.timeout) + except AttributeError: + pass + else: + log.addHandler(self.message_handler.log_handler) + if self.logger is not None: + self.logger.addHandler(self.message_handler.log_handler) + return + raise TimeoutError("PipeHandler has not started after 0.5 s.") + + def get_communicator(self, **kwargs) -> CommunicatorPipe: + """Get the communicator for the calling thread, creating one if necessary.""" + kwargs.setdefault("timeout", self.timeout) + return self.message_handler.get_communicator(**kwargs) + + def register_rpc_method(self, method: Callable[..., Any], **kwargs) -> None: + """Register a method for calling with the current message handler. + + If you restart the listening, you have to register the method anew. + """ + self.message_handler.register_rpc_method(method=method, **kwargs) + + def register_binary_rpc_method( + self, + method: Callable[..., Union[Any, tuple[Any, list[bytes]]]], + accept_binary_input: bool = False, + return_binary_output: bool = False, + **kwargs, + ) -> None: + """Register a binary method for calling with the current message handler. + + If you restart the listening, you have to register the method anew. + """ + self.message_handler.register_binary_rpc_method( + method=method, + accept_binary_input=accept_binary_input, + return_binary_output=return_binary_output, + **kwargs, + ) + + def stop_listen(self) -> None: + """Stop the listener Thread.""" + try: + if self.thread.is_alive(): + log.debug("Stopping listener thread.") + self.stop_event.set() + self.thread.join() + self.message_handler.close() + log.removeHandler(self.message_handler.log_handler) + if self.logger is not None: + self.logger.removeHandler(self.message_handler.log_handler) + except AttributeError: + pass + + """ + Methods below are executed in the thread, DO NOT CALL DIRECTLY! + """ + + def _listen(self, name: str, stop_event: Event, coordinator_host: str, coordinator_port: int, + data_host: str, data_port: int) -> None: + """Start a PipeHandler, which has to be executed in a separate thread.""" + self.message_handler = PipeHandler(name, host=coordinator_host, port=coordinator_port, + data_host=data_host, data_port=data_port) + self.message_handler.listen(stop_event=stop_event) diff --git a/pyleco/utils/log_levels.py b/pyleco/utils/log_levels.py new file mode 100644 index 000000000..66734c2e9 --- /dev/null +++ b/pyleco/utils/log_levels.py @@ -0,0 +1,43 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from enum import IntEnum +import logging + +from ..core.leco_protocols import LogLevels + + +class PythonLogLevels(IntEnum): + """Attribution of Python log levels to the LECO ones.""" + DEBUG = logging.DEBUG + INFO = logging.INFO + WARNING = logging.WARNING + ERROR = logging.ERROR + CRITICAL = logging.CRITICAL + + +def get_leco_log_level(log_level: int) -> LogLevels: + """Get the LECO log level from an integer (or python log level) if possible.""" + name = PythonLogLevels(log_level).name + return LogLevels[name] diff --git a/pyleco/utils/message_handler.py b/pyleco/utils/message_handler.py new file mode 100644 index 000000000..ee9e0c9a9 --- /dev/null +++ b/pyleco/utils/message_handler.py @@ -0,0 +1,365 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from functools import wraps +from json import JSONDecodeError +import logging +import time +from typing import Any, Callable, Optional, Union, TypeVar + +import zmq + +from ..core import COORDINATOR_PORT +from ..core.leco_protocols import ExtendedComponentProtocol +from ..core.message import Message, MessageTypes +from ..core.serialization import JsonContentTypes, get_json_content_type +from ..json_utils.errors import JSONRPCError +from ..json_utils.rpc_generator import RPCGenerator +from ..json_utils.rpc_server import RPCServer +from .base_communicator import BaseCommunicator +from .log_levels import PythonLogLevels +from .zmq_log_handler import ZmqLogHandler +from .events import Event, SimpleEvent + + +# Parameters +heartbeat_interval = 10 # s + + +ReturnValue = TypeVar("ReturnValue") + + +class MessageHandler(BaseCommunicator, ExtendedComponentProtocol): + """Maintain connection to the Coordinator and listen to incoming messages. + + This class is intended to run in a thread, maintain the connection to the coordinator + with heartbeats and timely responses. If a message arrives which is not connected to the control + protocol itself (e.g. ping message), another method is called. + You may subclass this class in order to handle these messages as desired. + + You may use it as a context manager. + + :param str name: Name to listen to and to publish values with. + :param str host: Host name (or IP address) of the Coordinator to connect to. + :param int port: Port number of the Coordinator to connect to. + :param protocol: Connection protocol. + :param log: Logger instance whose logs should be published. Defaults to `getLogger("__main__")`. + """ + + name: str + + current_message: Message + additional_response_payload: Optional[list[bytes]] = None + + def __init__( + self, + name: str, + host: str = "localhost", + port: int = COORDINATOR_PORT, + protocol: str = "tcp", + log: Optional[logging.Logger] = None, + context: Optional[zmq.Context] = None, + **kwargs, + ): + self.name = name + self._namespace: Union[str, None] = None + self._full_name: str = name + self.rpc = RPCServer(title=name) + self.rpc_generator = RPCGenerator() + self.register_rpc_methods() + + self.setup_logging(log=log) + self.setup_socket( + host=host, port=port, protocol=protocol, context=context or zmq.Context.instance() + ) + + super().__init__(**kwargs) + self.setup_message_buffer() + + @property + def namespace(self) -> Union[str, None]: + return self._namespace + + @namespace.setter + def namespace(self, value: Union[str, None]) -> None: + self._namespace = value + full_name = self.name if value is None else ".".join((value, self.name)) + self.set_full_name(full_name=full_name) + + def set_full_name(self, full_name: str) -> None: + self._full_name = full_name + self.rpc.title = full_name + self.log_handler.full_name = full_name + + @property + def full_name(self) -> str: + return self._full_name + + def setup_logging(self, log): + if log is None: + log = logging.getLogger("__main__") + # Add the ZmqLogHandler to the root logger, unless it has already a Handler. + first_pub_handler = True # we expect to be the first ZmqLogHandler + for h in log.handlers: + if isinstance(h, ZmqLogHandler): + first_pub_handler = False + self.log_handler = h + break + if first_pub_handler: + self.log_handler = ZmqLogHandler() + log.addHandler(self.log_handler) + self.root_logger = log + self.log = self.root_logger.getChild("MessageHandler") # for cooperation + + def setup_socket(self, host: str, port: int, protocol: str, context: zmq.Context) -> None: + self.socket: zmq.Socket = context.socket(zmq.DEALER) + self.log.info(f"MessageHandler connecting to {host}:{port}") + self.socket.connect(f"{protocol}://{host}:{port}") + + def register_rpc_method(self, method: Callable[..., Any], **kwargs) -> None: + """Register a method to be available via rpc calls.""" + self.rpc.method(**kwargs)(method) + + def _handle_binary_return_value( + self, return_value: tuple[ReturnValue, list[bytes]] + ) -> ReturnValue: + self.additional_response_payload = return_value[1] + return return_value[0] + + @staticmethod + def _pass_through(return_value: ReturnValue) -> ReturnValue: + return return_value + + def _generate_binary_capable_method( + self, + method: Callable[..., Union[ReturnValue, tuple[ReturnValue, list[bytes]]]], + accept_binary_input: bool = False, + return_binary_output: bool = False, + ) -> Callable[..., ReturnValue]: + returner = self._handle_binary_return_value if return_binary_output else self._pass_through + if accept_binary_input is True: + + @wraps(method) + def modified_method(*args, **kwargs) -> ReturnValue: # type: ignore + if args: + args_l = list(args) + if args_l[-1] is None: + args_l[-1] = self.current_message.payload[1:] + else: + args_l.append(self.current_message.payload[1:]) + args = args_l # type: ignore[assignment] + else: + kwargs["additional_payload"] = self.current_message.payload[1:] + return_value = method( + *args, **kwargs + ) + return returner(return_value=return_value) # type: ignore + else: + + @wraps(method) + def modified_method(*args, **kwargs) -> ReturnValue: + return_value = method(*args, **kwargs) + return returner(return_value=return_value) # type: ignore + + doc_addition = ( + f"(binary{' input' * accept_binary_input}{' output' * return_binary_output} method)" + ) + try: + modified_method.__doc__ += "\n" + doc_addition # type: ignore[operator] + except TypeError: + modified_method.__doc__ = doc_addition + return modified_method # type: ignore + + def register_binary_rpc_method( + self, + method: Callable[..., Union[Any, tuple[Any, list[bytes]]]], + accept_binary_input: bool = False, + return_binary_output: bool = False, + **kwargs, + ) -> None: + """Register a method which accepts binary input and/or returns binary values. + + :param accept_binary_input: the method must accept the additional payload as an + `additional_payload=None` parameter (default value must be present as `None`!). + :param return_binary_output: the method must return a tuple of a JSON-able python object + (e.g. `None`) and of a list of bytes objects, to be sent as additional payload. + """ + modified_method = self._generate_binary_capable_method( + method=method, + accept_binary_input=accept_binary_input, + return_binary_output=return_binary_output, + ) + self.register_rpc_method(modified_method, **kwargs) + + def register_rpc_methods(self) -> None: + """Register methods for RPC.""" + self.register_rpc_method(self.shut_down) + self.register_rpc_method(self.set_log_level) + self.register_rpc_method(self.pong) + + # Base communication + def send( + self, + receiver: Union[bytes, str], + conversation_id: Optional[bytes] = None, + data: Optional[Any] = None, + **kwargs, + ) -> None: + """Send a message to a receiver with serializable `data`.""" + try: + super().send(receiver=receiver, conversation_id=conversation_id, data=data, **kwargs) + except Exception as exc: + self.log.exception(f"Composing message with data {data} failed.", exc_info=exc) + # TODO send an error message to the receiver? + + # Continuous listening and message handling + def listen(self, stop_event: Event = SimpleEvent(), waiting_time: int = 100, **kwargs) -> None: + """Listen for zmq communication until `stop_event` is set or until KeyboardInterrupt. + + :param stop_event: Event to stop the listening loop. + :param waiting_time: Time to wait for a readout signal in ms. + """ + self.stop_event = stop_event + poller = self._listen_setup(**kwargs) + # Loop + try: + while not stop_event.is_set(): + self._listen_loop_element(poller=poller, waiting_time=waiting_time) + except KeyboardInterrupt: + pass # User stops the loop + finally: + # Close + self._listen_close(waiting_time=waiting_time) + + def _listen_setup(self) -> zmq.Poller: + """Setup for listening. + + If you add your own sockets, remember to poll only for incoming messages. + """ + self.log.info(f"Start to listen as '{self.name}'.") + # Prepare + poller = zmq.Poller() + poller.register(self.socket, zmq.POLLIN) + + # open communication + self.sign_in() + self.next_beat = time.perf_counter() + heartbeat_interval + return poller + + def _listen_loop_element( + self, poller: zmq.Poller, waiting_time: Optional[int] + ) -> dict[zmq.Socket, int]: + """Check the socks for incoming messages and handle them. + + :param waiting_time: Timeout of the poller in ms. + """ + socks = dict(poller.poll(waiting_time)) + if self.socket in socks: + self.read_and_handle_message() + del socks[self.socket] + elif (now := time.perf_counter()) > self.next_beat: + self.heartbeat() + self.next_beat = now + heartbeat_interval + return socks + + def _listen_close(self, waiting_time: Optional[int] = None) -> None: + """Close the listening loop.""" + self.log.info(f"Stop listen as '{self.name}'.") + self.sign_out() + + # Message handling in loop + def read_and_handle_message(self) -> None: + """Interpret incoming message, which have not been requested.""" + try: + message = self.read_message(timeout=0) + except (TimeoutError, JSONRPCError): + # only responses / errors arrived. + return + self.log.debug(f"Handling message {message}") + if not message.payload: + return # no payload, that means just a heartbeat + self.handle_message(message=message) + + def handle_message(self, message: Message) -> None: + if message.header_elements.message_type == MessageTypes.JSON: + self.handle_json_message(message=message) + else: + self.handle_unknown_message_type(message=message) + + def handle_json_message(self, message: Message) -> None: + try: + data: dict[str, Any] = message.data # type: ignore + except JSONDecodeError as exc: + self.log.exception(f"Could not decode json message {message}", exc_info=exc) + return + content = get_json_content_type(data) + if JsonContentTypes.REQUEST in content: + self.handle_json_request(message=message) + elif JsonContentTypes.ERROR in content: + self.handle_json_error(message=message) + elif JsonContentTypes.RESULT in content: + self.handle_json_result(message) + else: + self.log.error(f"Invalid JSON message received: {message}") + + def handle_json_request(self, message: Message) -> None: + response = self.process_json_message(message=message) + self.send_message(response) + + def process_json_message(self, message: Message) -> Message: + self.current_message = message + self.additional_response_payload = None + self.log.info(f"Handling commands of {message}.") + reply = self.rpc.process_request(message.payload[0]) + response = Message( + message.sender, + conversation_id=message.conversation_id, + message_type=MessageTypes.JSON, + data=reply, + additional_payload=self.additional_response_payload + ) + return response + + def handle_json_error(self, message: Message) -> None: + self.log.warning(f"Error message from {message.sender!r} received: {message}") + + def handle_json_result(self, message: Message) -> None: + self.log.warning(f"Unsolicited message from {message.sender!r} received: '{message}'") + + def handle_unknown_message_type(self, message: Message) -> None: + self.log.warning( + f"Message from {message.sender!r} with unknown message type " + f"{message.header_elements.message_type} received: '{message.data}', " + f"{message.payload!r}." + ) + + # Methods offered via RPC + def set_log_level(self, level: str) -> None: + """Set the log level.""" + plevel = PythonLogLevels[level] + self.root_logger.setLevel(plevel) + + def shut_down(self) -> None: + self.stop_event.set() diff --git a/pyleco/utils/parser.py b/pyleco/utils/parser.py new file mode 100644 index 000000000..c45e1a6e3 --- /dev/null +++ b/pyleco/utils/parser.py @@ -0,0 +1,66 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from argparse import ArgumentParser +import logging +from typing import Optional + + +parser = ArgumentParser() +parser.add_argument("-r", "--host", help="set the host name of this Node's Coordinator") +parser.add_argument("-n", "--name", help="set the application name") +parser.add_argument("-q", "--quiet", action="count", default=0, + help="decrease the logging level by one, may be used more than once") +parser.add_argument("-v", "--verbose", action="count", default=0, + help="increase the logging level by one, may be used more than once") + + +def parse_command_line_parameters(parser: ArgumentParser = parser, + logger: Optional[logging.Logger] = None, + arguments: Optional[list[str]] = None, + parser_description: Optional[str] = None, + logging_default: int = logging.WARNING, + ) -> dict: + """Parse the command line parameters and return a dictionary for GUIs. + + :param parser: parser to use, for example with more settings. + :param logger: The logger whose log level to set. Defaults to "__main__" logger. + :param list arguments: Arguments for the parser to parse. Per default, take it from `sys.argv`. + :param str parser_description: Override the parsers program description description. + :param int logging_default: Default level for logging. + :return: Dictionary with keyword arguments parsed from the command line parameters. + """ + if parser_description is not None: + parser.description = parser_description + kwargs = vars(parser.parse_args(arguments)) + verbosity = logging_default + (kwargs.pop("quiet", 0) - kwargs.pop("verbose", 0)) * 10 + if logger is None: + logger = logging.getLogger("__main__") + logger.setLevel(verbosity) + for key, value in list(kwargs.items()): + # remove not set values + if value is None: + del kwargs[key] + return kwargs diff --git a/pyleco/utils/pipe_handler.py b/pyleco/utils/pipe_handler.py new file mode 100644 index 000000000..af52ba139 --- /dev/null +++ b/pyleco/utils/pipe_handler.py @@ -0,0 +1,366 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from enum import Enum +from threading import get_ident, Condition +from typing import Any, Callable, Optional, Union +from warnings import warn + +import zmq + +from .extended_message_handler import ExtendedMessageHandler +from .base_communicator import MessageBuffer +from ..core.message import Message, MessageTypes +from ..core.internal_protocols import CommunicatorProtocol, SubscriberProtocol +from ..core.serialization import generate_conversation_id + + +class PipeCommands(bytes, Enum): + SUBSCRIBE = b"SUB" + UNSUBSCRIBE = b"UNSUB" + UNSUBSCRIBE_ALL = b"UNSUBALL" + SEND = b"SND" + RENAME = b"REN" + LOCAL_COMMAND = b"LOC" + + +class LockedMessageBuffer(MessageBuffer): + """Buffer messages thread safe for later reading by the application. + + With the method :meth:`add_conversation_id` a conversation_id is stored to indicate, that the + corresponding response should be stored in the buffer instead of handling it in the + message_handler. + The message_handler uses :meth:`add_response_message` to add a message to the buffer, if it is a + response, i.e. its conversation_id is in the list of expected responses. + The main application thread uses :meth:`retrieve_message` to get the response message with a + specific conversation_id. + If the response is in the buffer, it is returned immediately. + If the response is not yet in the buffer, it waits until a new message is added to the buffer to + check, whether that message fits the conversation_id. + This is repeated until the suiting response is found or a limit is reached. + """ + + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) + self._buffer_lock = Condition() + + def add_conversation_id(self, conversation_id: bytes) -> None: + """Add the conversation_id of a sent message in order to buffer the response.""" + with self._buffer_lock: + super().add_conversation_id(conversation_id=conversation_id) + + def remove_conversation_id(self, conversation_id: bytes) -> None: + """Remove a conversation_id from the requested ids.""" + with self._buffer_lock: + super().remove_conversation_id(conversation_id=conversation_id) + + def add_message(self, message: Message): + """Add a message to the buffer.""" + with self._buffer_lock: + super().add_message(message) + self._buffer_lock.notify_all() + + def add_response_message(self, message: Message) -> bool: + """Add a message to the buffer, if it is a requested response. + + .. deprecated:: 0.3 + Use :meth:`add_message` instead. + + :return: whether the message was added to the buffer. + """ + warn("`add_response_message` is deprecated, use `add_message` instead.", FutureWarning) + if self.is_conversation_id_requested(message.conversation_id): + self.add_message(message) + return True + else: + return False + + def retrieve_message(self, conversation_id: Optional[bytes] = None) -> Optional[Message]: + """Retrieve the requested message or the next free one for `conversation_id=None`.""" + with self._buffer_lock: + return super().retrieve_message(conversation_id=conversation_id) + + def _retrieve_message_without_lock(self, conversation_id: Optional[bytes]) -> Optional[Message]: + return super().retrieve_message(conversation_id=conversation_id) + + def _predicate_generator(self, conversation_id: bytes) -> Callable[[], Optional[Message]]: + def check_message_in_buffer() -> Optional[Message]: + return self._retrieve_message_without_lock( + conversation_id=conversation_id) + return check_message_in_buffer + + def wait_for_message(self, conversation_id: bytes, timeout: float = 1) -> Message: + """Retrieve a message with a certain `conversation_id` waiting `timeout` seconds. + + :param conversation_id: Conversation_id of the message to retrieve. + :param timeout: Timeout in seconds. + """ + with self._buffer_lock: + result = self._buffer_lock.wait_for( + self._predicate_generator(conversation_id=conversation_id), + timeout=timeout) + if result: + return result + # No result found: + raise TimeoutError("Reading timed out.") + + +class CommunicatorPipe(CommunicatorProtocol, SubscriberProtocol): + """A pipe endpoint satisfying the communicator protocol. + + You can create this endpoint in any thread you like and use it there. + """ + + def __init__(self, + handler: ExtendedMessageHandler, + pipe_port: int, + message_buffer: LockedMessageBuffer, + context: Optional[zmq.Context] = None, + timeout: float = 1, + **kwargs): + super().__init__(**kwargs) + self.handler = handler + context = context or zmq.Context.instance() + self.socket: zmq.Socket = context.socket(zmq.PAIR) + self.socket.connect(f"inproc://listenerPipe:{pipe_port}") + self.rpc_generator = handler.rpc_generator + self.message_buffer = message_buffer + self.buffer = self.message_buffer # for backward compatibility + self.timeout = timeout + + # CommunicatorProtocol + @property + def name(self) -> str: + return self.handler.name + + @name.setter + def name(self, value: Union[bytes, str]) -> None: + if isinstance(value, str): + value = value.encode() + self._send_pipe_message(PipeCommands.RENAME, value) + + @property + def namespace(self) -> Union[str, None]: # type: ignore[override] + return self.handler.namespace + + @property + def full_name(self) -> str: + return self.handler.full_name + + def _send_pipe_message(self, typ: PipeCommands, *content: bytes) -> None: + try: + self.socket.send_multipart((typ, *content)) + except zmq.ZMQError as exc: + raise ConnectionRefusedError(f"Connection to the handler refused with '{exc}', " + "probably the handler stopped.") + + def send_message(self, message: Message) -> None: + if not message.sender: + message.sender = self.full_name.encode() + self._send_pipe_message(PipeCommands.SEND, *message.to_frames()) + + def read_message(self, conversation_id: Optional[bytes], timeout: Optional[float] = None + ) -> Message: + if conversation_id is None: + raise ValueError("You have to request a message with its conversation_id.") + return self.message_buffer.wait_for_message( + conversation_id=conversation_id, + timeout=self.timeout if timeout is None else timeout, + ) + + def ask_message(self, message: Message, timeout: Optional[float] = None) -> Message: + self.message_buffer.add_conversation_id(message.conversation_id) + self.send_message(message=message) + return self.read_message(conversation_id=message.conversation_id, timeout=timeout) + + def sign_in(self) -> None: + raise NotImplementedError("Managed in the PipeHandler itself.") + + def sign_out(self) -> None: + raise NotImplementedError("Managed in the PipeHandler itself.") + + def close(self) -> None: + self.socket.close(1) + + # methods for the data protocol + def subscribe_single(self, topic: bytes) -> None: + self._send_pipe_message(PipeCommands.SUBSCRIBE, topic) + + def unsubscribe_single(self, topic: bytes) -> None: + self._send_pipe_message(PipeCommands.UNSUBSCRIBE, topic) + + def unsubscribe_all(self) -> None: + self._send_pipe_message(PipeCommands.UNSUBSCRIBE_ALL) + + # methods for local access + def _send_handler(self, method: str, **kwargs) -> bytes: + cid = generate_conversation_id() + message_string = self.rpc_generator.build_request_str(method=method, **kwargs) + self.message_buffer.add_conversation_id(cid) + self._send_pipe_message(PipeCommands.LOCAL_COMMAND, cid, message_string.encode()) + return cid + + def _read_handler(self, cid: bytes, timeout: float = 1) -> Any: + response_message = self.read_message(conversation_id=cid, timeout=timeout) + return self.interpret_rpc_response(response_message=response_message) + + def ask_handler(self, method: str, timeout: float = 1, **kwargs) -> Any: + """Ask the associated message handler.""" + cid = self._send_handler(method=method, timeout=timeout, **kwargs) + return self._read_handler(cid, timeout=timeout) + + # Utility methods + def register_rpc_method(self, method: Callable, **kwargs) -> None: + """Register a method with the message handler to make it available via RPC.""" + self.handler.register_rpc_method(method=method, **kwargs) + + +class PipeHandler(ExtendedMessageHandler): + """A message handler which offers thread-safe methods for sending/reading messages. + + This message handler offers the thread-safe :meth:`get_communicator` method to create a + communicator in a thread different to the handlers thread. + These communicator instances (in different threads) can communicate with the single message + handler safely. + The normal usage is to have the Pipehandler in some background thread listening ):meth:`listen`) + while the "active" threads have each a Communicator. + + :attr name_changing_methods: List of methods which are called, whenever the full_name changes. + """ + message_buffer: LockedMessageBuffer + _communicators: dict[int, CommunicatorPipe] + _on_name_change_methods: set[Callable[[str], None]] = set() + + def __init__(self, name: str, context: Optional[zmq.Context] = None, **kwargs) -> None: + context = context or zmq.Context.instance() + super().__init__(name=name, context=context, **kwargs) + self.internal_pipe: zmq.Socket = context.socket(zmq.PULL) + self.pipe_port = self.internal_pipe.bind_to_random_port("inproc://listenerPipe", + min_port=12345) + self._communicators = {} + + def setup_message_buffer(self) -> None: + self.message_buffer = LockedMessageBuffer() + + def close(self) -> None: + self.internal_pipe.close(1) + self.close_all_communicators() + super().close() + + def set_full_name(self, full_name: str) -> None: + super().set_full_name(full_name=full_name) + for method in self._on_name_change_methods: + try: + method(full_name) + except Exception as exc: + self.log.exception("Setting the name with a registered method failed.", + exc_info=exc) + + def register_on_name_change_method(self, method: Callable[[str], None]) -> None: + """Register a method (accepting a string) to be called whenever the full name changes.""" + self._on_name_change_methods.add(method) + + def unregister_on_name_change_method(self, method: Callable[[str], None]) -> None: + self._on_name_change_methods.discard(method) + + def _listen_setup(self, **kwargs) -> zmq.Poller: + poller = super()._listen_setup(**kwargs) + poller.register(self.internal_pipe, zmq.POLLIN) + return poller + + def _listen_loop_element(self, poller: zmq.Poller, waiting_time: Optional[int] + ) -> dict[zmq.Socket, int]: + socks = super()._listen_loop_element(poller=poller, waiting_time=waiting_time) + if self.internal_pipe in socks: + self.read_and_handle_pipe_message() + del socks[self.internal_pipe] + return socks + + def read_and_handle_pipe_message(self) -> None: + msg = self.internal_pipe.recv_multipart() + self.handle_pipe_message(msg) + + def handle_pipe_message(self, msg: list[bytes]) -> None: + cmd = msg[0] + if cmd == PipeCommands.SUBSCRIBE: + self.subscribe_single(topic=msg[1]) + elif cmd == PipeCommands.UNSUBSCRIBE: + self.unsubscribe_single(topic=msg[1]) + elif cmd == PipeCommands.UNSUBSCRIBE_ALL: + self.unsubscribe_all() + elif cmd == PipeCommands.SEND: + self._send_frames(frames=msg[1:]) + elif cmd == PipeCommands.RENAME: + self.rename_handler(msg[1].decode()) + elif cmd == PipeCommands.LOCAL_COMMAND: + self.handle_local_request(conversation_id=msg[1], rpc=msg[2]) + else: + self.log.debug(f"Received unknown '{msg}'.") + + def rename_handler(self, name): + self.sign_out() + self.name = name + self.namespace = None # to update the full_name + self.sign_in() + + # Control protocol + def _send_frames(self, frames: list[bytes]) -> None: + """Send frames over the connection.""" + self.log.debug(f"Sending {frames}") + self.socket.send_multipart(frames) + + # Local messages + def handle_local_request(self, conversation_id: bytes, rpc: bytes) -> None: + result = self.rpc.process_request(data=rpc) + self.message_buffer.add_message( + Message( + "comm", + sender="ego", + data=result, + message_type=MessageTypes.JSON, + conversation_id=conversation_id, + ) + ) + + # Thread safe methods for access from other threads + def create_communicator(self, **kwargs) -> CommunicatorPipe: + """Create a communicator wherever you want to access the pipe handler.""" + com = CommunicatorPipe(message_buffer=self.message_buffer, pipe_port=self.pipe_port, + handler=self, + **kwargs) + self._communicators[get_ident()] = com + return com + + def get_communicator(self, **kwargs) -> CommunicatorPipe: + """Get the communicator for this thread, creating one if necessary.""" + com = self._communicators.get(get_ident()) + if com is None or com.socket.closed is True: + return self.create_communicator(**kwargs) + else: + return com + + def close_all_communicators(self) -> None: + for communicator in self._communicators.values(): + communicator.close() diff --git a/pyleco/utils/qt_listener.py b/pyleco/utils/qt_listener.py new file mode 100644 index 000000000..32d4036df --- /dev/null +++ b/pyleco/utils/qt_listener.py @@ -0,0 +1,119 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from typing import Optional + +from qtpy.QtCore import QObject, Signal # type: ignore +from zmq import Context # type: ignore + +from ..core.message import Message +from ..core.data_message import DataMessage +from .listener import Listener, PipeHandler + + +class ListenerSignals(QObject): + """Signals for the Listener.""" + # General + name_changed = Signal(str) + # Control protocol + json_request_message = Signal(Message) + json_error_message = Signal(Message) + json_result_message = Signal(Message) + message = Signal(Message) # emitted in the same cases as above messages. + # Data Protocol + dataReady = Signal(dict) + data_message = Signal(DataMessage) + + +class QtPipeHandler(PipeHandler): + + local_methods = ["pong", "set_log_level"] + + def __init__(self, name: str, signals: ListenerSignals, context: Optional[Context] = None, + **kwargs) -> None: + self.signals = signals + super().__init__(name, context, **kwargs) + + def handle_json_request(self, message: Message) -> None: + try: + method = message.data.get("method") # type: ignore + except AttributeError: + pass + else: + if method in self.local_methods: + super().handle_json_request(message=message) + return + # in all other cases: + self.signals.message.emit(message) + self.signals.json_request_message.emit(message) + + def handle_json_error(self, message: Message) -> None: + self.signals.message.emit(message) + self.signals.json_error_message.emit(message) + + def handle_json_result(self, message: Message) -> None: + self.signals.message.emit(message) + self.signals.json_result_message.emit(message) + + def handle_unknown_message_type(self, message: Message) -> None: + self.signals.message.emit(message) + + def handle_subscription_data(self, data: dict) -> None: + """Handle incoming subscription data.""" + # old style + self.signals.dataReady.emit(data) + + def handle_subscription_message(self, message: DataMessage) -> None: + """Handle an incoming subscription message.""" + # new style + self.signals.data_message.emit(message) + + +class QtListener(Listener): + """Listening on incoming messages in a separate thread - PyQt version. + + On one side it handles incoming messages (data and control protocol) in another thread. + On the other side, it offers the :meth:`get_communicator` method, which returns a + :class:`Communicator`, offering communication to the network. + + Call :meth:`.start_listen()` to actually listen. + + It emits signals from :attr:`signals` if a control or data message arrives. + It also emits the `signals.name_changed` signal, whenever the Communicator changes its name. + + :param int data_port: Configure the port to be used for configuration. + :param logger: Logger instance whose logs should be published. Defaults to "__main__". + """ + + def __init__(self, name: str, host: str = "localhost", **kwargs) -> None: + super().__init__(name=name, host=host, **kwargs) + self.signals = ListenerSignals() + + def _listen(self, name: str, stop_event, coordinator_host: str, coordinator_port: int, + data_host: str, data_port: int) -> None: + self.message_handler = QtPipeHandler(name, signals=self.signals, + host=coordinator_host, port=coordinator_port, + data_host=data_host, data_port=data_port,) + self.message_handler.register_on_name_change_method(self.signals.name_changed.emit) + self.message_handler.listen(stop_event=stop_event) diff --git a/pyleco/utils/timers.py b/pyleco/utils/timers.py new file mode 100644 index 000000000..1f53f7609 --- /dev/null +++ b/pyleco/utils/timers.py @@ -0,0 +1,60 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + + +from threading import Event, Timer + + +class RepeatingTimer(Timer): + """A timer timing out several times instead of just once. + + Note that the next time is called after the function has finished! + + :param float interval: Interval between readouts in s. + """ + + def __init__(self, interval, function, args=None, kwargs=None): + super().__init__(interval, function, args, kwargs) + self.daemon = True + + def run(self): + while not self.finished.wait(self.interval): + self.function(*self.args, **self.kwargs) + + +class SignallingTimer(RepeatingTimer): + """Repeating timer that sets an Event (:attr:`signal`) at timeout and continues counting. + + :param float interval: Interval in s. + """ + + def __init__(self, interval): + self.signal = Event() + super().__init__(interval, self._timeout, args=(self.signal,)) + + @staticmethod + def _timeout(signal): + """Set and clear the signal event.""" + signal.set() + signal.clear() diff --git a/pyleco/utils/zmq_log_handler.py b/pyleco/utils/zmq_log_handler.py new file mode 100644 index 000000000..a5db4c3c3 --- /dev/null +++ b/pyleco/utils/zmq_log_handler.py @@ -0,0 +1,79 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +from logging.handlers import QueueHandler +import time +from typing import Any, Optional + +import zmq + +from ..core import LOG_RECEIVING_PORT +from ..core.data_message import DataMessage +from ..utils.data_publisher import DataPublisher + + +class ZmqLogHandler(QueueHandler): + """Handle log entries publishing them. + + You have to set the :attr:`full_name` in order to publish logs. + + :attr full_name: Full name of the Component. + """ + + full_name: str + + def __init__(self, context: Optional[zmq.Context] = None, host: str = "localhost", + port: int = LOG_RECEIVING_PORT, full_name: str = "") -> None: + publisher = DataPublisher(full_name=full_name, host=host, port=port, context=context) + super().__init__(publisher) # type: ignore + self.full_name = full_name + + def prepare(self, record: logging.LogRecord) -> list[str]: + """Prepare a json serializable message from the record in order to send it.""" + record.message = record.getMessage() + record.asctime = time.strftime('%Y-%m-%d %H:%M:%S') + tmp = [record.asctime, str(record.levelname), str(record.name)] + s = self.format(record) + if record.exc_info: + # Cache the traceback text to avoid converting it multiple times + # (it's constant anyway) + if not record.exc_text: + record.exc_text = logging.Formatter.formatException(self, record.exc_info) # type: ignore # noqa: E501 + if record.exc_text: + if s[-1:] != "\n": + s = s + "\n" + s = s + record.exc_text + if record.stack_info: + if s[-1:] != "\n": + s = s + "\n" + s = s + record.stack_info + tmp.append(s) + return tmp + + def enqueue(self, record: Any) -> None: + """Enqueue a message prepared by :meth:`prepare`, if the fullname is given.""" + message = DataMessage(topic=self.full_name.encode(), data=record) + self.queue.send_message(message) # type: ignore diff --git a/pyproject.toml b/pyproject.toml index 65b9c6aba..c33579ac5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,42 +1,58 @@ [project] name = "pyleco" -version = "0.0.1.dev" authors = [ { name="PyLECO Developers" }, ] +dynamic = ["version"] + description = "Python reference implementation of the Laboratory Experiment COntrol (LECO) protocol" readme = "README.md" license = {file = "LICENSE"} classifiers = [ "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", - "Development Status :: 1 - Planning", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering", + "Topic :: System :: Networking", ] -requires-python = ">=3.9" +requires-python = ">=3.8" dependencies = [ "pyzmq >= 22.3.0", - "openrpc >= 8.1.0", - "jsonrpc2-pyclient >= 4.3.0", - "jsonrpc2-objects >= 3.0.0", - "uuid7 >= 0.1.0", + "openrpc >= 8.1.0; python_version >= '3.9' and python_version < '3.13'", + "uuid6 >= 2024.1.12", ] [project.optional-dependencies] dev = [ - 'pytest', - 'pytest-cov', - 'sphinx', - 'sphinx_rtd_theme', + "pytest", + "pytest-cov", + "sphinx", + "sphinx_rtd_theme", +] +openrpc = [ + "openrpc >= 8.1.0; python_version >= '3.9'", ] [project.urls] "Homepage" = "https://github.com/pymeasure/pyleco" "Bug Tracker" = "https://github.com/pymeasure/pyleco/issues" +[project.scripts] +coordinator = "pyleco.coordinators.coordinator:main" +proxy_server = "pyleco.coordinators.proxy_server:main" +starter = "pyleco.management.starter:main" + [build-system] -requires = ["setuptools>=61.0", "wheel", "setuptools_scm>=7.0"] +requires = ["setuptools>=61.0", "wheel", "setuptools_scm>=8.1.0"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] @@ -72,4 +88,8 @@ omit = [ # Omit LECO definitions "pyleco/errors.py", "pyleco/core/leco_protocols.py", + # omit import file + "pyleco/json_utils/rpc_server.py", + # omit files for testing only + "pyleco/management/test_tasks/*", ] diff --git a/tests/acceptance_tests/test_coordinator_live.py b/tests/acceptance_tests/test_coordinator_live.py new file mode 100644 index 000000000..79ef399c1 --- /dev/null +++ b/tests/acceptance_tests/test_coordinator_live.py @@ -0,0 +1,162 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import logging +from socket import gethostname +from time import sleep +import threading + +import pytest + +from pyleco.core.message import Message, MessageTypes +from pyleco.utils.listener import Listener +from pyleco.utils.communicator import Communicator + +# Test the Coordinator and its Director in a live test +from pyleco.directors.coordinator_director import CoordinatorDirector +from pyleco.coordinators.coordinator import Coordinator + + +# Constants +PORT = 60001 +PORT2 = PORT + 1 +PORT3 = PORT + 2 +TALKING_TIME = 0.5 # s +TIMEOUT = 2 # s + + +hostname = gethostname() +testlevel = 30 +# pytest.skip("Takes too long.", allow_module_level=True) + + +def start_coordinator(namespace: str, port: int, coordinators=None, stop_event=None, **kwargs): + with Coordinator(namespace=namespace, port=port, **kwargs) as coordinator: + coordinator.routing(coordinators=coordinators, stop_event=stop_event) + + +@pytest.fixture(scope="module") +def leco(): + """A leco setup.""" + glog = logging.getLogger() + glog.setLevel(logging.DEBUG) + # glog.addHandler(logging.StreamHandler()) + log = logging.getLogger("test") + threads = [] + stop_events = [threading.Event(), threading.Event(), threading.Event()] + threads.append(threading.Thread(target=start_coordinator, + kwargs=dict(namespace="N1", port=PORT, + stop_event=stop_events[0]))) + threads.append(threading.Thread(target=start_coordinator, + kwargs=dict(namespace="N2", port=PORT2, + stop_event=stop_events[1]))) + threads.append(threading.Thread(target=start_coordinator, + kwargs=dict(namespace="N3", port=PORT3, + stop_event=stop_events[2]))) + for thread in threads: + thread.daemon = True + thread.start() + listener = Listener(name="Controller", port=PORT, timeout=TIMEOUT) + listener.start_listen() + sleep(TALKING_TIME) # time for setup + yield listener.get_communicator() + log.info("Tearing down") + listener.stop_listen() + for event in stop_events: + event.set() + for thread in threads: + thread.join(0.5) + + +@pytest.mark.skipif(testlevel < 0, reason="reduce load") +def test_startup(leco: Communicator): + with CoordinatorDirector(communicator=leco) as d: + assert d.get_local_components() == ["Controller"] + assert d.get_nodes() == {"N1": f"{hostname}:{PORT}"} + + +@pytest.mark.skipif(testlevel < 1, reason="reduce load") +def test_connect_N1_to_N2(leco: Communicator): + with CoordinatorDirector(communicator=leco) as d: + d.add_nodes({"N2": f"localhost:{PORT2}"}) + sleep(TALKING_TIME) # time for coordinators to talk + # assert that the N1.COORDINATOR knows about N2 + assert d.get_nodes() == {"N1": f"{hostname}:{PORT}", "N2": f"localhost:{PORT2}"} + # assert that the listener can contact N2.COORDINATOR + assert d.ask_rpc(actor="N2.COORDINATOR", method="pong") is None + + +@pytest.mark.skipif(testlevel < 2, reason="reduce load") +def test_Component_to_Component_via_1_Coordinator(leco: Communicator): + with Communicator(name="whatever", port=PORT) as c: + assert c.ask_rpc("N1.Controller", method="pong") is None + + +@pytest.mark.skipif(testlevel < 2, reason="reduce load") +def test_Component_to_Component_via_2_Coordinators(leco: Communicator): + with Communicator(name="whatever", port=PORT2) as c: + response = c.ask("N1.Controller", data={"id": 1, "method": "pong", "jsonrpc": "2.0"}, + message_type=MessageTypes.JSON) + assert response == Message( + b'N2.whatever', b'N1.Controller', data={"id": 1, "result": None, "jsonrpc": "2.0"}, + header=response.header) + + +@pytest.mark.skipif(testlevel < 2, reason="reduce load") +def test_Component_lists_propgate_through_Coordinators(leco: Communicator): + """Test that Component lists are propagated from one Coordinator to another.""" + with CoordinatorDirector(actor="N2.COORDINATOR", name="whatever", port=PORT2) as d: + assert d.get_global_components() == {"N1": ["Controller"], "N2": ["whatever"]} + + +@pytest.mark.skipif(testlevel < 2, reason="reduce load") +def test_Component_to_second_coordinator(leco: Communicator): + assert leco.ask_rpc("N2.COORDINATOR", method="pong") is None + + +def test_sign_in_rejected_for_duplicate_name(leco: Communicator): + with pytest.raises(ConnectionRefusedError): + with Communicator(name="Controller", port=PORT): + pass + + +@pytest.mark.skipif(testlevel < 3, reason="reduce load") +def test_connect_N3_to_N2(leco: Communicator): + with CoordinatorDirector(name="whatever", port=PORT3) as d1: + d1.add_nodes({"N2": f"localhost:{PORT2}"}) + + sleep(TALKING_TIME) # time for coordinators to talk + with CoordinatorDirector(actor="COORDINATOR", communicator=leco) as d2: + assert d2.get_nodes() == {"N1": f"{hostname}:{PORT}", "N2": f"localhost:{PORT2}", + "N3": f"{hostname}:{PORT3}"} + + +@pytest.mark.skipif(testlevel < 4, reason="reduce load") +def test_shutdown_N3(leco: Communicator): + with CoordinatorDirector(actor="N3.COORDINATOR", name="whatever", port=PORT3) as d1: + d1.shut_down_actor() + + sleep(TALKING_TIME) # time for coordinators to talk + with CoordinatorDirector(actor="COORDINATOR", communicator=leco) as d2: + assert d2.get_nodes() == {"N1": f"{hostname}:{PORT}", "N2": f"localhost:{PORT2}"} diff --git a/tests/acceptance_tests/test_director_actor.py b/tests/acceptance_tests/test_director_actor.py new file mode 100644 index 000000000..94c39315c --- /dev/null +++ b/tests/acceptance_tests/test_director_actor.py @@ -0,0 +1,167 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +import threading +from time import sleep + +import pytest + +from pyleco.coordinators.coordinator import Coordinator +from pyleco.actors.actor import Actor +from pyleco.directors.director import Director + + +# Constants +PORT = 60004 + + +def start_coordinator(namespace: str, port: int, coordinators=None, **kwargs): + with Coordinator(namespace=namespace, port=port, **kwargs) as coordinator: + coordinator.routing(coordinators=coordinators) + + +class FakeInstrument: + _prop1 = 5 + + def __init__(self): + pass + + def connect(self): + pass + + @property + def constant(self): + return 7 + + @property + def prop1(self): + return self._prop1 + + @prop1.setter + def prop1(self, value): + self._prop1 = value + + def triple(self, factor: float = 1, factor2: float = 1) -> float: + return factor * factor2 * 3 + + +def start_actor(event: threading.Event): + actor = Actor("actor", FakeInstrument, port=PORT) + + def binary_method_manually() -> None: + """Receive binary data and return it. Do all the binary things manually.""" + payload = actor.current_message.payload[1:] + try: + actor.additional_response_payload = [payload[0] * 2] + except IndexError: + pass + + def binary_method_created(additional_payload: list[bytes]) -> tuple[None, list[bytes]]: + """Receive binary data and return it. Create binary method by registering it.""" + return None, [additional_payload[0] * 2] + + actor.register_rpc_method(binary_method_manually) + actor.register_binary_rpc_method( + binary_method_created, accept_binary_input=True, return_binary_output=True + ) + actor.connect() + actor.rpc.method()(actor.device.triple) + actor.register_device_method(actor.device.triple) + actor.listen(event) + actor.disconnect() + + +@pytest.fixture(scope="module") +def director(): + """A leco setup.""" + glog = logging.getLogger() + glog.setLevel(logging.DEBUG) + # glog.addHandler(logging.StreamHandler()) + log = logging.getLogger("test") + stop_event = threading.Event() + threads = [] + threads.append(threading.Thread(target=start_coordinator, + kwargs=dict(namespace="N1", port=PORT))) + threads.append(threading.Thread(target=start_actor, kwargs=dict(event=stop_event))) + for thread in threads: + thread.daemon = True + thread.start() + sleep(1) + director = Director(actor="actor", port=PORT) + yield director + log.info("Tearing down") + stop_event.set() + director.shut_down_actor(actor="COORDINATOR") + for thread in threads: + thread.join(0.5) + + +def test_get_property(director: Director): + assert director.get_parameters("constant") == {"constant": 7} + + +def test_change_property(director: Director): + start = director.get_parameters(["prop1"])["prop1"] + director.set_parameters({"prop1": start + 3}) + assert director.get_parameters(["prop1"])["prop1"] == start + 3 + + +def test_call_action_arg(director: Director): + assert director.call_action("triple", 5) == 15 + + +def test_call_action_kwarg(director: Director): + assert director.call_action(action="triple", factor=5) == 15 + + +def test_call_action_arg_and_kwarg(director: Director): + assert director.call_action("triple", 2, factor2=5) == 30 + + +def test_method_via_rpc(director: Director): + assert director.ask_rpc(method="triple", factor=5) == 15 + + +def test_method_via_rpc2(director: Director): + assert director.ask_rpc(method="triple", factor=2, factor2=5) == 30 + + +def test_device_method_via_rpc(director: Director): + assert director.ask_rpc(method="device.triple", factor=5) == 15 + + +def test_binary_data_transfer(director: Director): + assert director.ask_rpc( + method="binary_method_manually", + additional_payload=[b"123"], + extract_additional_payload=True, + ) == (None, [b"123123"]) + + +def test_binary_data_transfer_created(director: Director): + assert director.ask_rpc( + method="binary_method_created", additional_payload=[b"123"], extract_additional_payload=True + ) == (None, [b"123123"]) diff --git a/tests/acceptance_tests/test_proxy_server_live.py b/tests/acceptance_tests/test_proxy_server_live.py new file mode 100644 index 000000000..e0c7cafea --- /dev/null +++ b/tests/acceptance_tests/test_proxy_server_live.py @@ -0,0 +1,73 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import pytest +from time import sleep + +from pyleco.utils.data_publisher import DataPublisher +from pyleco.utils.listener import Listener +from pyleco.core import PROXY_SENDING_PORT + +from pyleco.coordinators.proxy_server import start_proxy, port + + +pytestmark = pytest.mark.skip("Hangs on teardown in CI") + + +# Parameters +offset = 100 + + +class ModListener(Listener): + def __init__(self, name: str, host: str = "localhost", data_port: int = PROXY_SENDING_PORT, + **kwargs) -> None: + super().__init__(name=name, host=host, data_port=data_port, **kwargs) + self._data: list[dict] = [] + + def handle_subscription_data(self, data: dict) -> None: + self._data.append(data) + + +@pytest.fixture(scope="module") +def publisher() -> DataPublisher: + return DataPublisher(full_name="abc", port=port - 2 * offset) + + +@pytest.fixture(scope="module") +def listener(publisher): + context = start_proxy(offset=offset) + listener = ModListener(name="listener", data_port=port - 1 - 2 * offset) + listener.start_listen() + listener.communicator.subscribe("") + sleep(.5) # due to slow joiner: Allow time for connections. + yield listener # type: ignore + listener.close() + context.destroy() # in order to stop the proxy + + +def test_publishing(publisher: DataPublisher, listener: ModListener): + publisher.send_data(topic="topic", data="value") + sleep(.1) + assert listener._data == [{"topic": "value"}] diff --git a/tests/acceptance_tests/test_starter_live.py b/tests/acceptance_tests/test_starter_live.py new file mode 100644 index 000000000..cb6481c7c --- /dev/null +++ b/tests/acceptance_tests/test_starter_live.py @@ -0,0 +1,121 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import logging +import os +import threading +from time import sleep + +import pytest + +from pyleco.coordinators.coordinator import Coordinator +from pyleco.management.starter import Starter, Status +from pyleco.directors.starter_director import StarterDirector +from pyleco.directors.coordinator_director import CoordinatorDirector + +from pyleco.management.test_tasks import test_task + +# Constants +PORT = 60005 + + +def start_coordinator(namespace: str, port: int, coordinators=None, **kwargs): + with Coordinator(namespace=namespace, port=port, **kwargs) as coordinator: + coordinator.routing(coordinators=coordinators) + + +def start_starter(event: threading.Event): + path = os.path.dirname(test_task.__file__) + starter = Starter(directory=path, port=PORT) + starter.listen(event) + + +@pytest.fixture(scope="module") +def director(): + """A leco setup.""" + glog = logging.getLogger() + glog.setLevel(logging.DEBUG) + # glog.addHandler(logging.StreamHandler()) + log = logging.getLogger("test") + stop_event = threading.Event() + threads = [] + threads.append(threading.Thread(target=start_coordinator, + kwargs=dict(namespace="N1", port=PORT))) + threads.append(threading.Thread(target=start_starter, kwargs=dict(event=stop_event))) + for thread in threads: + thread.daemon = True + thread.start() + sleep(1) + director = StarterDirector(actor="starter", port=PORT, timeout=5) + yield director + log.info("Tearing down") + stop_event.set() + director.shut_down_actor(actor="COORDINATOR") + for thread in threads: + thread.join(0.5) + + +def test_sign_in(director: StarterDirector): + d2 = CoordinatorDirector(communicator=director.communicator) + assert "starter" in d2.get_local_components() # type: ignore + + +def test_tasks_listing(director: StarterDirector): + tasks = director.list_tasks() + expected_tasks = [ + {"name": "failing_task", "tooltip": ""}, + { + "name": "no_task", + "tooltip": "Task which can be imported, but not started as method `task` is missing.\n", + }, + { + "name": "test_task", + "tooltip": "Example scheme for an Actor for pymeasure instruments. 'test_task'\n", + }, + ] + for t in expected_tasks: + assert t in tasks + assert len(tasks) == len(expected_tasks), "More tasks present than expected." + + +def test_start_task(director: StarterDirector): + director.start_tasks("test_task") + status = Status(director.status_tasks("test_task")["test_task"]) + assert Status.STARTED in status + assert Status.RUNNING in status + + +def test_stop_task(director: StarterDirector): + director.stop_tasks("test_task") + status = Status(director.status_tasks("test_task").get("test_task", 0)) + assert Status.STARTED not in status + assert Status.RUNNING not in status + + +def test_start_task_again(director: StarterDirector): + director.start_tasks(["test_task", "failing_task", "no_task"]) + status = Status(director.status_tasks("test_task")["test_task"]) + assert Status.STARTED in status + assert Status.RUNNING in status + director.stop_tasks(["test_task", "no_task"]) diff --git a/tests/actors/test_actor.py b/tests/actors/test_actor.py new file mode 100644 index 000000000..44d3a626a --- /dev/null +++ b/tests/actors/test_actor.py @@ -0,0 +1,282 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import logging +from sys import version_info +import time + +from unittest.mock import MagicMock + +import pytest + +from pyleco.test import FakePoller +from pyleco.actors.actor import Actor +from pyleco.core.leco_protocols import PollingActorProtocol, ExtendedComponentProtocol, Protocol + + +class FantasyChannel: + def __init__(self) -> None: + self._prop = -1 + + @property + def channel_property(self): + return self._prop + + @channel_property.setter + def channel_property(self, value): + self._prop = value + + def channel_method(self, value): + return 2 * value + + +class FantasyInstrument: + + def __init__(self, adapter, name="FantasyInstrument", *args, **kwargs): + self.name = name + self.adapter = adapter + super().__init__() + self._prop = 5 + self._prop2 = 7 + self.channel = FantasyChannel() + self.channel.trace = FantasyChannel() # type: ignore + + @property + def prop(self): + return self._prop + + @prop.setter + def prop(self, value): + self._prop = value + + @property + def prop2(self): + return self._prop2 + + @prop2.setter + def prop2(self, value): + self._prop2 = value + + def silent_method(self, value): + self._method_value = value + + def returning_method(self, value): + return value ** 2 + + @property + def long(self): + time.sleep(0.5) + return 7 + + def connect(self, *args): + pass + + def disconnect(self, *args): + pass + + +class FakeActor(Actor): + + def queue_readout(self): + logging.getLogger().info(f"queue: {time.perf_counter()}") + super().queue_readout() + + def heartbeat(self): + logging.getLogger().info("beating") + super().heartbeat() + + +class ExtendedActorProtocol(ExtendedComponentProtocol, PollingActorProtocol, Protocol): + pass + + +@pytest.fixture() +def actor() -> FakeActor: + actor = FakeActor("test", FantasyInstrument, auto_connect={'adapter': MagicMock()}, + port=1234, + protocol="inproc") + actor.next_beat = float("inf") + return actor + + +class TestProtocolImplemented: + protocol_methods = [m for m in dir(ExtendedActorProtocol) if not m.startswith("_")] + + def static_test_methods_are_present(self): + def testing(component: ExtendedActorProtocol): + pass + testing(FakeActor(name="test", device_class=FantasyInstrument)) + + @pytest.fixture + def component_methods(self, actor: Actor): + response = actor.rpc.process_request( + '{"id": 1, "method": "rpc.discover", "jsonrpc": "2.0"}') + result = actor.rpc_generator.get_result_from_response(response) # type: ignore + return result.get('methods') + + @pytest.mark.parametrize("method", protocol_methods) + def test_method_is_available(self, component_methods, method): + for m in component_methods: + if m.get('name') == method: + return + raise AssertionError(f"Method {method} is not available.") + + +@pytest.mark.skipif(version_info.minor < 9, + reason="It is deprecated, because it does not work for python<3.9.") +def test_deprecated_cls_argument(): + with pytest.warns(FutureWarning, match="`cls` is deprecated"): + actor = FakeActor("test", cls=FantasyInstrument, auto_connect={'adapter': MagicMock()}, + port=1234, + protocol="inproc") + assert actor.device_class == FantasyInstrument + + +def test_device_class_or_cls_is_necessary(): + with pytest.raises(ValueError, match="`device_class`"): + FakeActor("test", protocol="inproc") + + +def test_get_properties(actor: Actor): + assert actor.get_parameters(['prop']) == {'prop': 5} + + +def test_get_channel_properties(actor: Actor): + assert actor.get_parameters(["channel.channel_property"]) == { + "channel.channel_property": -1} + + +def test_get_nested_channel_properties(actor: Actor): + assert actor.get_parameters(["channel.trace.channel_property"]) == { + "channel.trace.channel_property": -1} + + +def test_set_properties(actor: Actor): + actor.set_parameters({'prop2': 10}) + assert actor.device.prop2 == 10 + + +def test_set_channel_properties(actor: Actor): + actor.set_parameters(parameters={'channel.channel_property': 10}) + assert actor.device.channel.channel_property == 10 + + +def test_set_nested_channel_properties(actor: Actor): + actor.set_parameters(parameters={'channel.trace.channel_property': 10}) + assert actor.device.channel.trace.channel_property == 10 # type: ignore + + +def test_call_silent_method(actor: Actor): + assert actor.call_action("silent_method", kwargs=dict(value=7)) is None + assert actor.device._method_value == 7 + + +def test_returning_method(actor: Actor): + assert actor.call_action('returning_method', kwargs=dict(value=2)) == 4 + + +def test_channel_method(actor: Actor): + assert actor.call_action("channel.channel_method", args=(7,)) == 14 + + +def test_nested_channel_method(actor: Actor): + assert actor.call_action("channel.trace.channel_method", args=(7,)) == 14 + + +def test_register_device_method(actor: Actor): + actor.register_device_method(actor.device.returning_method) + response = actor.rpc.process_request( + '{"id": 1, "method": "device.returning_method", "params": [5], "jsonrpc": "2.0"}') + result = actor.rpc_generator.get_result_from_response(response) # type: ignore + assert result == 25 + + +class Test_disconnect: + @pytest.fixture + def disconnected_actor(self): + actor = FakeActor("name", device_class=FantasyInstrument, + auto_connect={"adapter": MagicMock()}) + actor._device = actor.device # type: ignore + actor.device.adapter.close = MagicMock() + actor.disconnect() + return actor + + def test_device_deleted(self, disconnected_actor: Actor): + assert not hasattr(disconnected_actor, "device") + + def test_timer_canceled(self, disconnected_actor: Actor): + assert disconnected_actor.timer.finished.is_set() is True + + def test_device_closed(self, disconnected_actor: Actor): + disconnected_actor._device.adapter.close.assert_called_once() # type: ignore + + +def test_exit_calls_disconnect(): + with FakeActor("name", device_class=FantasyInstrument) as actor: + actor.disconnect = MagicMock() + actor.disconnect.assert_called_once() + + +class Test_listen_loop_element: + @pytest.fixture + def looped_actor(self, actor: Actor): + """Check a loop with a value in the pipe""" + poller = FakePoller() + poller.register(actor.pipeL) + actor.queue_readout() # enqueue a readout + actor.readout = MagicMock() # type: ignore + # act + socks = actor._listen_loop_element(poller=poller, # type: ignore + waiting_time=None) + actor._socks = socks # type: ignore # for assertions + return actor + + def test_socks_empty(self, looped_actor: Actor): + assert looped_actor._socks == {} # type: ignore + + def test_readout_called(self, looped_actor: Actor): + looped_actor.readout.assert_called_once() # type: ignore + + def test_no_readout_queued(self, actor: Actor): + + poller = FakePoller() + poller.register(actor.pipeL) + actor.readout = MagicMock() # type: ignore + # act + actor._listen_loop_element(poller=poller, # type: ignore + waiting_time=0) + actor.readout.assert_not_called() + + +def test_timer_enqueues_heartbeat(actor: Actor): + actor.start_timer(0.0000001) # s + assert actor.pipeL.poll(timeout=50) == 1 # ms + + +def test_restart_stopped_timer(actor: Actor): + """Starting a stopped timer is impossible, ensure, that it works as expected (new timer).""" + actor.start_timer(10) # s + actor.stop_timer() + actor.start_timer(0.0000001) # s + assert actor.pipeL.poll(timeout=50) == 1 # ms diff --git a/tests/actors/test_locking_actor.py b/tests/actors/test_locking_actor.py new file mode 100644 index 000000000..31458ca9a --- /dev/null +++ b/tests/actors/test_locking_actor.py @@ -0,0 +1,320 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import logging +import time + +from unittest.mock import MagicMock + +import pytest + +from pyleco.core.message import Message +from pyleco.actors.locking_actor import LockingActor, AccessDeniedError +from pyleco.core.leco_protocols import LockingActorProtocol + + +class FantasyChannel: + def __init__(self) -> None: + self._prop = -1 + + @property + def channel_property(self): + return self._prop + + @channel_property.setter + def channel_property(self, value): + self._prop = value + + @property + def l_c_prop(self): + return self._prop + + @l_c_prop.setter + def l_c_prop(self, value): + self._prop = value + + def channel_method(self, value): + return 2 * value + + +class FantasyInstrument: + """Some instrument to be controlled. + + The prefix "l" indicates properties etc. which should be locked. + """ + def __init__(self, adapter, name="FantasyInstrument", *args, **kwargs): + self.name = name + self.adapter = adapter + super().__init__() + self.l_channel = FantasyChannel() + self.l_channel.trace = FantasyChannel() # type: ignore + self.o_channel = FantasyChannel() + self._l_prop = 5 + self._o_prop = 6 + + @property + def l_prop(self): + return self._l_prop + + @l_prop.setter + def l_prop(self, value): + self._l_prop = value + + @property + def o_prop(self): + return self._o_prop + + @o_prop.setter + def o_prop(self, value): + self._o_prop = value + + def l_method(self, value): + self._method_value = value + + def o_method(self, value): + return value**2 + + # methods for Instrument simulation + def connect(self, *args): + pass + + def disconnect(self, *args): + pass + + +class FakeActor(LockingActor): + def queue_readout(self): + logging.getLogger().info(f"queue: {time.perf_counter()}") + super().queue_readout() + + def heartbeat(self): + logging.getLogger().info("beating") + super().heartbeat() + + +@pytest.fixture() +def actor() -> FakeActor: + actor = FakeActor( + "test", + FantasyInstrument, + auto_connect={"adapter": MagicMock()}, + port=1234, + protocol="inproc", + ) + actor.next_beat = float("inf") + return actor + + +resources = ( + "l_prop", # a property + "l_method", # a method + "l_channel", # a channel + "l_channel.l_c_prop", # property of a channel + "o_channel.l_c_prop", # property of a channel +) + + +@pytest.fixture +def locked_actor(actor: LockingActor) -> LockingActor: + actor.current_message = Message("rec", "owner") + for r in resources: + actor.lock(r) + actor.current_message = Message("rec", "requester") + return actor + + +class TestProtocolImplemented: + protocol_methods = [m for m in dir(LockingActorProtocol) if not m.startswith("_")] + + def static_test_methods_are_present(self): + def testing(component: LockingActorProtocol): + pass + + testing(FakeActor(name="test", device_class=FantasyInstrument)) + + @pytest.fixture + def component_methods(self, actor: LockingActor): + response = actor.rpc.process_request( + '{"id": 1, "method": "rpc.discover", "jsonrpc": "2.0"}' + ) + result = actor.rpc_generator.get_result_from_response(response) # type: ignore + return result.get("methods") + + @pytest.mark.parametrize("method", protocol_methods) + def test_method_is_available(self, component_methods, method): + for m in component_methods: + if m.get("name") == method: + return + raise AssertionError(f"Method {method} is not available.") + +class Test_check_access_rights: + @pytest.mark.parametrize("resource", resources) + def test_owner(self, locked_actor: LockingActor, resource): + locked_actor.current_message = Message("rec", "owner") + assert locked_actor.check_access_rights(resource) is True + + @pytest.mark.parametrize("resource", ("l_channel.channel_property", "l_channel.trace")) + def test_owner_of_parent(self, locked_actor: LockingActor, resource): + locked_actor.current_message = Message("rec", "owner") + assert locked_actor.check_access_rights(resource) is True + + @pytest.mark.parametrize("resource", (None, *resources)) + def test_owner_of_device(self, actor: LockingActor, resource): + """Only the device itself is locked, test access to parts.""" + actor.current_message = Message("rec", "owner") + actor.lock(None) + # act and assert + assert actor.check_access_rights(resource) is True + + @pytest.mark.parametrize( + "resource", (None, "o_prop", "o_method", "o_channel", "o_channel.channel_property") + ) + def test_requester_True(self, locked_actor: LockingActor, resource): + """Test that another requester may access unlocked resources.""" + locked_actor.current_message = Message("rec", "requester") + assert locked_actor.check_access_rights(resource) is True + + @pytest.mark.parametrize( + "resource", ("l_channel", "l_channel.channel_property", "l_prop", "o_channel.l_c_prop") + ) + def test_requester_False(self, locked_actor: LockingActor, resource): + # arrange + locked_actor.force_unlock(None) + # act + locked_actor.current_message = Message("rec", "requester") + assert locked_actor.check_access_rights(resource) is False + + @pytest.mark.parametrize("resource", (None, *resources)) + def test_not_owner_of_device(self, actor: LockingActor, resource): + """Only the device itself is locked, test access to parts of it.""" + actor.current_message = Message("rec", "owner") + actor.lock(None) + actor.current_message = Message("rec", "requester") + # act and assert + assert actor.check_access_rights(resource) is True + + +@pytest.mark.parametrize( + "resource", + resources, +) +def test_lock_unlocked(actor: LockingActor, resource): + actor.current_message = Message("rec", "owner") + assert actor.lock(resource) is True + assert actor._locks[resource] == b"owner" + + +@pytest.mark.parametrize( + "resource", + resources, +) +def test_lock_already_locked(locked_actor: LockingActor, resource): + locked_actor.current_message = Message("rec", "owner") + assert locked_actor.lock(resource) is True + assert locked_actor._locks[resource] == b"owner" + + +@pytest.mark.parametrize( + "resource", + resources, +) +def test_lock_fail_as_already_locked(locked_actor: LockingActor, resource): + locked_actor.current_message = Message("rec", "requester") + assert locked_actor.lock(resource) is False + assert locked_actor._locks[resource] == b"owner" + + +@pytest.mark.parametrize( + "resource", + resources, +) +def test_unlock_locked(locked_actor: LockingActor, resource): + locked_actor.current_message = Message("rec", "owner") + locked_actor.unlock(resource) + assert resource not in locked_actor._locks + + +@pytest.mark.parametrize("resource", (None, "prop")) +def test_unlock_already_unlocked(actor: LockingActor, resource): + actor.current_message = Message("rec", "requester") + actor.unlock(resource) + # assert no error is raised + + +@pytest.mark.parametrize( + "resource", + resources, +) +def test_unlock_fail_as_different_user(locked_actor: LockingActor, resource): + locked_actor.current_message = Message("rec", "requester") + # with pytest.raises(AccessDeniedError, match=resource): + locked_actor.unlock(resource) + assert locked_actor._locks[resource] == b"owner" + + +@pytest.mark.parametrize( + "resource", + ("l_channel.channel_method", "l_channel.trace"), +) +def test_lock_fail_for_child_of_locked_resource(locked_actor: LockingActor, resource): + """If the parent is locked (e.g. the device), no child may be locked.""" + locked_actor.current_message = Message("rec", "requester") + assert locked_actor.lock(resource) is False + + +# test device access +def test_get_parameters_successfully(locked_actor: LockingActor): + locked_actor.current_message = Message("rec", "owner") + locked_actor.get_parameters(["l_prop", "l_channel.channel_property", "o_prop"]) + # assert that no error is raised + + +def test_get_parameters_unsuccessfully(locked_actor: LockingActor): + locked_actor.current_message = Message("rec", "requester") + with pytest.raises(AccessDeniedError, match="'l_prop'"): + locked_actor.get_parameters(["o_prop", "l_prop"]) + + +def test_set_parameters_successfully(locked_actor: LockingActor): + locked_actor.current_message = Message("rec", "owner") + locked_actor.set_parameters({"l_prop": 5, "l_channel.channel_property": 6}) + # assert that no error is raised + + +def test_set_parameters_unsuccessfully(locked_actor: LockingActor): + locked_actor.current_message = Message("rec", "requester") + with pytest.raises(AccessDeniedError, match="'l_prop'"): + locked_actor.set_parameters({"o_prop": 5, "l_prop": 6}) + + +def test_call_action_successfully(locked_actor: LockingActor): + locked_actor.current_message = Message("rec", "owner") + locked_actor.call_action("l_method", [5]) + # assert that no error is raised + + +def test_call_action_unsuccessfully(locked_actor: LockingActor): + locked_actor.current_message = Message("rec", "requester") + with pytest.raises(AccessDeniedError, match="'l_method'"): + locked_actor.call_action("l_method", [5]) diff --git a/tests/coordinators/test_coordinator.py b/tests/coordinators/test_coordinator.py new file mode 100644 index 000000000..2dc61ffd7 --- /dev/null +++ b/tests/coordinators/test_coordinator.py @@ -0,0 +1,695 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from unittest.mock import MagicMock + +import pytest + +from pyleco.json_utils.json_objects import Request, ErrorResponse, DataError +from pyleco.json_utils.errors import NODE_UNKNOWN, NOT_SIGNED_IN, DUPLICATE_NAME, RECEIVER_UNKNOWN +from pyleco.core import VERSION_B +from pyleco.core.message import Message, MessageTypes +from pyleco.core.leco_protocols import ExtendedComponentProtocol, Protocol, CoordinatorProtocol +from pyleco.utils.coordinator_utils import FakeMultiSocket, FakeNode +from pyleco.json_utils.rpc_generator import RPCGenerator +from pyleco.test import FakeContext +from pyleco.utils.events import SimpleEvent + +from pyleco.coordinators.coordinator import Coordinator +from pyleco.coordinators import coordinator as coordinator_module # type: ignore + + +@pytest.fixture +def coordinator() -> Coordinator: + coordinator = Coordinator(namespace="N1", host="N1host", cleaning_interval=1e5, + context=FakeContext(), # type: ignore + multi_socket=FakeMultiSocket() + ) + d = coordinator.directory + d.add_component(b"send", b"321") + d.add_component(b"rec", b"123") + d.add_node_sender(FakeNode(), "N2host:12300", namespace=b"N2") + d._nodes[b"N2"] = d._waiting_nodes.pop("N2host:12300") + d._nodes[b"N2"].namespace = b"N2" + d._waiting_nodes = {} + d.add_node_receiver(b"n2", b"N2") + n2 = coordinator.directory.get_node(b"N2") + n2._messages_sent = [] # type: ignore # reset dealer sock._socket. + n2.heartbeat = -1 + coordinator.sock._messages_sent = [] # type: ignore # reset router sock._socket: + return coordinator + + +def fake_perf_counter() -> float: + return 0. + + +@pytest.fixture() +def fake_counting(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr("pyleco.utils.coordinator_utils.perf_counter", fake_perf_counter) + + +cid = b"conversation_id;" + + +def fake_generate_cid() -> bytes: + return cid + + +@pytest.fixture(autouse=True) +def fake_cid_generation(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr("pyleco.core.serialization.generate_conversation_id", fake_generate_cid) + + +class ExtendedCoordinator(CoordinatorProtocol, ExtendedComponentProtocol, Protocol): + pass + + +class TestCoordinatorImplementsProtocol: + protocol_methods = [m for m in dir(ExtendedCoordinator) if not m.startswith("_")] + + def static_test_methods_are_present(self): + def testing(component: ExtendedCoordinator): + pass + testing(Coordinator()) + + @pytest.fixture + def component_methods(self, coordinator: Coordinator): + response = coordinator.rpc.process_request( + '{"id": 1, "method": "rpc.discover", "jsonrpc": "2.0"}') + result = RPCGenerator().get_result_from_response(response) # type: ignore + return result.get('methods') + + @pytest.mark.parametrize("method", protocol_methods) + def test_method_is_available(self, component_methods, method): + for m in component_methods: + if m.get('name') == method: + return + raise AssertionError(f"Method {method} is not available.") + + +class Test_coordinator_set_namespace_from_hostname: + @pytest.fixture + def namespace(self) -> bytes: + coordinator = Coordinator(context=FakeContext()) # type: ignore + return coordinator.namespace + + def test_namespace_is_bytes(self, namespace): + assert isinstance(namespace, bytes) + + def test_namespace_without_periods(self, namespace): + assert b"." not in namespace + + def test_namespace_from_hostname_with_periods(self, monkeypatch: pytest.MonkeyPatch): + def fake_gethostname() -> str: + return "hostname.domain.tld" + monkeypatch.setattr(coordinator_module, "gethostname", fake_gethostname) + coordinator = Coordinator(context=FakeContext()) # type: ignore + assert coordinator.namespace == b"hostname" + + +def test_coordinator_set_namespace_bytes(): + coordinator = Coordinator(namespace=b"test", context=FakeContext()) # type: ignore + assert coordinator.namespace == b"test" + + +def test_coordinator_set_namespace_invalid(): + with pytest.raises(ValueError, match="namespace"): + Coordinator(namespace=1234, context=FakeContext()) # type: ignore + + +def test_set_address_from_hostname(): + coordinator = Coordinator(context=FakeContext()) # type: ignore + assert isinstance(coordinator.address, str) + + +def test_set_address_manually(): + host = "host" + coordinator = Coordinator(host=host, context=FakeContext()) # type: ignore + assert coordinator.address == f"{host}:12300" + + +class TestClose: + @pytest.fixture + def coordinator_closed(self, coordinator: Coordinator): + coordinator.shut_down = MagicMock() # type: ignore[method-assign] + coordinator.close() + return coordinator + + def test_call_shutdown(self, coordinator_closed: Coordinator): + coordinator_closed.shut_down.assert_called_once() # type: ignore + + def test_close_socket(self, coordinator_closed: Coordinator): + assert coordinator_closed.sock.closed is True + + +def test_context_manager_calls_close(): + with Coordinator(multi_socket=FakeMultiSocket()) as c: + c.close = MagicMock() # type: ignore[method-assign] + c.close.assert_called_once() + + +class Test_clean_addresses: + def test_expired_component(self, coordinator: Coordinator, fake_counting): + coordinator.directory.get_components()[b"send"].heartbeat = -3.5 + coordinator.remove_expired_addresses(1) + assert b"send" not in coordinator.directory.get_component_names() + + def test_expired_component_updates_directory(self, coordinator: Coordinator, fake_counting): + coordinator.publish_directory_update = MagicMock() # type: ignore + coordinator.directory.get_components()[b"send"].heartbeat = -3.5 + coordinator.remove_expired_addresses(1) + coordinator.publish_directory_update.assert_called() + + def test_warn_component(self, coordinator: Coordinator, fake_counting): + # TODO implement heartbeat request + coordinator.directory.get_components()[b"send"].heartbeat = -1.5 + coordinator.remove_expired_addresses(1) + assert coordinator.sock._messages_sent == [(b"321", Message( # type: ignore + b"N1.send", b"N1.COORDINATOR", + message_type=MessageTypes.JSON, + data=Request(id=0, method="pong")))] + + def test_active_Component_remains_in_directory(self, coordinator: Coordinator, fake_counting): + coordinator.directory.get_components()[b"send"].heartbeat = -0.5 + coordinator.remove_expired_addresses(1) + assert coordinator.sock._messages_sent == [] # type: ignore + assert b"send" in coordinator.directory.get_components() + + def test_expired_Coordinator(self, coordinator: Coordinator, fake_counting): + coordinator.directory.get_node_ids()[b"n2"].heartbeat = -3.5 + coordinator.remove_expired_addresses(1) + assert b"n2" not in coordinator.directory.get_node_ids() + # further removal tests in :class:`Test_remove_coordinator` + + def test_warn_Coordinator(self, coordinator: Coordinator, fake_counting): + coordinator.publish_directory_update = MagicMock() # type: ignore + coordinator.directory.get_node_ids()[b"n2"].heartbeat = -1.5 + coordinator.remove_expired_addresses(1) + assert coordinator.directory.get_node_ids()[b"n2"]._messages_sent == [ # type: ignore + Message(b"N2.COORDINATOR", b"N1.COORDINATOR", + message_type=MessageTypes.JSON, + data=Request(id=0, method="pong")), + ] + + def test_active_Coordinator_remains_in_directory(self, coordinator: Coordinator, fake_counting): + coordinator.directory.get_node_ids()[b"n2"].heartbeat = -0.5 + coordinator.remove_expired_addresses(1) + assert b"n2" in coordinator.directory.get_node_ids() + + +def test_heartbeat_local(fake_counting, coordinator: Coordinator): + coordinator.sock._messages_read = [ # type: ignore + [b"321", Message(b"COORDINATOR", b"send")]] + coordinator.read_and_route() + assert coordinator.directory.get_components()[b"send"].heartbeat == 0 + + +def test_routing_connects_to_coordinators(coordinator: Coordinator): + event = SimpleEvent() + event.set() + coordinator.directory.add_node_sender = MagicMock() # type: ignore + coordinator.routing(["abc"], stop_event=event) + coordinator.directory.add_node_sender.assert_called_once + + +@pytest.mark.parametrize("i, o", ( + ([b"321", VERSION_B, b"COORDINATOR", b"send", b";", b""], None), # test heartbeat alone + ([b"321", VERSION_B, b"rec", b"send", b";", b"1"], + [b"123", VERSION_B, b"rec", b"send", b";", b"1"]), # receiver known, sender known. +)) +def test_routing_successful(coordinator: Coordinator, i, o): + """Test whether some incoming message `i` is sent as `o`. Here: successful routing.""" + coordinator.sock._messages_read = [ # type: ignore + (i[0], Message.from_frames(*i[1:]))] + coordinator.read_and_route() + if o is None: + assert coordinator.sock._messages_sent == [] # type: ignore + else: + assert coordinator.sock._messages_sent == [ # type: ignore + (o[0], Message.from_frames(*o[1:]))] + + +def test_reading_fails(coordinator: Coordinator, caplog: pytest.LogCaptureFixture): + def read_message() -> tuple[bytes, Message]: + return b"", Message.from_frames(*[b"frame 1", b"frame 2"]) # less frames than needed. + coordinator.sock.read_message = read_message # type: ignore + coordinator.read_and_route() + assert caplog.records[-1].msg == "Not enough frames read." + + +@pytest.mark.parametrize("i, o", ( + # receiver unknown, return to sender: + ([b"321", VERSION_B, b"x", b"send", b"conversation_id;mid0", b""], + [b"321", VERSION_B, b"send", b"N1.COORDINATOR", b"conversation_id;\x00\x00\x00\x01", + ErrorResponse(id=None, + error=DataError.from_error(RECEIVER_UNKNOWN, + "x")).model_dump_json().encode()]), + # unknown receiver node: + ([b"321", VERSION_B, b"N3.CB", b"N1.send", b"conversation_id;mid0"], + [b"321", VERSION_B, b"N1.send", b"N1.COORDINATOR", b"conversation_id;\x00\x00\x00\x01", + ErrorResponse(id=None, + error=DataError.from_error(NODE_UNKNOWN, + "N3")).model_dump_json().encode()]), + # sender (without namespace) did not sign in: + ([b"1", VERSION_B, b"rec", b"unknownSender", b"conversation_id;mid0"], + [b"1", VERSION_B, b"unknownSender", b"N1.COORDINATOR", b"conversation_id;\x00\x00\x00\x01", + ErrorResponse(id=None, error=NOT_SIGNED_IN).model_dump_json().encode()]), + # sender (with given Namespace) did not sign in: + ([b"1", VERSION_B, b"rec", b"N1.unknownSender", b"conversation_id;mid0"], + [b"1", VERSION_B, b"N1.unknownSender", b"N1.COORDINATOR", b"conversation_id;\x00\x00\x00\x01", + ErrorResponse(id=None, error=NOT_SIGNED_IN).model_dump_json().encode()]), + # unknown sender with a rogue node name: + ([b"1", VERSION_B, b"rec", b"N2.unknownSender", b"conversation_id;mid0"], + [b"1", VERSION_B, b"N2.unknownSender", b"N1.COORDINATOR", b"conversation_id;\x00\x00\x00\x01", + ErrorResponse(id=None, error=NOT_SIGNED_IN).model_dump_json().encode()]), +)) +def test_routing_error_messages(coordinator: Coordinator, i, o): + """Test whether some incoming message `i` is sent as `o`. Here: Error messages.""" + coordinator.sock._messages_read = [ # type: ignore + (i[0], Message.from_frames(*i[1:]))] + coordinator.read_and_route() + if o is None: + assert coordinator.sock._messages_sent == [] # type: ignore + else: + assert coordinator.sock._messages_sent == [ # type: ignore + (o[0], Message.from_frames(*o[1:]))] + + +def test_remote_routing(coordinator: Coordinator): + coordinator.sock._messages_read = [ # type: ignore + [b"321", Message(b"N2.CB", b"N1.send")]] + coordinator.read_and_route() + assert coordinator.directory.get_node(b"N2")._messages_sent == [ # type: ignore + Message(b"N2.CB", b"N1.send")] + + +@pytest.mark.parametrize("sender", (b"N2.CB", b"N2.COORDINATOR")) +def test_remote_heartbeat(coordinator: Coordinator, fake_counting, sender): + coordinator.sock._messages_read = [ # type: ignore + [b"n2", Message(b"N3.CA", sender)]] + assert coordinator.directory.get_node_ids()[b"n2"].heartbeat != 0 + coordinator.read_and_route() + assert coordinator.directory.get_node_ids()[b"n2"].heartbeat == 0 + + +class Test_handle_commands: + class SpecialCoordinator(Coordinator): + def handle_rpc_call(self, message: Message) -> None: + self._rpc = message + + @pytest.fixture + def coordinator_hc(self) -> Coordinator: + return self.SpecialCoordinator( + namespace="N1", host="N1host", cleaning_interval=1e5, + context=FakeContext(), # type: ignore + multi_socket=FakeMultiSocket()) + + def test_store_message(self, coordinator_hc: Coordinator): + msg = Message(b"receiver", b"sender", header=b"header", data=b"data") + coordinator_hc.handle_commands(b"identity", msg) + assert coordinator_hc.current_message == msg + + def test_store_identity(self, coordinator_hc: Coordinator): + msg = Message(b"receiver", b"sender", header=b"header", data=b"data") + coordinator_hc.handle_commands(b"identity", msg) + assert coordinator_hc.current_identity == b"identity" + + @pytest.mark.parametrize("identity, message", ( + (b"3", Message(b"", message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "method": "some"})), + )) + def test_call_handle_rpc_call(self, coordinator_hc: Coordinator, identity, message): + coordinator_hc.handle_commands(identity, message) + assert coordinator_hc._rpc == message # type: ignore + + def test_log_error_response(self, coordinator_hc: Coordinator): + pass # TODO + + def test_pass_at_null_result(self, coordinator_hc: Coordinator): + coordinator_hc.handle_commands(b"", + Message(b"", + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "result": None})) + assert not hasattr(coordinator_hc, "_rpc") + # assert no error log entry. TODO + + def test_log_at_non_null_result(self, coordinator_hc: Coordinator, + caplog: pytest.LogCaptureFixture): + caplog.set_level(10) + coordinator_hc.handle_commands(b"", + Message(b"", + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "result": 5})) + assert not hasattr(coordinator_hc, "_rpc") + # assert no error log entry. TODO + caplog.records[-1].msg.startswith("Unexpected result") + + def test_pass_at_batch_of_null_results(self, coordinator_hc: Coordinator): + coordinator_hc.handle_commands(b"", + Message(b"", + message_type=MessageTypes.JSON, + data=[{"jsonrpc": "2.0", "result": None, "id": 1}, + {"jsonrpc": "2.0", "result": None, "id": 2}] + )) + assert not hasattr(coordinator_hc, "_rpc") + # assert no error log entry. TODO + + def test_log_at_batch_of_non_null_results(self, coordinator_hc: Coordinator, + caplog: pytest.LogCaptureFixture): + caplog.set_level(10) + coordinator_hc.handle_commands(b"", + Message(b"", + message_type=MessageTypes.JSON, + data=[{"jsonrpc": "2.0", "result": None, "id": 1}, + {"jsonrpc": "2.0", "result": 5, "id": 2}] + )) + assert not hasattr(coordinator_hc, "_rpc") + caplog.records[-1].msg.startswith("Unexpected result") + + @pytest.mark.parametrize("data", ( + {"jsonrpc": "2.0", "no method": 7}, + ["jsonrpc", "2.0", "no method", 7], # not a dict + )) + def test_invalid_json_does_not_raise_exception(self, coordinator_hc: Coordinator, data): + coordinator_hc.handle_commands(b"", + Message(receiver=b"COORDINATOR", sender=b"send", + data=data, message_type=MessageTypes.JSON,)) + # assert that no error is raised + + def test_invalid_json_message_raises_log(self, coordinator_hc: Coordinator, + caplog: pytest.LogCaptureFixture): + data = "funny stuff" + coordinator_hc.handle_commands(b"", + Message(receiver=b"COORDINATOR", sender=b"send", + data=data, message_type=MessageTypes.JSON,)) + assert caplog.records[-1].msg.startswith("Invalid JSON message") + + +class Test_sign_in: + def test_signin(self, coordinator: Coordinator): + coordinator.sock._messages_read = [ # type: ignore + [b'cb', Message(b"COORDINATOR", b"CB", + data=Request(id=7, method="sign_in"), + message_type=MessageTypes.JSON, + conversation_id=cid, + )]] + # read_and_route needs to start at routing, to check that the messages passes the heartbeats + coordinator.read_and_route() + assert coordinator.sock._messages_sent == [ # type: ignore + (b"cb", Message(b"CB", b"N1.COORDINATOR", + conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 7, "result": None, "jsonrpc": "2.0"}))] + + def test_signin_sends_directory_update(self, coordinator: Coordinator): + coordinator.publish_directory_update = MagicMock() # type: ignore + coordinator.sock._messages_read = [ # type: ignore + [b'cb', Message(b"COORDINATOR", b"CB", conversation_id=cid, + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "method": "sign_in", "id": 7}, + )]] + # read_and_route needs to start at routing, to check that the messages passes the heartbeats + coordinator.read_and_route() + coordinator.publish_directory_update.assert_any_call() + + def test_signin_rejected(self, coordinator: Coordinator): + coordinator.sock._messages_read = [ # type: ignore + [b'cb', Message(b"COORDINATOR", b"send", conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 8, "method": "sign_in", "jsonrpc": "2.0"}, + )]] + coordinator.read_and_route() + assert coordinator.sock._messages_sent == [(b"cb", Message( # type: ignore + b"send", b"N1.COORDINATOR", + conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": None, "error": {"code": DUPLICATE_NAME.code, + "message": DUPLICATE_NAME.message}, + "jsonrpc": "2.0"} + ))] + + +class Test_sign_out_successful: + @pytest.fixture + def coordinator_signed_out(self, coordinator: Coordinator): + sign_out_message = Message(receiver=b"N1.COORDINATOR", sender=b"rec", + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "method": "sign_out", "id": 10}) + coordinator.publish_directory_update = MagicMock() # type: ignore + coordinator.sock._messages_read = [[b"123", sign_out_message]] # type: ignore + coordinator.read_and_route() + return coordinator + + def test_address_cleared(self, coordinator_signed_out: Coordinator): + assert b"rec" not in coordinator_signed_out.directory.get_components().keys() + + def test_acknowledgement_sent(self, coordinator_signed_out: Coordinator): + assert coordinator_signed_out.sock._messages_sent == [ # type: ignore + (b"123", Message(b"rec", b"N1.COORDINATOR", + message_type=MessageTypes.JSON, + data={"id": 10, "result": None, "jsonrpc": "2.0"}))] + + def test_directory_update_sent(self, coordinator_signed_out: Coordinator): + coordinator_signed_out.publish_directory_update.assert_any_call() # type: ignore + + def test_requires_new_sign_in(self, coordinator_signed_out): + coordinator = coordinator_signed_out + coordinator.sock._messages_sent = [] # type: ignore + coordinator.sock._messages_read = [[b'123', Message( # type: ignore + b"N1.COORDINATOR", b"rec", + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "result": None, "id": 11})]] + coordinator.read_and_route() + assert coordinator.sock._messages_sent == [(b"123", Message( # type: ignore + b"rec", b"N1.COORDINATOR", message_type=MessageTypes.JSON, + data=ErrorResponse(id=None, error=NOT_SIGNED_IN)))] + + +def test_sign_out_clears_address_explicit_namespace(coordinator: Coordinator): + coordinator.sock._messages_read = [[b'123', Message( # type: ignore + b"N1.COORDINATOR", b"N1.rec", message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "method": "sign_out", "id": 10})]] + coordinator.read_and_route() + assert b"rec" not in coordinator.directory.get_components().keys() + assert coordinator.sock._messages_sent == [ # type: ignore + (b"123", Message(b"N1.rec", b"N1.COORDINATOR", message_type=MessageTypes.JSON, + data={"id": 10, "result": None, "jsonrpc": "2.0"}))] + + +def test_sign_out_of_not_signed_in_generates_acknowledgment_nonetheless(coordinator: Coordinator): + coordinator.sock._messages_read = [[b'584', Message( # type: ignore + b"N1.COORDINATOR", b"rec584", message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "method": "sign_out", "id": 10})]] + coordinator.read_and_route() + assert coordinator.sock._messages_sent == [ # type: ignore + (b"584", Message(b"rec584", b"N1.COORDINATOR", message_type=MessageTypes.JSON, + data={"id": 10, "result": None, "jsonrpc": "2.0"}))] + + +class Test_coordinator_sign_in: + def test_co_signin_unknown_coordinator_successful(self, coordinator: Coordinator): + """Test that an unknown Coordinator may sign in.""" + coordinator.sock._messages_read = [ # type: ignore + [b'n3', Message(b"COORDINATOR", b"N3.COORDINATOR", + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "method": "coordinator_sign_in", "id": 15}, + conversation_id=cid)]] + coordinator.read_and_route() + assert b'n3' in coordinator.directory.get_node_ids().keys() + assert coordinator.sock._messages_sent == [ # type: ignore + (b'n3', Message(b"COORDINATOR", b"N1.COORDINATOR", + message_type=MessageTypes.JSON, + conversation_id=cid, data={"id": 15, "result": None, + "jsonrpc": "2.0"}))] + + def test_co_signin_known_coordinator_successful(self, fake_counting, coordinator: Coordinator): + """Test that a Coordinator may sign in as a response to N1's sign in.""" + + coordinator.directory.add_node_sender(FakeNode(), "N3host:12345", namespace=b"N3") + coordinator.directory.get_nodes()[b"N3"] = coordinator.directory._waiting_nodes.pop( + "N3host:12345") + coordinator.directory.get_nodes()[b"N3"].namespace = b"N3" + + coordinator.sock._messages_read = [ # type: ignore + [b'n3', Message(b"COORDINATOR", b"N3.COORDINATOR", + conversation_id=cid, + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "method": "coordinator_sign_in", "id": 15},)]] + coordinator.read_and_route() + assert b'n3' in coordinator.directory.get_node_ids().keys() + assert coordinator.sock._messages_sent == [(b'n3', Message( # type: ignore + b"COORDINATOR", b"N1.COORDINATOR", message_type=MessageTypes.JSON, conversation_id=cid, + data={"id": 15, "result": None, "jsonrpc": "2.0"}))] + + @pytest.mark.xfail(True, reason="Additional error data is added") + def test_co_signin_rejected(self, coordinator: Coordinator): + """Coordinator sign in rejected due to already connected Coordinator.""" + coordinator.sock._messages_read = [ # type: ignore + [b'n3', Message(b"COORDINATOR", b"N2.COORDINATOR", + data={"jsonrpc": "2.0", "method": "coordinator_sign_in", "id": 15}, + message_type=MessageTypes.JSON, + conversation_id=cid)]] + coordinator.read_and_route() + assert coordinator.sock._messages_sent == [(b"n3", Message( # type: ignore + b"COORDINATOR", b"N1.COORDINATOR", + data={"id": 15, "error": {"code": -32000, "message": "Server error", + "data": "ValueError: Another Coordinator is known!"}, + "jsonrpc": "2.0"}, + message_type=MessageTypes.JSON, + conversation_id=cid))] + + def test_coordinator_sign_in_fails_at_duplicate_name(self, coordinator: Coordinator): + coordinator.current_message = Message( + b"COORDINATOR", b"N2.COORDINATOR", + data={"jsonrpc": "2.0", "method": "coordinator_sign_in", "id": 15}, + message_type=MessageTypes.JSON, + conversation_id=cid) + coordinator.current_identity = b"n3" + with pytest.raises(ValueError, match="Another Coordinator is known!"): + coordinator.coordinator_sign_in() + + def test_co_signin_of_self_rejected(self, coordinator: Coordinator): + """Coordinator sign in rejected because it is the same coordinator.""" + coordinator.sock._messages_read = [ # type: ignore + [b'n3', Message(b"COORDINATOR", b"N1.COORDINATOR", conversation_id=cid, + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "method": "coordinator_sign_in", "id": 15})]] + coordinator.read_and_route() + assert coordinator.sock._messages_sent == [ # type: ignore + (b'n3', Message(b"N1.COORDINATOR", b"N1.COORDINATOR", conversation_id=cid, + message_type=MessageTypes.JSON, + data=ErrorResponse(id=None, error=NOT_SIGNED_IN)))] + + +class Test_coordinator_sign_out: + def test_co_signout_successful(self, coordinator: Coordinator): + coordinator.sock._messages_read = [ # type: ignore + [b'n2', Message(b"COORDINATOR", b"N2.COORDINATOR", + conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 10, "method": "coordinator_sign_out", "jsonrpc": "2.0"})]] + node = coordinator.directory.get_node(b"N2") + coordinator.read_and_route() + assert b"n2" not in coordinator.directory.get_node_ids() + assert node._messages_sent == [Message( # type: ignore + b"N2.COORDINATOR", b"N1.COORDINATOR", conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 100, "method": "coordinator_sign_out", "jsonrpc": "2.0"})] + + @pytest.mark.xfail(True, reason="Not yet defined.") + def test_co_signout_rejected_due_to_different_identity(self, coordinator: Coordinator): + """TODO TBD how to handle it""" + coordinator.set_log_level("DEBUG") + coordinator.sock._messages_read = [ # type: ignore + [b'n4', Message( + receiver=b"COORDINATOR", sender=b"N2.COORDINATOR", conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 10, "method": "coordinator_sign_out", "jsonrpc": "2.0"})]] + coordinator.read_and_route() + assert coordinator.sock._messages_sent == [ # type: ignore + (b"n4", Message( + receiver=b"N2.COORDINATOR", sender=b"N1.COORDINATOR", conversation_id=cid, + message_type=MessageTypes.JSON, + data=ErrorResponse(id=None, error=NOT_SIGNED_IN)))] + + def test_co_signout_of_not_signed_in_coordinator(self, coordinator: Coordinator): + """TODO TBD whether to reject or to ignore.""" + coordinator.sock._messages_read = [ # type: ignore + (b"n4", Message(b"COORDINATOR", b"N4.COORDINATOR", + message_type=MessageTypes.JSON, + data={"id": 10, "method": "coordinator_sign_out", "jsonrpc": "2.0"}))] + coordinator.read_and_route() + assert coordinator.sock._messages_sent == [] # type: ignore + + +class Test_shutdown: + @pytest.fixture + def shutdown_coordinator(self, coordinator: Coordinator) -> Coordinator: + self.n2 = coordinator.directory.get_node(b"N2") + coordinator.stop_event = SimpleEvent() + coordinator.shut_down() + return coordinator + + def test_sign_out_message_to_other_coordinators_sent(self, shutdown_coordinator: Coordinator): + assert self.n2._messages_sent == [ # type: ignore + Message(b"N2.COORDINATOR", b"N1.COORDINATOR", + message_type=MessageTypes.JSON, + data={"id": 2, "method": "coordinator_sign_out", "jsonrpc": "2.0"})] + + def test_event_set(self, shutdown_coordinator: Coordinator): + assert shutdown_coordinator.stop_event.is_set() is True + + +def test_send_nodes(coordinator: Coordinator): + data = coordinator.send_nodes() + assert data == {"N1": "N1host:12300", "N2": "N2host:12300"} + + +def test_send_local_components(coordinator: Coordinator): + data = coordinator.send_local_components() + assert data == ["send", "rec"] + + +def test_send_global_components(coordinator: Coordinator): + # Arrange + coordinator.global_directory[b"N5"] = ["some", "coordinator"] + # Act + data = coordinator.send_global_components() + assert data == { + "N5": ["some", "coordinator"], + "N1": ["send", "rec"]} + + +class Test_add_nodes: + def test_add_nodes(self, coordinator: Coordinator, fake_counting): + coordinator.add_nodes({"N1": "N1host:12300", "N2": "wrong_host:-7", "N3": "N3host:12300"}) + assert coordinator.directory.get_node(b"N2").address == "N2host:12300" # not changed + assert "N3host:12300" in coordinator.directory._waiting_nodes.keys() # newly created + + +class Test_record_components: + def test_set(self, coordinator: Coordinator): + coordinator.current_message = Message(b"", sender="N2.COORDINATOR") + coordinator.record_components(["send", "rec"]) + assert coordinator.global_directory == {b"N2": ["send", "rec"]} + + +def test_publish_directory_updates(coordinator: Coordinator): + # TODO TBD in LECO + coordinator.publish_directory_update() + assert coordinator.directory.get_node_ids()[b"n2"]._messages_sent == [ # type: ignore + Message( + b'N2.COORDINATOR', b'N1.COORDINATOR', + message_type=MessageTypes.JSON, + data=[ + {"id": 5, "method": "add_nodes", + "params": {"nodes": {"N1": "N1host:12300", "N2": "N2host:12300"}}, + "jsonrpc": "2.0"}, + {"id": 6, "method": "record_components", + "params": {"components": ["send", "rec"]}, + "jsonrpc": "2.0"}] + ), + ] diff --git a/tests/coordinators/test_proxy_server.py b/tests/coordinators/test_proxy_server.py new file mode 100644 index 000000000..cfee33e2a --- /dev/null +++ b/tests/coordinators/test_proxy_server.py @@ -0,0 +1,108 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +"""Hangs infinitely on import on github CI.""" + +# from __future__ import annotations +# import threading +# from typing import Optional + +# import pytest +# import zmq + +# from pyleco.core import ( +# PROXY_RECEIVING_PORT, +# PROXY_SENDING_PORT, +# ) +# from pyleco.test import FakeContext, FakeSocket + +# from pyleco.coordinators.proxy_server import pub_sub_proxy, start_proxy + +# parameters: tuple[FakeSocket, FakeSocket, Optional[FakeSocket]] + + +# @pytest.fixture +# def fake_proxy_steerable(monkeypatch: pytest.MonkeyPatch) -> None: +# def _fake_proxy_steerable( +# frontend: FakeSocket, backend: FakeSocket, capture: Optional[FakeSocket] = None +# ): +# global parameters +# parameters = frontend, backend, capture +# raise zmq.ContextTerminated + +# monkeypatch.setattr("zmq.proxy_steerable", _fake_proxy_steerable) + + +# class Test_pub_sub_proxy: +# def test_default_config(self, fake_proxy_steerable): +# pub_sub_proxy(FakeContext()) # type: ignore +# global parameters +# f, b, c = parameters +# assert f.addr == f"tcp://*:{PROXY_SENDING_PORT}" +# assert b.addr == f"tcp://*:{PROXY_RECEIVING_PORT}" +# assert c is None + +# def test_event_set_for_successful_binding(self, fake_proxy_steerable): +# event = threading.Event() +# pub_sub_proxy(FakeContext(), event=event) # type: ignore +# assert event.is_set() + +# @pytest.mark.parametrize( +# "pub, sub", +# ( +# ("localhost", "remote"), +# ("remote", "localhost"), +# ("a", "b"), +# ), +# ) +# def test_remote_configuration(self, pub: str, sub: str, fake_proxy_steerable): +# pub_sub_proxy(FakeContext(), sub=sub, pub=pub) # type: ignore +# global parameters +# f, b, c = parameters +# assert f.addr == f"tcp://{pub}:{PROXY_RECEIVING_PORT}" +# assert b.addr == f"tcp://{sub}:{PROXY_SENDING_PORT}" + +# def test_capture(self, fake_proxy_steerable): +# pub_sub_proxy(context=FakeContext(), captured=True) # type: ignore +# global parameters +# f, b, c = parameters # type: ignore +# c: FakeSocket +# assert c.addr == "inproc://capture" +# assert c.socket_type == zmq.PUB + + +# def test_start_proxy(): +# context = start_proxy() +# # assert no error is raised +# context.term() + + +# @pytest.mark.skip(reason="Hangs infinitely") +# def test_start_proxy_fails_if_already_started(): +# # arrange +# context = start_proxy() +# with pytest.raises(TimeoutError): +# start_proxy() +# # assert no error is raised +# context.term() diff --git a/tests/core/test_data_message.py b/tests/core/test_data_message.py new file mode 100644 index 000000000..d443f7819 --- /dev/null +++ b/tests/core/test_data_message.py @@ -0,0 +1,144 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.core.data_message import DataMessage + +topic = b"N1.CA" +cid = b"conversation_id;" +message_type = 0 + + +@pytest.fixture +def message(): + message = DataMessage(topic, conversation_id=cid, data=[4, 5]) + return message + + +class TestDataMessageInit: + def test_topic(self, message: DataMessage): + assert message.topic == topic + + def test_header(self, message: DataMessage): + assert message.header == cid + b"\x00" + + def test_data(self, message: DataMessage): + assert message.data == [4, 5] + + @pytest.mark.parametrize("key, value", (("conversation_id", b"content"), + ("message_type", 7), + )) + def test_header_param_incompatible_with_header_element_params(self, key, value): + with pytest.raises(ValueError, match="header"): + DataMessage(topic="topic", header=b"whatever", **{key: value}) + + def test_additional_payload(self): + message = DataMessage("topic", data=b"0", additional_payload=[b"1", b"2"]) + assert message.payload == [b"0", b"1", b"2"] + + def test_additional_payload_without_data(self): + message = DataMessage("topic", additional_payload=[b"1", b"2"]) + assert message.payload == [b"1", b"2"] + + +def test_data_message_str_topic(): + assert DataMessage(topic="topic").topic == b"topic" + + +def test_data_message_message_type(): + message = DataMessage(topic=b"topic", message_type=7) + assert message.message_type == 7 + + +def test_converation_id(message: DataMessage): + assert message.conversation_id == cid + + +def test_message_type(message: DataMessage): + assert message.message_type == 0 + + +class TestDataMessageData: + def test_bytes_payload(self): + message = DataMessage(b"", data=b"some data stuff") + assert message.payload == [b"some data stuff"] + + def test_str_payload(self): + message = DataMessage(b"", data="some data stuff") + assert message.payload == [b"some data stuff"] + + def test_no_payload(self): + message = DataMessage(b"") + assert message.payload == [] + + +class TestFromFrames: + @pytest.fixture + def from_frames_message(self): + message = DataMessage.from_frames(b"frame0", b"frame1", b"frame2", b"frame3") + return message + + def test_topic(self, from_frames_message: DataMessage): + assert from_frames_message.topic == b"frame0" + + def test_header(self, from_frames_message: DataMessage): + assert from_frames_message.header == b"frame1" + + def test_payload(self, from_frames_message: DataMessage): + assert from_frames_message.payload == [b"frame2", b"frame3"] + + +def test_to_frames(): + message = DataMessage(b"topic", conversation_id=cid, data=b"data") + assert message.to_frames() == [b"topic", b"conversation_id;\x00", b"data"] + + +class TestComparison: + def test_message_comparison(self): + frames = [b"topic", b"conversation_id;\x00", b'[["GET", [1, 2]]'] + m1 = DataMessage.from_frames(*frames) + m2 = DataMessage.from_frames(*frames) + assert m1 == m2 + + def test_dictionary_order_is_irrelevant(self): + m1 = DataMessage(b"topic", conversation_id=cid, data={"a": 1, "b": 2}) + m2 = DataMessage(b"topic", conversation_id=cid, data={"b": 2, "a": 1}) + assert m1 == m2 + + def test_distinguish_empty_payload_frame(self): + m1 = DataMessage("r", conversation_id=b"conversation_id;") + m1.payload = [b""] + m2 = DataMessage("r", conversation_id=b"conversation_id;") + assert m2.payload == [] # verify that it does not have a payload + assert m1 != m2 + + @pytest.mark.parametrize("other", (5, 3.4, [64, 3], (5, "string"), "string")) + def test_comparison_with_something_else_fails(self, message, other): + assert message != other + + +def test_repr(): + message = DataMessage.from_frames(b'topic', b'conversation_id;\x00', b'data') + assert repr(message) == r"DataMessage.from_frames(b'topic', b'conversation_id;\x00', b'data')" diff --git a/tests/core/test_internal_protocols.py b/tests/core/test_internal_protocols.py new file mode 100644 index 000000000..acd4c74e6 --- /dev/null +++ b/tests/core/test_internal_protocols.py @@ -0,0 +1,183 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.core.message import Message, MessageTypes +from pyleco.core.internal_protocols import CommunicatorProtocol +from pyleco.test import FakeCommunicator +from pyleco.json_utils.errors import JSONRPCError + +cid = b"conversation_id;" + +# Test the utility methods of the CommunicatorProtocol + + +@pytest.fixture +def communicator() -> CommunicatorProtocol: + return FakeCommunicator(name="communicator") + + +def test_full_name_without_namespace(communicator: FakeCommunicator): + communicator.namespace = None + assert communicator.full_name == "communicator" + + +def test_full_name_with_namespace(communicator: FakeCommunicator): + communicator.namespace = "N1" + assert communicator.full_name == "N1.communicator" + + +def test_send(communicator: FakeCommunicator): + kwargs = dict(receiver="rec", message_type=MessageTypes.JSON, data=[4, 5], conversation_id=cid) + communicator.send(**kwargs) # type: ignore + assert communicator._s[0] == Message(sender="communicator", **kwargs) # type: ignore + + +class Test_ask: + response = Message(receiver="communicator", sender="rec", conversation_id=cid) + + @pytest.fixture + def communicator_asked(self, communicator: FakeCommunicator): + communicator._r = [self.response] + return communicator + + def test_sent(self, communicator_asked: FakeCommunicator): + communicator_asked.ask(receiver="rec", conversation_id=cid) + assert communicator_asked._s == [Message(receiver="rec", sender="communicator", + conversation_id=cid)] + + def test_read(self, communicator_asked: FakeCommunicator): + response = communicator_asked.ask(receiver="rec", conversation_id=cid) + assert response == self.response + + +class Test_interpret_rpc_response: + def test_valid_message(self, communicator: FakeCommunicator): + message = Message(receiver="rec", data={"jsonrpc": "2.0", "result": 6.0, "id": 7}) + assert communicator.interpret_rpc_response(message) == 6.0 + + def test_error(self, communicator: FakeCommunicator): + message = Message(receiver="rec", data={"jsonrpc": "2.0", + "error": {"code": -1, "message": "abc"}, "id": 7}) + with pytest.raises(JSONRPCError): + communicator.interpret_rpc_response(message) + + def test_json_binary_response(self, communicator: FakeCommunicator): + message = Message( + receiver="rec", + data={"jsonrpc": "2.0", "result": None, "id": 7}, + additional_payload=[b"abcd", b"efgh"], + ) + assert communicator.interpret_rpc_response(message, extract_additional_payload=True) == ( + None, + [ + b"abcd", + b"efgh", + ], + ) + + def test_ignore_additional_payload_if_not_desired(self, communicator: FakeCommunicator): + message = Message( + receiver="rec", + data={"jsonrpc": "2.0", "result": None, "id": 7}, + additional_payload=[b"abcd"], + ) + assert ( + communicator.interpret_rpc_response(message, extract_additional_payload=False) is None + ) + + def test_without_additional_payload_return_empty_list(self, communicator: FakeCommunicator): + message = Message( + receiver="rec", + data={"jsonrpc": "2.0", "result": None, "id": 7}, + ) + assert communicator.interpret_rpc_response(message, extract_additional_payload=True) == ( + None, + [], + ) + + def test_json_value_and_binary_payload(self, communicator: FakeCommunicator): + message = Message( + receiver="rec", + data={"jsonrpc": "2.0", "result": 6, "id": 7}, + additional_payload=[b"abcd"], + ) + assert communicator.interpret_rpc_response(message, extract_additional_payload=True) == ( + 6, + [b"abcd"], + ) + + +class Test_ask_rpc: + response = Message(receiver="communicator", sender="rec", conversation_id=cid, + message_type=MessageTypes.JSON, + data={ + "jsonrpc": "2.0", + "result": 5, + "id": 1, + }) + + @pytest.fixture + def communicator_asked(self, communicator: FakeCommunicator): + communicator._r = [self.response] + return communicator + + def test_sent(self, communicator_asked: FakeCommunicator): + communicator_asked.ask_rpc(receiver="rec", method="test_method", par1=5) + sent = communicator_asked._s[0] + assert communicator_asked._s == [Message(receiver="rec", sender="communicator", + conversation_id=sent.conversation_id, + message_type=MessageTypes.JSON, + data={ + "jsonrpc": "2.0", + "method": "test_method", + "id": 1, + "params": {'par1': 5}, + })] + + def test_sent_with_additional_payload(self, communicator_asked: FakeCommunicator): + communicator_asked.ask_rpc( + receiver="rec", method="test_method", par1=5, additional_payload=[b"12345"] + ) + sent = communicator_asked._s[0] + assert communicator_asked._s == [ + Message( + receiver="rec", + sender="communicator", + conversation_id=sent.conversation_id, + message_type=MessageTypes.JSON, + data={ + "jsonrpc": "2.0", + "method": "test_method", + "id": 1, + "params": {"par1": 5}, + }, + additional_payload=[b"12345"], + ) + ] + + def test_read(self, communicator_asked: FakeCommunicator): + result = communicator_asked.ask_rpc(receiver="rec", method="test_method", par1=5) + assert result == 5 diff --git a/tests/core/test_message.py b/tests/core/test_message.py index fc3504236..17a940e4a 100644 --- a/tests/core/test_message.py +++ b/tests/core/test_message.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -84,6 +84,14 @@ def test_message_data_str_to_binary_data(self): message = Message(b"rec", data="some string") assert message.payload[0] == b"some string" + def test_additional_binary_data(self): + message = Message(b"rec", data=b"0", additional_payload=[b"1", b"2"]) + assert message.payload == [b"0", b"1", b"2"] + + def test_additional_payload_without_data(self): + message = Message(b"rec", additional_payload=[b"1", b"2"]) + assert message.payload == [b"1", b"2"] + @pytest.mark.parametrize("key, value", (("conversation_id", b"content"), ("message_id", b"mid"), ("message_type", 7), @@ -213,5 +221,10 @@ def test_repr(): assert repr(message) == r"Message.from_frames(b'V', b'rec', b'send', b'cid;mid', b'data')" +def test_repr_without_sender(): + message = Message.from_frames(b'V', b'rec', b'', b'cid;mid', b'data') + assert repr(message) == r"Message.from_frames(b'V', b'rec', b'', b'cid;mid', b'data')" + + def test_conversation_id_getter(message: Message): assert message.conversation_id == cid diff --git a/tests/core/test_serialization.py b/tests/core/test_serialization.py index eeff97f22..5682e1b3c 100644 --- a/tests/core/test_serialization.py +++ b/tests/core/test_serialization.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -22,10 +22,16 @@ # THE SOFTWARE. # +from __future__ import annotations +import datetime +import uuid +from typing import Any, Optional, Union + import pytest -from jsonrpcobjects.objects import Request +from pyleco.json_utils.json_objects import Request from pyleco.core import serialization +from pyleco.core.serialization import JsonContentTypes, get_json_content_type class Test_create_header_frame: @@ -103,3 +109,72 @@ def test_UUID_version(self, conversation_id): def test_variant(self, conversation_id): assert conversation_id[8] >> 6 == 0b10 + + def test_correct_timestamp(self, conversation_id): + ts = serialization.conversation_id_to_datetime(conversation_id=conversation_id) + assert abs(ts - datetime.datetime.now(datetime.timezone.utc)) < datetime.timedelta(hours=1) + + +def test_conversation_id_to_datetime_according_to_uuid_example(): + """According to the draft https://datatracker.ietf.org/doc/draft-ietf-uuidrev-rfc4122bis/14/ + 017F22E2-79B0-7CC3-98C4-DC0C0C07398F should be a timestamp of + Tuesday, February 22, 2022 2:22:22.00 PM GMT-05:00, represented as 1645557742000 + """ + cid = uuid.UUID("017F22E2-79B0-7CC3-98C4-DC0C0C07398F").bytes + reference = datetime.datetime( + 2022, 2, 22, 14, 22, 22, tzinfo=datetime.timezone(datetime.timedelta(hours=-5)) + ) + ts = serialization.conversation_id_to_datetime(cid) + assert reference - ts == datetime.timedelta(0) + + +def test_json_type_result_is_response(): + assert JsonContentTypes.RESPONSE in JsonContentTypes.RESULT_RESPONSE + assert JsonContentTypes.RESULT in JsonContentTypes.RESULT_RESPONSE + + +def test_json_type_error_is_response(): + assert JsonContentTypes.RESPONSE in JsonContentTypes.ERROR_RESPONSE + assert JsonContentTypes.ERROR in JsonContentTypes.ERROR_RESPONSE + + +# Methods for get_json_content_type +def create_request(method: str, params: Optional[Union[list, dict]] = None, id: int = 1 + ) -> dict[str, Any]: + return {"jsonrpc": "2.0", "id": id, "method": method, "params": params} + + +def create_result(result: Any, id: int = 1) -> dict[str, Any]: + return {"jsonrpc": "2.0", "result": result, "id": id} + + +def create_error(error_code: int, error_message: str, id: int = 1) -> dict[str, Any]: + return {"jsonrpc": "2.0", "id": id, "error": {"code": error_code, "message": error_message}} + + +class Test_get_json_content_type: + @pytest.mark.parametrize("data, type", ( + (create_request("abc"), JsonContentTypes.REQUEST), + ([create_request(method="abc")] * 2, JsonContentTypes.REQUEST | JsonContentTypes.BATCH), + (create_result(None), JsonContentTypes.RESULT_RESPONSE), + ([create_result(None), create_result(5, 7)], + JsonContentTypes.RESULT_RESPONSE | JsonContentTypes.BATCH), + (create_error(89, "whatever"), JsonContentTypes.ERROR_RESPONSE), + ([create_error(89, "xy")] * 2, + JsonContentTypes.ERROR_RESPONSE | JsonContentTypes.BATCH), + ([create_result(4), create_error(32, "xy")], # batch of result and error + JsonContentTypes.RESULT_RESPONSE | JsonContentTypes.BATCH | JsonContentTypes.ERROR), + )) + def test_data_is_valid_type(self, data, type): + assert get_json_content_type(data) == type + + @pytest.mark.parametrize("data", ( + {}, + [], + [{}], + {"some": "thing"}, + 5.6, + "adsfasdf", + )) + def test_invalid_data(self, data): + assert get_json_content_type(data) == JsonContentTypes.INVALID diff --git a/tests/directors/test_coordinator_director.py b/tests/directors/test_coordinator_director.py new file mode 100644 index 000000000..29e2e0369 --- /dev/null +++ b/tests/directors/test_coordinator_director.py @@ -0,0 +1,48 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.test import FakeDirector +from pyleco.coordinators.coordinator import Coordinator +from pyleco.directors.coordinator_director import CoordinatorDirector + + +class FakeCoordinatorDirector(FakeDirector, CoordinatorDirector): + """Replaces the ask_rpc method.""" + + +@pytest.fixture +def coordinator_director(): + data_logger_director = FakeCoordinatorDirector(remote_class=Coordinator) + return data_logger_director + + +@pytest.mark.parametrize("method", ("get_local_components", "get_global_components", "get_nodes", + )) +def test_method_call_existing_remote_methods(coordinator_director: FakeCoordinatorDirector, method): + """Test that the remote method exists.""" + coordinator_director.return_value = None + getattr(coordinator_director, method)() + # asserts that no error is raised in the "ask_rpc" method diff --git a/tests/directors/test_data_logger_director.py b/tests/directors/test_data_logger_director.py new file mode 100644 index 000000000..b8ea437ca --- /dev/null +++ b/tests/directors/test_data_logger_director.py @@ -0,0 +1,56 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from inspect import getfullargspec + +import pytest + +from pyleco.test import FakeDirector +from pyleco.management.data_logger import DataLogger +from pyleco.directors.data_logger_director import DataLoggerDirector + + +class FakeDataLoggerDirector(FakeDirector, DataLoggerDirector): + """Replaces the ask_rpc method.""" + + +@pytest.fixture +def data_logger_director(): + data_logger_director = FakeDataLoggerDirector(remote_class=DataLogger) + return data_logger_director + + +def test_start_collecting_signature(): + orig_spec = getfullargspec(DataLogger.start_collecting) + dir_spec = getfullargspec(DataLoggerDirector.start_collecting) + assert orig_spec == dir_spec + + +@pytest.mark.parametrize("method", ("save_data", "start_collecting", "stop_collecting", + "save_data_async", "get_last_datapoint" + )) +def test_method_call_existing_remote_methods(data_logger_director: FakeDataLoggerDirector, method): + data_logger_director.return_value = None + getattr(data_logger_director, method)() + # asserts that no error is raised in the "ask_rpc" method diff --git a/tests/directors/test_director.py b/tests/directors/test_director.py new file mode 100644 index 000000000..e0a2b826b --- /dev/null +++ b/tests/directors/test_director.py @@ -0,0 +1,236 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.directors.director import Director +from pyleco.test import FakeCommunicator +from pyleco.core.message import Message, MessageTypes + + +cid = b"conversation_id;" + + +def fake_generate_conversation_id(): + return cid + + +@pytest.fixture +def director(monkeypatch): + monkeypatch.setattr("pyleco.directors.director.generate_conversation_id", + fake_generate_conversation_id) + monkeypatch.setattr("pyleco.core.serialization.generate_conversation_id", + fake_generate_conversation_id) + return Director(actor="actor", communicator=FakeCommunicator(name="director")) + + +def test_sign_out(director: Director): + director.sign_out() + assert director.communicator._signed_in is False # type: ignore + + +def test_close(director: Director): + director._own_communicator = True + director.close() + assert director.communicator._closed is True # type: ignore + + +def test_context_manager(): + with Director(communicator=FakeCommunicator(name="director")) as director: + communicator = director.communicator + director._own_communicator = True + communicator._closed = False # type: ignore + assert communicator._closed is True # type: ignore + + +class Test_actor_check: + def test_invalid_actor(self, director: Director): + director.actor = None + with pytest.raises(ValueError): + director._actor_check(actor=None) + + def test_given_actor(self, director: Director): + assert director._actor_check("another_actor") == "another_actor" + + def test_default_actor(self, director: Director): + assert director._actor_check("") == "actor" + + +def test_ask_message(director: Director): + rec = Message("director", "actor", conversation_id=cid) + director.communicator._r = [rec] # type: ignore + result = director.ask_message() + assert result == rec + sent = director.communicator._s[0] # type: ignore + assert sent == Message( + "actor", + "director", + conversation_id=cid, + ) + + +def test_get_rpc_capabilities(director: Director): + data = {"name": "actor", "methods": []} + director.communicator._r = [ # type: ignore + Message("director", "actor", conversation_id=cid, message_type=MessageTypes.JSON, data={ + "id": 1, "result": data, "jsonrpc": "2.0" + })] + result = director.get_rpc_capabilities() + assert director.communicator._s == [ # type: ignore + Message("actor", "director", conversation_id=cid, message_type=MessageTypes.JSON, data={ + "id": 1, "method": "rpc.discover", "jsonrpc": "2.0" + })] + assert result == data + + +def test_shutdown_actor(director: Director): + director.communicator._r = [ # type: ignore + Message("director", "actor", conversation_id=cid, message_type=MessageTypes.JSON, data={ + "id": 1, "result": None, "jsonrpc": "2.0" + })] + director.shut_down_actor() + assert director.communicator._s == [ # type: ignore + Message("actor", "director", conversation_id=cid, message_type=MessageTypes.JSON, data={ + "id": 1, "method": "shut_down", "jsonrpc": "2.0" + })] + + +def test_set_actor_log_level(director: Director): + director.communicator._r = [ # type: ignore + Message("director", "actor", conversation_id=cid, message_type=MessageTypes.JSON, data={ + "id": 1, "result": None, "jsonrpc": "2.0" + })] + director.set_actor_log_level(30) + assert director.communicator._s == [ # type: ignore + Message("actor", "director", conversation_id=cid, message_type=MessageTypes.JSON, data={ + "id": 1, "method": "set_log_level", "jsonrpc": "2.0", "params": {"level": "WARNING"} + })] + + +def test_read_rpc_response(director: Director): + director.communicator._r = [ # type: ignore + Message("director", "actor", conversation_id=cid, message_type=MessageTypes.JSON, data={ + "id": 1, "result": 7.5, "jsonrpc": "2.0" + })] + assert director.read_rpc_response(conversation_id=cid) == 7.5 + + +def test_read_binary_rpc_response(director: Director): + director.communicator._r = [ # type: ignore + Message( + "director", + "actor", + conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 1, "result": None, "jsonrpc": "2.0"}, + additional_payload=[b"123"], + ) + ] + assert director.read_rpc_response(conversation_id=cid, extract_additional_payload=True) == ( + None, + [b"123"], + ) + + +def test_get_properties_async(director: Director): + properties = ["a", "some"] + cid = director.get_parameters_async(parameters=properties) + assert director.communicator._s == [Message( # type: ignore + receiver="actor", sender="director", conversation_id=cid, message_type=MessageTypes.JSON, + data={"id": 1, "method": "get_parameters", "params": {"parameters": properties}, + "jsonrpc": "2.0"} + )] + + +def test_get_properties_async_string(director: Director): + properties = ["some"] + cid = director.get_parameters_async(parameters=properties[0]) + assert director.communicator._s == [Message( # type: ignore + receiver="actor", sender="director", conversation_id=cid, message_type=MessageTypes.JSON, + data={"id": 1, "method": "get_parameters", "params": {"parameters": properties}, + "jsonrpc": "2.0"} + )] + + +def test_set_properties_async(director: Director): + properties = {"a": 5, "some": 7.3} + cid = director.set_parameters_async(parameters=properties) + assert director.communicator._s == [Message( # type: ignore + receiver="actor", sender="director", conversation_id=cid, message_type=MessageTypes.JSON, + data={"id": 1, "method": "set_parameters", "params": {"parameters": properties}, + "jsonrpc": "2.0"} + )] + + +def test_call_action_async_with_args_and_kwargs(director: Director): + cid = director.call_action_async("action_name", "arg1", key1=1) + assert director.communicator._s == [Message( # type: ignore + receiver="actor", sender="director", conversation_id=cid, message_type=MessageTypes.JSON, + data={"id": 1, "method": "call_action", "params": {"action": "action_name", + "args": ["arg1"], "kwargs": {"key1": 1}}, + "jsonrpc": "2.0"} + )] + + +def test_call_action_async_with_args_only(director: Director): + cid = director.call_action_async("action_name", "arg1", 5) + assert director.communicator._s == [Message( # type: ignore + receiver="actor", sender="director", conversation_id=cid, message_type=MessageTypes.JSON, + data={"id": 1, "method": "call_action", "params": {"action": "action_name", + "args": ["arg1", 5]}, + "jsonrpc": "2.0"} + )] + + +def test_call_action_async_with_kwargs_only(director: Director): + cid = director.call_action_async("action_name", arg1=1, arg2="abc") + assert director.communicator._s == [Message( # type: ignore + receiver="actor", sender="director", conversation_id=cid, message_type=MessageTypes.JSON, + data={"id": 1, "method": "call_action", "params": {"action": "action_name", + "kwargs": {"arg1": 1, "arg2": "abc"}}, + "jsonrpc": "2.0"} + )] + + +class Test_get_properties: + properties = ["a", "some"] + expected_result = {"a": 5, "some": 7} + + @pytest.fixture + def director_gp(self, director: Director): + director.communicator._r = [ # type: ignore + Message("director", "actor", conversation_id=cid, message_type=MessageTypes.JSON, data={ + "id": 1, "result": self.expected_result, "jsonrpc": "2.0" + })] + self.result = director.get_parameters(parameters=self.properties) + return director + + def test_message_sent(self, director_gp): + assert director_gp.communicator._s == [Message( # type: ignore + "actor", "director", conversation_id=cid, message_type=MessageTypes.JSON, data={ + "id": 1, "method": "get_parameters", "params": {"parameters": self.properties}, + "jsonrpc": "2.0"})] + + def test_result(self, director_gp): + assert self.result == self.expected_result diff --git a/tests/directors/test_locking_director.py b/tests/directors/test_locking_director.py new file mode 100644 index 000000000..ac5d4f84d --- /dev/null +++ b/tests/directors/test_locking_director.py @@ -0,0 +1,46 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.test import FakeDirector +from pyleco.actors.locking_actor import LockingActor +from pyleco.directors.locking_director import LockingDirector + + +class FakeLockingDirector(FakeDirector, LockingDirector): + """Replaces the ask_rpc method.""" + + +@pytest.fixture +def locking_director() -> LockingDirector: + data_logger_director = FakeLockingDirector(remote_class=LockingActor) + return data_logger_director + + +@pytest.mark.parametrize("method", ("lock", "unlock", "force_unlock")) +def test_method_call_existing_remote_methods(locking_director: FakeLockingDirector, method): + locking_director.return_value = None + getattr(locking_director, method)("task_name") + # asserts that no error is raised in the "ask_rpc" method diff --git a/tests/directors/test_starter_director.py b/tests/directors/test_starter_director.py new file mode 100644 index 000000000..c8bd1f09b --- /dev/null +++ b/tests/directors/test_starter_director.py @@ -0,0 +1,48 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.test import FakeDirector +from pyleco.management.starter import Starter +from pyleco.directors.starter_director import StarterDirector + + +class FakeStarterDirector(FakeDirector, StarterDirector): + """Replaces the ask_rpc method.""" + + +@pytest.fixture +def starter_director() -> StarterDirector: + data_logger_director = FakeStarterDirector(remote_class=Starter) + return data_logger_director + + +@pytest.mark.parametrize("method", ("start_tasks", "restart_tasks", "stop_tasks", "install_tasks", + "status_tasks", "list_tasks", + )) +def test_method_call_existing_remote_methods(starter_director: FakeStarterDirector, method): + starter_director.return_value = None + getattr(starter_director, method)("task_name") + # asserts that no error is raised in the "ask_rpc" method diff --git a/tests/directors/test_transparent_director.py b/tests/directors/test_transparent_director.py new file mode 100644 index 000000000..c6c6bac7a --- /dev/null +++ b/tests/directors/test_transparent_director.py @@ -0,0 +1,73 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from unittest.mock import MagicMock + +import pytest + +from pyleco.test import FakeCommunicator +from pyleco.directors.transparent_director import TransparentDevice, TransparentDirector, RemoteCall + + +def get_parameters_fake(parameters): + pars = {} + for i, par in enumerate(parameters): + pars[par] = i + return pars + + +class FakeDirector(TransparentDirector): + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.call_action = MagicMock() + self.get_parameters = MagicMock() + self.get_parameters.side_effect = get_parameters_fake + self.set_parameters = MagicMock() + + +class FantasyDevice(TransparentDevice): + method = RemoteCall() + + +@pytest.fixture +def director() -> TransparentDirector: + director = FakeDirector(device_class=FantasyDevice, + communicator=FakeCommunicator(name="Communicator")) # type: ignore + return director + + +def test_get_parameters(director: TransparentDirector): + assert director.device.getter == 0 + director.get_parameters.assert_called_once_with(parameters=("getter",)) # type: ignore + + +def test_set_parameters(director: TransparentDirector): + director.device.setter = 5 + director.set_parameters.assert_called_once_with(parameters={"setter": 5}) # type: ignore + + +def test_method(director: TransparentDirector): + director.device.method(5, kwarg=7) + director.call_action.assert_called_once_with("method", 5, kwarg=7) # type: ignore diff --git a/tests/json_utils/test_errors.py b/tests/json_utils/test_errors.py new file mode 100644 index 000000000..e28156098 --- /dev/null +++ b/tests/json_utils/test_errors.py @@ -0,0 +1,33 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from pyleco.json_utils.json_objects import Error +from pyleco.json_utils.errors import JSONRPCError + + +def test_JsonRPCError(): + error = Error(5, "abc") + exc = JSONRPCError(error) + assert exc.rpc_error == error + assert exc.args[0] == "5: abc" diff --git a/tests/json_utils/test_json_objects.py b/tests/json_utils/test_json_objects.py new file mode 100644 index 000000000..59474fe43 --- /dev/null +++ b/tests/json_utils/test_json_objects.py @@ -0,0 +1,123 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.json_utils import json_objects + + +def test_request(): + request = json_objects.Request(id=5, method="call") + assert request.model_dump() == { + "id": 5, + "jsonrpc": "2.0", + "method": "call", + } + + +def test_request_with_params(): + request = json_objects.ParamsRequest(id=5, method="call", params=[1, 5]) + assert request.model_dump() == { + "id": 5, + "jsonrpc": "2.0", + "method": "call", + "params": [1, 5], + } + + +def test_notification(): + request = json_objects.Notification(method="call") + assert request.model_dump() == { + "jsonrpc": "2.0", + "method": "call", + } + + +def test_notification_with_params(): + request = json_objects.ParamsNotification(method="call", params=[1, 5]) + assert request.model_dump() == { + "jsonrpc": "2.0", + "method": "call", + "params": [1, 5], + } + + +def test_result(): + result = json_objects.ResultResponse(id=5, result=7) + assert result.model_dump() == { + "id": 5, + "jsonrpc": "2.0", + "result": 7, + } + + +def test_error_with_data(): + """Test that the Error object is json serializable.""" + data_error = json_objects.DataError(code=5, message="whatever", data="abc") + error_response = json_objects.ErrorResponse(id=7, error=data_error) + assert error_response.model_dump_json() == '{"id":7,"error":{"code":5,"message":"whatever","data":"abc"},"jsonrpc":"2.0"}' # noqa + + +def test_generate_data_error_from_error(): + error = json_objects.Error(code=5, message="abc") + data_error = json_objects.DataError.from_error(error, "data") + assert data_error.code == error.code + assert data_error.message == error.message + assert data_error.data == "data" + + +class Test_BatchObject: + element = json_objects.Request(5, "start") + + @pytest.fixture + def batch_obj(self): + return json_objects.BatchObject([self.element]) + + def test_init_with_value(self): + obj = json_objects.BatchObject([self.element]) + assert obj == [self.element] + + def test_init_with_values(self): + obj = json_objects.BatchObject([self.element, self.element]) + assert obj == [self.element, self.element] + + def test_bool_value_with_element(self): + obj = json_objects.BatchObject([self.element]) + assert bool(obj) is True + + def test_bool_value_without_element(self): + obj = json_objects.BatchObject() + assert bool(obj) is False + + def test_append(self, batch_obj: json_objects.BatchObject): + el2 = json_objects.Request(5, "start") + batch_obj.append(el2) + assert batch_obj[-1] == el2 + + def test_model_dump(self, batch_obj: json_objects.BatchObject): + assert batch_obj.model_dump() == [self.element.model_dump()] + + def test_model_dump_json(self, batch_obj: json_objects.BatchObject): + result = '[{"id":5,"method":"start","jsonrpc":"2.0"}]' + assert batch_obj.model_dump_json() == result diff --git a/tests/core/test_rpc_generator.py b/tests/json_utils/test_rpc_generator.py similarity index 88% rename from tests/core/test_rpc_generator.py rename to tests/json_utils/test_rpc_generator.py index 7f8215e98..45d63da5c 100644 --- a/tests/core/test_rpc_generator.py +++ b/tests/json_utils/test_rpc_generator.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -26,11 +26,11 @@ import pytest -from jsonrpcobjects.objects import ErrorResponse -from jsonrpcobjects.errors import JSONRPCError +from pyleco.json_utils.json_objects import ErrorResponse +from pyleco.json_utils.errors import (JSONRPCError, NODE_UNKNOWN, NOT_SIGNED_IN, DUPLICATE_NAME, + RECEIVER_UNKNOWN) -from pyleco.errors import NODE_UNKNOWN, NOT_SIGNED_IN, DUPLICATE_NAME, RECEIVER_UNKNOWN -from pyleco.core.rpc_generator import RPCGenerator, INVALID_SERVER_RESPONSE +from pyleco.json_utils.rpc_generator import RPCGenerator, INVALID_SERVER_RESPONSE @pytest.fixture @@ -55,12 +55,6 @@ def test_build_request_str_raises_error(generator: RPCGenerator): generator.build_request_str("some_method", "argument", keyword="whatever") -def test_clear_ids(generator: RPCGenerator): - generator._ids = {7: 7} - generator.clear_id_list() - assert generator._ids == {} - - @pytest.mark.parametrize("response, result", ( ('{"id": 5, "result": 7.9, "jsonrpc": "2.0"}', 7.9), (b'{"id": 5, "result": 7.9, "jsonrpc": "2.0"}', 7.9), # bytes version diff --git a/tests/json_utils/test_rpc_server.py b/tests/json_utils/test_rpc_server.py new file mode 100644 index 000000000..06e07975a --- /dev/null +++ b/tests/json_utils/test_rpc_server.py @@ -0,0 +1,305 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import json +import logging + +import pytest + +from pyleco.json_utils.rpc_generator import RPCGenerator +from pyleco.json_utils.rpc_server_definition import RPCServer +from pyleco.json_utils.json_objects import ( + Request, + ParamsRequest, + ResultResponse, + ErrorResponse, + DataError, + ResponseBatch, +) +from pyleco.json_utils.errors import ( + ServerError, + InvalidRequest, + INVALID_REQUEST, + SERVER_ERROR, + INTERNAL_ERROR, +) + +try: + # Load openrpc server for comparison, if available. + from openrpc import RPCServer as RPCServerOpen # type: ignore +except ModuleNotFoundError: + rpc_server_classes: list = [RPCServer] +else: + rpc_server_classes = [RPCServer, RPCServerOpen] + + +@pytest.fixture +def rpc_generator() -> RPCGenerator: + return RPCGenerator() + + +args = None +def side_effect_method(arg=None) -> int: + global args + args = (arg,) + return 5 + + +def fail() -> None: + """Fail always. + + This method fails always. + And has a description. + """ + raise NotImplementedError + + +def simple() -> int: + """A method without parameters.""" + return 7 + + +def obligatory_parameter(arg1: float) -> float: + """Needs an argument.""" + return arg1 * 2 + + +@pytest.fixture(params=rpc_server_classes) +def rpc_server(request) -> RPCServer: + rpc_server = request.param() + rpc_server.method(name="sem")(side_effect_method) + rpc_server.method()(side_effect_method) + rpc_server.method()(fail) + rpc_server.method()(simple) + rpc_server.method()(obligatory_parameter) + return rpc_server + + +@pytest.fixture +def rpc_server_local() -> RPCServer: + """Create an instance of PyLECO's RPC Server""" + rpc_server = RPCServer() + rpc_server.method(name="sem")(side_effect_method) + rpc_server.method()(side_effect_method) + rpc_server.method()(fail) + rpc_server.method()(simple) + rpc_server.method()(obligatory_parameter) + return rpc_server + + +def test_success(rpc_generator: RPCGenerator, rpc_server: RPCServer): + request = rpc_generator.build_request_str(method="sem", arg=3) + response = rpc_server.process_request(request) + result = rpc_generator.get_result_from_response(response) # type: ignore + assert result == 5 + assert args == (3,) # type: ignore + + +def test_multiple_requests_success(rpc_server: RPCServer, rpc_generator: RPCGenerator): + request1 = ParamsRequest(id=1, method="sem", params=[3]) + request2 = Request(id=2, method="side_effect_method") + message = json.dumps([request1.model_dump(), request2.model_dump()]) + result = rpc_server.process_request(message) + result_obj = json.loads(result) # type: ignore + assert rpc_generator.get_result_from_response(result_obj[0]) == 5 + assert rpc_generator.get_result_from_response(result_obj[1]) == 5 + + +def test_failing_method(rpc_generator: RPCGenerator, rpc_server: RPCServer): + request = rpc_generator.build_request_str(method="fail") + response = rpc_server.process_request(request) + with pytest.raises(ServerError) as exc_info: + rpc_generator.get_result_from_response(response) # type: ignore + error = exc_info.value.rpc_error + assert error.code == SERVER_ERROR.code + assert error.message == SERVER_ERROR.message + + +@pytest.mark.xfail(True, reason="Self written RPCServer cannot handle additional args") +def test_wrong_method_arguments(rpc_generator: RPCGenerator, rpc_server: RPCServer): + request = rpc_generator.build_request_str(method="simple", arg=9) + response = rpc_server.process_request(request) + result = rpc_generator.get_result_from_response(response) # type: ignore + assert result == 7 + + +def test_obligatory_parameter_missing(rpc_generator: RPCGenerator, rpc_server: RPCServer): + request = rpc_generator.build_request_str(method="obligatory_parameter") + response = rpc_server.process_request(request) + with pytest.raises(ServerError) as exc_info: + rpc_generator.get_result_from_response(response) # type: ignore + error = exc_info.value.rpc_error + assert error.code == SERVER_ERROR.code + assert error.message == SERVER_ERROR.message + + +def test_process_response(rpc_server: RPCServer, rpc_generator: RPCGenerator): + """It should be a request, not a response!""" + request = ResultResponse(id=7, result=9) + request_string = request.model_dump_json() + response = rpc_server.process_request(request_string) + with pytest.raises(InvalidRequest) as exc_info: + rpc_generator.get_result_from_response(response) # type: ignore + error = exc_info.value.rpc_error + assert error.code == INVALID_REQUEST.code + assert error.message == INVALID_REQUEST.message + # ignore the following test, which depends on the openrpc version + # assert error.data == request.model_dump() # type: ignore + + +class Test_discover_method: + + @pytest.fixture + def discovered(self, rpc_server: RPCServer, rpc_generator: RPCGenerator) -> dict: + request = rpc_generator.build_request_str("rpc.discover") + response = rpc_server.process_request(request) + return rpc_generator.get_result_from_response(response) # type: ignore + + def test_info(self, discovered: dict): + info: dict = discovered["info"] + assert info.get("title") == "RPC Server" + assert info.get("version") == "0.1.0" + + @pytest.fixture + def methods(self, discovered: dict) -> list: + return discovered.get("methods") # type: ignore + + def test_side_effect_method(self, methods: list): + m1: dict = methods[0] + m2 = methods[1] + assert m1["name"] == "sem" + assert m1.get("description") is None + assert m2["name"] == "side_effect_method" + + def test_fail(self, methods: list): + m = methods[2] + assert m["name"] == "fail" + assert m["summary"] == "Fail always." + assert m.get("description") == "This method fails always. And has a description." + + def test_simple_description(self, methods: list): + m = methods[3] + assert m["name"] == "simple" + assert m["summary"] == "A method without parameters." + assert m.get("description") is None + + def test_rpc_discover_not_listed(self, methods: list): + for m in methods: + if m.get("name") == "rpc.discover": + raise AssertionError("rpc.discover is listed as a method!") + + +# tests regarding the local implementation of the RPC Server +def test_process_single_notification(rpc_server_local: RPCServer): + result = rpc_server_local._process_single_request( + {"jsonrpc": "2.0", "method": "simple"} + ) + assert result is None + + +class Test_process_request: + def test_log_exception(self, rpc_server_local: RPCServer, caplog: pytest.LogCaptureFixture): + rpc_server_local.process_request(b"\xff") + records = caplog.record_tuples + assert records[-1] == ( + "pyleco.json_utils.rpc_server_definition", + logging.ERROR, + "UnicodeDecodeError:", + ) + + def test_exception_response(self, rpc_server_local: RPCServer): + result = rpc_server_local.process_request(b"\xff") + assert result == ErrorResponse(id=None, error=INTERNAL_ERROR).model_dump_json() + + def test_invalid_request(self, rpc_server_local: RPCServer): + result = rpc_server_local.process_request(b"7") + assert ( + result + == ErrorResponse( + id=None, error=DataError.from_error(INVALID_REQUEST, 7) + ).model_dump_json() + ) + + def test_batch_entry_notification(self, rpc_server_local: RPCServer): + """A notification (request without id) shall not return anything.""" + requests = [ + {"jsonrpc": "2.0", "method": "simple"}, + {"jsonrpc": "2.0", "method": "simple", "id": 4}, + ] + result = json.loads(rpc_server_local.process_request(json.dumps(requests))) # type: ignore + assert result == [{"jsonrpc": "2.0", "result": 7, "id": 4}] + + def test_batch_of_notifications(self, rpc_server_local: RPCServer): + """A notification (request without id) shall not return anything.""" + requests = [ + {"jsonrpc": "2.0", "method": "simple"}, + {"jsonrpc": "2.0", "method": "simple"}, + ] + result = rpc_server_local.process_request(json.dumps(requests)) + assert result is None + + def test_notification(self, rpc_server_local: RPCServer): + """A notification (request without id) shall not return anything.""" + requests = [ + {"jsonrpc": "2.0", "method": "simple"}, + ] + result = rpc_server_local.process_request(json.dumps(requests)) + assert result is None + + +class Test_process_request_object: + def test_invalid_request(self, rpc_server_local: RPCServer): + result = rpc_server_local.process_request_object(7) + assert ( + result + == ErrorResponse( + id=None, error=DataError.from_error(INVALID_REQUEST, 7) + ) + ) + + def test_batch_entry_notification(self, rpc_server_local: RPCServer): + """A notification (request without id) shall not return anything.""" + requests = [ + {"jsonrpc": "2.0", "method": "simple"}, + {"jsonrpc": "2.0", "method": "simple", "id": 4}, + ] + result = rpc_server_local.process_request_object(requests) + assert result == ResponseBatch([ResultResponse(4, 7)]) + + def test_batch_of_notifications(self, rpc_server_local: RPCServer): + """A notification (request without id) shall not return anything.""" + requests = [ + {"jsonrpc": "2.0", "method": "simple"}, + {"jsonrpc": "2.0", "method": "simple"}, + ] + result = rpc_server_local.process_request_object(requests) + assert result is None + + def test_notification(self, rpc_server_local: RPCServer): + """A notification (request without id) shall not return anything.""" + requests = [ + {"jsonrpc": "2.0", "method": "simple"}, + ] + result = rpc_server_local.process_request_object(requests) + assert result is None diff --git a/tests/management/test_data_logger.py b/tests/management/test_data_logger.py new file mode 100644 index 000000000..fb8173c75 --- /dev/null +++ b/tests/management/test_data_logger.py @@ -0,0 +1,409 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import json +from math import isnan +from pathlib import Path +import re +from unittest.mock import MagicMock + +import pytest + +from pyleco.core.data_message import DataMessage +from pyleco.test import FakeContext +from pyleco.management.data_logger import DataLogger, nan, ValuingModes, TriggerTypes + + +@pytest.fixture +def data_logger() -> DataLogger: + dl = DataLogger(context=FakeContext()) + dl.subscriber.subscribe = MagicMock() # type: ignore[method-assign] + dl.subscriber.unsubscribe = MagicMock() # type: ignore[method-assign] + dl.start_collecting( + variables=["time", "test", "2", "N1.sender.var"], + trigger_type=TriggerTypes.VARIABLE, + trigger_variable="test", + trigger_timeout=10, + valuing_mode=ValuingModes.AVERAGE, + value_repeating=False, + ) + dl.tmp["2"] = [1, 2] + return dl + + +class Test_start_collecting: + @pytest.fixture(params=[False, True]) + def data_logger_sc(self, data_logger: DataLogger, request): + if request.param: + # do it once without restarting and once with restarting + data_logger.start_collecting() + return data_logger + + def test_trigger_type(self, data_logger_sc: DataLogger): + assert data_logger_sc.trigger_type == TriggerTypes.VARIABLE + + def test_trigger_variable(self, data_logger_sc: DataLogger): + assert data_logger_sc.trigger_variable == "test" + + def test_trigger_timeout(self, data_logger_sc: DataLogger): + assert data_logger_sc.trigger_timeout == 10 + + def test_value_repeating(self, data_logger_sc: DataLogger): + assert data_logger_sc.value_repeating is False + + def test_variables(self, data_logger_sc: DataLogger): + for key in ["time", "test", "2", "N1.sender.var"]: + assert key in data_logger_sc.lists.keys() + + +def test_start_collecting_starts_timer(data_logger: DataLogger): + # arrange + data_logger.trigger_timeout = 1000 + # act + data_logger.start_collecting(trigger_type=TriggerTypes.TIMER, trigger_timeout=500) + # assert + assert data_logger.timer.interval == 500 + # cleanup + data_logger.timer.cancel() + + +def test_start_collecting_starts_timer_even_second_time(data_logger: DataLogger): + """Even a second time, without explicit trigger type, the timer should be started.""" + # arrange + data_logger.trigger_timeout = 500 + # first time, to set type + data_logger.start_collecting(trigger_type=TriggerTypes.TIMER, trigger_timeout=1000) + data_logger.stop_collecting() + assert not hasattr(data_logger, "timer") # no timer left + # act + data_logger.start_collecting() + # assert + assert data_logger.timer.interval == 1000 + # cleanup + data_logger.timer.cancel() + + +def test_listen_close_stops_collecting(data_logger: DataLogger): + data_logger.stop_collecting = MagicMock() # type: ignore[method-assign] + # act + data_logger._listen_close() + # assert + data_logger.stop_collecting.assert_called_once() + + +def test_setup_listen_does_not_start_collecting_without_start_data(data_logger: DataLogger): + data_logger.start_collecting = MagicMock() # type: ignore[method-assign] + data_logger._listen_setup() + data_logger.start_collecting.assert_not_called() + + +def test_setup_listen_starts_collecting(data_logger: DataLogger): + data_logger.start_collecting = MagicMock() # type: ignore[method-assign] + data_logger._listen_setup(start_data={"var": 7}) + data_logger.start_collecting.assert_called_once_with(var=7) + + +class Test_setup_variables: + @pytest.fixture + def data_logger_stv(self, data_logger: DataLogger): + data_logger.namespace = "N1" + data_logger.unsubscribe_all() + data_logger.subscriber.subscribe = MagicMock() # type: ignore[method-assign] + data_logger.setup_variables([ + "var1", + "N1.sender.var2", "N1.sender.var3", "sender.var4", + "sender2.var5", + ]) + return data_logger + + def test_just_once_subscribed_to_component(self, data_logger_stv: DataLogger): + """Even though there are several variables from the same component + (with or without namespace defined), only one subscriptions should be there""" + data_logger_stv._subscriptions.remove(b"N1.sender") # asserts that it is present + assert b"N1.sender" not in data_logger_stv._subscriptions + assert b"sender" not in data_logger_stv._subscriptions # namespace is added + + def test_subscribe_to_complemented_namespace(self, data_logger_stv: DataLogger): + """A variable name without namespace should be complemented with the namespace.""" + assert b"N1.sender2" in data_logger_stv._subscriptions + + def test_subscribe_to_simple_variable(self, data_logger_stv: DataLogger): + assert b"var1" in data_logger_stv._subscriptions + + @pytest.mark.parametrize("var", ("N1.sender.var2", "N1.sender.var3")) + def test_lists_for_component_variables(self, data_logger_stv: DataLogger, var): + assert var in data_logger_stv.lists + + @pytest.mark.parametrize("var", ("N1.sender.var4", "N1.sender2.var5")) + def test_lists_for_complemented_namespace(self, data_logger_stv: DataLogger, var): + """If the namespace has been complemented, it should be in the lists.""" + assert var in data_logger_stv.lists + + +def test_subscribe_without_having_logged_in(data_logger: DataLogger, + caplog: pytest.LogCaptureFixture): + """Test that proper logging happens if the data_logger did not sign in (yet) but should + subscribe to some remote object.""" + data_logger.namespace = None + data_logger.setup_variables(["Component.Variable"]) + assert caplog.messages == ["Cannot subscribe to 'Component.Variable' as the namespace is not known."] # noqa + + +def test_set_valuing_mode_last(data_logger: DataLogger): + data_logger.set_valuing_mode(ValuingModes.LAST) + assert data_logger.last == data_logger.valuing + + +def test_handle_subscription_message_calls_handle_data(data_logger: DataLogger): + data_logger.handle_subscription_data = MagicMock() # type: ignore[method-assign] + message = DataMessage(topic="N1.sender", data={'var': 5, 'test': 7.3}) + data_logger.handle_subscription_message(message) + data_logger.handle_subscription_data.assert_called_once_with({"N1.sender.var": 5, + "N1.sender.test": 7.3}) + + +def test_handle_subscription_message_adds_data_to_lists(data_logger: DataLogger): + message = DataMessage(topic="N1.sender", data={"var": 5.6}) + data_logger.handle_subscription_message(message) + assert data_logger.tmp["N1.sender.var"] == [5.6] + + +def test_handle_subscription_message_handles_broken_message(data_logger: DataLogger, + caplog: pytest.LogCaptureFixture): + message = DataMessage(topic="N1.sender", data="not a dict") + data_logger.handle_subscription_message(message) + assert len(caplog.messages) == 1 + assert caplog.messages[0].startswith("Could not decode message") + + +def test_handle_subscription_data_without_trigger(data_logger: DataLogger): + data_logger.trigger_variable = "not present" + data_logger.handle_subscription_data({"test": 5}) + data_logger.handle_subscription_data({"test": 7}) + assert data_logger.tmp["test"] == [5, 7] + + +def test_handle_subscription_data_triggers(data_logger: DataLogger): + data_logger.make_datapoint = MagicMock() # type: ignore[method-assign] + data_logger.handle_subscription_data({"test": 5}) + data_logger.make_datapoint.assert_called_once() + + +def test_handle_subscription_data_without_list(data_logger: DataLogger, + caplog: pytest.LogCaptureFixture): + caplog.set_level(0) + data_logger.handle_subscription_data({'not_present': 42}) + assert caplog.messages == ["Got value for 'not_present', but no list present."] + + +def test_set_publisher_name(data_logger: DataLogger): + data_logger.set_full_name("N1.cA") + assert data_logger.publisher.full_name == "N1.cA" + assert data_logger.full_name == "N1.cA" + + +class Test_start_timer_trigger: + @pytest.fixture + def data_logger_stt(self, data_logger: DataLogger): + data_logger.start_timer_trigger(1000) + yield data_logger + data_logger.timer.cancel() + + def test_timer_interval(self, data_logger_stt: DataLogger): + assert data_logger_stt.timer.interval == 1000 + + def test_timer_started(self, data_logger_stt: DataLogger): + with pytest.raises(RuntimeError): # can start a timer at most once + data_logger_stt.timer.start() + + +class Test_make_data_point: + @pytest.fixture + def data_logger_mdp(self, data_logger: DataLogger): + del data_logger.lists['time'] # for better comparison + data_logger.make_datapoint() + return data_logger + + def test_last_data_point(self, data_logger_mdp: DataLogger): + assert data_logger_mdp.last_datapoint == {"test": nan, "2": 1.5, "N1.sender.var": nan} + + def test_tmp_is_cleared(self, data_logger_mdp: DataLogger): + assert data_logger_mdp.tmp["2"] == [] + + def test_publish_data(self, data_logger: DataLogger): + # arrange it with a Mock + data_logger.publisher.send_data = MagicMock() # type: ignore[method-assign] + del data_logger.lists["time"] + + data_logger.namespace = "N1" + # act + data_logger.make_datapoint() + # assert + data_logger.publisher.send_data.assert_called_once_with(data={"test": nan, "2": 1.5, + "N1.sender.var": nan}) + + +def test_calculate_data_adds_time(data_logger: DataLogger): + datapoint = data_logger.calculate_data() + assert datapoint["time"] > 0 + + +class Test_calculate_single_data: + @pytest.mark.parametrize("list, result", ( + ([2, 3], 2.5), + ([5], 5), + )) + def test_average(self, data_logger: DataLogger, list, result): + assert data_logger.calculate_single_data("2", tmp=list) == result + + def test_average_results_in_nan(self, data_logger: DataLogger): + assert isnan(data_logger.calculate_single_data("2", tmp=[])) + + @pytest.mark.parametrize("list, result", ( + ([2, 3], 3), + ([5], 5), + )) + def test_last(self, data_logger: DataLogger, list, result): + data_logger.valuing = data_logger.last + assert data_logger.calculate_single_data("2", tmp=list) == result + + def test_last_results_in_nan(self, data_logger: DataLogger): + data_logger.valuing = data_logger.last + assert isnan(data_logger.calculate_single_data("2", tmp=[])) + + @pytest.mark.parametrize("list, result", ( + ([2, 3], 2.5), + ([], 55), + ([5], 5), + )) + def test_repeating_with_last_value(self, data_logger: DataLogger, list, result): + data_logger.value_repeating = True + data_logger.lists["2"] = [55] + assert data_logger.calculate_single_data("2", tmp=list) == result + + @pytest.mark.parametrize("list, result", ( + ([2, 3], 2.5), + ([5], 5), + )) + def test_repeating_without_last_value(self, data_logger: DataLogger, list, result): + data_logger.value_repeating = True + data_logger.lists["2"] = [] + assert data_logger.calculate_single_data("2", tmp=list) == result + + def test_repeating_without_last_value_results_in_nan(self, data_logger: DataLogger): + data_logger.value_repeating = True + data_logger.lists["2"] = [] + assert isnan(data_logger.calculate_single_data("2", tmp=[])) + + +class Test_last: + def test_last_returns_last_value(self, data_logger: DataLogger): + assert data_logger.last([1, 2, 3, 4, 5]) == 5 + + def test_return_single_value(self, data_logger: DataLogger): + assert data_logger.last(5) == 5 # type: ignore + + def test_empty_list_returns_nan(self, data_logger: DataLogger): + assert isnan(data_logger.last([])) + + +class Test_save_data: + @pytest.fixture + def data_logger_sd(self, data_logger: DataLogger, tmp_path_factory: pytest.TempPathFactory): + path = tmp_path_factory.mktemp("save") + data_logger.directory = str(path) + self.file_name = data_logger.save_data() + self.today = data_logger.today + return data_logger + + def test_filename(self, data_logger_sd: DataLogger): + result = re.match(r"20\d\d_\d\d_\d\dT\d\d_\d\d_\d\d", data_logger_sd.last_save_name) + assert result is not None + + @pytest.fixture + def saved_file(self, data_logger_sd: DataLogger): + path = Path(data_logger_sd.directory) / data_logger_sd.last_save_name + return path.with_suffix(".json").read_text() + + def test_output(self, saved_file: str): + today_string = self.today.isoformat() + assert saved_file == "".join( + ( + """["", {"time": [], "test": [], "2": [], "N1.sender.var": []}, """, + '''{"units": {}, "today": "''', + today_string, + '''", "file_name": "''', + self.file_name, + """", "logger_name": "DataLoggerN", """, + """"configuration": {"trigger_type": "variable", "trigger_timeout": 10, """, + """"trigger_variable": "test", "valuing_mode": "average", """, + """"value_repeating": false, """, + """"variables": ["time", "test", "2", "N1.sender.var"], """, + """"units": {}}}]""", + ) + ) + + def test_json_content(self, saved_file: str): + today_string = self.today.isoformat() + assert json.loads(saved_file) == [ + "", + {"time": [], "test": [], "2": [], "N1.sender.var": []}, + {"units": {}, "today": today_string, "file_name": self.file_name, + "logger_name": "DataLoggerN", + "configuration": {"trigger_type": "variable", "trigger_timeout": 10, + "trigger_variable": "test", "valuing_mode": "average", + "value_repeating": False, + "variables": ["time", "test", "2", "N1.sender.var"], + "units": {}, + }, + }, + ] + + +def test_get_configuration(data_logger: DataLogger): + config = data_logger.get_configuration() + assert config == { + "trigger_type": TriggerTypes.VARIABLE, + "trigger_variable": "test", + "trigger_timeout": 10, + "valuing_mode": "average", + "value_repeating": False, + "variables": ["time", "test", "2", "N1.sender.var"], + "units": {}, + } + + +def test_get_last_datapoint(data_logger: DataLogger): + data_logger.last_datapoint = {"key": "value"} + assert data_logger.get_last_datapoint() == data_logger.last_datapoint + + +def test_get_last_save_name(data_logger: DataLogger): + data_logger.last_save_name = "abcef" + assert data_logger.get_last_save_name() == data_logger.last_save_name + + +def test_get_list_length(data_logger: DataLogger): + data_logger.lists = {"abc": [0, 1, 2, 3, 4]} + assert data_logger.get_list_length() == 5 diff --git a/tests/management/test_starter.py b/tests/management/test_starter.py new file mode 100644 index 000000000..3157ed3c2 --- /dev/null +++ b/tests/management/test_starter.py @@ -0,0 +1,250 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +from unittest.mock import call, MagicMock + +import pytest + +from pyleco.test import FakeContext +from pyleco.management.starter import sanitize_tasks, Starter, Status +from pyleco.utils.events import SimpleEvent + + +@pytest.fixture +def starter() -> Starter: + starter = Starter(context=FakeContext()) + return starter + + +class FakeThread: + def __init__(self, target=None, alive=False, *args, **kwargs) -> None: + self._target = target + self._args = args + self._kwargs = kwargs + self._alive = alive + + def start(self) -> None: + self._alive = True + + def is_alive(self) -> bool: + return self._alive + + def join(self, timeout=None) -> None: + return + + +@pytest.mark.parametrize("tasks", (None, [], (), "string", ["abc", "def"])) +def test_sanitize_tasks(tasks): + sanitized = sanitize_tasks(tasks) + assert isinstance(sanitized, (tuple, list)) + for t in sanitized: + assert isinstance(t, str) + + +@pytest.mark.parametrize("tasks, invalid_task_name", ( + (5, 5), + (("valid", 6), 6), + ([["list"], "abc"], "['list']")), +) +def test_invalid_tasks(tasks, invalid_task_name, caplog): + assert sanitize_tasks(tasks) == () + assert caplog.messages == [f"Invalid task name '{invalid_task_name}' received."] + + + +def test_init(starter: Starter): + assert starter.started_tasks == {} + assert starter.threads == {} + + +@pytest.mark.parametrize("pre, post", ( + (Status.STARTED | Status.INSTALLED, Status.STARTED | Status.INSTALLED), + (None, Status.INSTALLED), # not yet in the dict + (Status.STARTED, Status.STARTED | Status.INSTALLED), + (Status.STARTED | Status.RUNNING, Status.STARTED | Status.RUNNING | Status.INSTALLED), + (Status.STOPPED, Status.INSTALLED), +)) +def test_install_task(starter: Starter, pre: Status, post: Status): + if pre is not None: + starter.started_tasks["test"] = pre + starter.install_task("test") + assert starter.started_tasks["test"] == post + + +def test_install_tasks(starter: Starter): + starter.install_task = MagicMock() # type: ignore[method-assign] + starter.install_tasks(["a", "b"]) + assert starter.install_task.call_args_list == [call("a"), call("b")] + + +@pytest.mark.parametrize("pre, post", ( + (Status.RUNNING | Status.INSTALLED, Status.RUNNING), + (None, Status.STOPPED), # not yet in the dict + (Status.RUNNING, Status.RUNNING), + (Status.STOPPED, Status.STOPPED), +)) +def test_uninstall_task(starter: Starter, pre: Status, post: Status): + if pre is not None: + starter.started_tasks["test"] = pre + starter.uninstall_task("test") + assert starter.started_tasks.get("test") == post + + +def test_uninstall_tasks(starter: Starter): + starter.uninstall_task = MagicMock() # type: ignore[method-assign] + starter.uninstall_tasks(["a", "b"]) + assert starter.uninstall_task.call_args_list == [call("a"), call("b")] + + +class Test_status_tasks: + @pytest.fixture + def status(self, starter: Starter) -> dict[str, Status]: + starter.threads["SR"] = FakeThread(alive=True) # type: ignore + starter.threads["S"] = FakeThread() # type: ignore + starter.threads["NS"] = FakeThread(alive=True) # type: ignore + starter.started_tasks = { + "SR": Status.STARTED | Status.RUNNING, + "S": Status.STARTED | Status.RUNNING, + "NS": Status.STARTED, + } + self.starter = starter + return starter.status_tasks(names=["unknown"]) + + def test_started_running(self, status): + """Test that a running task remains running.""" + assert status["SR"] == Status.STARTED | Status.RUNNING + + def test_started_not_running(self, status): + """Test that a stopped thread is not running anymore.""" + assert status["S"] == Status.STARTED + + def test_newly_started_is_also_running(self, status): + """Test that a newly started (last time not running) is now running.""" + assert status["NS"] == Status.STARTED | Status.RUNNING + + def test_unknown_is_marked_stopped(self, status): + assert status["unknown"] == Status.STOPPED + + def test_stopped_running_thread_is_removed(self, status): + assert "S" not in self.starter.threads.keys() + + def test_stopped_causes_log_entry(self, status, caplog: pytest.LogCaptureFixture): + assert "Thread 'S' stopped unexpectedly." in [ + record.message for record in caplog.get_records(when="setup") + ] + + +class Test_check_installed_tasks: + @pytest.fixture + def starter_cit(self, starter: Starter) -> Starter: + starter.start_task = MagicMock() # type: ignore[method-assign] + starter.started_tasks = { + "IR": Status.INSTALLED | Status.RUNNING, + "INR": Status.INSTALLED, + "SR": Status.STARTED | Status.RUNNING, + "SNR": Status.STARTED, # not running, should not be started as it is not installed. + } + starter.check_installed_tasks() + return starter + + def test_start_installed_but_not_running_task(self, starter_cit: Starter): + """Test, that only the installed (and not running) task is started.""" + starter_cit.start_task.assert_called_once_with("INR") # type: ignore[attr-defined] + + +class Test_start_task: + def test_already_started_task(self, starter: Starter): + # arrange + starter.started_tasks["t1"] = Status.STARTED + starter.threads["t1"] = FakeThread(alive=True) # type: ignore + starter.events["t1"] = SimpleEvent() # type: ignore + # act + starter.start_task("t1") + assert Status.RUNNING in Status(starter.started_tasks["t1"]) + + +def test_start_tasks(starter: Starter): + starter.start_task = MagicMock() # type: ignore[method-assign] + starter.start_tasks(["a", "b"]) + assert starter.start_task.call_args_list == [call("a"), call("b")] + + +class Test_stop_task: + def test_stop_not_existing_task(self, starter: Starter): + starter.stop_task("whatever") + + def test_stop_existing_running_task(self, starter: Starter): + # arrange + starter.started_tasks["t1"] = Status.STARTED + starter.threads["t1"] = FakeThread(alive=True) # type: ignore + event = starter.events["t1"] = SimpleEvent() # type: ignore + # act + starter.stop_task("t1") + assert "t1" not in starter.threads + assert "t1" not in starter.started_tasks + assert event.is_set() is True + + def test_stop_removed_task(self, starter: Starter): + # arrange + try: + del starter.threads["t1"] + except KeyError: + pass + starter.started_tasks["t1"] = Status.STARTED + # act + starter.stop_task("t1") + assert "t1" not in starter.threads + assert "t1" not in starter.started_tasks + + +def test_stop_tasks(starter: Starter): + starter.stop_task = MagicMock() # type: ignore[method-assign] + starter.stop_tasks(["a", "b"]) + assert starter.stop_task.call_args_list == [call("a"), call("b")] + + +def test_restart_tasks(starter: Starter): + starter.start_task = MagicMock() # type: ignore[method-assign] + starter.stop_task = MagicMock() # type: ignore[method-assign] + starter.restart_tasks(["a", "b"]) + assert starter.stop_task.call_args_list == [call("a"), call("b")] + assert starter.start_task.call_args_list == [call("a"), call("b")] + + +def test_stop_all_tasks(starter: Starter): + # arrange + starter.started_tasks["t1"] = Status.STARTED + starter.threads["t1"] = FakeThread(alive=True) # type: ignore + event = starter.events["t1"] = SimpleEvent() # type: ignore + # act + starter.stop_all_tasks() + assert "t1" not in starter.threads + assert "t1" not in starter.started_tasks + assert event.is_set() is True + + +def test_list_tasks_failing(starter: Starter): + starter.directory = "/abcdefghijklmno" + assert starter.list_tasks() == [] diff --git a/tests/test_test.py b/tests/test_test.py index 2b8b5e165..037983922 100644 --- a/tests/test_test.py +++ b/tests/test_test.py @@ -1,7 +1,7 @@ # # This file is part of the PyLECO package. # -# Copyright (c) 2023-2023 PyLECO Developers +# Copyright (c) 2023-2024 PyLECO Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -24,14 +24,59 @@ import pytest -from pyleco.test import FakePoller, FakeSocket +from pyleco.test import FakeCommunicator, FakePoller, FakeSocket @pytest.fixture -def poller(): +def poller() -> FakePoller: return FakePoller() +@pytest.fixture +def socket() -> FakeSocket: + return FakeSocket(1) + + +@pytest.fixture +def sub_socket() -> FakeSocket: + return FakeSocket(2) + + +def test_socket_unbind(socket: FakeSocket): + socket.bind("abc") + socket.unbind() + assert socket.addr is None + + +def test_socket_disconnect(socket: FakeSocket): + socket.connect("abc") + socket.disconnect() + assert socket.addr is None + + +@pytest.mark.parametrize("topic", ("string", b"bytes")) +def test_socket_subscribe(sub_socket: FakeSocket, topic): + sub_socket.subscribe(topic) + assert isinstance(sub_socket._subscriptions[-1], bytes) + + +def test_subscribe_fails_for_not_SUB(socket: FakeSocket): + with pytest.raises(ValueError): + socket.subscribe("abc") + + +@pytest.mark.parametrize("topic", ("topic", b"topic")) +def test_socket_unsubscribe(sub_socket: FakeSocket, topic): + sub_socket._subscriptions.append(b"topic") + sub_socket.unsubscribe(topic) + assert b"topic" not in sub_socket._subscriptions + + +def test_unsubscribe_fails_for_not_SUB(socket: FakeSocket): + with pytest.raises(ValueError): + socket.unsubscribe("abc") + + class Test_FakePoller_unregister: def test_no_error_at_missing(self, poller: FakePoller): poller.unregister(FakeSocket(1)) @@ -42,3 +87,9 @@ def test_unregister_removes_socket(self, poller: FakePoller): poller._sockets = [1, 2, socket, 4, 5] # type: ignore poller.unregister(socket) assert socket not in poller._sockets + + +def test_FakeCommunicator_sign_in(): + fc = FakeCommunicator("") + fc.sign_in() + assert fc._signed_in is True diff --git a/tests/utils/test_base_communicator.py b/tests/utils/test_base_communicator.py new file mode 100644 index 000000000..c67c81069 --- /dev/null +++ b/tests/utils/test_base_communicator.py @@ -0,0 +1,402 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +import time +from typing import Optional +from unittest.mock import MagicMock + +import pytest + +from pyleco.test import FakeSocket +from pyleco.core.message import Message, MessageTypes +from pyleco.json_utils.rpc_generator import RPCGenerator +from pyleco.json_utils.errors import DUPLICATE_NAME +from pyleco.json_utils.json_objects import ErrorResponse + +from pyleco.utils.base_communicator import BaseCommunicator, MessageBuffer + + +cid = b"conversation_id;" +header = b"".join((cid, b"\x00" * 4)) + + +def fake_generate_cid(): + return cid + + +@pytest.fixture() +def fake_cid_generation(monkeypatch): + monkeypatch.setattr("pyleco.core.serialization.generate_conversation_id", fake_generate_cid) + + +class FakeBaseCommunicator(BaseCommunicator): + + def __init__(self, name="communicator") -> None: + self.name = name + self.setup_message_buffer() + self.socket = FakeSocket(0) # type: ignore + self.log = logging.getLogger() + self.rpc_generator = RPCGenerator() + + # For tests + self._s: list[Message] = [] + self._r: list[Message] = [] + + def _send_socket_message(self, message: Message) -> None: + self._s.append(message) + + def _read_socket_message(self, timeout: Optional[float] = None) -> Message: + if self._r: + return self._r.pop(0) + raise TimeoutError + + +@pytest.fixture +def buffer() -> MessageBuffer: + return MessageBuffer() + + +@pytest.fixture +def communicator() -> FakeBaseCommunicator: + return FakeBaseCommunicator() + + +m1 = Message(receiver="N1.communicator", sender="xy") # some message +mr = Message(receiver="N1.communicator", sender="xy", conversation_id=cid) # requested message +m2 = Message(receiver="N1.communicator", sender="xy") # another message + + +def test_cid_in_buffer(buffer: MessageBuffer): + assert buffer.is_conversation_id_requested(cid) is False + buffer.add_conversation_id(cid) + assert buffer.is_conversation_id_requested(cid) is True + buffer.remove_conversation_id(cid) + assert buffer.is_conversation_id_requested(cid) is False + + +def test_remove_cid_without_cid_raises_no_exception(buffer: MessageBuffer): + buffer.remove_conversation_id(cid) + # assert that no error is raised + + +def test_retrieve_requested_message(buffer: MessageBuffer): + buffer.add_conversation_id(cid) + buffer.add_message(m1) + buffer.add_message(mr) + buffer.add_message(m2) + ret = buffer.retrieve_message(cid) + assert ret == mr + assert buffer.is_conversation_id_requested(cid) is False + + +def test_retrieve_free_message(buffer: MessageBuffer): + buffer.add_conversation_id(cid) + buffer.add_message(mr) + buffer.add_message(m2) + ret = buffer.retrieve_message() + assert ret == m2 + + +def test_free_message_not_found(buffer: MessageBuffer): + buffer.add_message(mr) + buffer.add_conversation_id(cid) + assert buffer.retrieve_message(None) is None + + +def test_requested_message_not_found(buffer: MessageBuffer): + buffer.add_message(m1) + assert buffer.retrieve_message(cid) is None + + +def test_buffer_len(buffer: MessageBuffer): + assert len(buffer) == 0 + buffer.add_message(m1) + assert len(buffer) == 1 + + +def test_close(communicator: FakeBaseCommunicator): + communicator.close() + assert communicator.socket.closed is True # type: ignore + + +def test_context_manager(): + stored_communicator = None + with FakeBaseCommunicator() as communicator: # type: ignore + assert isinstance(communicator, FakeBaseCommunicator) # assert enter + stored_communicator = communicator + assert stored_communicator.socket.closed is True # exit + + +def test_send_socket_message(communicator: FakeBaseCommunicator): + msg = Message(receiver="rec", sender="abc") + BaseCommunicator._send_socket_message(communicator, msg) + assert communicator.socket._s == [msg.to_frames()] # type: ignore + + +def test_send_message(communicator: FakeBaseCommunicator): + msg = Message(receiver="rec", sender="") + communicator.send_message(msg) + sent = communicator._s.pop() + assert sent.sender == b"communicator" + msg.sender = b"communicator" + assert sent == msg + + +class Test_sign_in: + def test_sign_in_successful(self, communicator: FakeBaseCommunicator, fake_cid_generation): + message = Message(receiver=b"N3.communicator", sender=b"N3.COORDINATOR", + conversation_id=cid, + message_type=MessageTypes.JSON, + data={ + "id": 0, "result": None, "jsonrpc": "2.0", + }) + communicator._r = [message] # type: ignore + communicator.namespace = None + communicator.sign_in() + assert communicator.namespace == "N3" + + def test_not_valid_message(self, communicator: FakeBaseCommunicator, + caplog: pytest.LogCaptureFixture, + fake_cid_generation): + message = Message("communicator", "COORDINATOR", data=b"[]", conversation_id=cid) + communicator._r = [message] # type: ignore + communicator.sign_in() + caplog.records[-1].msg.startswith("Not json message received:") + + def test_duplicate_name(self, communicator: FakeBaseCommunicator, + caplog: pytest.LogCaptureFixture, + fake_cid_generation): + communicator.namespace = None + message = Message("communicator", "N3.COORDINATOR", message_type=MessageTypes.JSON, + data=ErrorResponse(id=5, error=DUPLICATE_NAME), + conversation_id=cid) + communicator._r = [message] # type: ignore + communicator.sign_in() + assert communicator.namespace is None + assert caplog.records[-1].msg == "Sign in failed, the name is already used." + + def test_handle_unknown_error(self, communicator: FakeBaseCommunicator, + caplog: pytest.LogCaptureFixture, + fake_cid_generation): + communicator.namespace = None + message = Message("communicator", "N3.COORDINATOR", message_type=MessageTypes.JSON, data={ + "jsonrpc": "2.0", "error": {'code': 123545, "message": "error_msg"}, "id": 5 + }, conversation_id=cid) + communicator._r = [message] # type: ignore + communicator.sign_in() + assert communicator.namespace is None + assert caplog.records[-1].msg.startswith("Sign in failed, unknown error") + + def test_handle_request_message(self, communicator: FakeBaseCommunicator, + caplog: pytest.LogCaptureFixture, + fake_cid_generation + ): + """Handle a message without result or error.""" + communicator.namespace = None + message = Message("communicator", "N3.COORDINATOR", message_type=MessageTypes.JSON, data={ + "jsonrpc": "2.0", "id": 5, "method": "some_method", + }, conversation_id=cid) + communicator._r = [message] # type: ignore + communicator.sign_in() + assert communicator.namespace is None + assert caplog.records[-1].msg.startswith("Sign in failed, unknown error") + + def test_log_timeout_error(self, communicator: FakeBaseCommunicator, + caplog: pytest.LogCaptureFixture): + communicator.sign_in() + assert caplog.records[-1].msg.startswith("Signing in timed out.") + + +class Test_finish_sign_in: + @pytest.fixture + def communicator_fsi(self, communicator: FakeBaseCommunicator, + caplog: pytest.LogCaptureFixture): + caplog.set_level(logging.INFO) + communicator.finish_sign_in(response_message=Message( + b"communicator", b"N5.COORDINATOR", + message_type=MessageTypes.JSON, + data={"id": 10, "result": None, "jsonrpc": "2.0"})) + return communicator + + def test_namespace(self, communicator_fsi: FakeBaseCommunicator): + assert communicator_fsi.namespace == "N5" + + def test_full_name(self, communicator_fsi: FakeBaseCommunicator): + assert communicator_fsi.full_name == "N5.communicator" + + def test_log_message(self, communicator_fsi: FakeBaseCommunicator, + caplog: pytest.LogCaptureFixture): + assert caplog.get_records("setup")[-1].message == ("Signed in to Node 'N5'.") + + +def test_heartbeat(communicator: FakeBaseCommunicator): + communicator.heartbeat() + msg = communicator._s.pop() + assert msg.receiver == b"COORDINATOR" + assert msg.payload == [] + + +def test_sign_out_fail(communicator: FakeBaseCommunicator, caplog: pytest.LogCaptureFixture, + fake_cid_generation): + communicator.namespace = "N3" + message = Message("communicator", "N3.COORDINATOR", message_type=MessageTypes.JSON, data={ + "jsonrpc": "2.0", "error": {"code": 12345}, "id": 1, + }, conversation_id=cid) + communicator._r = [message] # type: ignore + communicator.sign_out() + assert communicator.namespace is not None + assert caplog.messages[-1].startswith("Signing out failed") + + +def test_sign_out_success(communicator: FakeBaseCommunicator, fake_cid_generation): + communicator.namespace = "N3" + message = Message("communicator", "N3.COORDINATOR", message_type=MessageTypes.JSON, data={ + "jsonrpc": "2.0", "result": None, "id": 1, + }, conversation_id=cid) + communicator._r = [message] # type: ignore + communicator.sign_out() + assert communicator.namespace is None + + +def test_finish_sign_out(communicator: FakeBaseCommunicator): + communicator.finish_sign_out() + assert communicator.namespace is None + assert communicator.full_name == "communicator" + + +class Test_read_message: + conf: list[tuple[list[Message], list[Message], Optional[bytes], list[Message], list[Message], + str]] = [ + # socket_in, buffer_in, cid, socket_out, buffer_out, test_id + # find first not requested message + ([m1], [], None, [], [], "return first message from socket"), + ([m2], [m1], None, [m2], [], "return first message from buffer, not socket"), + ([m1], [mr], None, [], [mr], "ignore requested message in buffer"), + ([mr, m1], [], None, [], [mr], "ignore requested message in socket"), + # find requested message + ([mr], [], cid, [], [], "return specific message from socket"), + ([m2], [mr], cid, [m2], [], "return specific message from buffer"), + ([mr], [m2], cid, [], [m2], "return specific message from socket although filled buffer"), + ([m2, mr, m1], [], cid, [m1], [m2], "find specific message in socket"), + ([], [m2, mr, m1], cid, [], [m2, m1], "find specific message in buffer"), + ] + ids = [test[-1] for test in conf] + + def test_return_message_from_socket(self, communicator: FakeBaseCommunicator): + communicator._r = [m1] # type: ignore + assert communicator.read_message() == m1 + + def test_return_message_from_buffer(self, communicator: FakeBaseCommunicator): + communicator.message_buffer.add_message(m1) + assert communicator.read_message() == m1 + # assert that no error is raised + + def test_cid_not_longer_in_requested_ids(self, communicator: FakeBaseCommunicator): + communicator.message_buffer.add_conversation_id(cid) + communicator.message_buffer.add_message(mr) + communicator.read_message(conversation_id=cid) + assert communicator.message_buffer.is_conversation_id_requested(cid) is False + + @pytest.mark.parametrize("test", conf, ids=ids) + def test_return_correct_message(self, + test: tuple[list[Message], list[Message], Optional[bytes]], + communicator: FakeBaseCommunicator): + socket, buffer, cid0, *_ = test + communicator._r = socket.copy() # type: ignore + for m in buffer: + communicator.message_buffer.add_message(m) + communicator.message_buffer.add_conversation_id(cid) + # act + result = communicator.read_message(conversation_id=cid0) + assert result == m1 if cid is None else mr + + @pytest.mark.parametrize("test", conf, ids=ids) + def test_correct_buffer_socket(self, + test: tuple[list[Message], list[Message], Optional[bytes], + list[Message], list[Message]], + communicator: FakeBaseCommunicator): + socket_in, buffer_in, cid0, socket_out, buffer_out, *_ = test + communicator._r = socket_in.copy() # type: ignore + for m in buffer_in: + communicator.message_buffer.add_message(m) + communicator.message_buffer.add_conversation_id(cid) + # act + communicator.read_message(conversation_id=cid0) + assert communicator._r == socket_out # type: ignore + assert communicator.message_buffer._messages == buffer_out + + def test_timeout_zero_works(self, communicator: FakeBaseCommunicator): + communicator._r = [m1] # type: ignore + communicator.read_message(timeout=0) + # assert that no error is raised + + def test_timeout_error(self, communicator: FakeBaseCommunicator): + def waiting(*args, **kwargs): + time.sleep(.1) + return m1 + communicator._read_socket_message = waiting # type: ignore[assignment] + with pytest.raises(TimeoutError): + communicator.read_message(conversation_id=cid, timeout=0) + + +class Test_ask_message: + expected_sent = Message("receiver", sender="communicator", conversation_id=cid) + expected_response = Message("communicator", sender="receiver", conversation_id=cid) + + @pytest.fixture + def communicator_asked(self, communicator: FakeBaseCommunicator): + communicator._r = [self.expected_response] # type: ignore + self.response = communicator.ask_message(message=self.expected_sent) + return communicator + + def test_sent_expected(self, communicator_asked: FakeBaseCommunicator): + assert communicator_asked._s == [self.expected_sent] + + def test_expected_response(self, communicator_asked): + assert self.expected_response == self.response + + def test_no_cid_in_requested_cids_list(self, communicator_asked: FakeBaseCommunicator): + assert communicator_asked.message_buffer.is_conversation_id_requested(cid) is False + + +class Test_handle_not_signed_in: + @pytest.fixture + def communicator_hnsi(self, communicator: FakeBaseCommunicator) -> FakeBaseCommunicator: + communicator.namespace = "xyz" + communicator.sign_in = MagicMock() # type: ignore + communicator.handle_not_signed_in() + communicator.sign_in.assert_called_once + return communicator + + def test_namespace_reset(self, communicator_hnsi: FakeBaseCommunicator): + assert communicator_hnsi.namespace is None + + def test_sign_in_called(self, communicator_hnsi: FakeBaseCommunicator): + communicator_hnsi.sign_in.assert_called_once() # type: ignore + + def test_log_warning(self, communicator_hnsi: FakeBaseCommunicator, + caplog: pytest.LogCaptureFixture) -> None: + assert caplog.get_records(when="setup")[-1].message == "I was not signed in, signing in." diff --git a/tests/utils/test_communicator.py b/tests/utils/test_communicator.py new file mode 100644 index 000000000..e301aca12 --- /dev/null +++ b/tests/utils/test_communicator.py @@ -0,0 +1,275 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from unittest.mock import MagicMock + +import pytest + +from pyleco.core import VERSION_B +from pyleco.core.message import Message, MessageTypes +from pyleco.core.serialization import serialize_data +from pyleco.json_utils.errors import JSONRPCError, NOT_SIGNED_IN, NODE_UNKNOWN + +from pyleco.utils.communicator import Communicator +from pyleco.test import FakeSocket, FakeContext + + +cid = b"conversation_id;" +header = b"".join((cid, b"\x00" * 3, b"\x01")) + + +message_tests = ( + ({'receiver': "broker", 'data': [["GET", [1, 2]], ["GET", 3]], 'sender': 's', + 'message_type': MessageTypes.JSON}, + [VERSION_B, b"broker", b"s", header, serialize_data([["GET", [1, 2]], ["GET", 3]])]), + ({'receiver': "someone", 'conversation_id': cid, 'sender': "ego", 'message_id': b"mid"}, + [VERSION_B, b'someone', b'ego', b'conversation_id;mid\x00']), + ({'receiver': "router", 'sender': "origin"}, + [VERSION_B, b"router", b"origin", header[:-1] + b"\x00"]), +) + + +def fake_time(): + return 0 + + +def fake_randbytes(n): + return b"\01" * n + + +def fake_generate_cid(): + return cid + + +@pytest.fixture() +def fake_cid_generation(monkeypatch): + monkeypatch.setattr("pyleco.core.serialization.generate_conversation_id", fake_generate_cid) + + +# intercom +class FakeCommunicator(Communicator): + def open(self, context=None): + super().open(context=FakeContext()) # type: ignore + + +@pytest.fixture() +def communicator() -> Communicator: + communicator = FakeCommunicator(name="Test") + communicator._last_beat = float("inf") + return communicator + + +def test_name(): + c = FakeCommunicator(name="Test") + assert c.name == "Test" + + +def test_auto_open(): + c = FakeCommunicator(name="Test", auto_open=True) + assert isinstance(c.socket, FakeSocket) + + +def test_context_manager_opens_connection(): + class FK2(FakeCommunicator): + def __init__(self, **kwargs): + super().__init__(auto_open=False, **kwargs) + + def sign_in(self): + pass + with FK2(name="Test") as c: + assert isinstance(c.socket, FakeSocket) + + +class Test_close: + @pytest.fixture + def closed_communicator(self, communicator: Communicator, fake_cid_generation): + message = Message("Test", "COORDINATOR", message_type=MessageTypes.JSON, + conversation_id=cid, data={ + "jsonrcp": "2.0", "result": None, "id": 1, + }) + communicator.socket._r = [message.to_frames()] # type: ignore + communicator.close() + return communicator + + def test_socket_closed(self, closed_communicator: Communicator): + assert closed_communicator.socket.closed is True + + def test_signed_out(self, closed_communicator: Communicator): + sign_out_message = Message.from_frames(*closed_communicator.socket._s.pop()) # type: ignore # noqa + assert sign_out_message == Message( + "COORDINATOR", + "Test", + conversation_id=sign_out_message.conversation_id, + message_type=MessageTypes.JSON, + data={'jsonrpc': "2.0", 'method': "sign_out", "id": 1} + ) + + def test_no_error_without_socket(self): + communicator = FakeCommunicator("Test", auto_open=False) + communicator.close() + # no error raised + + +def test_reset(communicator: Communicator): + communicator.close = MagicMock() # type: ignore + communicator.open = MagicMock() # type: ignore + # act + communicator.reset() + # assert + communicator.close.assert_called_once() + communicator.open.assert_called_once() + + +@pytest.mark.parametrize("kwargs, message", message_tests) +def test_communicator_send(communicator: Communicator, kwargs, message, monkeypatch, + fake_cid_generation): + monkeypatch.setattr("pyleco.utils.communicator.perf_counter", fake_time) + communicator.send(**kwargs) + assert communicator.socket._s.pop() == message # type: ignore + + +def test_poll(communicator: Communicator): + assert communicator.poll() == 0 + + +class Test_ask_message: + request = Message(receiver=b"N1.receiver", data="whatever", conversation_id=cid) + response = Message(receiver=b"N1.Test", sender=b"N1.receiver", data=["xyz"], + message_type=MessageTypes.JSON, + conversation_id=cid) + + def test_ignore_ping(self, communicator: Communicator): + ping_message = Message(receiver=b"N1.Test", sender=b"N1.COORDINATOR", + message_type=MessageTypes.JSON, + data={"id": 0, "method": "pong", "jsonrpc": "2.0"}) + communicator.socket._r = [ # type: ignore + ping_message.to_frames(), + self.response.to_frames()] + communicator.ask_message(self.request) + assert communicator.socket._s == [self.request.to_frames()] + + def test_sign_in(self, communicator: Communicator, fake_cid_generation): + communicator.sign_in = MagicMock() # type: ignore + not_signed_in = Message(receiver="N1.Test", sender="N1.COORDINATOR", + message_type=MessageTypes.JSON, + conversation_id=cid, + data={"id": None, + "error": NOT_SIGNED_IN.model_dump(), + "jsonrpc": "2.0"}, + ) + communicator.socket._r = [ # type: ignore + not_signed_in.to_frames(), + self.response.to_frames()] + response = communicator.ask_message(self.request) + # assert that the message is sent once + assert communicator.socket._s.pop(0) == self.request.to_frames() # type: ignore + # assert that it tries to sign in + communicator.sign_in.assert_called() + # assert that the message is called a second time + assert communicator.socket._s == [self.request.to_frames()] + # assert that the correct response is returned + assert response == self.response + + def test_ignore_wrong_response(self, communicator: Communicator, + caplog: pytest.LogCaptureFixture): + """A wrong response should not be returned.""" + caplog.set_level(10) + m = Message(receiver="whatever", sender="s", message_type=MessageTypes.JSON, + data={'jsonrpc': "2.0"}).to_frames() + communicator.socket._r = [m, self.response.to_frames()] # type: ignore + assert communicator.ask_message(self.request) == self.response + + def test_sign_in_fails_several_times(self, communicator: Communicator, fake_cid_generation): + not_signed_in = Message(receiver="communicator", sender="N1.COORDINATOR", + message_type=MessageTypes.JSON, + data={"id": None, + "error": NOT_SIGNED_IN.model_dump(), + "jsonrpc": "2.0"}, + ).to_frames() + communicator.sign_in = MagicMock() # type: ignore + communicator.socket._r = [not_signed_in, not_signed_in] # type: ignore + with pytest.raises(ConnectionRefusedError): + communicator.ask_message(self.request) + + @pytest.mark.xfail(True, reason="Unsure whether it should work that way.") + def test_ask_message_with_error(self, communicator: Communicator): + response = Message(receiver="communicator", sender="N1.COORDINATOR", + message_type=MessageTypes.JSON, conversation_id=cid, + data={"id": None, + "error": NODE_UNKNOWN.model_dump(), + "jsonrpc": "2.0"}, + ) + communicator.socket._r = [response.to_frames()] # type: ignore + with pytest.raises(JSONRPCError, match=NODE_UNKNOWN.message): + communicator.ask_message(Message("receiver", conversation_id=cid)) + + +def test_ask_rpc(communicator: Communicator, fake_cid_generation): + received = Message(receiver=b"N1.Test", sender=b"N1.receiver", + conversation_id=cid) + received.payload = [b"""{"jsonrpc": "2.0", "result": 123.45, "id": "1"}"""] + communicator.socket._r = [received.to_frames()] # type: ignore + response = communicator.ask_rpc(receiver="N1.receiver", method="test_method", some_arg=4) + assert communicator.socket._s == [ + Message(b'N1.receiver', b'Test', + conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 1, "method": "test_method", + "params": {"some_arg": 4}, "jsonrpc": "2.0"}).to_frames()] + assert response == 123.45 + + +def test_communicator_sign_in(fake_cid_generation, communicator: Communicator): + communicator.socket._r = [ # type: ignore + Message(b"N2.n", b"N2.COORDINATOR", + conversation_id=cid, message_type=MessageTypes.JSON, + data={"id": 1, "result": None, "jsonrpc": "2.0"}).to_frames()] + communicator.sign_in() + assert communicator.namespace == "N2" + + +def test_get_capabilities(communicator: Communicator, fake_cid_generation): + communicator.socket._r = [ # type: ignore + Message("communicator", "sender", conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 1, "result": 6, "jsonrpc": "2.0"} + ).to_frames() + ] + result = communicator.get_capabilities(receiver="rec") + sent = Message.from_frames(*communicator.socket._s.pop()) # type: ignore + assert sent.data == {"id": 1, "method": "rpc.discover", "jsonrpc": "2.0"} + assert result == 6 + + +def test_ask_json(communicator: Communicator, fake_cid_generation): + response = Message("communicator", sender="rec", conversation_id=cid, + data="super response") + communicator.socket._r = [response.to_frames()] # type: ignore + json_string = "[5, 6.7]" + # act + result = communicator.ask_json(receiver="rec", json_string=json_string) + # assert + sent = Message.from_frames(*communicator.socket._s.pop()) # type: ignore + assert sent.data == [5, 6.7] + assert result == b"super response" diff --git a/tests/utils/test_coordinator_utils.py b/tests/utils/test_coordinator_utils.py new file mode 100644 index 000000000..91953ca8f --- /dev/null +++ b/tests/utils/test_coordinator_utils.py @@ -0,0 +1,543 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.test import FakeContext +from pyleco.core.message import Message, MessageTypes +from pyleco.json_utils.errors import NOT_SIGNED_IN, DUPLICATE_NAME +from pyleco.json_utils.json_objects import Request, ResultResponse, ErrorResponse +from pyleco.utils.coordinator_utils import CommunicationError, ZmqNode, ZmqMultiSocket, Node,\ + Directory, FakeNode + + +class TestZmqMultiSocket: + @pytest.fixture + def socket(self): + socket = ZmqMultiSocket(context=FakeContext()) # type: ignore + socket._sock._r = [ # type: ignore + [b"id", b"version", b"receiver", b"sender", b"header", b"data"]] + return socket + + def test_poll_True(self, socket): + assert socket.message_received() is True + + def test_read_message(self, socket): + assert socket.read_message() == (b"id", Message.from_frames( + b"version", b"receiver", b"sender", b"header", b"data")) + + def test_poll_False(self, socket): + socket._sock._r = [] + assert socket.message_received() is False + + +class TestZmqNode: + @pytest.fixture + def node(self): + node = ZmqNode(context=FakeContext()) # type: ignore + return node + + def test_connection(self, node): + node.connect("abc") + assert node._dealer.addr == "tcp://abc" + + def test_disconnect(self, node): + node.connect("abc") + socket = node._dealer + node.disconnect() + assert socket.addr is None + assert node.is_connected() is False + + def test_is_connected_with_dealer(self, node): + node.connect("abc") + node._dealer.close() + assert node.is_connected() is False + + def test_is_connected(self, node): + assert node.is_connected() is False + node.connect("abc") + assert node.is_connected() is True + + +cid = b"conversation_id;" + + +def fake_generate_cid(): + return cid + + +@pytest.fixture() +def fake_cid_generation(monkeypatch): + monkeypatch.setattr("pyleco.core.serialization.generate_conversation_id", fake_generate_cid) + + +@pytest.fixture +def empty_directory() -> Directory: + return Directory(b"N1", b"N1.COORDINATOR", "N1host:12300") + + +@pytest.fixture +def directory(empty_directory: Directory) -> Directory: + d = empty_directory + d._nodes[b"N2"] = n2 = FakeNode() + d._node_ids[b"n2"] = n2 + n2.connect("N2host") + n2.namespace = b"N2" + d.add_component(b"send", b"send_id") + d.add_component(b"rec", b"rec_id") + return d + + +def fake_perf_counter(): + return 0. + + +@pytest.fixture() +def fake_counting(monkeypatch): + monkeypatch.setattr("pyleco.utils.coordinator_utils.perf_counter", fake_perf_counter) + + +class Test_add_component: + def test_add_component(self, empty_directory: Directory): + empty_directory.add_component(b"name", b"identity") + assert b"name" in empty_directory.get_components() + assert empty_directory.get_components()[b"name"].identity == b"identity" + + def test_adding_component_of_already_signed_in_component_succeeds(self, fake_counting, + directory: Directory): + # TODO not defined in LECO + sender = directory._components[b"send"] + sender.heartbeat = -100 + directory.add_component(name=b"send", identity=b"send_id") + # test that no error is raised. + assert sender.heartbeat == 0 + + def test_reject_adding(self, directory: Directory): + with pytest.raises(ValueError): + directory.add_component(b"send", b"new_identity") + assert directory.get_components()[b"send"].identity != b"new_identity" + + +class Test_remove_component: + @pytest.mark.parametrize("identity", (b"", b"send_id")) + def test_remove(self, directory: Directory, identity): + directory.remove_component(b"send", identity) + assert b"send" not in directory.get_components() + + def test_remove_with_wrong_identity(self, directory: Directory): + with pytest.raises(ValueError): + directory.remove_component(b"send", b"wrong_identity") + + def test_remove_not_present(self, directory: Directory): + """Test whether an already removed component does not raise an error.""" + directory.remove_component(b"not_present", b"") + + +class Test_get_component_id: + def test_component_present(self, directory: Directory): + assert directory.get_component_id(b"send") == b"send_id" + + def test_component_missing(self, directory: Directory): + with pytest.raises(ValueError): + directory.get_component_id(b"not_present") + + +class Test_add_node_sender: + """These are the first two parts of the sign in process: connect and send the COSIGNIN.""" + @pytest.mark.parametrize("namespace", (b"N1", b"N2")) + def test_invalid_namespaces(self, directory: Directory, namespace): + with pytest.raises(ValueError): + directory.add_node_sender(Node(), "N3host", namespace) + + @pytest.mark.parametrize("address", ("N1host", "N3host", "N1host:12300", "N3host:12300")) + def test_invalid_address(self, directory: Directory, address): + """No new connection to the coordinator itself or if there is another attempt to connect + to that same remote node.""" + # simulate a connection to N3 under way + directory._waiting_nodes["N3host:12300"] = None # type: ignore + with pytest.raises(ValueError): + directory.add_node_sender(Node(), address, b"N3") + + def test_node_added(self, fake_counting, directory: Directory): + length = len(directory._waiting_nodes) + directory.add_node_sender(FakeNode(), "N3host", b"N3") + assert directory._nodes.get(b"N3host") is None + assert len(directory._waiting_nodes) > length + + @pytest.fixture + def node(self, fake_cid_generation, directory: Directory) -> Node: + address = "N3host:12345" + directory.add_node_sender(node=FakeNode(), address=address, namespace=b"N3") + return directory._waiting_nodes[address] + + def test_node_address(self, node: Node): + assert node.address == "N3host:12345" + + def test_node_heartbeat(self, fake_counting, node: Node): + assert node.heartbeat == 0 + + def test_node_connected(self, node: Node): + assert node.is_connected() + + def test_message_sent(self, node: Node): + assert node._messages_sent == [Message( # type: ignore + b"COORDINATOR", b"N1.COORDINATOR", + data=Request(id=1, method="coordinator_sign_in"), + message_type=MessageTypes.JSON, + conversation_id=cid)] + + def test_node_port_added_to_address(self, directory: Directory): + directory.add_node_sender(FakeNode(), "N3host", b"N3") + assert directory._waiting_nodes["N3host:12300"].address == "N3host:12300" + + +class Test_add_node_receiver_unknown: + """Handles a remote Coordinator, which is signing in.""" + @pytest.fixture + def unknown_node(self, fake_counting, directory: Directory) -> Node: + identity = b"receiver_id" + directory.add_node_receiver(identity=identity, namespace=b"N3") + return directory._node_ids[identity] + + def test_namespace_with_identity(self, unknown_node: Node): + assert unknown_node.namespace == b"N3" + + def test_heartbeat(self, unknown_node: Node): + assert unknown_node.heartbeat == 0 + + +class Test_add_node_receiver_partially_known: + identity = b"n3" + + @pytest.fixture + def partially_known_node(self, fake_counting, directory: Directory) -> Directory: + """Only sending to that node is possible.""" + node = FakeNode() + node.namespace = b"N3" + directory._nodes[node.namespace] = node + directory.add_node_receiver(identity=self.identity, namespace=b"N3") + return directory + + def test_heartbeat(self, partially_known_node: Directory): + assert partially_known_node._node_ids[self.identity].heartbeat == 0 + + def test_id_set(self, partially_known_node: Directory): + assert self.identity in partially_known_node._node_ids.keys() + + +def test_add_node_receiver_already_known(directory: Directory): + with pytest.raises(ValueError): + directory.add_node_receiver(b"n5", b"N2") + + +class Test_check_unfinished_node_connections: + @pytest.fixture + def directory_cunc(self, directory: Directory) -> Directory: + directory.add_node_sender(FakeNode(), "N3host", b"N3") + node = directory._waiting_nodes["N3host:12300"] + node._messages_read = [Message(b"N1.COORDINATOR", b"N3.COORDINATOR", # type: ignore + message_type=MessageTypes.JSON, + data=ResultResponse(id=1, result=None))] + directory.check_unfinished_node_connections() + return directory + + def test_new_node(self, directory_cunc: Directory): + assert directory_cunc.get_node(b"N3") is not None + + +def test_check_unfinished_node_connection_logs_error(directory: Directory, caplog): + directory.add_node_sender(FakeNode(), "N3host", b"N3") + node = directory._waiting_nodes["N3host:12300"] + + def read_message(timeout: int = 0) -> Message: + return Message.from_frames(*[b"frame 1", b"frame 2"]) # not enough frames + node.read_message = read_message # type: ignore + node._messages_read = ["just something to indicate a message in the buffer"] # type: ignore + directory.check_unfinished_node_connections() + assert caplog.records[-1].msg == "Message decoding failed." + + +class Test_handle_node_message: + """Already included in check_unfinished_node_connections""" + @pytest.mark.parametrize("message", ( + Message(b"N1.COORDINATOR", b"N5.COORDINATOR", + message_type=MessageTypes.JSON, + data=ErrorResponse(id=None, error=DUPLICATE_NAME)), + Message(b"N1.COORDINATOR", b"N5.COORDINATOR", + message_type=MessageTypes.JSON, + data=ErrorResponse(id=None, error=NOT_SIGNED_IN)), + Message("N1.COORDINATOR", "N5.COORDINATOR", + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "error": {"code": -32600, "message": "Invalid Request"}, + "id": None}) + )) + def test_rejected_sign_in(self, directory: Directory, message): + directory._waiting_nodes["N5host"] = n = FakeNode() + directory._handle_node_message(key="N5host", message=message) + assert "N5host" not in directory._waiting_nodes.keys() + assert n not in directory._nodes.values() + + @pytest.mark.parametrize("message", ( + Message(b"N1.COORDINATOR", b"N5.COORDINATOR", + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "result": None, "id": 1}), + Message(b"N1.COORDINATOR", b"N5.COORDINATOR", + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "result": None, "id": 5}), + )) + def test_successful_sign_in(self, directory: Directory, message): + directory._waiting_nodes["N5host"] = n = FakeNode() + directory._handle_node_message(key="N5host", message=message) + assert directory.get_node(b"N5") == n + assert "N5host" not in directory._waiting_nodes.keys() + + +class Test_finish_sign_in_to_remote: + @pytest.fixture + def directory_sirn(self, fake_cid_generation, directory: Directory) -> Directory: + directory.add_node_sender(FakeNode(), "N3host", b"N3") + temp_namespace = list(directory._waiting_nodes.keys())[0] + node = directory._waiting_nodes[temp_namespace] + assert node.namespace != b"N3" + directory._finish_sign_in_to_remote(temp_namespace, Message( + receiver=b"N1.COORDINATOR", + sender=b"N3.COORDINATOR", + message_type=MessageTypes.JSON, + data={"id": 1, "result": None, "jsonrpc": "2.0"}, + )) + return directory + + def test_waiting_nodes_cleared(self, directory_sirn: Directory): + assert len(directory_sirn._waiting_nodes) == 0 + + def test_node_renamed(self, directory_sirn: Directory): + assert list(directory_sirn._nodes.keys()) == [b"N2", b"N3"] + + def test_node_namespace_set(self, directory_sirn: Directory): + assert directory_sirn._nodes[b"N3"].namespace == b"N3" + + def test_directory_sent(self, directory_sirn: Directory): + assert directory_sirn._nodes[b"N3"]._messages_sent == [ # type: ignore + Message(b'COORDINATOR', b'N1.COORDINATOR', + {"id": 1, "method": "coordinator_sign_in", "jsonrpc": "2.0"}, + message_type=MessageTypes.JSON, + conversation_id=cid), + Message( + b'N3.COORDINATOR', b'N1.COORDINATOR', + data=[{"id": 2, "method": "add_nodes", "params": + {"nodes": {"N1": "N1host:12300", "N2": "N2host", "N3": "N3host:12300"}}, + "jsonrpc": "2.0"}, + {"id": 3, "method": "record_components", "params": + {"components": ["send", "rec"]}, "jsonrpc": "2.0"}], + message_type=MessageTypes.JSON, + conversation_id=cid, + )] + + +class Test_combine_sender_and_receiver_nodes: + @pytest.fixture + def directory_crasn(self, directory: Directory) -> Directory: + directory.add_node_receiver(b"n3", b"N3") + return directory + + def test_match(self, directory_crasn: Directory): + node = Node() + node.namespace = b"N3" + directory_crasn._combine_sender_and_receiver_nodes(node) + assert node.heartbeat != -1 + assert directory_crasn._node_ids[b"n3"] == node + + def test_mismatch(self, directory_crasn: Directory): + node = Node() + node.namespace = b"N4" + directory_crasn._combine_sender_and_receiver_nodes(node) + assert node.heartbeat == -1 + assert directory_crasn._node_ids[b"n3"] != node + + +class Test_remove_node_without_checks: + def test_remove_only_receiver(self, directory: Directory): + directory.add_node_receiver(b"n3", b"N3") + assert b"n3" in directory._node_ids + directory._remove_node_without_checks(b"N3") + assert b"n3" not in directory._node_ids + assert b"N3" not in directory._nodes + + def test_remove_complete_node(self, directory: Directory): + assert b"n2" in directory._node_ids + assert b"N2" in directory._nodes + directory._remove_node_without_checks(b"N2") + assert b"n2" not in directory._node_ids + assert b"N2" not in directory._nodes + + def test_remove_by_namespace(self, directory: Directory): + node = Node() + directory._nodes[b"N3"] = node + directory._node_ids[b"some_id"] = node + directory._remove_node_without_checks(b"N3") + assert b"some_id" not in directory._node_ids + assert b"N3" not in directory._nodes + + +class Test_update_heartbeat: + def test_local_component(self, fake_counting, directory: Directory): + directory.update_heartbeat(b"send_id", Message.from_frames(b"", b"", b"N1.send", b"")) + assert directory.get_components()[b"send"].heartbeat == 0 + + def test_local_component_without_namespace(self, fake_counting, directory: Directory): + directory.update_heartbeat(b"send_id", Message.from_frames(b"", b"", b"send", b"")) + assert directory.get_components()[b"send"].heartbeat == 0 + + def test_local_component_signs_in(self, directory: Directory): + directory.update_heartbeat(b"new_id", Message.from_frames( + b"", b"COORDINATOR", b"send2", b"", + b'{"id": 2, "method": "sign_in", "jsonrpc": "2.0"}')) + # test that no error is raised + + def test_not_signed_in_component_signs_out(self, directory: Directory): + # TODO not determined by LECO + directory.update_heartbeat(b"new_id", Message.from_frames( + b"", b"COORDINATOR", b"send2", b"", + b'{"id": 2, "method": "sign_out", "jsonrpc": "2.0"}')) + # test that no error is raised + + def test_local_component_with_wrong_id(self, directory: Directory): + with pytest.raises(CommunicationError, match=DUPLICATE_NAME.message): + directory.update_heartbeat(b"new_id", Message.from_frames( + b"", b"COORDINATOR", b"send", b"")) + + def test_local_component_with_wrong_id_signs_in(self, directory: Directory): + with pytest.raises(CommunicationError, match=DUPLICATE_NAME.message): + directory.update_heartbeat(b"new_id", Message( + receiver=b"COORDINATOR", sender=b"send", + message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "id": 2, "method": "sign_in"})) + + def test_known_node(self, fake_counting, directory: Directory): + directory.update_heartbeat(b"n2", Message.from_frames(b"", b"COORDINATOR", b"N2.send", b"")) + assert directory.get_node_ids()[b"n2"].heartbeat == 0 + + @pytest.mark.parametrize("data", ( + {"jsonrpc": "2.0", "method": "coordinator_sign_in", "id": 7}, + {"jsonrpc": "2.0", "method": "coordinator_sign_out", "id": 7}, + )) + def test_signing_in_out_node(self, directory: Directory, data): + directory.update_heartbeat(b"n3", Message( + b"COORDINATOR", b"N3.COORDINATOR", data=data, message_type=MessageTypes.JSON)) + # test that no error is raised + + def test_from_unknown_node(self, directory: Directory): + with pytest.raises(CommunicationError, match="not signed in"): + directory.update_heartbeat(b"n3", Message(b"COORDINATOR", b"N3.send")) + + +class Test_find_expired_components: + def test_expired_component(self, directory: Directory, fake_counting): + directory.get_components()[b"send"].heartbeat = -3.5 + directory.find_expired_components(1) + assert b"send" not in directory.get_components().keys() + + def test_warn_component(self, directory: Directory, fake_counting): + directory.get_components()[b"send"].heartbeat = -1.5 + assert directory.find_expired_components(1) == [(b"send_id", b"send")] + + def test_active_Component(self, directory: Directory, fake_counting): + directory.get_components()[b"send"].heartbeat = -0.5 + assert directory.find_expired_components(1) == [] + assert b"send" in directory.get_components() + + +class Test_find_expired_nodes: + + def test_expired_node(self, directory: Directory, fake_counting): + directory.get_node_ids()[b"n2"].heartbeat = -3.5 + directory.find_expired_nodes(1) + assert b"n2" not in directory.get_node_ids() + + def test_warn_node(self, directory: Directory, fake_counting, fake_cid_generation): + directory.get_node_ids()[b"n2"].heartbeat = -1.5 + directory.find_expired_nodes(1) + assert directory.get_node_ids()[b"n2"]._messages_sent == [ # type: ignore + Message(b"N2.COORDINATOR", b"N1.COORDINATOR", + Request(id=0, method="pong"), + message_type=MessageTypes.JSON, + conversation_id=cid)] + + def test_active_node(self, directory: Directory, fake_counting): + directory.get_node_ids()[b"n2"].heartbeat = -0.5 + directory.find_expired_nodes(1) + assert b"n2" in directory.get_node_ids() + + def test_expired_waiting_node(self, directory: Directory, fake_counting): + waiting_node = FakeNode() + waiting_node.heartbeat = - 3.5 + directory._waiting_nodes["address"] = waiting_node + # act + directory.find_expired_nodes(1) + assert "address" not in directory._waiting_nodes + + +def test_get_node_id(directory: Directory): + assert directory.get_node_id(b"N2") == b"n2" + + +def test_get_node_id_not_first_place_in_list(directory: Directory): + n3 = FakeNode() + n3.namespace = b"N3" + directory._node_ids[b"n3"] = n3 + assert directory.get_node_id(b"N3") == b"n3" + + +def test_get_node_id_fails(directory: Directory): + with pytest.raises(ValueError, match="No receiving connection to namespace"): + directory.get_node_id(b"N5") + + +class Test_sign_out_from_node: + @pytest.fixture + def directory_wo_n2(self, fake_cid_generation, directory: Directory) -> Directory: + directory._test = directory.get_node(b"N2") # type: ignore + directory.sign_out_from_node(b"N2") + return directory + + def test_message_sent(self, directory_wo_n2: Directory): + assert directory_wo_n2._test._messages_sent == [ # type: ignore + Message(b"N2.COORDINATOR", b"N1.COORDINATOR", + data={"id": 1, "method": "coordinator_sign_out", "jsonrpc": "2.0"}, + message_type=MessageTypes.JSON, + conversation_id=cid)] + + def test_connection_closed(self, directory_wo_n2: Directory): + assert directory_wo_n2._test.is_connected() is False # type: ignore + + def test_n2_removed_from_nodes(self, directory_wo_n2: Directory): + assert b"N2" not in directory_wo_n2.get_nodes() + + def test_n2_removed_from_node_ids(self, directory_wo_n2: Directory): + assert b"n2" not in directory_wo_n2.get_node_ids() + + +def test_sign_out_from_unknown_node_fails(directory: Directory): + with pytest.raises(ValueError, match="is not known"): + directory.sign_out_from_node(b"unknown node") diff --git a/tests/utils/test_data_publisher.py b/tests/utils/test_data_publisher.py new file mode 100644 index 000000000..c6f3575d1 --- /dev/null +++ b/tests/utils/test_data_publisher.py @@ -0,0 +1,87 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pickle +import pytest + +from pyleco.utils.data_publisher import DataPublisher, DataMessage +from pyleco.test import FakeContext + + +@pytest.fixture +def publisher(): + publisher = DataPublisher(full_name="sender", context=FakeContext()) # type: ignore + return publisher + + +def test_socket_type(publisher: DataPublisher): + assert publisher.socket.socket_type == 1 + + +def test_connection(): + publisher = DataPublisher(full_name="", host="localhost", port=12345, + context=FakeContext()) # type: ignore + assert publisher.socket.addr == "tcp://localhost:12345" + + +def test_context_manager_closes_connection(): + with DataPublisher("", context=FakeContext()) as p: # type: ignore + pass + assert p.socket.closed is True + + +def test_call_publisher_sends(publisher: DataPublisher): + publisher(b"data") + # assert + message = DataMessage.from_frames(*publisher.socket._s.pop()) # type: ignore + assert message.topic == publisher.full_name.encode() + assert message.payload[0] == b"data" + + +def test_send_data(publisher: DataPublisher): + publisher.send_data( + data=b"data", topic=b"topic", conversation_id=b"cid", additional_payload=[b"1"] + ) + assert publisher.socket._s == [[b"topic", b"cid\x00", b"data", b"1"]] + + +def test_send_message(publisher: DataPublisher): + message = DataMessage.from_frames(b"topic", b"header", b"data") + publisher.send_message(message=message) + assert publisher.socket._s == [message.to_frames()] + + +def test_send_legacy(publisher: DataPublisher): + value = 5.67 + publisher.send_legacy({'key': value}) + message = DataMessage.from_frames(*publisher.socket._s[0]) # type: ignore + assert message.topic == b"key" + assert message.payload[0] == pickle.dumps(value) + assert message.message_type == 234 + + +def test_set_full_name(publisher: DataPublisher): + new_full_name = "new full name" + publisher.set_full_name(new_full_name) + assert publisher.full_name == new_full_name diff --git a/tests/utils/test_extended_message_handler.py b/tests/utils/test_extended_message_handler.py new file mode 100644 index 000000000..e4c562f07 --- /dev/null +++ b/tests/utils/test_extended_message_handler.py @@ -0,0 +1,149 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import json +import pickle +from unittest.mock import MagicMock + +import pytest + +from pyleco.core.data_message import DataMessage +from pyleco.test import FakeContext, FakeSocket +from pyleco.utils.events import SimpleEvent +from pyleco.utils.extended_message_handler import ExtendedMessageHandler + + +@pytest.fixture +def handler(): + handler = ExtendedMessageHandler(name="handler", + context=FakeContext()) # type: ignore + handler.namespace = "N1" + handler.stop_event = SimpleEvent() + handler.subscriber = FakeSocket(2) # type: ignore + handler.handle_subscription_message = MagicMock() # it is not defined + return handler + + +def test_read_subscription_message_calls_handle(handler: ExtendedMessageHandler): + message = DataMessage("", data="[]") + handler.subscriber._r = [message.to_frames()] # type: ignore + handler.read_subscription_message() + # assert + handler.handle_subscription_message.assert_called_once_with(message) # type: ignore + + +def test_handle_subscription_message_raises_not_implemented(): + handler = ExtendedMessageHandler(name="handler", context=FakeContext()) # type: ignore + with pytest.raises(NotImplementedError): + handler.handle_subscription_message(DataMessage(b"topic")) + + +def test_read_subscription_message_calls_handle_legacy(handler: ExtendedMessageHandler): + message = DataMessage("", data="[]", message_type=234) + handler.handle_full_legacy_subscription_message = MagicMock() # type: ignore[method-assign] + handler.subscriber._r = [message.to_frames()] # type: ignore + handler.read_subscription_message() + # assert + handler.handle_full_legacy_subscription_message.assert_called_once_with(message) # type: ignore + + +def test_subscribe_single(handler: ExtendedMessageHandler): + handler.subscribe_single(b"topic") + assert handler.subscriber._subscriptions == [b"topic"] # type: ignore + assert handler._subscriptions == [b"topic"] + + +def test_subscribe_single_again(handler: ExtendedMessageHandler, caplog: pytest.LogCaptureFixture): + # arrange + handler.subscribe_single(b"topic") + caplog.set_level(10) + # act + handler.subscribe_single(b"topic") + assert caplog.messages[-1] == f"Already subscribed to {b'topic'!r}." + + +@pytest.mark.parametrize("topics, result", ( + ("topic", [b"topic"]), # single string + (["topic1", "topic2"], [b"topic1", b"topic2"]), # list of strings + (("topic1", "topic2"), [b"topic1", b"topic2"]), # tuple of strings +)) +def test_subscribe(handler: ExtendedMessageHandler, topics, result): + handler.subscribe(topics) + assert handler._subscriptions == result + + +def test_unsubscribe_single(handler: ExtendedMessageHandler): + handler._subscriptions = [b"topic"] + handler.subscriber._subscriptions = [b"topic"] # type: ignore + handler.unsubscribe_single(b"topic") + assert handler._subscriptions == [] + assert handler.subscriber._subscriptions == [] # type: ignore + + +@pytest.mark.parametrize("topics, result", ( + ("topic", [b"topic"]), # single string + (["topic1", "topic2"], [b"topic1", b"topic2"]), # list of strings + (("topic1", "topic2"), [b"topic1", b"topic2"]), # tuple of strings +)) +def test_unsubscribe(handler: ExtendedMessageHandler, topics, result): + handler._subscriptions = result + handler.unsubscribe(topics) + assert handler._subscriptions == [] + + +def test_unsubscribe_all(handler: ExtendedMessageHandler): + handler._subscriptions = [b"topic1", b"topic2"] + handler.unsubscribe_all() + assert handler._subscriptions == [] + + +class Test_handle_full_legacy_subscription_message: + @pytest.fixture + def handler_hfl(self, handler: ExtendedMessageHandler) -> ExtendedMessageHandler: + handler.handle_subscription_data = MagicMock() # type: ignore[method-assign] + return handler + + def test_handle_pickled_message(self, handler_hfl: ExtendedMessageHandler): + data = ["some", "data", 5] + handler_hfl.handle_full_legacy_subscription_message( + DataMessage("topic", data=pickle.dumps(data), message_type=234) + ) + handler_hfl.handle_subscription_data.assert_called_once_with({"topic": data}) # type: ignore + + def test_handle_json_message(self, handler_hfl: ExtendedMessageHandler): + data = ["some", "data", 5] + handler_hfl.handle_full_legacy_subscription_message( + DataMessage("topic", data=json.dumps(data), message_type=235) + ) + handler_hfl.handle_subscription_data.assert_called_once_with({"topic": data}) # type: ignore + + def test_handle_unknown_message_type(self, handler_hfl: ExtendedMessageHandler): + with pytest.raises(ValueError): + handler_hfl.handle_full_legacy_subscription_message( + DataMessage("topic", data="", message_type=210) + ) + + def test_handle_subscription_data(self, handler: ExtendedMessageHandler): + with pytest.raises(NotImplementedError): + handler.handle_subscription_data({}) diff --git a/tests/utils/test_listener.py b/tests/utils/test_listener.py new file mode 100644 index 000000000..68eaae6ac --- /dev/null +++ b/tests/utils/test_listener.py @@ -0,0 +1,68 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.test import FakeCommunicator +from pyleco.core.message import Message + +from pyleco.utils.listener import Listener, CommunicatorPipe + + +@pytest.fixture +def listener() -> Listener: + listener = Listener(name="test") # type: ignore + listener.communicator = FakeCommunicator(name="N.Pipe") # type: ignore + return listener + + +def test_communicator_name_is_returned(listener: Listener): + assert listener.name == "N.Pipe" + + +class Test_communicator_closed_at_stopped_listener(): + @pytest.fixture(scope="class") + def communicator(self) -> CommunicatorPipe: + # scope is class as starting the listener takes some time + listener = Listener(name="test") + listener.start_listen() + communicator = listener.communicator + listener.stop_listen() + return communicator + + def test_socket_closed(self, communicator: CommunicatorPipe): + assert communicator.socket.closed is True + + def test_internal_method(self, communicator: CommunicatorPipe): + """A method which is handled in the handler and not sent from the handler via LECO.""" + with pytest.raises(ConnectionRefusedError): + communicator.ask_handler("pong") + + def test_sending_messages(self, communicator: CommunicatorPipe): + with pytest.raises(ConnectionRefusedError): + communicator.send_message(Message("rec", "send")) + + def test_changing_name(self, communicator: CommunicatorPipe): + with pytest.raises(ConnectionRefusedError): + communicator.name = "abc" diff --git a/tests/utils/test_log_levels.py b/tests/utils/test_log_levels.py new file mode 100644 index 000000000..71049b154 --- /dev/null +++ b/tests/utils/test_log_levels.py @@ -0,0 +1,49 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import logging + +import pytest + +from pyleco.utils.log_levels import get_leco_log_level, LogLevels, PythonLogLevels + + +@pytest.mark.parametrize("level, value", ( + (logging.DEBUG, LogLevels.DEBUG), + (logging.INFO, LogLevels.INFO), + (logging.WARNING, LogLevels.WARNING), + (logging.ERROR, LogLevels.ERROR), + (logging.CRITICAL, LogLevels.CRITICAL), +)) +def test_get_leco_log_level(level, value): + assert get_leco_log_level(level) == value + + +def test_failing_get_leco_log_level(): + with pytest.raises(ValueError): + get_leco_log_level(5) + + +def test_PythonLogLevels(): + assert PythonLogLevels["DEBUG"] == logging.DEBUG diff --git a/tests/utils/test_message_handler.py b/tests/utils/test_message_handler.py new file mode 100644 index 000000000..7ceddc315 --- /dev/null +++ b/tests/utils/test_message_handler.py @@ -0,0 +1,751 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from __future__ import annotations +import logging +from unittest.mock import MagicMock +import time +from typing import Optional + +import pytest + +from pyleco.core import VERSION_B +from pyleco.core.message import Message, MessageTypes +from pyleco.core.leco_protocols import ExtendedComponentProtocol, LogLevels +from pyleco.core.internal_protocols import CommunicatorProtocol +from pyleco.core.serialization import serialize_data +from pyleco.test import FakeContext, FakePoller +from pyleco.json_utils.json_objects import Request, ResultResponse, ErrorResponse +from pyleco.json_utils.errors import JSONRPCError, INVALID_REQUEST, NOT_SIGNED_IN, DUPLICATE_NAME,\ + NODE_UNKNOWN, RECEIVER_UNKNOWN + +from pyleco.utils.message_handler import MessageHandler, SimpleEvent + + +handler_name = "N1.handler" +remote_name = "remote" +cid = b"conversation_id;" +header = b"".join((cid, b"\x00" * 4)) + + +def fake_generate_cid(): + return cid + + +@pytest.fixture() +def fake_cid_generation(monkeypatch): + monkeypatch.setattr("pyleco.core.serialization.generate_conversation_id", fake_generate_cid) + + +@pytest.fixture() +def handler() -> MessageHandler: + handler = MessageHandler(name=handler_name.split(".")[1], context=FakeContext()) # type: ignore + handler.namespace = handler_name.split(".")[0] + handler.stop_event = SimpleEvent() + handler.timeout = 0.1 + return handler + + +class TestProtocolImplemented: + protocol_methods = [m for m in dir(ExtendedComponentProtocol) if not m.startswith("_")] + + def static_test_methods_are_present(self): + def testing(component: ExtendedComponentProtocol): + pass + testing(MessageHandler(name="test")) + + def test_internal_communicator_protocol(communicator: CommunicatorProtocol): + pass + test_internal_communicator_protocol(MessageHandler(name="test")) + + @pytest.fixture + def component_methods(self, handler: MessageHandler): + response = handler.rpc.process_request( + '{"id": 1, "method": "rpc.discover", "jsonrpc": "2.0"}') + result = handler.rpc_generator.get_result_from_response(response) # type: ignore + return result.get('methods') + + @pytest.mark.parametrize("method", protocol_methods) + def test_method_is_available(self, component_methods, method): + for m in component_methods: + if m.get('name') == method: + return + raise AssertionError(f"Method {method} is not available.") + + +class Test_setup_logging: + def test(self, handler: MessageHandler): + logger = logging.getLogger("test") + logger.addHandler(logging.NullHandler()) + handler.setup_logging(logger) + assert len(logger.handlers) == 2 + assert handler.root_logger == logger + assert handler.log == logging.getLogger("test.MessageHandler") + + +class Test_namespace_setter: + def test_full_name_without_namespace(self, handler: MessageHandler): + handler.namespace = None + assert handler.full_name == "handler" + + @pytest.fixture + def handler_ns(self, handler: MessageHandler) -> MessageHandler: + handler.namespace = "xyz" + return handler + + def test_namespace(self, handler_ns: MessageHandler): + assert handler_ns.namespace == "xyz" + + def test_full_name(self, handler_ns: MessageHandler): + assert handler_ns.full_name == "xyz.handler" + + def test_rpc_title(self, handler_ns: MessageHandler): + assert handler_ns.rpc.title == "xyz.handler" + + def test_log_handler(self, handler_ns: MessageHandler): + assert handler_ns.log_handler.full_name == "xyz.handler" + + +class Test_sign_in: + def test_sign_in_successful(self, handler: MessageHandler, fake_cid_generation): + message = Message(receiver=b"N3.handler", sender=b"N3.COORDINATOR", + conversation_id=cid, + message_type=MessageTypes.JSON, + data={ + "id": 0, "result": None, "jsonrpc": "2.0", + }) + handler.socket._r = [message.to_frames()] # type: ignore + handler.namespace = None + handler.sign_in() + assert handler.namespace == "N3" + + def test_not_valid_message(self, handler: MessageHandler, caplog: pytest.LogCaptureFixture, + fake_cid_generation): + message = Message("handler", "COORDINATOR", data=b"[]", conversation_id=cid) + handler.socket._r = [message.to_frames()] # type: ignore + handler.sign_in() + caplog.records[-1].msg.startswith("Not json message received:") + + def test_duplicate_name(self, handler: MessageHandler, caplog: pytest.LogCaptureFixture, + fake_cid_generation): + handler.namespace = None + message = Message("handler", "N3.COORDINATOR", message_type=MessageTypes.JSON, + data=ErrorResponse(id=5, error=DUPLICATE_NAME), + conversation_id=cid) + handler.socket._r = [message.to_frames()] # type: ignore + handler.sign_in() + assert handler.namespace is None + assert caplog.records[-1].msg == "Sign in failed, the name is already used." + + def test_handle_unknown_error(self, handler: MessageHandler, caplog: pytest.LogCaptureFixture, + fake_cid_generation): + handler.namespace = None + message = Message("handler", "N3.COORDINATOR", message_type=MessageTypes.JSON, data={ + "jsonrpc": "2.0", "error": {'code': 123545, "message": "error_msg"}, "id": 5 + }, conversation_id=cid) + handler.socket._r = [message.to_frames()] # type: ignore + handler.sign_in() + assert handler.namespace is None + assert caplog.records[-1].msg.startswith("Sign in failed, unknown error") + + def test_handle_request_message(self, handler: MessageHandler, caplog: pytest.LogCaptureFixture, + fake_cid_generation + ): + """Handle a message without result or error.""" + handler.namespace = None + message = Message("handler", "N3.COORDINATOR", message_type=MessageTypes.JSON, data={ + "jsonrpc": "2.0", "id": 5, "method": "some_method", + }, conversation_id=cid) + handler.socket._r = [message.to_frames()] # type: ignore + handler.sign_in() + assert handler.namespace is None + assert caplog.records[-1].msg.startswith("Sign in failed, unknown error") + + def test_log_timeout_error(self, handler: MessageHandler, caplog: pytest.LogCaptureFixture): + handler.sign_in() + assert caplog.records[-1].msg.startswith("Signing in timed out.") + + +class Test_finish_sign_in: + @pytest.fixture + def handler_fsi(self, handler: MessageHandler, caplog: pytest.LogCaptureFixture): + caplog.set_level(logging.INFO) + handler.finish_sign_in(response_message=Message( + b"handler", b"N5.COORDINATOR", + message_type=MessageTypes.JSON, + data={"id": 10, "result": None, "jsonrpc": "2.0"})) + return handler + + def test_namespace(self, handler_fsi: MessageHandler): + assert handler_fsi.namespace == "N5" + + def test_full_name(self, handler_fsi: MessageHandler): + assert handler_fsi.full_name == "N5.handler" + + def test_log_message(self, handler_fsi: MessageHandler, caplog: pytest.LogCaptureFixture): + assert caplog.get_records("setup")[-1].message == ("Signed in to Node 'N5'.") + + +def test_sign_out_fail(handler: MessageHandler, caplog: pytest.LogCaptureFixture, + fake_cid_generation): + handler.namespace = "N3" + message = Message("handler", "N3.COORDINATOR", message_type=MessageTypes.JSON, data={ + "jsonrpc": "2.0", "error": {"code": 12345}, "id": 1, + }, conversation_id=cid) + handler.socket._r = [message.to_frames()] # type: ignore + handler.sign_out() + assert handler.namespace is not None + assert caplog.messages[-1].startswith("Signing out failed") + + +def test_sign_out_success(handler: MessageHandler, fake_cid_generation): + handler.namespace = "N3" + message = Message("handler", "N3.COORDINATOR", message_type=MessageTypes.JSON, data={ + "jsonrpc": "2.0", "result": None, "id": 1, + }, conversation_id=cid) + handler.socket._r = [message.to_frames()] # type: ignore + handler.sign_out() + assert handler.namespace is None + + +def test_finish_sign_out(handler: MessageHandler): + handler.finish_sign_out() + assert handler.namespace is None + assert handler.full_name == "handler" + + +# test communication +def test_send(handler: MessageHandler): + handler.send("N2.CB", conversation_id=cid, message_id=b"sen", data=[["TEST"]], + message_type=MessageTypes.JSON) + assert handler.socket._s == [[VERSION_B, b"N2.CB", b"N1.handler", b"conversation_id;sen\x01", + b'[["TEST"]]']] + + +def test_send_with_sender(handler: MessageHandler): + handler.send("N2.CB", sender="sender", conversation_id=cid, message_id=b"sen", + data=[["TEST"]], + message_type=MessageTypes.JSON) + assert handler.socket._s == [[VERSION_B, b"N2.CB", b"sender", b"conversation_id;sen\x01", + b'[["TEST"]]']] + + +def test_send_message_raises_error(handler: MessageHandler, caplog: pytest.LogCaptureFixture): + handler.send(receiver=remote_name, header=b"header", conversation_id=b"12345") + assert caplog.messages[-1].startswith("Composing message with") + + +def test_heartbeat(handler: MessageHandler, fake_cid_generation): + handler.heartbeat() + assert handler.socket._s == [[VERSION_B, b"COORDINATOR", b"N1.handler", header]] + + +class Test_read_message: + m1 = Message(receiver=handler_name, sender="xy") # some message + mr = Message(receiver=handler_name, sender="xy", conversation_id=cid) # requested message + m2 = Message(receiver=handler_name, sender="xy") # another message + + conf: list[tuple[list[Message], list[Message], Optional[bytes], list[Message], list[Message], + str]] = [ + # socket_in, buffer_in, cid, socket_out, buffer_out, test_id + # find first not requested message + ([m1], [], None, [], [], "return first message from socket"), + ([m2], [m1], None, [m2], [], "return first message from buffer, not socket"), + ([m1], [mr], None, [], [mr], "ignore requested message in buffer"), + ([mr, m1], [], None, [], [mr], "ignore requested message in socket"), + # find requested message + ([mr], [], cid, [], [], "return specific message from socket"), + ([m2], [mr], cid, [m2], [], "return specific message from buffer"), + ([mr], [m2], cid, [], [m2], "return specific message from socket although filled buffer"), + ([m2, mr, m1], [], cid, [m1], [m2], "find specific message in socket"), + ([], [m2, mr, m1], cid, [], [m2, m1], "find specific message in buffer"), + ] + ids = [test[-1] for test in conf] + + def test_return_message_from_socket(self, handler: MessageHandler): + handler.socket._r = [self.m1.to_frames()] # type: ignore + assert handler.read_message() == self.m1 + + def test_return_message_from_buffer(self, handler: MessageHandler): + handler.message_buffer.add_message(self.m1) + assert handler.read_message() == self.m1 + # assert that no error is raised + + def test_cid_not_longer_in_requested_ids(self, handler: MessageHandler): + handler.message_buffer.add_conversation_id(cid) + handler.message_buffer.add_message(self.mr) + handler.read_message(conversation_id=cid) + assert handler.message_buffer.is_conversation_id_requested(cid) is False + + @pytest.mark.parametrize("test", conf, ids=ids) + def test_return_correct_message(self, + test: tuple[list[Message], list, Optional[bytes]], + handler: MessageHandler): + socket, buffer, cid0, *_ = test + handler.socket._r = [m.to_frames() for m in socket] # type: ignore + for m in buffer: + handler.message_buffer.add_message(m) + handler.message_buffer.add_conversation_id(cid) + # act and assert + assert handler.read_message(conversation_id=cid0) == self.m1 if cid is None else self.mr + + @pytest.mark.parametrize("test", conf, ids=ids) + def test_correct_buffer_socket(self, test, handler: MessageHandler): + socket_in, buffer_in, cid0, socket_out, buffer_out, *_ = test + handler.socket._r = [m.to_frames() for m in socket_in] # type: ignore + for m in buffer_in: + handler.message_buffer.add_message(m) + handler.message_buffer.add_conversation_id(cid) + # act + handler.read_message(conversation_id=cid0) + assert handler.socket._r == [m.to_frames() for m in socket_out] # type: ignore + assert handler.message_buffer._messages == buffer_out + + def test_timeout_zero_works(self, handler: MessageHandler): + handler.socket._r = [self.m1.to_frames()] # type: ignore + handler.read_message(timeout=0) + # assert that no error is raised + + def test_timeout_error(self, handler: MessageHandler): + def waiting(*args, **kwargs): + time.sleep(.1) + return self.m1 + handler._read_socket_message = waiting # type: ignore[assignment] + with pytest.raises(TimeoutError): + handler.read_message(conversation_id=cid, timeout=0) + + +class Test_ask_message: + expected_sent = Message(remote_name, sender=handler_name, conversation_id=cid) + expected_response = Message(handler_name, sender=remote_name, conversation_id=cid) + + @pytest.fixture + def handler_asked(self, handler: MessageHandler): + handler.socket._r = [self.expected_response.to_frames()] # type: ignore + self.response = handler.ask_message(message=self.expected_sent) + return handler + + def test_sent_expected(self, handler_asked: MessageHandler): + assert handler_asked.socket._s == [self.expected_sent.to_frames()] + + def test_expected_response(self, handler_asked): + assert self.expected_response == self.response + + def test_no_cid_in_requested_cids_list(self, handler_asked: MessageHandler): + assert handler_asked.message_buffer.is_conversation_id_requested(cid) is False + + +class Test_read_and_handle_message: + def test_handle_message_handles_no_new_socket_message(self, handler: MessageHandler): + """Test, that the message handler does not raise an error without a new socket message.""" + handler.message_buffer.add_conversation_id(cid) + handler.socket._r = [ # type: ignore + Message(receiver=handler_name, sender=remote_name, conversation_id=cid).to_frames()] + # act + handler.read_and_handle_message() + # assert that no error is raised. + + def test_handle_message_ignores_heartbeats(self, handler: MessageHandler): + handler.handle_message = MagicMock() # type: ignore + # empty message of heartbeat + handler.socket._r = [[VERSION_B, b"N1.handler", b"whatever", b";"]] # type: ignore + handler.read_and_handle_message() + handler.handle_message.assert_not_called() + + @pytest.mark.parametrize("i, out", ( + ( # shutdown + [VERSION_B, b"N1.handler", b"N1.CB", b"conversation_id;mid" + bytes((MessageTypes.JSON,)), + serialize_data({"id": 5, "method": "shut_down", "jsonrpc": "2.0"})], + [VERSION_B, b"N1.CB", b"N1.handler", b"conversation_id;\x00\x00\x00\x00", + serialize_data({"id": 5, "result": None, "jsonrpc": "2.0"})]), + ( # pong + Message("N1.handler", "N1.COORDINATOR", conversation_id=cid, + message_type=MessageTypes.JSON, data=Request(id=2, method="pong") + ).to_frames(), + Message("N1.COORDINATOR", "N1.handler", conversation_id=cid, + message_type=MessageTypes.JSON, + data=ResultResponse(id=2, result=None)).to_frames()), + )) + def test_read_and_handle_message(self, handler: MessageHandler, + i: list[bytes], out: list[bytes]): + handler.socket._r = [i] # type: ignore + handler.read_and_handle_message() + for j in range(len(out)): + if j == 3: + continue # reply adds timestamp + assert handler.socket._s[0][j] == out[j] # type: ignore + + def test_handle_not_signed_in_message(self, handler: MessageHandler): + handler.sign_in = MagicMock() # type: ignore + handler.socket._r = [Message(receiver="handler", sender="N1.COORDINATOR", # type: ignore + message_type=MessageTypes.JSON, + data=ErrorResponse(id=5, error=NOT_SIGNED_IN), + ).to_frames()] + handler.read_and_handle_message() + assert handler.namespace is None + handler.sign_in.assert_called_once() + assert handler.full_name == "handler" + + def test_handle_node_unknown_message(self, handler: MessageHandler): + error = Message("N1.handler", "N1.COORDINATOR", message_type=MessageTypes.JSON, + data=ErrorResponse(id=None, error=NODE_UNKNOWN)) + handler.message_buffer.add_message(error) + handler.read_and_handle_message() + # assert that no error is raised and that no message is sent + assert handler.socket._s == [] + + def test_handle_receiver_unknown_message(self, handler: MessageHandler): + error = Message("N1.handler", "N1.COORDINATOR", message_type=MessageTypes.JSON, + data=ErrorResponse(id=None, error=RECEIVER_UNKNOWN)) + handler.message_buffer.add_message(error) + handler.read_and_handle_message() + # assert that no error is raised and that no message is sent + assert handler.socket._s == [] + + def test_handle_ACK_does_not_change_Namespace(self, handler: MessageHandler): + """Test that an ACK does not change the Namespace, if it is already set.""" + handler.socket._r = [Message(b"N3.handler", b"N3.COORDINATOR", # type: ignore + message_type=MessageTypes.JSON, + data={"id": 3, "result": None, "jsonrpc": "2.0"}).to_frames()] + handler.namespace = "N1" + handler.read_and_handle_message() + assert handler.namespace == "N1" + + def test_handle_invalid_json_message(self, handler: MessageHandler, + caplog: pytest.LogCaptureFixture): + """An invalid message should not cause the message handler to crash.""" + handler.socket._r = [Message(b"N3.handler", b"N3.COORDINATOR", # type: ignore + message_type=MessageTypes.JSON, + data={"without": "method..."}).to_frames()] + handler.read_and_handle_message() + assert caplog.records[-1].msg.startswith("Invalid JSON message") + + def test_handle_corrupted_message(self, handler: MessageHandler, + caplog: pytest.LogCaptureFixture): + """An invalid message should not cause the message handler to crash.""" + handler.socket._r = [Message(b"N3.handler", b"N3.COORDINATOR", # type: ignore + message_type=MessageTypes.JSON, + data=[]).to_frames()] + handler.read_and_handle_message() + assert caplog.records[-1].msg.startswith("Invalid JSON message") + + def test_handle_undecodable_message(self, handler: MessageHandler, + caplog: pytest.LogCaptureFixture): + """An invalid message should not cause the message handler to crash.""" + message = Message( + b"N3.handler", + b"N3.COORDINATOR", + message_type=MessageTypes.JSON, + additional_payload=[b"()"], + ) + handler.socket._r = [message.to_frames()] # type: ignore + handler.read_and_handle_message() + assert caplog.records[-1].msg.startswith("Could not decode") + + +def test_handle_unknown_message_type(handler: MessageHandler, caplog: pytest.LogCaptureFixture): + message = Message(handler_name, sender="sender", message_type=255) + handler.handle_message(message=message) + assert caplog.records[-1].message.startswith("Message from b'sender'") + + +class Test_process_json_message: + def test_handle_rpc_request(self, handler: MessageHandler): + message = Message(receiver=handler_name, sender=remote_name, + data=Request(id=5, method="pong"), + conversation_id=cid, message_type=MessageTypes.JSON) + response = Message(receiver=remote_name, + data=ResultResponse(id=5, result=None), + conversation_id=cid, message_type=MessageTypes.JSON) + result = handler.process_json_message(message=message) + assert result == response + + def test_handle_json_not_request(self, handler: MessageHandler): + """Test, that a json message, which is not a request, is handled appropriately.""" + data = ResultResponse(id=5, result=None) # some json, which is not a request. + message = Message(receiver=handler_name, sender=remote_name, + data=data, + conversation_id=cid, message_type=MessageTypes.JSON) + result = handler.process_json_message(message=message) + assert result.receiver == remote_name.encode() + assert result.conversation_id == cid + assert result.header_elements.message_type == MessageTypes.JSON + with pytest.raises(JSONRPCError) as exc_info: + handler.rpc_generator.get_result_from_response(result.data) # type: ignore + error = exc_info.value.rpc_error + assert error.code == INVALID_REQUEST.code + assert error.message == INVALID_REQUEST.message + + +class Test_process_json_message_with_created_binary: + payload_in: list[bytes] + payload_out: list[bytes] + + @pytest.fixture( + params=( + # normally created binary method + {"method": "do_binary", "params": [5]}, # with a list + {"method": "do_binary", "params": {"data": 5}}, # a dictionary + # manually created binary method + {"method": "do_binary_manually", "params": [5]}, + {"method": "do_binary_manually", "params": {"data": 5}}, + ), + ids=( + "created, list", + "created, dict", + "manual, list", + "manual, dict", + ), + ) + def data(self, request): + """Create a request with a list and a dict of other parameters.""" + d = {"jsonrpc": "2.0", "id": 8} + d.update(request.param) + return d + + @pytest.fixture + def handler_b(self, handler: MessageHandler): + test_class = self + class SpecialHandler(MessageHandler): + def do_binary_manually(self, data: int) -> int: + test_class.payload_in = self.current_message.payload[1:] + self.additional_response_payload = test_class.payload_out + return data + + def do_binary( + self, data: int, additional_payload: Optional[list[bytes]] = None + ) -> tuple[int, list[bytes]]: + test_class.payload_in = additional_payload # type: ignore + return data, test_class.payload_out + + handler = SpecialHandler(name=handler_name.split(".")[1], context=FakeContext()) # type: ignore + handler.namespace = handler_name.split(".")[0] + handler.stop_event = SimpleEvent() + handler.timeout = 0.1 + + handler.register_rpc_method(handler.do_binary_manually) + handler.register_binary_rpc_method( + handler.do_binary, accept_binary_input=True, return_binary_output=True + ) + return handler + + def test_message_stored(self, handler_b: MessageHandler, data): + m_in = Message("abc", data=data, message_type=MessageTypes.JSON) + handler_b.process_json_message(m_in) + assert handler_b.current_message == m_in + + def test_empty_additional_payload(self, handler_b: MessageHandler, data): + m_in = Message("abc", data=data, message_type=MessageTypes.JSON) + handler_b.process_json_message(m_in) + assert handler_b.additional_response_payload is None + + def test_binary_payload_available(self, handler_b: MessageHandler, data): + m_in = Message( + "abc", data=data, message_type=MessageTypes.JSON, additional_payload=[b"def"] + ) + self.payload_out = [] + handler_b.process_json_message(m_in) + assert self.payload_in == [b"def"] + + def test_binary_payload_sent(self, handler_b: MessageHandler, data): + m_in = Message("abc", data=data, message_type=MessageTypes.JSON) + self.payload_out = [b"ghi"] + response = handler_b.process_json_message(m_in) + assert response.payload[1:] == [b"ghi"] + assert response.data == {"jsonrpc": "2.0", "id": 8, "result": 5} + + +def test_handle_binary_return_value(handler: MessageHandler): + payload = [b"abc", b"def"] + result = handler._handle_binary_return_value((None, payload)) + assert result is None + assert handler.additional_response_payload == payload + + +class Test_generate_binary_method: + @pytest.fixture + def binary_method(self): + def binary_method(index: int, additional_payload: list[bytes]) -> tuple[None, list[bytes]]: + """Docstring of binary method.""" + return None, [additional_payload[index]] + return binary_method + + @pytest.fixture(params=(True, False)) + def modified_binary_method(self, handler: MessageHandler, binary_method, request): + handler.current_message = Message( + "rec", "send", data=b"", additional_payload=[b"0", b"1", b"2", b"3"] + ) + self._accept_binary_input = request.param + mod = handler._generate_binary_capable_method( + binary_method, accept_binary_input=self._accept_binary_input, return_binary_output=True + ) + self.handler = handler + return mod + + def test_name(self, binary_method, modified_binary_method): + assert modified_binary_method.__name__ == binary_method.__name__ + + def test_docstring(self, modified_binary_method, binary_method): + doc_addition = ( + "(binary input output method)" + if self._accept_binary_input + else "(binary output method)" + ) + assert modified_binary_method.__doc__ == binary_method.__doc__ + "\n" + doc_addition + + @pytest.mark.parametrize( + "input, output, string", + ( + (False, False, "(binary method)"), + (True, False, "(binary input method)"), + (False, True, "(binary output method)"), + (True, True, "(binary input output method)"), + ), + ) + def test_docstring_without_original_docstring( + self, handler: MessageHandler, input, output, string + ): + def binary_method(additional_payload): + return 7 + mod = handler._generate_binary_capable_method( + binary_method, accept_binary_input=input, return_binary_output=output + ) + assert mod.__doc__ == string + + def test_annotation(self, modified_binary_method, binary_method): + assert modified_binary_method.__annotations__ == binary_method.__annotations__ + + def test_functionality_kwargs(self, modified_binary_method): + if self._accept_binary_input: + assert modified_binary_method(index=1) is None + else: + assert ( + modified_binary_method(index=1, additional_payload=[b"0", b"1", b"2", b"3"]) is None + ) + assert self.handler.additional_response_payload == [b"1"] + + def test_functionality_args(self, modified_binary_method): + if self._accept_binary_input: + assert modified_binary_method(1) is None + else: + assert modified_binary_method(1, [b"0", b"1", b"2", b"3"]) is None + assert self.handler.additional_response_payload == [b"1"] + + def test_binary_input_from_message(self, handler: MessageHandler): + handler.current_message = Message("rec", "send", data=b"", additional_payload=[b"0"]) + + def binary_method(additional_payload = None): + return 7 + mod = handler._generate_binary_capable_method( + binary_method, accept_binary_input=True, return_binary_output=False + ) + assert mod() == 7 + + +class Test_listen: + @pytest.fixture + def handler_l(self, handler: MessageHandler, fake_cid_generation): + event = SimpleEvent() + event.set() + handler.socket._r = [ # type: ignore + Message("handler", "N1.COORDINATOR", message_type=MessageTypes.JSON, + conversation_id=cid, + data={"id": 2, "result": None, "jsonrpc": "2.0"}).to_frames()] + handler.listen(stop_event=event) + return handler + + def test_messages_are_sent(self, handler_l: MessageHandler): + assert handler_l.socket._s == [ + Message("COORDINATOR", "N1.handler", conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 1, "method": "sign_in", "jsonrpc": "2.0"}).to_frames(), + Message("COORDINATOR", "N1.handler", conversation_id=cid, + message_type=MessageTypes.JSON, + data={"id": 2, "method": "sign_out", "jsonrpc": "2.0"}).to_frames(), + ] + + def test_next_beat(self, handler_l: MessageHandler): + assert handler_l.next_beat > 0 + + def test_loop_element_changes_heartbeat(self, handler_l: MessageHandler): + handler_l.next_beat = 0 + # Act + handler_l._listen_loop_element(poller=FakePoller(), waiting_time=0) # type: ignore + assert handler_l.next_beat > 0 + + def test_loop_element_does_not_change_heartbeat_if_short(self, handler_l: MessageHandler): + handler_l.next_beat = float("inf") + # Act + handler_l._listen_loop_element(poller=FakePoller(), waiting_time=0) # type: ignore + assert handler_l.next_beat == float("inf") + + def test_KeyboardInterrupt_in_loop(self, handler: MessageHandler): + def raise_error(poller, waiting_time): + raise KeyboardInterrupt + handler.sign_in = MagicMock() # type: ignore[method-assign] + handler._listen_loop_element = raise_error # type: ignore + handler.listen() + # assert that no error is raised and that the test does not hang + + +def test_listen_loop_element(handler: MessageHandler): + poller = FakePoller() + poller.register(handler.socket) + handler.socket._r = [ # type: ignore + Message("Test", "COORDINATOR").to_frames() + ] + socks = handler._listen_loop_element(poller, 0) # type: ignore + assert socks == {} + + +class Test_listen_close: + @pytest.fixture + def handler_lc(self, handler: MessageHandler): + handler._listen_close(0) + return handler + + def test_sign_out_sent(self, handler_lc: MessageHandler): + sent = Message.from_frames(*handler_lc.socket._s[-1]) # type: ignore + assert handler_lc.socket._s == [Message("COORDINATOR", "N1.handler", + conversation_id=sent.conversation_id, + message_type=MessageTypes.JSON, + data={ + "id": 1, "method": "sign_out", "jsonrpc": "2.0", + }, + ).to_frames()] + + def test_warning_log_written(self, handler_lc: MessageHandler, + caplog: pytest.LogCaptureFixture): + assert caplog.get_records("setup")[-1].message == "Waiting for sign out response timed out." + + +def test_set_log_level(handler: MessageHandler): + handler.set_log_level(LogLevels.ERROR) + assert handler.root_logger.level == 40 # logging.ERROR + + +def test_shutdown(handler: MessageHandler): + handler.stop_event = SimpleEvent() + handler.shut_down() + assert handler.stop_event.is_set() is True diff --git a/tests/utils/test_parser.py b/tests/utils/test_parser.py new file mode 100644 index 000000000..9952e92f6 --- /dev/null +++ b/tests/utils/test_parser.py @@ -0,0 +1,49 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import logging + +import pytest + +from pyleco.utils.parser import parser, parse_command_line_parameters + + +class Test_parse_command_line_parameters: + @pytest.fixture + def parsed_kwargs(self): + self.parser = parser + return parse_command_line_parameters(parser=self.parser, + arguments=["-v", "--name", "name_value", "-v"], + parser_description="Some description", + logging_default=logging.WARNING + ) + + def test_parser_description(self, parsed_kwargs): + assert self.parser.description == "Some description" + + def test_kwargs(self, parsed_kwargs): + assert parsed_kwargs == {'name': "name_value"} + + def test_logging_level(self, parsed_kwargs): + assert logging.getLogger("__main__").level == logging.DEBUG diff --git a/tests/utils/test_pipe_handler.py b/tests/utils/test_pipe_handler.py new file mode 100644 index 000000000..685487a02 --- /dev/null +++ b/tests/utils/test_pipe_handler.py @@ -0,0 +1,355 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +from unittest.mock import MagicMock + +import pytest +import zmq + +from pyleco.core.message import Message +from pyleco.test import FakeContext + +from pyleco.utils.pipe_handler import LockedMessageBuffer, PipeHandler, CommunicatorPipe,\ + PipeCommands + +cid = b"conversation_id;" # conversation_id +header = b"".join((cid, b"mid", b"\x00")) +# the result +msg = Message(b"r", b"s", conversation_id=cid, message_id=b"mid") +msg_list = ("r", "s", cid, b"", None) +# some different message +other = Message(b"r", b"s", conversation_id=b"conversation_id9", message_id=b"mid") + + +@pytest.fixture +def message_buffer() -> LockedMessageBuffer: + message_buffer = LockedMessageBuffer() + message_buffer._messages = [msg] + return message_buffer + + +# Test MessageBuffer +def test_add_conversation_id(message_buffer: LockedMessageBuffer): + message_buffer.add_conversation_id(conversation_id=cid) + assert cid in message_buffer._requested_ids + + +class Test_add_response_message_successful: + @pytest.fixture + def message_buffer_added(self) -> LockedMessageBuffer: + # Arrange + mb = LockedMessageBuffer() + assert len(mb) == 0 + mb.add_conversation_id(cid) + # Act + mb.add_message(msg) + return mb + + def test_return_value(self, message_buffer_added: LockedMessageBuffer): + assert len(message_buffer_added) == 1 + + def test_msg_in_buffer(self, message_buffer_added: LockedMessageBuffer): + assert message_buffer_added._messages == [msg] + + +class Test_check_message_in_buffer: + @pytest.fixture + def message_buffer_cmib(self, message_buffer: LockedMessageBuffer): + predicate = message_buffer._predicate_generator(cid) + message_buffer._predicate = predicate # type: ignore + return message_buffer + + def test_message_is_in_first_place(self, message_buffer_cmib: LockedMessageBuffer): + assert message_buffer_cmib._predicate() == msg # type: ignore + assert message_buffer_cmib._messages == [] + + def test_no_suitable_message_in_buffer(self, message_buffer: LockedMessageBuffer): + predicate = message_buffer._predicate_generator(conversation_id=b"other_cid") + assert predicate() is None + assert message_buffer._messages != [] + + def test_msg_somewhere_in_buffer(self, message_buffer_cmib: LockedMessageBuffer): + o2 = Message(b"r", b"s", conversation_id=b"conversation_id9", message_id=b"mi7") + message_buffer_cmib._messages = [other, msg, o2] + assert message_buffer_cmib._predicate() == msg # type:ignore + assert message_buffer_cmib._messages == [other, o2] + + +@pytest.mark.parametrize("buffer", ( + [msg], # msg is only message + [msg, other], # msg is in the first place of the buffer + [other, msg], # msg is in the second and last place of the buffer + [other, msg, other] # msg is in the middle of the buffer + )) +def test_retrieve_message_success(message_buffer: LockedMessageBuffer, buffer): + message_buffer._messages = buffer + original_length = len(buffer) + assert message_buffer.wait_for_message(cid) == msg + assert len(message_buffer._messages) == original_length - 1 + + +@pytest.mark.parametrize("buffer", ( + [], # no message in buffer + [other], # other message in buffer + )) +def test_retrieve_message_fail(message_buffer: LockedMessageBuffer, buffer): + message_buffer._messages = buffer + with pytest.raises(TimeoutError): + message_buffer.wait_for_message(conversation_id=cid, timeout=0.01) + + +@pytest.mark.parametrize("length", (1, 3, 7)) +def test_length_of_buffer(message_buffer: LockedMessageBuffer, length: int): + message_buffer._messages = length * [msg] + assert len(message_buffer) == length + + +# Test CommunicatorPipe +class Test_CommunicatorPipe_send_pipe: + def test_send_pipe_message(self, communicator: CommunicatorPipe): + communicator.socket.send_multipart = MagicMock() # type: ignore[method-assign] + communicator._send_pipe_message(PipeCommands.LOCAL_COMMAND, b"abc") + # assert + communicator.socket.send_multipart.assert_called_once_with( + (PipeCommands.LOCAL_COMMAND, b"abc") + ) + + def test_raise_ConnectionError_on_zmq_error(self, communicator: CommunicatorPipe): + communicator.socket.send_multipart = MagicMock( # type: ignore[method-assign] + side_effect=zmq.ZMQError(128, "not a socket") + ) + # act + with pytest.raises(ConnectionRefusedError): + communicator._send_pipe_message(PipeCommands.LOCAL_COMMAND, b"c") + + +# Test PipeHandler +@pytest.fixture +def pipe_handler() -> PipeHandler: + """With fake contexts, that is with a broken pipe.""" + pipe_handler = PipeHandler(name="handler", context=FakeContext()) # type: ignore + return pipe_handler + + +@pytest.fixture +def pipe_handler_pipe(): + """With a working pipe!""" + pipe_handler = PipeHandler(name="handler", context=FakeContext()) # type: ignore + pipe_handler.internal_pipe = zmq.Context.instance().socket(zmq.PULL) + pipe_handler.pipe_port = pipe_handler.internal_pipe.bind_to_random_port( + "inproc://listenerPipe", min_port=12345) + yield pipe_handler + pipe_handler.close() + + +@pytest.fixture +def communicator(pipe_handler_pipe: PipeHandler) -> CommunicatorPipe: + """Communicator of `pipe_handler_pipe`.""" + return pipe_handler_pipe.get_communicator() + + +def test_close_closes_all_communicators( + pipe_handler_pipe: PipeHandler, communicator: CommunicatorPipe +): + pipe_handler_pipe.close() + assert communicator.socket.closed is True + + +class Test_PipeHandler_read_message: + def test_handle_response(self, pipe_handler: PipeHandler): + message = Message("rec", "send") + pipe_handler.socket._r = [message.to_frames()] # type: ignore + pipe_handler.message_buffer.add_conversation_id(message.conversation_id) + # act + with pytest.raises(TimeoutError): + pipe_handler.read_message() + assert pipe_handler.message_buffer.wait_for_message(message.conversation_id) == message + + def test_handle_request(self, pipe_handler: PipeHandler, caplog: pytest.LogCaptureFixture): + """Message is not a response and should be handled by the MessageHandler.""" + message = Message("rec", "send") + pipe_handler.socket._r = [message.to_frames()] # type: ignore + # act and assert + assert pipe_handler.read_message() == message + + +def test_invalid_pipe_message(pipe_handler: PipeHandler, caplog: pytest.LogCaptureFixture): + caplog.set_level(10) + pipe_handler.handle_pipe_message([b"abc"]) + assert caplog.messages[0].startswith("Received unknown") + + +class Test_get_communicator: + @pytest.fixture + def pipe_handler_setup(self): + pipe_handler = PipeHandler(name="handler", context=FakeContext()) # type: ignore + communicator = pipe_handler.get_communicator(context=FakeContext()) # type: ignore + pipe_handler.external_pipe = communicator # type: ignore + return pipe_handler + + def test_external_pipe_type(self, pipe_handler_setup: PipeHandler): + assert isinstance(pipe_handler_setup.external_pipe, CommunicatorPipe) # type: ignore + + def test_pipe_ports_match(self, pipe_handler_setup: PipeHandler): + port_number = pipe_handler_setup.pipe_port + assert port_number == 5 # due to FakeSocket + assert pipe_handler_setup.internal_pipe.addr == "inproc://listenerPipe" + assert pipe_handler_setup.external_pipe.socket.addr == "inproc://listenerPipe:5" # type: ignore # noqa + + def test_second_call_returns_same_communicator(self, pipe_handler_setup: PipeHandler): + com2 = pipe_handler_setup.get_communicator() + assert com2 == pipe_handler_setup.external_pipe # type: ignore + + +def test_close_all_communicators(pipe_handler_pipe: PipeHandler, communicator: CommunicatorPipe): + pipe_handler_pipe.close_all_communicators() + assert communicator.socket.closed is True + + +def test_communicator_send_message(pipe_handler_pipe: PipeHandler, communicator: CommunicatorPipe): + message = Message("rec", "send") + pipe_handler_pipe._send_frames = MagicMock() # type: ignore[method-assign] + communicator.send_message(message) + pipe_handler_pipe.read_and_handle_pipe_message() + # assert that the message is actually sent + pipe_handler_pipe._send_frames.assert_called_once_with(frames=message.to_frames()) + + +def test_communicator_send_message_without_sender(pipe_handler_pipe: PipeHandler, + communicator: CommunicatorPipe): + message = Message("rec", sender="") + pipe_handler_pipe._send_frames = MagicMock() # type: ignore[method-assign] + communicator.send_message(message) + pipe_handler_pipe.read_and_handle_pipe_message() + # assert that the message is actually sent + message.sender = b"handler" # should have been added by the handler + pipe_handler_pipe._send_frames.assert_called_once_with(frames=message.to_frames()) + + +def test_communicator_read_message(pipe_handler_pipe: PipeHandler, communicator: CommunicatorPipe): + response = Message("handler", "rec", conversation_id=cid) + pipe_handler_pipe.message_buffer.add_conversation_id(cid) + pipe_handler_pipe.message_buffer.add_message(response) + # act + read = communicator.read_message(cid) + assert read == response + + +def test_communicator_ask_message(pipe_handler_pipe: PipeHandler, communicator: CommunicatorPipe): + message = Message("rec", "handler", conversation_id=cid) + response = Message("handler", "rec", conversation_id=cid) + pipe_handler_pipe._send_frames = MagicMock() # type: ignore[method-assign] + pipe_handler_pipe.message_buffer.add_conversation_id(cid) + pipe_handler_pipe.message_buffer.add_message(response) + # act + read = communicator.ask_message(message) + pipe_handler_pipe.read_and_handle_pipe_message() + # assert + assert read == response + pipe_handler_pipe._send_frames.assert_called_once_with(frames=message.to_frames()) + + +def test_communicator_sign_in(communicator: CommunicatorPipe): + with pytest.raises(NotImplementedError): + communicator.sign_in() + + +def test_communicator_sign_out(communicator: CommunicatorPipe): + with pytest.raises(NotImplementedError): + communicator.sign_out() + + +def test_communicator_subscribe(pipe_handler_pipe: PipeHandler, communicator: CommunicatorPipe): + pipe_handler_pipe.subscribe_single = MagicMock() # type: ignore[method-assign] + # act + communicator.subscribe_single(b"topic") + pipe_handler_pipe.read_and_handle_pipe_message() + # assert + pipe_handler_pipe.subscribe_single.assert_called_once_with(topic=b"topic") + + +def test_communicator_unsubscribe(pipe_handler_pipe: PipeHandler, communicator: CommunicatorPipe): + pipe_handler_pipe.unsubscribe_single = MagicMock() # type: ignore[method-assign] + # act + communicator.unsubscribe_single(b"topic") + pipe_handler_pipe.read_and_handle_pipe_message() + # assert + pipe_handler_pipe.unsubscribe_single.assert_called_once_with(topic=b"topic") + + +def test_communicator_unsubscribe_all(pipe_handler_pipe: PipeHandler, + communicator: CommunicatorPipe): + pipe_handler_pipe.unsubscribe_all = MagicMock() # type: ignore[method-assign] + # act + communicator.unsubscribe_all() + pipe_handler_pipe.read_and_handle_pipe_message() + # assert + pipe_handler_pipe.unsubscribe_all.assert_called_once() + + +def test_communicator_rename(pipe_handler_pipe: PipeHandler, communicator: CommunicatorPipe): + pipe_handler_pipe.sign_in = MagicMock() # type: ignore[method-assign] + pipe_handler_pipe.sign_out = MagicMock() # type: ignore[method-assign] + # act + communicator.name = "new name" + pipe_handler_pipe.read_and_handle_pipe_message() + # assert + pipe_handler_pipe.sign_out.assert_called_once() + assert pipe_handler_pipe.name == "new name" + assert communicator.name == "new name" + pipe_handler_pipe.sign_in.assert_called_once() + + +def test_register_name_change_method(pipe_handler: PipeHandler): + method = MagicMock() + pipe_handler.register_on_name_change_method(method) + pipe_handler.set_full_name("new full name") + # assert + method.assert_called_once_with("new full name") + + +def test_unregister_name_change_method(pipe_handler: PipeHandler): + method = MagicMock() + pipe_handler.register_on_name_change_method(method) + assert method in pipe_handler._on_name_change_methods + # act + pipe_handler.unregister_on_name_change_method(method) + assert method not in pipe_handler._on_name_change_methods + + +def test_handle_local_method(pipe_handler_pipe: PipeHandler, communicator: CommunicatorPipe): + cid = communicator._send_handler(method="pong") + pipe_handler_pipe.read_and_handle_pipe_message() + assert communicator._read_handler(cid) is None + + +def test_ask_handler(communicator: CommunicatorPipe): + communicator._send_handler = MagicMock() # type: ignore[method-assign] + communicator._read_handler = MagicMock() # type: ignore[method-assign] + communicator._read_handler.return_value = 5 + # act + result = communicator.ask_handler(method="method", timeout=1) + assert result == 5 + communicator._send_handler.assert_called_once_with(method="method", timeout=1) + communicator._read_handler.assert_called_once() diff --git a/tests/utils/test_qt_listener.py b/tests/utils/test_qt_listener.py new file mode 100644 index 000000000..3c509d438 --- /dev/null +++ b/tests/utils/test_qt_listener.py @@ -0,0 +1,87 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.test import FakeCommunicator, FakeContext +from pyleco.core.message import Message, MessageTypes + +try: + from pyleco.utils.qt_listener import QtListener, QtPipeHandler, ListenerSignals +except ModuleNotFoundError: + pytest.skip(reason="qtpy not installed.", allow_module_level=True) + +cid = b"conversation_id;" + + +@pytest.fixture +def signal(): + class FakeSignal: + def emit(self, message: Message): + self._content = message + return FakeSignal() + + +@pytest.fixture +def qt_listener(signal) -> QtListener: + qt_listener = QtListener(name="test") # type: ignore + qt_listener.communicator = FakeCommunicator(name="N.Pipe") # type: ignore + qt_listener.signals.message = signal + return qt_listener + + +@pytest.fixture +def qt_handler(signal) -> QtPipeHandler: + handler = QtPipeHandler(name="handler", + context=FakeContext(), # type: ignore + signals=ListenerSignals()) + handler.signals.message = signal + return handler + + +class Test_handle_message: + def test_handle_valid_jsonrpc(self, qt_handler: QtPipeHandler): + msg = Message("N.Pipe", "sender", + data={"jsonrpc": "2.0", "method": "abc", "id": 6}, + message_type=MessageTypes.JSON, + conversation_id=cid, + ) + qt_handler.handle_message(msg) + assert qt_handler.signals.message._content == msg # type: ignore + + def test_empty_message(self, qt_handler: QtPipeHandler): + msg = Message("N.Pipe", "sender") + qt_handler.handle_message(msg) + assert qt_handler.signals.message._content == msg # type: ignore + + def test_local_method(self, qt_handler: QtPipeHandler): + msg = Message("handler", "sender", + conversation_id=cid, message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "method": "pong", "id": 3}) + qt_handler.handle_message(msg) + assert Message.from_frames(*qt_handler.socket._s[0]) == Message( # type: ignore + "sender", "handler", conversation_id=cid, message_type=MessageTypes.JSON, + data={"jsonrpc": "2.0", "result": None, "id": 3} + ) + diff --git a/tests/utils/test_zmq_log_handler.py b/tests/utils/test_zmq_log_handler.py new file mode 100644 index 000000000..73c8f19b6 --- /dev/null +++ b/tests/utils/test_zmq_log_handler.py @@ -0,0 +1,48 @@ +# +# This file is part of the PyLECO package. +# +# Copyright (c) 2023-2024 PyLECO Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + +import pytest + +from pyleco.test import FakeContext +from pyleco.utils.zmq_log_handler import ZmqLogHandler, DataMessage + + +@pytest.fixture +def handler() -> ZmqLogHandler: + return ZmqLogHandler(context=FakeContext(), full_name="fullname", port=12345) # type: ignore + + +def test_init_(handler: ZmqLogHandler): + assert handler.full_name == "fullname" + + +def test_init_address(handler: ZmqLogHandler): + assert handler.queue.socket.addr == "tcp://localhost:12345" # type: ignore + + +def test_enqueue(handler: ZmqLogHandler): + handler.enqueue("whatever") + message = DataMessage.from_frames(*handler.queue.socket._s.pop()) # type: ignore + assert message.topic == b"fullname" + assert message.payload == [b'whatever']