diff --git a/.github/workflows/pythonbuild.yml b/.github/workflows/pythonbuild.yml
index b58b61ac95..378b841d21 100644
--- a/.github/workflows/pythonbuild.yml
+++ b/.github/workflows/pythonbuild.yml
@@ -149,6 +149,41 @@ jobs:
fail_ci_if_error: false
files: coverage.xml
+ test-hypothesis:
+ needs:
+ - detect-python-versions
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest]
+ python-version: ${{fromJson(needs.detect-python-versions.outputs.python-versions)}}
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Cache pip
+ uses: actions/cache@v3
+ with:
+ # This path is specific to Ubuntu
+ path: ~/.cache/pip
+ # Look to see if there is a cache hit for the corresponding requirements files
+ key: ${{ format('{0}-pip-{1}', runner.os, hashFiles('dev-requirements.in', 'requirements.in')) }}
+ - name: Install dependencies
+ run: make setup && pip freeze
+ - name: Test with coverage
+ env:
+ FLYTEKIT_HYPOTHESIS_PROFILE: ci
+ run: |
+ make unit_test_hypothesis
+ - name: Codecov
+ uses: codecov/codecov-action@v3.1.4
+ with:
+ fail_ci_if_error: false
+ files: coverage.xml
+
test-serialization:
needs:
- detect-python-versions
diff --git a/Makefile b/Makefile
index 859b0aaf44..a8317faa17 100644
--- a/Makefile
+++ b/Makefile
@@ -62,10 +62,13 @@ unit_test_extras_codecov:
unit_test:
# Skip all extra tests and run them with the necessary env var set so that a working (albeit slower)
# library is used to serialize/deserialize protobufs is used.
- $(PYTEST_AND_OPTS) -m "not (serial or sandbox_test)" tests/flytekit/unit/ --ignore=tests/flytekit/unit/extras/ --ignore=tests/flytekit/unit/models --ignore=tests/flytekit/unit/extend ${CODECOV_OPTS}
+ $(PYTEST_AND_OPTS) -m "not (serial or sandbox_test or hypothesis)" tests/flytekit/unit/ --ignore=tests/flytekit/unit/extras/ --ignore=tests/flytekit/unit/models --ignore=tests/flytekit/unit/extend ${CODECOV_OPTS}
# Run serial tests without any parallelism
$(PYTEST) -m "serial" tests/flytekit/unit/ --ignore=tests/flytekit/unit/extras/ --ignore=tests/flytekit/unit/models --ignore=tests/flytekit/unit/extend ${CODECOV_OPTS}
+.PHONY: unit_test_hypothesis
+unit_test_hypothesis:
+ $(PYTEST_AND_OPTS) -m "hypothesis" tests/flytekit/unit/experimental ${CODECOV_OPTS}
.PHONY: unit_test_extras
unit_test_extras:
diff --git a/flytekit/core/python_function_task.py b/flytekit/core/python_function_task.py
index 147f15bbb3..f97d96296e 100644
--- a/flytekit/core/python_function_task.py
+++ b/flytekit/core/python_function_task.py
@@ -349,15 +349,21 @@ def dynamic_execute(self, task_function: Callable, **kwargs) -> Any:
raise ValueError(f"Invalid execution provided, execution state: {ctx.execution_state}")
def _write_decks(self, native_inputs, native_outputs_as_map, ctx, new_user_params):
- # These errors are raised if the source code can not be retrieved
- with suppress(OSError, TypeError):
- source_code = inspect.getsource(self._task_function)
-
+ if self._disable_deck is False:
from flytekit.deck import Deck
- from flytekit.deck.renderer import SourceCodeRenderer
+ from flytekit.deck.renderer import PythonDependencyRenderer
+
+ # These errors are raised if the source code can not be retrieved
+ with suppress(OSError, TypeError):
+ source_code = inspect.getsource(self._task_function)
+ from flytekit.deck.renderer import SourceCodeRenderer
+
+ source_code_deck = Deck("Source Code")
+ renderer = SourceCodeRenderer()
+ source_code_deck.append(renderer.to_html(source_code))
- source_code_deck = Deck("Source Code")
- renderer = SourceCodeRenderer()
- source_code_deck.append(renderer.to_html(source_code))
+ python_dependencies_deck = Deck("Dependencies")
+ renderer = PythonDependencyRenderer()
+ python_dependencies_deck.append(renderer.to_html())
return super()._write_decks(native_inputs, native_outputs_as_map, ctx, new_user_params)
diff --git a/flytekit/deck/renderer.py b/flytekit/deck/renderer.py
index 7f4913f6d9..51157dc876 100644
--- a/flytekit/deck/renderer.py
+++ b/flytekit/deck/renderer.py
@@ -86,3 +86,77 @@ def to_html(self, source_code: str) -> str:
css = formatter.get_style_defs(".highlight").replace("#fff0f0", "#ffffff")
html = highlight(source_code, PythonLexer(), formatter)
return f"{html}"
+
+
+class PythonDependencyRenderer:
+ """
+ PythonDependencyDeck is a deck that contains information about packages installed via pip.
+ """
+
+ def __init__(self, title: str = "Dependencies"):
+ self._title = title
+
+ def to_html(self) -> str:
+ import json
+ import subprocess
+ import sys
+
+ from flytekit.loggers import logger
+
+ try:
+ installed_packages = json.loads(
+ subprocess.check_output([sys.executable, "-m", "pip", "list", "--format", "json"])
+ )
+ requirements_txt = (
+ subprocess.check_output([sys.executable, "-m", "pip", "freeze"])
+ .decode("utf-8")
+ .replace("\\n", "\n")
+ .rstrip()
+ )
+ except subprocess.CalledProcessError as e:
+ logger.error(f"Error occurred while fetching installed packages: {e}")
+ return "Error occurred while fetching installed packages."
+
+ table = (
+ "
\n\nName | \nVersion | \n
\n"
+ )
+
+ for entry in installed_packages:
+ table += f"\n{entry['name']} | \n{entry['version']} | \n
\n"
+
+ table += "
"
+
+ html = f"""
+
+
+
+
+
+ Flyte Dependencies
+
+
+
+
+
+ Python Dependencies
+
+ {table}
+
+ {requirements_txt}
+
+
+
+ """
+ return html
diff --git a/pyproject.toml b/pyproject.toml
index 5678d342b7..45ce9e83a7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -99,6 +99,7 @@ log_cli_level = 20
markers = [
"sandbox_test: fake integration tests", # unit tests that are really integration tests that run on a sandbox environment
"serial: tests to avoid using with pytest-xdist",
+ "hypothesis: tests that use they hypothesis library",
]
[tool.coverage.report]
diff --git a/tests/flytekit/unit/conftest.py b/tests/flytekit/unit/conftest.py
index 7414b5e064..ed9b6ada98 100644
--- a/tests/flytekit/unit/conftest.py
+++ b/tests/flytekit/unit/conftest.py
@@ -1,4 +1,7 @@
+import os
+
import pytest
+from hypothesis import settings
from flytekit.image_spec.image_spec import ImageSpecBuilder
@@ -11,3 +14,9 @@ def build_image(self, img):
@pytest.fixture()
def mock_image_spec_builder():
return MockImageSpecBuilder()
+
+
+settings.register_profile("ci", max_examples=5, deadline=100_000)
+settings.register_profile("dev", max_examples=10, deadline=10_000)
+
+settings.load_profile(os.getenv("FLYTEKIT_HYPOTHESIS_PROFILE", "dev"))
diff --git a/tests/flytekit/unit/deck/test_deck.py b/tests/flytekit/unit/deck/test_deck.py
index 45056ae283..9cf497bccd 100644
--- a/tests/flytekit/unit/deck/test_deck.py
+++ b/tests/flytekit/unit/deck/test_deck.py
@@ -3,12 +3,13 @@
import pytest
from markdown_it import MarkdownIt
-from mock import mock
+from mock import mock, patch
import flytekit
from flytekit import Deck, FlyteContextManager, task
from flytekit.deck import MarkdownRenderer, SourceCodeRenderer, TopFrameRenderer
from flytekit.deck.deck import _output_deck
+from flytekit.deck.renderer import PythonDependencyRenderer
@pytest.mark.skipif("pandas" not in sys.modules, reason="Pandas is not installed.")
@@ -50,9 +51,9 @@ def test_timeline_deck():
@pytest.mark.parametrize(
"disable_deck,expected_decks",
[
- (None, 2), # time line deck + source code deck
- (False, 4), # time line deck + source code deck + input and output decks
- (True, 2), # time line deck + source code deck
+ (None, 1), # time line deck
+ (False, 5), # time line deck + source code deck + python dependency deck + input and output decks
+ (True, 1), # time line deck
],
)
def test_deck_for_task(disable_deck, expected_decks):
@@ -75,11 +76,21 @@ def t1(a: int) -> str:
@pytest.mark.parametrize(
"enable_deck,disable_deck, expected_decks, expect_error",
[
- (None, None, 3, False), # default deck and time line deck + source code deck
- (None, False, 5, False), # default deck and time line deck + source code deck + input and output decks
- (None, True, 3, False), # default deck and time line deck + source code deck
- (True, None, 5, False), # default deck and time line deck + source code deck + input and output decks
- (False, None, 3, False), # default deck and time line deck + source code deck
+ (None, None, 2, False), # default deck and time line deck
+ (
+ None,
+ False,
+ 6,
+ False,
+ ), # default deck and time line deck + source code deck + python dependency deck + input and output decks
+ (None, True, 2, False), # default deck and time line deck
+ (
+ True,
+ None,
+ 6,
+ False,
+ ), # default deck and time line deck + source code deck + python dependency deck + input and output decks
+ (False, None, 2, False), # default deck and time line deck
(True, True, -1, True), # Set both disable_deck and enable_deck to True and confirm that it fails
(False, False, -1, True), # Set both disable_deck and enable_deck to False and confirm that it fails
],
@@ -176,3 +187,19 @@ def test_source_code_renderer():
# Assert that the color #ffffff is used instead of #fff0f0
assert "#ffffff" in result
assert "#fff0f0" not in result
+
+
+def test_python_dependency_renderer():
+ with patch("subprocess.check_output") as mock_check_output:
+ mock_check_output.return_value = '[{"name": "numpy", "version": "1.21.0"}]'.encode()
+ renderer = PythonDependencyRenderer()
+ result = renderer.to_html()
+ assert "numpy" in result
+ assert "1.21.0" in result
+
+ # Assert that the result includes parts of the python dependency
+ assert "Name" in result
+ assert "Version" in result
+
+ # Assert that the button of copy
+ assert 'button onclick="copyTable()"' in result
diff --git a/tests/flytekit/unit/experimental/test_eager_workflows.py b/tests/flytekit/unit/experimental/test_eager_workflows.py
index 9760f8c008..c25e2ae762 100644
--- a/tests/flytekit/unit/experimental/test_eager_workflows.py
+++ b/tests/flytekit/unit/experimental/test_eager_workflows.py
@@ -6,7 +6,7 @@
import hypothesis.strategies as st
import pytest
-from hypothesis import given, settings
+from hypothesis import given
from flytekit import dynamic, task, workflow
from flytekit.exceptions.user import FlyteValidationException
@@ -15,7 +15,6 @@
from flytekit.types.file import FlyteFile
from flytekit.types.structured import StructuredDataset
-DEADLINE = 2000
INTEGER_ST = st.integers(min_value=-10_000_000, max_value=10_000_000)
@@ -48,7 +47,7 @@ def dynamic_wf(x: int) -> int:
@given(x_input=INTEGER_ST)
-@settings(deadline=DEADLINE, max_examples=5)
+@pytest.mark.hypothesis
def test_simple_eager_workflow(x_input: int):
"""Testing simple eager workflow with just tasks."""
@@ -62,7 +61,7 @@ async def eager_wf(x: int) -> int:
@given(x_input=INTEGER_ST)
-@settings(deadline=DEADLINE, max_examples=5)
+@pytest.mark.hypothesis
def test_conditional_eager_workflow(x_input: int):
"""Test eager workflow with conditional logic."""
@@ -80,7 +79,7 @@ async def eager_wf(x: int) -> int:
@given(x_input=INTEGER_ST)
-@settings(deadline=DEADLINE, max_examples=5)
+@pytest.mark.hypothesis
def test_try_except_eager_workflow(x_input: int):
"""Test eager workflow with try/except logic."""
@@ -99,7 +98,7 @@ async def eager_wf(x: int) -> int:
@given(x_input=INTEGER_ST, n_input=st.integers(min_value=1, max_value=20))
-@settings(deadline=DEADLINE, max_examples=5)
+@pytest.mark.hypothesis
def test_gather_eager_workflow(x_input: int, n_input: int):
"""Test eager workflow with asyncio gather."""
@@ -113,7 +112,7 @@ async def eager_wf(x: int, n: int) -> typing.List[int]:
@given(x_input=INTEGER_ST)
-@settings(deadline=DEADLINE, max_examples=5)
+@pytest.mark.hypothesis
def test_eager_workflow_with_dynamic_exception(x_input: int):
"""Test eager workflow with dynamic workflow is not supported."""
@@ -131,7 +130,7 @@ async def nested_eager_wf(x: int) -> int:
@given(x_input=INTEGER_ST)
-@settings(deadline=DEADLINE, max_examples=5)
+@pytest.mark.hypothesis
def test_nested_eager_workflow(x_input: int):
"""Testing running nested eager workflows."""
@@ -145,7 +144,7 @@ async def eager_wf(x: int) -> int:
@given(x_input=INTEGER_ST)
-@settings(deadline=DEADLINE, max_examples=5)
+@pytest.mark.hypothesis
def test_eager_workflow_within_workflow(x_input: int):
"""Testing running eager workflow within a static workflow."""
@@ -168,7 +167,7 @@ def subworkflow(x: int) -> int:
@given(x_input=INTEGER_ST)
-@settings(deadline=DEADLINE, max_examples=5)
+@pytest.mark.hypothesis
def test_workflow_within_eager_workflow(x_input: int):
"""Testing running a static workflow within an eager workflow."""
@@ -182,7 +181,7 @@ async def eager_wf(x: int) -> int:
@given(x_input=INTEGER_ST)
-@settings(deadline=DEADLINE, max_examples=5)
+@pytest.mark.hypothesis
def test_local_task_eager_workflow_exception(x_input: int):
"""Testing simple eager workflow with a local function task doesn't work."""
@@ -199,8 +198,8 @@ async def eager_wf_with_local(x: int) -> int:
@given(x_input=INTEGER_ST)
-@settings(deadline=DEADLINE, max_examples=5)
@pytest.mark.filterwarnings("ignore:coroutine 'AsyncEntity.__call__' was never awaited")
+@pytest.mark.hypothesis
def test_local_workflow_within_eager_workflow_exception(x_input: int):
"""Cannot call a locally-defined workflow within an eager workflow"""
@@ -243,6 +242,7 @@ def create_directory() -> FlyteDirectory:
@pytest.mark.skipif("pandas" not in sys.modules, reason="Pandas is not installed.")
+@pytest.mark.hypothesis
def test_eager_workflow_with_offloaded_types():
"""Test eager workflow that eager workflows work with offloaded types."""
import pandas as pd