[DO NOT MERGE] Python 3.8 deprecation workflow validation #3112
GitHub Actions / Test Results
failed
Aug 27, 2024 in 0s
1 fail, 19 skipped, 2 pass in 48m 47s
22 tests 2 ✅ 48m 47s ⏱️
1 suites 19 💤
1 files 1 ❌
Results for commit 310bec9.
Annotations
Check warning on line 0 in apache_beam.examples.wordcount_it_test.WordCountIT
github-actions / Test Results
test_wordcount_it_with_prebuilt_sdk_container_cloud_build (apache_beam.examples.wordcount_it_test.WordCountIT) failed
sdks/python/pytest-beam_python3.12_sdk.xml [took 33m 6s]
Raw output
Failed: Timeout >1800.0s
self = <apache_beam.examples.wordcount_it_test.WordCountIT testMethod=test_wordcount_it_with_prebuilt_sdk_container_cloud_build>
@pytest.mark.it_validatescontainer
def test_wordcount_it_with_prebuilt_sdk_container_cloud_build(self):
> self._run_wordcount_it(
wordcount.run,
experiment='beam_fn_api',
prebuild_sdk_container_engine='cloud_build')
apache_beam/examples/wordcount_it_test.py:102:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
apache_beam/examples/wordcount_it_test.py:150: in _run_wordcount_it
run_wordcount(
apache_beam/examples/wordcount.py:87: in run
with beam.Pipeline(options=pipeline_options) as p:
apache_beam/pipeline.py:620: in __exit__
self.result = self.run()
apache_beam/pipeline.py:570: in run
self._options).run(False)
apache_beam/pipeline.py:594: in run
return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:66: in run_pipeline
self.result.wait_until_finish(duration=wait_duration)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <DataflowPipelineResult <Job
clientRequestId: '20240827161639922405-4244'
createTime: '2024-08-27T16:16:40.958418Z'
...024-08-27T16:16:40.958418Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)> at 0x7ed33463a150>
duration = None
def wait_until_finish(self, duration=None):
if not self.is_in_terminal_state():
if not self.has_job:
raise IOError('Failed to get the Dataflow job id.')
consoleUrl = (
"Console URL: https://console.cloud.google.com/"
f"dataflow/jobs/<RegionId>/{self.job_id()}"
"?project=<ProjectId>")
thread = threading.Thread(
target=DataflowRunner.poll_for_job_completion,
args=(self._runner, self, duration))
# Mark the thread as a daemon thread so a keyboard interrupt on the main
# thread will terminate everything. This is also the reason we will not
# use thread.join() to wait for the polling thread.
thread.daemon = True
thread.start()
while thread.is_alive():
> time.sleep(5.0)
E Failed: Timeout >1800.0s
apache_beam/runners/dataflow/dataflow_runner.py:794: Failed
Loading