Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Python][Pipeline Options] Recursively Get All Subclasses #31141

Closed

[Pipeline Options] Recursively Get All Subclasses

15ad893
Select commit
Loading
Failed to load commit list.
Sign in for the full log view
Closed

[Python][Pipeline Options] Recursively Get All Subclasses #31141

[Pipeline Options] Recursively Get All Subclasses
15ad893
Select commit
Loading
Failed to load commit list.
GitHub Actions / Test Results failed May 1, 2024 in 0s

2 errors, 1 785 fail, 897 skipped, 4 861 pass in 28m 49s

   10 files     10 suites   28m 49s ⏱️
7 545 tests 4 861 ✅   897 💤 1 785 ❌ 2 🔥
7 743 runs  4 890 ✅ 1 066 💤 1 785 ❌ 2 🔥

Results for commit 15ad893.

Annotations

Check warning on line 0 in apache_beam.transforms.userstate_test.StatefulDoFnOnDirectRunnerTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_dynamic_timer_clear_then_set_timer (apache_beam.transforms.userstate_test.StatefulDoFnOnDirectRunnerTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage_no_xdist.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.transforms.userstate_test.StatefulDoFnOnDirectRunnerTest testMethod=test_dynamic_timer_clear_then_set_timer>

    @pytest.mark.no_xdist
    @pytest.mark.timeout(10)
    def test_dynamic_timer_clear_then_set_timer(self):
      class EmitTwoEvents(DoFn):
        EMIT_CLEAR_SET_TIMER = TimerSpec('emitclear', TimeDomain.WATERMARK)
    
        def process(self, element, emit=DoFn.TimerParam(EMIT_CLEAR_SET_TIMER)):
          yield ('1', 'set')
          emit.set(1)
    
        @on_timer(EMIT_CLEAR_SET_TIMER)
        def emit_clear(self):
          yield ('1', 'clear')
    
      class DynamicTimerDoFn(DoFn):
        EMIT_TIMER_FAMILY = TimerSpec('emit', TimeDomain.WATERMARK)
    
        def process(self, element, emit=DoFn.TimerParam(EMIT_TIMER_FAMILY)):
          if element[1] == 'set':
            emit.set(10, dynamic_timer_tag='emit1')
            emit.set(20, dynamic_timer_tag='emit2')
          if element[1] == 'clear':
            emit.set(30, dynamic_timer_tag='emit3')
            emit.clear(dynamic_timer_tag='emit3')
            emit.set(40, dynamic_timer_tag='emit3')
          return []
    
        @on_timer(EMIT_TIMER_FAMILY)
        def emit_callback(
            self, ts=DoFn.TimestampParam, tag=DoFn.DynamicTimerTagParam):
          yield (tag, ts)
    
      with TestPipeline() as p:
        res = (
            p
            | beam.Create([('1', 'impulse')])
            | beam.ParDo(EmitTwoEvents())
            | beam.ParDo(DynamicTimerDoFn()))
>       assert_that(res, equal_to([('emit1', 10), ('emit2', 20), ('emit3', 40)]))

apache_beam/transforms/userstate_test.py:1033: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7bbfa3b87730>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_handler_with_batch_sizes (apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest testMethod=test_handler_with_batch_sizes>

    def test_handler_with_batch_sizes(self):
      self.embedding_conig.max_batch_size = 100
      self.embedding_conig.min_batch_size = 10
      data = [
          {
              'x': "Hello world"
          },
          {
              'x': "Apache Beam"
          },
      ] * 100
      expected_data = [{key: value[::-1]
                        for key, value in d.items()} for d in data]
      with beam.Pipeline() as p:
        result = (
            p
            | beam.Create(data)
            | base.MLTransform(
                write_artifact_location=self.artifact_location).with_transform(
                    self.embedding_conig))
>       assert_that(
            result,
            equal_to(expected_data),
        )

apache_beam/ml/transforms/base_test.py:413: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7adf69d3a280>
action = _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None)
conflicting_actions = [('--slices', _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_handler_on_columns_not_exist_in_input_data (apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest testMethod=test_handler_on_columns_not_exist_in_input_data>

    def test_handler_on_columns_not_exist_in_input_data(self):
      data = [
          {
              'x': "Hello world", 'y': "Apache Beam"
          },
          {
              'x': "Apache Beam", 'y': "Hello world"
          },
      ]
      self.embedding_conig.columns = ['x', 'y', 'a']
    
      with self.assertRaises(RuntimeError):
        with beam.Pipeline() as p:
>         _ = (
              p
              | beam.Create(data)
              | base.MLTransform(
                  write_artifact_location=self.artifact_location).with_transform(
                      self.embedding_conig))

apache_beam/ml/transforms/base_test.py:458: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_handler_with_inconsistent_keys (apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest testMethod=test_handler_with_inconsistent_keys>

    def test_handler_with_inconsistent_keys(self):
      data = [
          {
              'x': 'foo', 'y': 'bar', 'z': 'baz'
          },
          {
              'x': 'foo2', 'y': 'bar2'
          },
          {
              'x': 'foo3', 'y': 'bar3', 'z': 'baz3'
          },
      ]
      self.embedding_conig.min_batch_size = 2
      with self.assertRaises(RuntimeError):
        with beam.Pipeline() as p:
>         _ = (
              p
              | beam.Create(data)
              | base.MLTransform(
                  write_artifact_location=self.artifact_location).with_transform(
                      self.embedding_conig))

apache_beam/ml/transforms/base_test.py:495: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.base_test.MLTransformDLQTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_dlq_with_embeddings (apache_beam.ml.transforms.base_test.MLTransformDLQTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.ml.transforms.base_test.MLTransformDLQTest testMethod=test_dlq_with_embeddings>

    def test_dlq_with_embeddings(self):
      with beam.Pipeline() as p:
        good, bad = (
            p
            | beam.Create([{
                'x': 1
            },
            {
              'x': 3,
            },
            {
              'x': 'Hello'
            }
            ],
            )
            | base.MLTransform(
                write_artifact_location=self.artifact_location).with_transform(
                    FakeEmbeddingsManager(
                      columns=['x'])).with_exception_handling())
    
        good_expected_elements = [{'x': 'olleH'}]
    
        assert_that(
            good,
            equal_to(good_expected_elements),
            label='good',
        )
    
        # batching happens in RunInference hence elements
        # are in lists in the bad pcoll.
        bad_expected_elements = [[{'x': 1}], [{'x': 3}]]
    
>       assert_that(
            bad | beam.Map(lambda x: x.element),
            equal_to(bad_expected_elements),
            label='bad',
        )

apache_beam/ml/transforms/base_test.py:677: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7adf6c00d460>
action = _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None)
conflicting_actions = [('--slices', _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.base_test.MLTransformDLQTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_mltransform_with_dlq_and_extract_tranform_name (apache_beam.ml.transforms.base_test.MLTransformDLQTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.ml.transforms.base_test.MLTransformDLQTest testMethod=test_mltransform_with_dlq_and_extract_tranform_name>

    def test_mltransform_with_dlq_and_extract_tranform_name(self):
      with beam.Pipeline() as p:
        good, bad = (
          p
          | beam.Create([{
              'x': 1
          },
          {
            'x': 3,
          },
          {
            'x': 'Hello'
          }
          ],
          )
          | base.MLTransform(
              write_artifact_location=self.artifact_location).with_transform(
                  FakeEmbeddingsManager(
                    columns=['x'])).with_exception_handling())
    
        good_expected_elements = [{'x': 'olleH'}]
        assert_that(
            good,
            equal_to(good_expected_elements),
            label='good',
        )
    
        bad_expected_transform_name = [
            'FakeEmbeddingsManager', 'FakeEmbeddingsManager'
        ]
    
>       assert_that(
            bad | beam.Map(lambda x: x.transform_name),
            equal_to(bad_expected_transform_name),
        )

apache_beam/ml/transforms/base_test.py:714: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7adf6c19fc10>
action = _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None)
conflicting_actions = [('--slices', _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_handler_with_list_data (apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest testMethod=test_handler_with_list_data>

    def test_handler_with_list_data(self):
      data = [{
          'x': ['Hello world', 'Apache Beam'],
      }, {
          'x': ['Apache Beam', 'Hello world'],
      }]
      with self.assertRaises(TypeError):
        with beam.Pipeline() as p:
>         _ = (
              p
              | beam.Create(data)
              | base.MLTransform(
                  write_artifact_location=self.artifact_location).with_transform(
                      self.embedding_conig))

apache_beam/ml/transforms/base_test.py:473: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_handler_on_multiple_columns (apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest testMethod=test_handler_on_multiple_columns>

    def test_handler_on_multiple_columns(self):
      data = [
          {
              'x': "Hello world", 'y': "Apache Beam", 'z': 'unchanged'
          },
          {
              'x': "Apache Beam", 'y': "Hello world", 'z': 'unchanged'
          },
      ]
      self.embedding_conig.columns = ['x', 'y']
      expected_data = [{
          key: (value[::-1] if key in self.embedding_conig.columns else value)
          for key,
          value in d.items()
      } for d in data]
      with beam.Pipeline() as p:
        result = (
            p
            | beam.Create(data)
            | base.MLTransform(
                write_artifact_location=self.artifact_location).with_transform(
                    self.embedding_conig))
>       assert_that(
            result,
            equal_to(expected_data),
        )

apache_beam/ml/transforms/base_test.py:440: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7adf6c3dc850>
action = _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None)
conflicting_actions = [('--slices', _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 4 runs failed: test_embeddings_with_read_artifact_location (apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest testMethod=test_embeddings_with_read_artifact_location>

    def test_embeddings_with_read_artifact_location(self):
      with beam.Pipeline() as p:
        embedding_config = VertexAITextEmbeddings(
            model_name=model_name, columns=[test_query_column])
    
        with beam.Pipeline() as p:
          data = (
              p
              | "CreateData" >> beam.Create([{
                  test_query_column: test_query
              }]))
>         _ = self.pipeline_with_configurable_artifact_location(
              pipeline=data,
              embedding_config=embedding_config,
              write_artifact_location=self.artifact_location)

apache_beam/ml/transforms/embeddings/vertex_ai_test.py:125: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7948e8aa0af0>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 4 runs failed: test_vertex_ai_text_embeddings (apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest testMethod=test_vertex_ai_text_embeddings>

    def test_vertex_ai_text_embeddings(self):
      embedding_config = VertexAITextEmbeddings(
          model_name=model_name, columns=[test_query_column])
      with beam.Pipeline() as pipeline:
        transformed_pcoll = (
            pipeline
            | "CreateData" >> beam.Create([{
                test_query_column: test_query
            }])
            | "MLTransform" >> MLTransform(
                write_artifact_location=self.artifact_location).with_transform(
                    embedding_config))
    
        def assert_element(element):
          assert len(element[test_query_column]) == 768
    
>       _ = (transformed_pcoll | beam.Map(assert_element))

apache_beam/ml/transforms/embeddings/vertex_ai_test.py:72: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7948e96a8ee0>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 4 runs failed: test_with_gcs_artifact_location (apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 1s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest testMethod=test_with_gcs_artifact_location>

    def test_with_gcs_artifact_location(self):
      with beam.Pipeline() as p:
        embedding_config = VertexAITextEmbeddings(
            model_name=model_name, columns=[test_query_column])
    
        with beam.Pipeline() as p:
          data = (
              p
              | "CreateData" >> beam.Create([{
                  test_query_column: test_query
              }]))
>         _ = self.pipeline_with_configurable_artifact_location(
              pipeline=data,
              embedding_config=embedding_config,
              write_artifact_location=self.gcs_artifact_location)

apache_beam/ml/transforms/embeddings/vertex_ai_test.py:175: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7948e8bac880>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 4 runs failed: test_with_int_data_types (apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.transforms.embeddings.vertex_ai_test.VertexAIEmbeddingsTest testMethod=test_with_int_data_types>

    def test_with_int_data_types(self):
      embedding_config = VertexAITextEmbeddings(
          model_name=model_name, columns=[test_query_column])
      with self.assertRaises(TypeError):
        with beam.Pipeline() as pipeline:
>         _ = (
              pipeline
              | "CreateData" >> beam.Create([{
                  test_query_column: 1
              }])
              | "MLTransform" >> MLTransform(
                  write_artifact_location=self.artifact_location).with_transform(
                      embedding_config))

apache_beam/ml/transforms/embeddings/vertex_ai_test.py:155: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_handler_with_dict_inputs (apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest)

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-cloudcoverage.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.ml.transforms.base_test.TextEmbeddingHandlerTest testMethod=test_handler_with_dict_inputs>

    def test_handler_with_dict_inputs(self):
      data = [
          {
              'x': "Hello world"
          },
          {
              'x': "Apache Beam"
          },
      ]
      expected_data = [{key: value[::-1]
                        for key, value in d.items()} for d in data]
      with beam.Pipeline() as p:
        result = (
            p
            | beam.Create(data)
            | base.MLTransform(
                write_artifact_location=self.artifact_location).with_transform(
                    self.embedding_conig))
>       assert_that(
            result,
            equal_to(expected_data),
        )

apache_beam/ml/transforms/base_test.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7adf6c02f4c0>
action = _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None)
conflicting_actions = [('--slices', _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.examples.snippets.transforms.elementwise.runinference_test.RunInferenceStdoutTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_check_torch_keyed_model_handler (apache_beam.examples.snippets.transforms.elementwise.runinference_test.RunInferenceStdoutTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.examples.snippets.transforms.elementwise.runinference_test.RunInferenceStdoutTest testMethod=test_check_torch_keyed_model_handler>
mock_stdout = <_io.StringIO object at 0x7b5e0dc09c10>

    @pytest.mark.uses_pytorch
    def test_check_torch_keyed_model_handler(self, mock_stdout):
>     runinference.torch_keyed_model_handler()

apache_beam/examples/snippets/transforms/elementwise/runinference_test.py:110: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/snippets/transforms/elementwise/runinference.py:97: in torch_keyed_model_handler
    test(predictions)
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7b5e0da001f0>
action = _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None)
conflicting_actions = [('--slices', _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_env_vars_set_correctly_tensor_handler (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_env_vars_set_correctly_tensor_handler>

    def test_env_vars_set_correctly_tensor_handler(self):
      torch_model = PytorchLinearRegression(2, 1)
      torch_model.load_state_dict(
          OrderedDict([('linear.weight', torch.Tensor([[2.0, 3]])),
                       ('linear.bias', torch.Tensor([0.5]))]))
    
      torch_script_model = torch.jit.script(torch_model)
    
      torch_script_path = os.path.join(self.tmpdir, 'torch_script_model.pt')
    
      torch.jit.save(torch_script_model, torch_script_path)
    
      handler_with_vars = PytorchModelHandlerTensor(
          torch_script_model_path=torch_script_path, env_vars={'FOO': 'bar'})
      os.environ.pop('FOO', None)
      self.assertFalse('FOO' in os.environ)
      with TestPipeline() as pipeline:
        _ = (
            pipeline
            | 'start' >> beam.Create(TWO_FEATURES_EXAMPLES)
            | RunInference(handler_with_vars))
>       pipeline.run()

apache_beam/ml/inference/pytorch_inference_test.py:944: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7967b2102d00>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_inference_torch_script_model (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_inference_torch_script_model>

    def test_inference_torch_script_model(self):
      torch_model = PytorchLinearRegression(2, 1)
      torch_model.load_state_dict(
          OrderedDict([('linear.weight', torch.Tensor([[2.0, 3]])),
                       ('linear.bias', torch.Tensor([0.5]))]))
    
      torch_script_model = torch.jit.script(torch_model)
    
      torch_script_path = os.path.join(self.tmpdir, 'torch_script_model.pt')
    
      torch.jit.save(torch_script_model, torch_script_path)
    
      model_handler = PytorchModelHandlerTensor(
          torch_script_model_path=torch_script_path)
    
      with TestPipeline() as pipeline:
        pcoll = pipeline | 'start' >> beam.Create(TWO_FEATURES_EXAMPLES)
        predictions = pcoll | RunInference(model_handler)
>       assert_that(
            predictions,
            equal_to(
                TWO_FEATURES_PREDICTIONS, equals_fn=_compare_prediction_result))

apache_beam/ml/inference/pytorch_inference_test.py:827: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7afe07aabd90>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_env_vars_set_correctly_keyed_tensor_handler (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_env_vars_set_correctly_keyed_tensor_handler>

    def test_env_vars_set_correctly_keyed_tensor_handler(self):
      os.environ.pop('FOO', None)
      self.assertFalse('FOO' in os.environ)
      with TestPipeline() as pipeline:
        inference_args = {
            'prediction_param_array': torch.from_numpy(
                np.array([1, 2], dtype="float32")),
            'prediction_param_bool': True
        }
    
        state_dict = OrderedDict([('linear.weight', torch.Tensor([[2.0]])),
                                  ('linear.bias', torch.Tensor([0.5]))])
        path = os.path.join(self.tmpdir, 'my_state_dict_path')
        torch.save(state_dict, path)
    
        handler_with_vars = PytorchModelHandlerKeyedTensor(
            env_vars={'FOO': 'bar'},
            state_dict_path=path,
            model_class=PytorchLinearRegressionKeyedBatchAndExtraInferenceArgs,
            model_params={
                'input_dim': 1, 'output_dim': 1
            })
        inference_args_side_input = (
            pipeline | 'create side' >> beam.Create(inference_args))
    
        _ = (
            pipeline
            | 'start' >> beam.Create(KEYED_TORCH_EXAMPLES)
            | RunInference(
                model_handler=handler_with_vars,
                inference_args=beam.pvalue.AsDict(inference_args_side_input)))
>       pipeline.run()

apache_beam/ml/inference/pytorch_inference_test.py:979: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7a53c6910850>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_pipeline_gcs_model_control_batching (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_pipeline_gcs_model_control_batching>

    @unittest.skipIf(GCSFileSystem is None, 'GCP dependencies are not installed')
    def test_pipeline_gcs_model_control_batching(self):
      with TestPipeline() as pipeline:
        examples = torch.from_numpy(
            np.array([1, 5, 3, 10], dtype="float32").reshape(-1, 1))
        expected_predictions = [
            PredictionResult(ex, pred) for ex,
            pred in zip(
                examples,
                torch.Tensor([example * 2.0 + 0.5
                              for example in examples]).reshape(-1, 1))
        ]
    
        def batch_validator_tensor_inference_fn(
            batch,
            model,
            device,
            inference_args,
            model_id,
        ):
          if len(batch) != 2:
            raise Exception(
                f'Expected batch of size 2, received batch of size {len(batch)}')
          return default_tensor_inference_fn(
              batch, model, device, inference_args, model_id)
    
    
        gs_pth = 'gs://apache-beam-ml/models/' \
            'pytorch_lin_reg_model_2x+0.5_state_dict.pth'
        model_handler = PytorchModelHandlerTensor(
            state_dict_path=gs_pth,
            model_class=PytorchLinearRegression,
            model_params={
                'input_dim': 1, 'output_dim': 1
            },
            inference_fn=batch_validator_tensor_inference_fn,
            min_batch_size=2,
            max_batch_size=2)
    
        pcoll = pipeline | 'start' >> beam.Create(examples)
        predictions = pcoll | RunInference(model_handler)
>       assert_that(
            predictions,
            equal_to(expected_predictions, equals_fn=_compare_prediction_result))

apache_beam/ml/inference/pytorch_inference_test.py:729: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7c1445a01280>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_pipeline_local_model_extra_inference_args_batching_args (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_pipeline_local_model_extra_inference_args_batching_args>

    def test_pipeline_local_model_extra_inference_args_batching_args(self):
      with TestPipeline() as pipeline:
        inference_args = {
            'prediction_param_array': torch.from_numpy(
                np.array([1, 2], dtype="float32")),
            'prediction_param_bool': True
        }
    
        state_dict = OrderedDict([('linear.weight', torch.Tensor([[2.0]])),
                                  ('linear.bias', torch.Tensor([0.5]))])
        path = os.path.join(self.tmpdir, 'my_state_dict_path')
        torch.save(state_dict, path)
    
        def batch_validator_keyed_tensor_inference_fn(
            batch,
            model,
            device,
            inference_args,
            model_id,
        ):
          if len(batch) != 2:
            raise Exception(
                f'Expected batch of size 2, received batch of size {len(batch)}')
          return default_keyed_tensor_inference_fn(
              batch, model, device, inference_args, model_id)
    
        model_handler = PytorchModelHandlerKeyedTensor(
            state_dict_path=path,
            model_class=PytorchLinearRegressionKeyedBatchAndExtraInferenceArgs,
            model_params={
                'input_dim': 1, 'output_dim': 1
            },
            inference_fn=batch_validator_keyed_tensor_inference_fn,
            min_batch_size=2,
            max_batch_size=2)
    
        pcoll = pipeline | 'start' >> beam.Create(KEYED_TORCH_EXAMPLES)
        inference_args_side_input = (
            pipeline | 'create side' >> beam.Create(inference_args))
        predictions = pcoll | RunInference(
            model_handler=model_handler,
            inference_args=beam.pvalue.AsDict(inference_args_side_input))
>       assert_that(
            predictions,
            equal_to(
                KEYED_TORCH_PREDICTIONS, equals_fn=_compare_prediction_result))

apache_beam/ml/inference/pytorch_inference_test.py:655: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7f60778771c0>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_gpu_auto_convert_to_cpu (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_gpu_auto_convert_to_cpu>

    def test_gpu_auto_convert_to_cpu(self):
      """
      This tests the scenario in which the user defines `device='GPU'` for the
      PytorchModelHandlerX, but runs the pipeline on a machine without GPU, we
      automatically detect this discrepancy and do automatic conversion to CPU.
      A warning is also logged to inform the user.
      """
      with self.assertLogs() as log:
        with TestPipeline() as pipeline:
          examples = torch.from_numpy(
              np.array([1, 5, 3, 10], dtype="float32").reshape(-1, 1))
    
          state_dict = OrderedDict([('linear.weight', torch.Tensor([[2.0]])),
                                    ('linear.bias', torch.Tensor([0.5]))])
          path = os.path.join(self.tmpdir, 'my_state_dict_path')
          torch.save(state_dict, path)
    
          model_handler = PytorchModelHandlerTensor(
              state_dict_path=path,
              model_class=PytorchLinearRegression,
              model_params={
                  'input_dim': 1, 'output_dim': 1
              },
              device='GPU')
          # Upon initialization, device is cuda
          self.assertEqual(model_handler._device, torch.device('cuda'))
    
          pcoll = pipeline | 'start' >> beam.Create(examples)
          # pylint: disable=expression-not-assigned
          pcoll | RunInference(model_handler)
    
          # During model loading, device converted to cuda
>         self.assertEqual(model_handler._device, torch.device('cuda'))

apache_beam/ml/inference/pytorch_inference_test.py:786: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7967b18c37f0>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_pipeline_gcs_model (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_pipeline_gcs_model>

    @unittest.skipIf(GCSFileSystem is None, 'GCP dependencies are not installed')
    def test_pipeline_gcs_model(self):
      with TestPipeline() as pipeline:
        examples = torch.from_numpy(
            np.array([1, 5, 3, 10], dtype="float32").reshape(-1, 1))
        expected_predictions = [
            PredictionResult(ex, pred) for ex,
            pred in zip(
                examples,
                torch.Tensor([example * 2.0 + 0.5
                              for example in examples]).reshape(-1, 1))
        ]
    
        gs_pth = 'gs://apache-beam-ml/models/' \
            'pytorch_lin_reg_model_2x+0.5_state_dict.pth'
        model_handler = PytorchModelHandlerTensor(
            state_dict_path=gs_pth,
            model_class=PytorchLinearRegression,
            model_params={
                'input_dim': 1, 'output_dim': 1
            })
    
        pcoll = pipeline | 'start' >> beam.Create(examples)
        predictions = pcoll | RunInference(model_handler)
>       assert_that(
            predictions,
            equal_to(expected_predictions, equals_fn=_compare_prediction_result))

apache_beam/ml/inference/pytorch_inference_test.py:684: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7a53c63f1af0>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.examples.snippets.transforms.elementwise.runinference_test.RunInferenceStdoutTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_check_torch_unkeyed_model_handler (apache_beam.examples.snippets.transforms.elementwise.runinference_test.RunInferenceStdoutTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --slices: conflicting option string: --slices
self = <apache_beam.examples.snippets.transforms.elementwise.runinference_test.RunInferenceStdoutTest testMethod=test_check_torch_unkeyed_model_handler>
mock_stdout = <_io.StringIO object at 0x7b5e0da0e3a0>

    @pytest.mark.uses_pytorch
    def test_check_torch_unkeyed_model_handler(self, mock_stdout):
>     runinference.torch_unkeyed_model_handler()

apache_beam/examples/snippets/transforms/elementwise/runinference_test.py:117: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/snippets/transforms/elementwise/runinference.py:63: in torch_unkeyed_model_handler
    test(predictions)
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/pipeline_test.py:817: in _add_argparse_args
    parser.add_argument('--slices', type=int)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7b5e0da899d0>
action = _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None)
conflicting_actions = [('--slices', _StoreAction(option_strings=['--slices'], dest='slices', nargs=None, const=None, default=None, type=<class 'int'>, choices=None, help=None, metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --slices: conflicting option string: --slices

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_pipeline_local_model_extra_inference_args_large (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_pipeline_local_model_extra_inference_args_large>

    def test_pipeline_local_model_extra_inference_args_large(self):
      with TestPipeline() as pipeline:
        inference_args = {
            'prediction_param_array': torch.from_numpy(
                np.array([1, 2], dtype="float32")),
            'prediction_param_bool': True
        }
    
        state_dict = OrderedDict([('linear.weight', torch.Tensor([[2.0]])),
                                  ('linear.bias', torch.Tensor([0.5]))])
        path = os.path.join(self.tmpdir, 'my_state_dict_path')
        torch.save(state_dict, path)
    
        def batch_validator_keyed_tensor_inference_fn(
            batch,
            model,
            device,
            inference_args,
            model_id,
        ):
          multi_process_shared_loaded = "multi_process_shared" in str(type(model))
          if not multi_process_shared_loaded:
            raise Exception(
                f'Loaded model of type {type(model)}, was ' +
                'expecting multi_process_shared_model')
          return default_keyed_tensor_inference_fn(
              batch, model, device, inference_args, model_id)
    
        model_handler = PytorchModelHandlerKeyedTensor(
            state_dict_path=path,
            model_class=PytorchLinearRegressionKeyedBatchAndExtraInferenceArgs,
            model_params={
                'input_dim': 1, 'output_dim': 1
            },
            inference_fn=batch_validator_keyed_tensor_inference_fn,
            large_model=True)
    
        pcoll = pipeline | 'start' >> beam.Create(KEYED_TORCH_EXAMPLES)
        inference_args_side_input = (
            pipeline | 'create side' >> beam.Create(inference_args))
        predictions = pcoll | RunInference(
            model_handler=model_handler,
            inference_args=beam.pvalue.AsDict(inference_args_side_input))
>       assert_that(
            predictions,
            equal_to(
                KEYED_TORCH_PREDICTIONS, equals_fn=_compare_prediction_result))

apache_beam/ml/inference/pytorch_inference_test.py:608: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7f6077538f70>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_pipeline_local_model_large (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_pipeline_local_model_large>

    def test_pipeline_local_model_large(self):
      with TestPipeline() as pipeline:
    
        def batch_validator_tensor_inference_fn(
            batch,
            model,
            device,
            inference_args,
            model_id,
        ):
          multi_process_shared_loaded = "multi_process_shared" in str(type(model))
          if not multi_process_shared_loaded:
            raise Exception(
                f'Loaded model of type {type(model)}, was ' +
                'expecting multi_process_shared_model')
          return default_tensor_inference_fn(
              batch, model, device, inference_args, model_id)
    
        state_dict = OrderedDict([('linear.weight', torch.Tensor([[2.0, 3]])),
                                  ('linear.bias', torch.Tensor([0.5]))])
        path = os.path.join(self.tmpdir, 'my_state_dict_path')
        torch.save(state_dict, path)
    
        model_handler = PytorchModelHandlerTensor(
            state_dict_path=path,
            model_class=PytorchLinearRegression,
            model_params={
                'input_dim': 2, 'output_dim': 1
            },
            inference_fn=batch_validator_tensor_inference_fn,
            large_model=True)
    
        pcoll = pipeline | 'start' >> beam.Create(TWO_FEATURES_EXAMPLES)
        predictions = pcoll | RunInference(model_handler)
>       assert_that(
            predictions,
            equal_to(
                TWO_FEATURES_PREDICTIONS, equals_fn=_compare_prediction_result))

apache_beam/ml/inference/pytorch_inference_test.py:529: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <argparse._ArgumentGroup object at 0x7967b1c61fd0>
action = _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, const=None, default='https:...one, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None)
conflicting_actions = [('--dataflow_endpoint', _StoreAction(option_strings=['--dataflow_endpoint'], dest='dataflow_endpoint', nargs=None, co...e, choices=None, help='The URL for the Dataflow API. If not set, the default public URL will be used.', metavar=None))]

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError

Check warning on line 0 in apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_invalid_input_type (apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest) failed

sdks/python/test-suites/tox/py38/build/srcs/sdks/python/pytest_py38-pytorch-200.xml [took 0s]
Raw output
argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint
self = <apache_beam.ml.inference.pytorch_inference_test.PytorchRunInferencePipelineTest testMethod=test_invalid_input_type>

    def test_invalid_input_type(self):
      with self.assertRaisesRegex(TypeError, "expected Tensor as element"):
        with TestPipeline() as pipeline:
          examples = np.array([1, 5, 3, 10], dtype="float32").reshape(-1, 1)
    
          state_dict = OrderedDict([('linear.weight', torch.Tensor([[2.0]])),
                                    ('linear.bias', torch.Tensor([0.5]))])
          path = os.path.join(self.tmpdir, 'my_state_dict_path')
          torch.save(state_dict, path)
    
          model_handler = PytorchModelHandlerTensor(
              state_dict_path=path,
              model_class=PytorchLinearRegression,
              model_params={
                  'input_dim': 1, 'output_dim': 1
              })
    
          pcoll = pipeline | 'start' >> beam.Create(examples)
          # pylint: disable=expression-not-assigned
>         pcoll | RunInference(model_handler)

apache_beam/ml/inference/pytorch_inference_test.py:752: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/direct_runner.py:119: in run_pipeline
    all_options = options.get_all_options()
apache_beam/options/pipeline_options.py:337: in get_all_options
    cls._add_argparse_args(parser)  # pylint: disable=protected-access
apache_beam/options/pipeline_options.py:769: in _add_argparse_args
    parser.add_argument(
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1386: in add_argument
    return self._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1749: in _add_action
    self._optionals._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1590: in _add_action
    action = super(_ArgumentGroup, self)._add_action(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1400: in _add_action
    self._check_conflict(action)
/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1539: in _check_conflict
    conflict_handler(action, confl_optionals)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

    def _handle_conflict_error(self, action, conflicting_actions):
        message = ngettext('conflicting option string: %s',
                           'conflicting option strings: %s',
                           len(conflicting_actions))
        conflict_string = ', '.join([option_string
                                     for option_string, action
                                     in conflicting_actions])
>       raise ArgumentError(action, message % conflict_string)
E       argparse.ArgumentError: argument --dataflow_endpoint: conflicting option string: --dataflow_endpoint

/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/argparse.py:1548: ArgumentError