Skip to content

[DO NOT MERGE] Run all PostCommit and PreCommit Tests against Release Branch #4106

[DO NOT MERGE] Run all PostCommit and PreCommit Tests against Release Branch

[DO NOT MERGE] Run all PostCommit and PreCommit Tests against Release Branch #4106

GitHub Actions / Test Results failed Dec 19, 2024 in 0s

1 fail, 54 skipped, 109 pass in 1h 55m 18s

  3 files    3 suites   1h 55m 18s ⏱️
164 tests 109 ✅ 54 💤 1 ❌
216 runs  156 ✅ 59 💤 1 ❌

Results for commit c1ba1bd.

Annotations

Check warning on line 0 in apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT)

sdks/python/pytest_postCommitIT-df-py39.xml [took 5s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 19 Dec 2024 14:31:11 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(ec557722f5d84bb0): The workflow could not be created. Causes: (8de3e5598caf3795): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT testMethod=test_big_query_new_types_avro>

    @pytest.mark.it_postcommit
    def test_big_query_new_types_avro(self):
      expected_checksum = test_utils.compute_hash(NEW_TYPES_OUTPUT_EXPECTED)
      verify_query = NEW_TYPES_OUTPUT_VERIFY_QUERY % self.output_table
      pipeline_verifiers = [
          PipelineStateMatcher(),
          BigqueryMatcher(
              project=self.project,
              query=verify_query,
              checksum=expected_checksum)
      ]
      self._setup_new_types_env()
      extra_opts = {
          'query': NEW_TYPES_QUERY % (self.dataset_id, NEW_TYPES_INPUT_TABLE),
          'output': self.output_table,
          'output_schema': NEW_TYPES_OUTPUT_SCHEMA,
          'use_standard_sql': False,
          'wait_until_finish_duration': WAIT_UNTIL_FINISH_DURATION_MS,
          'on_success_matcher': all_of(*pipeline_verifiers),
      }
      options = self.test_pipeline.get_full_options_as_args(**extra_opts)
>     big_query_query_to_table_pipeline.run_bq_pipeline(options)

apache_beam/io/gcp/big_query_query_to_table_it_test.py:251: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/io/gcp/big_query_query_to_table_pipeline.py:103: in run_bq_pipeline
    result = p.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:567: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:594: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:725: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:831: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7dfc9d760f40>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 1...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20241219143110361757-4304'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 19 Dec 2024 14:31:11 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(ec557722f5d84bb0): The workflow could not be created. Causes: (8de3e5598caf3795): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError