Skip to content

tests.system.aiplatform.test_project_id_inference.TestProjectIDInference: test_project_id_inference failed #1095

Closed
@flaky-bot

Description

@flaky-bot

This test failed!

To configure my behavior, see the Flaky Bot documentation.

If I'm commenting on this issue too often, add the flakybot: quiet label and
I will stop commenting.


commit: 876cb33
buildURL: Build Status, Sponge
status: failed

Test output
args = (parent: "projects/precise-truck-742/locations/us-central1"
custom_job {
  display_name: "temp-vertex-sdk-project-id-i...ce-0d0e1f40-deed-4038-b75a-b5/aiplatform-custom-job-2022-03-21-14:35:12.924"
    }
    enable_web_access: true
  }
}
,)
kwargs = {'metadata': [('x-goog-request-params', 'parent=projects/precise-truck-742/locations/us-central1'), ('x-goog-api-client', 'model-builder/1.11.0 gl-python/3.8.12 grpc/1.34.1 gax/1.31.5 gapic/1.11.0')]}
@six.wraps(callable_)
def error_remapped_callable(*args, **kwargs):
    try:
      return callable_(*args, **kwargs)

.nox/system-3-8/lib/python3.8/site-packages/google/api_core/grpc_helpers.py:67:


self = <grpc._channel._UnaryUnaryMultiCallable object at 0x7f22c41ba460>
request = parent: "projects/precise-truck-742/locations/us-central1"
custom_job {
display_name: "temp-vertex-sdk-project-id-in...ence-0d0e1f40-deed-4038-b75a-b5/aiplatform-custom-job-2022-03-21-14:35:12.924"
}
enable_web_access: true
}
}

timeout = None
metadata = [('x-goog-request-params', 'parent=projects/precise-truck-742/locations/us-central1'), ('x-goog-api-client', 'model-builder/1.11.0 gl-python/3.8.12 grpc/1.34.1 gax/1.31.5 gapic/1.11.0')]
credentials = None, wait_for_ready = None, compression = None

def __call__(self,
             request,
             timeout=None,
             metadata=None,
             credentials=None,
             wait_for_ready=None,
             compression=None):
    state, call, = self._blocking(request, timeout, metadata, credentials,
                                  wait_for_ready, compression)
  return _end_unary_response_blocking(state, call, False, None)

.nox/system-3-8/lib/python3.8/site-packages/grpc/_channel.py:923:


state = <grpc._channel._RPCState object at 0x7f22954ca7f0>
call = <grpc._cython.cygrpc.SegregatedCall object at 0x7f2295601040>
with_call = False, deadline = None

def _end_unary_response_blocking(state, call, with_call, deadline):
    if state.code is grpc.StatusCode.OK:
        if with_call:
            rendezvous = _MultiThreadedRendezvous(state, call, None, deadline)
            return state.response, rendezvous
        else:
            return state.response
    else:
      raise _InactiveRpcError(state)

E grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that terminated with:
E status = StatusCode.INVALID_ARGUMENT
E details = "Machine type "n1-standard-2" is not supported."
E debug_error_string = "{"created":"@1647873313.253501462","description":"Error received from peer ipv4:74.125.195.95:443","file":"src/core/lib/surface/call.cc","file_line":1062,"grpc_message":"Machine type "n1-standard-2" is not supported.","grpc_status":3}"
E >

.nox/system-3-8/lib/python3.8/site-packages/grpc/_channel.py:826: _InactiveRpcError

The above exception was the direct cause of the following exception:

self = <tests.system.aiplatform.test_project_id_inference.TestProjectIDInference object at 0x7f22953531c0>
shared_state = {'bucket': <Bucket: temp-vertex-sdk-project-id-inference-0d0e1f40-deed-4038-b75a-b5>, 'resources': [<google.cloud.aipl...inference-0d0e1f40-deed-4038-b75a-b5', 'storage_client': <google.cloud.storage.client.Client object at 0x7f22954dda00>}

def test_project_id_inference(self, shared_state):
    # Collection of resources generated by this test, to be deleted during teardown
    shared_state["resources"] = []

    aiplatform.init(
        location=e2e_base._LOCATION,
        staging_bucket=shared_state["staging_bucket_name"],
    )

    worker_pool_specs = [
        {
            "machine_spec": {"machine_type": "n1-standard-2"},
            "replica_count": 1,
            "container_spec": {
                "image_uri": "python:3.9",
                "command": [
                    "sh",
                    "-exc",
                    """python3 -m pip install git+https://github.com/Ark-kun/python-aiplatform@fix--Fixed-getitng-project-ID-when-running-on-Vertex-AI#egg=google-cloud-aiplatform&subdirectory=.
                        "$0" "$@"
                        """,
                    "python3",
                    "-c",
                    """
                        from google.cloud import aiplatform
                        # Not initializing the Vertex SDK explicitly
                        # Checking teh project ID
                        print(aiplatform.initializer.global_config.project)
                        assert not aiplatform.initializer.global_config.project.endswith("-tp")
                        # Testing ability to list resources
                        endpoints = aiplatform.Endpoint.list()
                        print(endpoints)
                        """,
                ],
                "args": [],
            },
        }
    ]

    custom_job = aiplatform.CustomJob(
        display_name=self._make_display_name("custom"),
        worker_pool_specs=worker_pool_specs,
    )
    custom_job.run(
        enable_web_access=True, sync=False,
    )

    shared_state["resources"].append(custom_job)

    in_progress_done_check = custom_job.done()
  custom_job.wait_for_resource_creation()

tests/system/aiplatform/test_project_id_inference.py:80:


google/cloud/aiplatform/jobs.py:987: in wait_for_resource_creation
self._wait_for_resource_creation()
google/cloud/aiplatform/base.py:1197: in _wait_for_resource_creation
self._raise_future_exception()
google/cloud/aiplatform/base.py:209: in _raise_future_exception
raise self._exception
google/cloud/aiplatform/base.py:221: in _complete_future
future.result() # raises
/usr/local/lib/python3.8/concurrent/futures/_base.py:437: in result
return self.__get_result()
/usr/local/lib/python3.8/concurrent/futures/_base.py:389: in __get_result
raise self._exception
/usr/local/lib/python3.8/concurrent/futures/thread.py:57: in run
result = self.fn(*self.args, **self.kwargs)
google/cloud/aiplatform/base.py:311: in wait_for_dependencies_and_invoke
result = method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:1440: in run
self._gca_resource = self.api_client.create_custom_job(
google/cloud/aiplatform_v1/services/job_service/client.py:716: in create_custom_job
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
.nox/system-3-8/lib/python3.8/site-packages/google/api_core/gapic_v1/method.py:145: in call
return wrapped_func(*args, **kwargs)
.nox/system-3-8/lib/python3.8/site-packages/google/api_core/grpc_helpers.py:69: in error_remapped_callable
six.raise_from(exceptions.from_grpc_error(exc), exc)


value = None
from_value = <_InactiveRpcError of RPC that terminated with:
status = StatusCode.INVALID_ARGUMENT
details = "Machine type "n1-sta...b/surface/call.cc","file_line":1062,"grpc_message":"Machine type "n1-standard-2" is not supported.","grpc_status":3}"

???
E google.api_core.exceptions.InvalidArgument: 400 Machine type "n1-standard-2" is not supported.

:3: InvalidArgument

Metadata

Metadata

Assignees

No one assigned

    Labels

    🚨This issue needs some love.aiplatformIssues related to the AI Platform (Unified) API.api: aiplatformIssues related to the AI Platform API.flakybot: flakyTells the Flaky Bot not to close or comment on this issue.flakybot: issueAn issue filed by the Flaky Bot. Should not be added manually.priority: p1Important issue which blocks shipping the next release. Will be fixed prior to next release.type: bugError or flaw in code with unintended results or allowing sub-optimal usage patterns.

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions