Comments (40)
commit: 57806fb
buildURL: Build Status, Sponge
status: failed
Test output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:422: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:380: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:814: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f68d0c15520>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/7276704910538702848def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 06c9d18
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:422: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:380: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:814: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f05efd39fd0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6611051774990090240def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 3a97c52
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:422: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:380: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:814: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f6f4c4bfbe0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/7581788600920965120def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: f3b25ab
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:429: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:387: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:814: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f2ef4bb1970>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/1662915198463246336def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 9eed3b7
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:429: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:387: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fbcf1090940>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/2317801916152676352def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 65e263c
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:429: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:387: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f100b4beb50>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6439780848731422720def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 593b54f
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:429: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:387: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f3002887b80>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/770690099825344512def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:128]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: f8f2b9c
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:434: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:392: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f930599b850>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/4670490717779394560def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 18e8bb2
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:434: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:392: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fa2442e9d30>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6576832774110445568def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: faead25
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:479: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:437: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f4fd3254c40>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/7712267645789143040def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 4e76a6e
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:488: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:446: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f21177162e0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/555111053990559744def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:128]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 8df5185
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:170:
vertexai/language_models/_language_models.py:491: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:449: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7feaa6efddc0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/3646198879482806272def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: ba636f6
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:222:
vertexai/language_models/_language_models.py:563: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:521: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f98501dac10>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/5001588853274836992def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 3a8348b
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:223:
vertexai/language_models/_language_models.py:570: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:528: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fb3c8354130>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/5528580378421362688def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: cbf9b6e
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:229:
vertexai/language_models/_language_models.py:563: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:521: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f6c70b3cf70>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/8341315840225837056def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: e6d1e95
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:245:
vertexai/language_models/_language_models.py:736: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:694: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fa310d2e820>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/9217688180214464512def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 62ff30d
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:890: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:848: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fa13013c940>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/5908422063399370752def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: ff5cfa1
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:890: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:848: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f4a090a8dc0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/7917572854973988864def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 7eaa1d4
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:890: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:848: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fcd2c8d00d0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6996270071827922944def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: d11b8e6
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:890: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:848: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f234c5e2370>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/1697401380668440576def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: c60b9ca
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:890: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:848: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f62783669d0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/8573219335198212096def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 3d468ed
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:890: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:848: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:794: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:875: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f8a24399220>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/7626944443962097664def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 339593e
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:890: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:848: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f6f6d3d3550>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/7404544027988590592def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 61733c8
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:890: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:848: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fa55cb32010>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6356045341694885888def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 2a08535
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:885: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:843: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f4e78183ad0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6633535688266481664def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: b3ac4a3
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:905: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:863: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:809: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fb2cdd1bb10>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6254775922730205184def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 91176aa
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:905: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:863: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f304e65bb50>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/8517535668321124352def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 0fb50c7
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:907: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:865: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f5039b73510>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/3753325396888649728def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 2bd494b
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:907: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:865: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fb26088f8d0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/8703298157833879552def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 50c1591
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:907: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:865: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f5d04d19350>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/3908618220172476416def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: a134461
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:911: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:869: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7faa1e4db290>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/4995872492322029568def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: d62bb1b
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:916: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:874: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f8a4d9c1990>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/1022131916423299072def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 5dba09b
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:916: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:874: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f318e94a510>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/1053199716977737728def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: e7fbecc
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:916: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:874: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f0d846d4c50>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6373947590018334720def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 078d9e3
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:916: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:874: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f4ee956f590>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/255279730654183424def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:128]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 4256361
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:916: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:874: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7fab2419f8d0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/393044139568005120def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:128]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: d516931
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:916: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:874: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f259b0c02d0>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6178376082396282880def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: f7feeca
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:916: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:874: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f0065617250>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6793891487718309888def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 commit: 0fa47ae
buildURL: Build Status, Sponge
status: failedTest output
self =def test_batch_prediction_for_text_generation(self): source_uri = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/batch_prediction_prompts1.jsonl" destination_uri_prefix = "gs://ucaip-samples-us-central1/model/llm/batch_prediction/predictions/text-bison@001_" aiplatform.init(project=e2e_base._PROJECT, location=e2e_base._LOCATION) model = TextGenerationModel.from_pretrained("text-bison@001")
job = model.batch_predict(
dataset=source_uri, destination_uri_prefix=destination_uri_prefix, model_parameters={"temperature": 0, "top_p": 1, "top_k": 5}, )
tests/system/aiplatform/test_language_models.py:259:
vertexai/language_models/_language_models.py:884: in batch_predict
return super().batch_predict(
vertexai/language_models/_language_models.py:842: in batch_predict
job = aiplatform.BatchPredictionJob.create(
google/cloud/aiplatform/jobs.py:795: in create
return cls._create(
google/cloud/aiplatform/base.py:819: in wrapper
return method(*args, **kwargs)
google/cloud/aiplatform/jobs.py:876: in _create
batch_prediction_job._block_until_complete()
self = <google.cloud.aiplatform.jobs.BatchPredictionJob object at 0x7f9ce06cf810>
resource name: projects/580378083368/locations/us-central1/batchPredictionJobs/6359100609630568448def _block_until_complete(self): """Helper method to block and check on job until complete. Raises: RuntimeError: If job failed or cancelled. """ log_wait = _LOG_WAIT_TIME previous_time = time.time() while self.state not in _JOB_COMPLETE_STATES: current_time = time.time() if current_time - previous_time >= log_wait: self._log_job_state() log_wait = min(log_wait * _WAIT_TIME_MULTIPLIER, _MAX_WAIT_TIME) previous_time = current_time time.sleep(_JOB_WAIT_TIME) self._log_job_state() # Error is only populated when the job state is # JOB_STATE_FAILED or JOB_STATE_CANCELLED. if self._gca_resource.state in _JOB_ERROR_STATES:
raise RuntimeError("Job failed with:\n%s" % self._gca_resource.error)
E RuntimeError: Job failed with:
E code: 3
E message: "Failed to run inference job. Syntax error: Unexpected keyword AS at [78:129]"google/cloud/aiplatform/jobs.py:241: RuntimeError
from python-aiplatform.
flaky-bot commented on July 2, 2024 Looks like this issue is flaky. 😟
I'm going to leave this open and stop commenting.
A human should fix and close this.
When run at the same commit (0fa47ae), this test passed in one build (Build Status, Sponge) and failed in another build (Build Status, Sponge).
from python-aiplatform.
Related Issues (20)
- tests.system.aiplatform.test_featurestore.TestFeaturestore: test_online_reads failed HOT 1
- [BUG] Can't use seed parameter with TextGenerationModel.predict_async HOT 2
- tests.system.aiplatform.test_e2e_tabular.TestEndToEndTabular: test_end_to_end_tabular failed HOT 1
- tests.system.aiplatform.test_prediction_cpr.TestPredictionCpr: test_build_cpr_model_upload_and_deploy failed HOT 1
- GenerationConfig.response_schema ValueError when passing raw JSON schema HOT 4
- Can not version or assign to parent AutoML forecasting (TFT) model when training it from custom KubeFlow container
- tests.system.vertexai.test_offline_store.TestOfflineStore: test_entity_df_no_timestamp_column_raises_error failed HOT 1
- tests.system.vertexai.test_offline_store.TestOfflineStore: test_entity_df_too_many_timestamp_columns_raises_error failed HOT 1
- tests.system.vertexai.test_offline_store.TestOfflineStore: test_with_features_old_data failed HOT 1
- tests.system.vertexai.test_offline_store.TestOfflineStore: test_with_features_new_data failed HOT 1
- tests.system.vertexai.test_offline_store.TestOfflineStore: test_with_features_mixed1_data failed HOT 1
- tests.system.vertexai.test_offline_store.TestOfflineStore: test_with_features_mixed2_data failed HOT 1
- RAG file metadata
- Unable to use Gemini stable model for caching in vertexai preview HOT 3
- tests.unit.vertexai.test_tokenization.TestModelLoad: test_load_model_proto_from_cache failed HOT 6
- tests.unit.vertexai.test_tokenization.TestModelLoad: test_load_model_proto_from_corrupted_cache failed HOT 5
- Unclear Token Usage Metrics in Gemini on Vertex AI - Request for additional `total_billable_token` metric instead HOT 1
- Warning: a recent release failed HOT 1
- Warning: a recent release failed
- Reasoning Engine Error
Recommend Projects
React
A declarative, efficient, and flexible JavaScript library for building user interfaces.
Vue.js
🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
Typescript
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
TensorFlow
An Open Source Machine Learning Framework for Everyone
Django
The Web framework for perfectionists with deadlines.
Laravel
A PHP framework for web artisans
D3
Bring data to life with SVG, Canvas and HTML. 📊📈🎉
Recommend Topics
javascript
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
web
Some thing interesting about web. New door for the world.
server
A server is a program made to process requests and deliver data to clients.
Machine learning
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
Visualization
Some thing interesting about visualization, use data art
Game
Some thing interesting about game, make everyone happy.
Recommend Org
We are working to build community through open source technology. NB: members must have two-factor auth.
Microsoft
Open source projects and samples from Microsoft.
Google ❤️ Open Source for everyone.
Alibaba
Alibaba Open Source for everyone
D3
Data-Driven Documents codes.
Tencent
China tencent open source team.
from python-aiplatform.