diff --git a/google/genai/batches.py b/google/genai/batches.py index 7d122f086..1fedc3087 100644 --- a/google/genai/batches.py +++ b/google/genai/batches.py @@ -406,6 +406,9 @@ def _BatchJob_from_vertex( to_object, ['completion_stats'], getv(from_object, ['completionStats']) ) + if getv(from_object, ['outputInfo']) is not None: + setv(to_object, ['output_info'], getv(from_object, ['outputInfo'])) + return to_object diff --git a/google/genai/tests/batches/test_get.py b/google/genai/tests/batches/test_get.py index 5f010ab18..68f7cf3cd 100644 --- a/google/genai/tests/batches/test_get.py +++ b/google/genai/tests/batches/test_get.py @@ -16,6 +16,7 @@ """Tests for batches.get().""" +import re import pytest from ... import types @@ -76,3 +77,15 @@ async def test_async_get(client): batch_job = await client.aio.batches.get(name=name) assert batch_job + + +@pytest.mark.asyncio +async def test_async_get_with_multimodal_dataset_output(client): + if client.vertexai: + name = _BATCH_JOB_NAME + batch_job = await client.aio.batches.get(name=name) + + assert re.match( + r'^projects/[^/]+/locations/[^/]+/datasets/[^/]+$', + batch_job.output_info.vertex_multimodal_dataset_name, + ) diff --git a/google/genai/types.py b/google/genai/types.py index a2fb4cb75..84b70ed85 100644 --- a/google/genai/types.py +++ b/google/genai/types.py @@ -16562,6 +16562,39 @@ class _CreateBatchJobParametersDict(TypedDict, total=False): ] +class BatchJobOutputInfo(_common.BaseModel): + """Represents the `output_info` field in batch jobs.""" + + vertex_multimodal_dataset_name: Optional[str] = Field( + default=None, + description="""This field is experimental and may change in future versions. The Vertex AI dataset name containing the output data.""", + ) + gcs_output_directory: Optional[str] = Field( + default=None, + description="""The full path of the Cloud Storage directory created, into which the prediction output is written.""", + ) + bigquery_output_table: Optional[str] = Field( + default=None, + description="""The name of the BigQuery table created, in `predictions_` format, into which the prediction output is written.""", + ) + + +class BatchJobOutputInfoDict(TypedDict, total=False): + """Represents the `output_info` field in batch jobs.""" + + vertex_multimodal_dataset_name: Optional[str] + """This field is experimental and may change in future versions. The Vertex AI dataset name containing the output data.""" + + gcs_output_directory: Optional[str] + """The full path of the Cloud Storage directory created, into which the prediction output is written.""" + + bigquery_output_table: Optional[str] + """The name of the BigQuery table created, in `predictions_` format, into which the prediction output is written.""" + + +BatchJobOutputInfoOrDict = Union[BatchJobOutputInfo, BatchJobOutputInfoDict] + + class CompletionStats(_common.BaseModel): """Success and error statistics of processing multiple entities (for example, DataItems or structured data rows) in batch. @@ -16669,6 +16702,11 @@ class BatchJob(_common.BaseModel): description="""Statistics on completed and failed prediction instances. This field is for Vertex AI only. """, ) + output_info: Optional[BatchJobOutputInfo] = Field( + default=None, + description="""Information further describing the output of this job. Output only. + """, + ) @property def done(self) -> bool: @@ -16746,6 +16784,10 @@ class BatchJobDict(TypedDict, total=False): """Statistics on completed and failed prediction instances. This field is for Vertex AI only. """ + output_info: Optional[BatchJobOutputInfoDict] + """Information further describing the output of this job. Output only. + """ + BatchJobOrDict = Union[BatchJob, BatchJobDict]