Skip to content

Commit b001cc2

Browse files
speedstorm1copybara-github
authored andcommitted
No public description
PiperOrigin-RevId: 788929755
1 parent 2e43d91 commit b001cc2

File tree

3 files changed

+0
-191
lines changed

3 files changed

+0
-191
lines changed

google/genai/tests/tunings/test_tune.py

Lines changed: 0 additions & 101 deletions
Original file line numberDiff line numberDiff line change
@@ -32,107 +32,6 @@
3232
),
3333
exception_if_mldev="gcs_uri parameter is not supported in Gemini API.",
3434
),
35-
pytest_helper.TestTableItem(
36-
name="test_dataset_gcs_uri_all_parameters",
37-
parameters=genai_types._CreateTuningJobParameters(
38-
base_model="gemini-1.5-pro-002",
39-
training_dataset=genai_types.TuningDataset(
40-
gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_train_data.jsonl",
41-
),
42-
config=genai_types.CreateTuningJobConfig(
43-
tuned_model_display_name="Model display name",
44-
epoch_count=1,
45-
learning_rate_multiplier=1.0,
46-
adapter_size="ADAPTER_SIZE_ONE",
47-
validation_dataset=genai_types.TuningDataset(
48-
gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_validation_data.jsonl",
49-
),
50-
# Not supported in Vertex AI
51-
# batch_size=4,
52-
# learning_rate=0.01,
53-
),
54-
),
55-
exception_if_mldev="gcs_uri parameter is not supported in Gemini API.",
56-
),
57-
pytest_helper.TestTableItem(
58-
name="test_dataset_gcs_uri_parameters_unsupported_by_vertex",
59-
parameters=genai_types._CreateTuningJobParameters(
60-
base_model="gemini-1.5-pro-002",
61-
training_dataset=genai_types.TuningDataset(
62-
gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_train_data.jsonl",
63-
),
64-
config=genai_types.CreateTuningJobConfig(
65-
# Not supported in Vertex AI
66-
batch_size=4,
67-
learning_rate=0.01,
68-
),
69-
),
70-
exception_if_vertex="batch_size parameter is not supported in Vertex AI.",
71-
exception_if_mldev="gcs_uri parameter is not supported in Gemini API.",
72-
),
73-
pytest_helper.TestTableItem(
74-
name="test_dataset_examples_parameters_unsupported_by_mldev",
75-
parameters=genai_types._CreateTuningJobParameters(
76-
# Error: "models/gemini-1.5-pro-002 is not found for
77-
# CREATE TUNED MODEL at API version v1beta."
78-
# base_model="gemini-1.5-pro-002",
79-
base_model="models/gemini-1.0-pro-001",
80-
training_dataset=genai_types.TuningDataset(
81-
examples=[
82-
genai_types.TuningExample(
83-
text_input=f"Input text {i}",
84-
output=f"Output text {i}",
85-
)
86-
for i in range(5)
87-
],
88-
),
89-
# Required for MLDev:
90-
# "Either tuned_model_id or display_name must be set."
91-
config=genai_types.CreateTuningJobConfig(
92-
tuned_model_display_name="Model display name",
93-
# Not supported in MLDev
94-
adapter_size="ADAPTER_SIZE_ONE",
95-
# Generator issue: "validationDatasetUri": {}. See b/375079287
96-
# validation_dataset=genai_types.TuningDataset(
97-
# gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_validation_data.jsonl",
98-
# ),
99-
),
100-
),
101-
exception_if_mldev="adapter_size parameter is not supported in Gemini API.",
102-
exception_if_vertex="examples parameter is not supported in Vertex AI.",
103-
),
104-
pytest_helper.TestTableItem(
105-
name="test_dataset_vertex_dataset_resource",
106-
parameters=genai_types._CreateTuningJobParameters(
107-
base_model="gemini-1.5-pro-002",
108-
training_dataset=genai_types.TuningDataset(
109-
vertex_dataset_resource="projects/613165508263/locations/us-central1/datasets/8254568702121345024",
110-
),
111-
),
112-
exception_if_mldev="vertex_dataset_resource parameter is not supported in Gemini API.",
113-
),
114-
pytest_helper.TestTableItem(
115-
name="test_dataset_dataset_resource_all_parameters",
116-
parameters=genai_types._CreateTuningJobParameters(
117-
base_model="gemini-1.5-pro-002",
118-
training_dataset=genai_types.TuningDataset(
119-
vertex_dataset_resource="projects/613165508263/locations/us-central1/datasets/8254568702121345024",
120-
),
121-
config=genai_types.CreateTuningJobConfig(
122-
tuned_model_display_name="Model display name",
123-
epoch_count=1,
124-
learning_rate_multiplier=1.0,
125-
adapter_size="ADAPTER_SIZE_ONE",
126-
validation_dataset=genai_types.TuningDataset(
127-
vertex_dataset_resource="projects/613165508263/locations/us-central1/datasets/5556912525326417920",
128-
),
129-
# Not supported in Vertex AI
130-
# batch_size=4,
131-
# learning_rate=0.01,
132-
),
133-
),
134-
exception_if_mldev="vertex_dataset_resource parameter is not supported in Gemini API.",
135-
),
13635
]
13736

13837
pytestmark = pytest_helper.setup(

google/genai/tunings.py

Lines changed: 0 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -86,20 +86,6 @@ def _ListTuningJobsParameters_to_mldev(
8686
return to_object
8787

8888

89-
def _TuningExample_to_mldev(
90-
from_object: Union[dict[str, Any], object],
91-
parent_object: Optional[dict[str, Any]] = None,
92-
) -> dict[str, Any]:
93-
to_object: dict[str, Any] = {}
94-
if getv(from_object, ['text_input']) is not None:
95-
setv(to_object, ['textInput'], getv(from_object, ['text_input']))
96-
97-
if getv(from_object, ['output']) is not None:
98-
setv(to_object, ['output'], getv(from_object, ['output']))
99-
100-
return to_object
101-
102-
10389
def _TuningDataset_to_mldev(
10490
from_object: Union[dict[str, Any], object],
10591
parent_object: Optional[dict[str, Any]] = None,
@@ -113,16 +99,6 @@ def _TuningDataset_to_mldev(
11399
'vertex_dataset_resource parameter is not supported in Gemini API.'
114100
)
115101

116-
if getv(from_object, ['examples']) is not None:
117-
setv(
118-
to_object,
119-
['examples', 'examples'],
120-
[
121-
_TuningExample_to_mldev(item, to_object)
122-
for item in getv(from_object, ['examples'])
123-
],
124-
)
125-
126102
return to_object
127103

128104

@@ -169,20 +145,6 @@ def _CreateTuningJobConfig_to_mldev(
169145
if getv(from_object, ['adapter_size']) is not None:
170146
raise ValueError('adapter_size parameter is not supported in Gemini API.')
171147

172-
if getv(from_object, ['batch_size']) is not None:
173-
setv(
174-
parent_object,
175-
['tuningTask', 'hyperparameters', 'batchSize'],
176-
getv(from_object, ['batch_size']),
177-
)
178-
179-
if getv(from_object, ['learning_rate']) is not None:
180-
setv(
181-
parent_object,
182-
['tuningTask', 'hyperparameters', 'learningRate'],
183-
getv(from_object, ['learning_rate']),
184-
)
185-
186148
return to_object
187149

188150

@@ -289,9 +251,6 @@ def _TuningDataset_to_vertex(
289251
getv(from_object, ['vertex_dataset_resource']),
290252
)
291253

292-
if getv(from_object, ['examples']) is not None:
293-
raise ValueError('examples parameter is not supported in Vertex AI.')
294-
295254
return to_object
296255

297256

@@ -366,12 +325,6 @@ def _CreateTuningJobConfig_to_vertex(
366325
getv(from_object, ['adapter_size']),
367326
)
368327

369-
if getv(from_object, ['batch_size']) is not None:
370-
raise ValueError('batch_size parameter is not supported in Vertex AI.')
371-
372-
if getv(from_object, ['learning_rate']) is not None:
373-
raise ValueError('learning_rate parameter is not supported in Vertex AI.')
374-
375328
return to_object
376329

377330

google/genai/types.py

Lines changed: 0 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -9082,28 +9082,6 @@ class ListTuningJobsResponseDict(TypedDict, total=False):
90829082
]
90839083

90849084

9085-
class TuningExample(_common.BaseModel):
9086-
9087-
text_input: Optional[str] = Field(
9088-
default=None, description="""Text model input."""
9089-
)
9090-
output: Optional[str] = Field(
9091-
default=None, description="""The expected model output."""
9092-
)
9093-
9094-
9095-
class TuningExampleDict(TypedDict, total=False):
9096-
9097-
text_input: Optional[str]
9098-
"""Text model input."""
9099-
9100-
output: Optional[str]
9101-
"""The expected model output."""
9102-
9103-
9104-
TuningExampleOrDict = Union[TuningExample, TuningExampleDict]
9105-
9106-
91079085
class TuningDataset(_common.BaseModel):
91089086
"""Supervised fine-tuning training dataset."""
91099087

@@ -9115,10 +9093,6 @@ class TuningDataset(_common.BaseModel):
91159093
default=None,
91169094
description="""The resource name of the Vertex Multimodal Dataset that is used as training dataset. Example: 'projects/my-project-id-or-number/locations/my-location/datasets/my-dataset-id'.""",
91179095
)
9118-
examples: Optional[list[TuningExample]] = Field(
9119-
default=None,
9120-
description="""Inline examples with simple input/output text.""",
9121-
)
91229096

91239097

91249098
class TuningDatasetDict(TypedDict, total=False):
@@ -9130,9 +9104,6 @@ class TuningDatasetDict(TypedDict, total=False):
91309104
vertex_dataset_resource: Optional[str]
91319105
"""The resource name of the Vertex Multimodal Dataset that is used as training dataset. Example: 'projects/my-project-id-or-number/locations/my-location/datasets/my-dataset-id'."""
91329106

9133-
examples: Optional[list[TuningExampleDict]]
9134-
"""Inline examples with simple input/output text."""
9135-
91369107

91379108
TuningDatasetOrDict = Union[TuningDataset, TuningDatasetDict]
91389109

@@ -9195,14 +9166,6 @@ class CreateTuningJobConfig(_common.BaseModel):
91959166
adapter_size: Optional[AdapterSize] = Field(
91969167
default=None, description="""Adapter size for tuning."""
91979168
)
9198-
batch_size: Optional[int] = Field(
9199-
default=None,
9200-
description="""The batch size hyperparameter for tuning. If not set, a default of 4 or 16 will be used based on the number of training examples.""",
9201-
)
9202-
learning_rate: Optional[float] = Field(
9203-
default=None,
9204-
description="""The learning rate hyperparameter for tuning. If not set, a default of 0.001 or 0.0002 will be calculated based on the number of training examples.""",
9205-
)
92069169

92079170

92089171
class CreateTuningJobConfigDict(TypedDict, total=False):
@@ -9232,12 +9195,6 @@ class CreateTuningJobConfigDict(TypedDict, total=False):
92329195
adapter_size: Optional[AdapterSize]
92339196
"""Adapter size for tuning."""
92349197

9235-
batch_size: Optional[int]
9236-
"""The batch size hyperparameter for tuning. If not set, a default of 4 or 16 will be used based on the number of training examples."""
9237-
9238-
learning_rate: Optional[float]
9239-
"""The learning rate hyperparameter for tuning. If not set, a default of 0.001 or 0.0002 will be calculated based on the number of training examples."""
9240-
92419198

92429199
CreateTuningJobConfigOrDict = Union[
92439200
CreateTuningJobConfig, CreateTuningJobConfigDict

0 commit comments

Comments
 (0)