Skip to content

Commit 2d91be0

Browse files
sararobcopybara-github
authored andcommitted
chore: Upgrade black version to 25.1.0 for Vertex SDK
PiperOrigin-RevId: 789752562
1 parent 553c423 commit 2d91be0

File tree

823 files changed

+16183
-13692
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

823 files changed

+16183
-13692
lines changed

google/cloud/aiplatform/_streaming_prediction.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,9 @@ async def predict_stream_of_tensor_lists_from_single_tensor_list_async(
130130
inputs=tensor_list,
131131
parameters=parameters_tensor,
132132
)
133-
async for response in await prediction_service_async_client.server_streaming_predict(
133+
async for (
134+
response
135+
) in await prediction_service_async_client.server_streaming_predict(
134136
request=request
135137
):
136138
yield response.outputs
@@ -183,7 +185,9 @@ async def predict_stream_of_dict_lists_from_single_dict_list_async(
183185
"""
184186
tensor_list = [value_to_tensor(d) for d in dict_list]
185187
parameters_tensor = value_to_tensor(parameters) if parameters else None
186-
async for tensor_list in predict_stream_of_tensor_lists_from_single_tensor_list_async(
188+
async for (
189+
tensor_list
190+
) in predict_stream_of_tensor_lists_from_single_tensor_list_async(
187191
prediction_service_async_client=prediction_service_async_client,
188192
endpoint_name=endpoint_name,
189193
tensor_list=tensor_list,

google/cloud/aiplatform/base.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -639,7 +639,6 @@ def _get_and_validate_project_location(
639639
project: Optional[str] = None,
640640
location: Optional[str] = None,
641641
) -> Tuple[str, str]:
642-
643642
"""Validate the project and location for the resource.
644643
645644
Args:

google/cloud/aiplatform/datasets/__init__.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,13 @@
1616
#
1717

1818
from google.cloud.aiplatform.datasets.dataset import _Dataset
19-
from google.cloud.aiplatform.datasets.column_names_dataset import _ColumnNamesDataset
19+
from google.cloud.aiplatform.datasets.column_names_dataset import (
20+
_ColumnNamesDataset,
21+
)
2022
from google.cloud.aiplatform.datasets.tabular_dataset import TabularDataset
21-
from google.cloud.aiplatform.datasets.time_series_dataset import TimeSeriesDataset
23+
from google.cloud.aiplatform.datasets.time_series_dataset import (
24+
TimeSeriesDataset,
25+
)
2226
from google.cloud.aiplatform.datasets.image_dataset import ImageDataset
2327
from google.cloud.aiplatform.datasets.text_dataset import TextDataset
2428
from google.cloud.aiplatform.datasets.video_dataset import VideoDataset

google/cloud/aiplatform/docker_utils/build.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,7 @@ def _prepare_exposed_ports(exposed_ports: Optional[List[int]] = None) -> str:
223223

224224

225225
def _prepare_environment_variables(
226-
environment_variables: Optional[Dict[str, str]] = None
226+
environment_variables: Optional[Dict[str, str]] = None,
227227
) -> str:
228228
"""Returns the Dockerfile entries required to set environment variables in containers.
229229

google/cloud/aiplatform/docker_utils/run.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,9 @@
3030
)
3131

3232
from google.cloud.aiplatform.constants import prediction
33-
from google.cloud.aiplatform.docker_utils.utils import DEFAULT_MOUNTED_MODEL_DIRECTORY
33+
from google.cloud.aiplatform.docker_utils.utils import (
34+
DEFAULT_MOUNTED_MODEL_DIRECTORY,
35+
)
3436
from google.cloud.aiplatform.utils import prediction_utils
3537

3638
_logger = logging.getLogger(__name__)

google/cloud/aiplatform/explain/metadata/tf/v1/saved_model_metadata_builder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ def get_metadata_protobuf(self) -> explanation_metadata.ExplanationMetadata:
126126

127127

128128
def _create_input_metadata_from_signature(
129-
signature_inputs: Dict[str, "tf.Tensor"] # noqa: F821
129+
signature_inputs: Dict[str, "tf.Tensor"], # noqa: F821
130130
) -> Dict[str, explanation_metadata.ExplanationMetadata.InputMetadata]:
131131
"""Creates InputMetadata from signature inputs.
132132

google/cloud/aiplatform/explain/metadata/tf/v2/saved_model_metadata_builder.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -75,9 +75,7 @@ def __init__(
7575
signature_name
7676
)
7777

78-
def _infer_metadata_entries_from_model(
79-
self, signature_name: str
80-
) -> Tuple[
78+
def _infer_metadata_entries_from_model(self, signature_name: str) -> Tuple[
8179
Dict[str, explanation_metadata.ExplanationMetadata.InputMetadata],
8280
Dict[str, explanation_metadata.ExplanationMetadata.OutputMetadata],
8381
]:
@@ -107,10 +105,10 @@ def _infer_metadata_entries_from_model(
107105
output_mds = {}
108106
for name in output_sig:
109107
if not self._explain_output or self._explain_output[0] == name:
110-
output_mds[
111-
name
112-
] = explanation_metadata.ExplanationMetadata.OutputMetadata(
113-
output_tensor_name=name,
108+
output_mds[name] = (
109+
explanation_metadata.ExplanationMetadata.OutputMetadata(
110+
output_tensor_name=name,
111+
)
114112
)
115113
break
116114
else:

google/cloud/aiplatform/featurestore/_entity_type.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -116,11 +116,11 @@ def __init__(
116116
)
117117
self._gca_resource = self._get_gca_resource(
118118
resource_name=entity_type_name,
119-
parent_resource_name_fields={
120-
featurestore.Featurestore._resource_noun: featurestore_id
121-
}
122-
if featurestore_id
123-
else featurestore_id,
119+
parent_resource_name_fields=(
120+
{featurestore.Featurestore._resource_noun: featurestore_id}
121+
if featurestore_id
122+
else featurestore_id
123+
),
124124
)
125125

126126
self._featurestore_online_client = self._instantiate_featurestore_online_client(
@@ -1842,7 +1842,7 @@ def _apply_feature_timestamp(
18421842

18431843
@staticmethod
18441844
def _is_timestamp(
1845-
timestamp: Union[datetime.datetime, timestamp_pb2.Timestamp]
1845+
timestamp: Union[datetime.datetime, timestamp_pb2.Timestamp],
18461846
) -> bool:
18471847
return isinstance(timestamp, datetime.datetime) or isinstance(
18481848
timestamp, timestamp_pb2.Timestamp

google/cloud/aiplatform/featurestore/feature.py

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -114,12 +114,14 @@ def __init__(
114114
)
115115
self._gca_resource = self._get_gca_resource(
116116
resource_name=feature_name,
117-
parent_resource_name_fields={
118-
featurestore.Featurestore._resource_noun: featurestore_id,
119-
featurestore.EntityType._resource_noun: entity_type_id,
120-
}
121-
if featurestore_id
122-
else featurestore_id,
117+
parent_resource_name_fields=(
118+
{
119+
featurestore.Featurestore._resource_noun: featurestore_id,
120+
featurestore.EntityType._resource_noun: entity_type_id,
121+
}
122+
if featurestore_id
123+
else featurestore_id
124+
),
123125
)
124126

125127
def _get_featurestore_name(self) -> str:
@@ -338,11 +340,11 @@ def list(
338340
resource_noun=featurestore.EntityType._resource_noun,
339341
parse_resource_name_method=featurestore.EntityType._parse_resource_name,
340342
format_resource_name_method=featurestore.EntityType._format_resource_name,
341-
parent_resource_name_fields={
342-
featurestore.Featurestore._resource_noun: featurestore_id
343-
}
344-
if featurestore_id
345-
else featurestore_id,
343+
parent_resource_name_fields=(
344+
{featurestore.Featurestore._resource_noun: featurestore_id}
345+
if featurestore_id
346+
else featurestore_id
347+
),
346348
project=project,
347349
location=location,
348350
resource_id_validator=featurestore.EntityType._resource_id_validator,
@@ -585,11 +587,11 @@ def create(
585587
resource_noun=featurestore.EntityType._resource_noun,
586588
parse_resource_name_method=featurestore.EntityType._parse_resource_name,
587589
format_resource_name_method=featurestore.EntityType._format_resource_name,
588-
parent_resource_name_fields={
589-
featurestore.Featurestore._resource_noun: featurestore_id
590-
}
591-
if featurestore_id
592-
else featurestore_id,
590+
parent_resource_name_fields=(
591+
{featurestore.Featurestore._resource_noun: featurestore_id}
592+
if featurestore_id
593+
else featurestore_id
594+
),
593595
project=project,
594596
location=location,
595597
resource_id_validator=featurestore.EntityType._resource_id_validator,

google/cloud/aiplatform/gapic/schema/__init__.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,20 @@
1818
from google.cloud.aiplatform.v1.schema import predict
1919
from google.cloud.aiplatform.v1.schema import trainingjob
2020
from google.cloud.aiplatform.v1beta1.schema import predict as predict_v1beta1
21-
from google.cloud.aiplatform.v1beta1.schema import predict as trainingjob_v1beta1
21+
from google.cloud.aiplatform.v1beta1.schema import (
22+
predict as trainingjob_v1beta1,
23+
)
2224

2325
# import the v1 submodules for enhancement
24-
from google.cloud.aiplatform.v1.schema.predict.instance_v1 import types as instance
25-
from google.cloud.aiplatform.v1.schema.predict.params_v1 import types as params
26-
from google.cloud.aiplatform.v1.schema.predict.prediction_v1 import types as prediction
26+
from google.cloud.aiplatform.v1.schema.predict.instance_v1 import (
27+
types as instance,
28+
)
29+
from google.cloud.aiplatform.v1.schema.predict.params_v1 import (
30+
types as params,
31+
)
32+
from google.cloud.aiplatform.v1.schema.predict.prediction_v1 import (
33+
types as prediction,
34+
)
2735
from google.cloud.aiplatform.v1.schema.trainingjob.definition_v1 import (
2836
types as definition,
2937
)

0 commit comments

Comments
 (0)