Skip to content

Commit 03a9ef1

Browse files
committed
Merge branch 'main' of https://github.com/googleapis/python-storage into transfer_manager_usage_metrics
2 parents 9c67a41 + ab74673 commit 03a9ef1

File tree

12 files changed

+193
-47
lines changed

12 files changed

+193
-47
lines changed

.github/.OwlBot.lock.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,5 @@
1313
# limitations under the License.
1414
docker:
1515
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
16-
digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6
16+
digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc
17+
# created: 2023-06-03T21:25:37.968717478Z

.kokoro/requirements.txt

Lines changed: 23 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -113,28 +113,26 @@ commonmark==0.9.1 \
113113
--hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \
114114
--hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9
115115
# via rich
116-
cryptography==39.0.1 \
117-
--hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \
118-
--hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \
119-
--hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \
120-
--hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \
121-
--hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \
122-
--hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \
123-
--hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \
124-
--hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \
125-
--hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \
126-
--hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \
127-
--hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \
128-
--hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \
129-
--hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \
130-
--hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \
131-
--hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \
132-
--hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \
133-
--hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \
134-
--hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \
135-
--hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \
136-
--hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \
137-
--hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8
116+
cryptography==41.0.0 \
117+
--hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \
118+
--hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \
119+
--hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \
120+
--hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \
121+
--hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \
122+
--hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \
123+
--hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \
124+
--hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \
125+
--hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \
126+
--hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \
127+
--hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \
128+
--hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \
129+
--hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \
130+
--hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \
131+
--hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \
132+
--hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \
133+
--hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \
134+
--hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \
135+
--hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be
138136
# via
139137
# gcp-releasetool
140138
# secretstorage
@@ -419,9 +417,9 @@ readme-renderer==37.3 \
419417
--hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \
420418
--hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343
421419
# via twine
422-
requests==2.28.1 \
423-
--hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \
424-
--hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349
420+
requests==2.31.0 \
421+
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
422+
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
425423
# via
426424
# gcp-releasetool
427425
# google-api-core

google/cloud/storage/batch.py

Lines changed: 33 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -133,18 +133,27 @@ class Batch(Connection):
133133
134134
:type client: :class:`google.cloud.storage.client.Client`
135135
:param client: The client to use for making connections.
136+
137+
:type raise_exception: bool
138+
:param raise_exception:
139+
(Optional) Defaults to True. If True, instead of adding exceptions
140+
to the list of return responses, the final exception will be raised.
141+
Note that exceptions are unwrapped after all operations are complete
142+
in success or failure, and only the last exception is raised.
136143
"""
137144

138145
_MAX_BATCH_SIZE = 1000
139146

140-
def __init__(self, client):
147+
def __init__(self, client, raise_exception=True):
141148
api_endpoint = client._connection.API_BASE_URL
142149
client_info = client._connection._client_info
143150
super(Batch, self).__init__(
144151
client, client_info=client_info, api_endpoint=api_endpoint
145152
)
146153
self._requests = []
147154
self._target_objects = []
155+
self._responses = []
156+
self._raise_exception = raise_exception
148157

149158
def _do_request(
150159
self, method, url, headers, data, target_object, timeout=_DEFAULT_TIMEOUT
@@ -219,24 +228,34 @@ def _prepare_batch_request(self):
219228
_, body = payload.split("\n\n", 1)
220229
return dict(multi._headers), body, timeout
221230

222-
def _finish_futures(self, responses):
231+
def _finish_futures(self, responses, raise_exception=True):
223232
"""Apply all the batch responses to the futures created.
224233
225234
:type responses: list of (headers, payload) tuples.
226235
:param responses: List of headers and payloads from each response in
227236
the batch.
228237
238+
:type raise_exception: bool
239+
:param raise_exception:
240+
(Optional) Defaults to True. If True, instead of adding exceptions
241+
to the list of return responses, the final exception will be raised.
242+
Note that exceptions are unwrapped after all operations are complete
243+
in success or failure, and only the last exception is raised.
244+
229245
:raises: :class:`ValueError` if no requests have been deferred.
230246
"""
231247
# If a bad status occurs, we track it, but don't raise an exception
232248
# until all futures have been populated.
249+
# If raise_exception=False, we add exceptions to the list of responses.
233250
exception_args = None
234251

235252
if len(self._target_objects) != len(responses): # pragma: NO COVER
236253
raise ValueError("Expected a response for every request.")
237254

238255
for target_object, subresponse in zip(self._target_objects, responses):
239-
if not 200 <= subresponse.status_code < 300:
256+
# For backwards compatibility, only the final exception will be raised.
257+
# Set raise_exception=False to include all exceptions to the list of return responses.
258+
if not 200 <= subresponse.status_code < 300 and raise_exception:
240259
exception_args = exception_args or subresponse
241260
elif target_object is not None:
242261
try:
@@ -247,9 +266,16 @@ def _finish_futures(self, responses):
247266
if exception_args is not None:
248267
raise exceptions.from_http_response(exception_args)
249268

250-
def finish(self):
269+
def finish(self, raise_exception=True):
251270
"""Submit a single `multipart/mixed` request with deferred requests.
252271
272+
:type raise_exception: bool
273+
:param raise_exception:
274+
(Optional) Defaults to True. If True, instead of adding exceptions
275+
to the list of return responses, the final exception will be raised.
276+
Note that exceptions are unwrapped after all operations are complete
277+
in success or failure, and only the last exception is raised.
278+
253279
:rtype: list of tuples
254280
:returns: one ``(headers, payload)`` tuple per deferred request.
255281
"""
@@ -269,7 +295,8 @@ def finish(self):
269295
raise exceptions.from_http_response(response)
270296

271297
responses = list(_unpack_batch_response(response))
272-
self._finish_futures(responses)
298+
self._finish_futures(responses, raise_exception=raise_exception)
299+
self._responses = responses
273300
return responses
274301

275302
def current(self):
@@ -283,7 +310,7 @@ def __enter__(self):
283310
def __exit__(self, exc_type, exc_val, exc_tb):
284311
try:
285312
if exc_type is None:
286-
self.finish()
313+
self.finish(raise_exception=self._raise_exception)
287314
finally:
288315
self._client._pop_batch()
289316

google/cloud/storage/client.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -302,17 +302,24 @@ def bucket(self, bucket_name, user_project=None):
302302
"""
303303
return Bucket(client=self, name=bucket_name, user_project=user_project)
304304

305-
def batch(self):
305+
def batch(self, raise_exception=True):
306306
"""Factory constructor for batch object.
307307
308308
.. note::
309309
This will not make an HTTP request; it simply instantiates
310310
a batch object owned by this client.
311311
312+
:type raise_exception: bool
313+
:param raise_exception:
314+
(Optional) Defaults to True. If True, instead of adding exceptions
315+
to the list of return responses, the final exception will be raised.
316+
Note that exceptions are unwrapped after all operations are complete
317+
in success or failure, and only the last exception is raised.
318+
312319
:rtype: :class:`google.cloud.storage.batch.Batch`
313320
:returns: The batch object created.
314321
"""
315-
return Batch(client=self)
322+
return Batch(client=self, raise_exception=raise_exception)
316323

317324
def _get_resource(
318325
self,

samples/snippets/requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
google-cloud-pubsub==2.16.1
1+
google-cloud-pubsub==2.17.1
22
google-cloud-storage==2.9.0
33
pandas===1.3.5; python_version == '3.7'
4-
pandas==2.0.1; python_version >= '3.8'
4+
pandas==2.0.2; python_version >= '3.8'

tests/perf/README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ $ python3 benchmarking.py --num_samples 10000 --object_size 5120..16384 --output
3232
| --test_type | test type to run benchmarking | `w1r3`, `range` | `w1r3` |
3333
| --output_file | file to output results to | any file path | `output_bench<TIMESTAMP>.csv` |
3434
| --tmp_dir | temp directory path on file system | any file path | `tm-perf-metrics` |
35+
| --delete_bucket | whether or not to delete GCS bucket used for benchmarking| bool | `False` |
3536

3637

3738
## Workload definition and CSV headers

tests/perf/_perf_utils.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -193,19 +193,20 @@ def get_bucket_instance(bucket_name):
193193
return bucket
194194

195195

196-
def cleanup_bucket(bucket):
196+
def cleanup_bucket(bucket, delete_bucket=False):
197197
# Delete blobs first as the bucket may contain more than 256 blobs.
198198
try:
199199
blobs = bucket.list_blobs()
200200
for blob in blobs:
201201
blob.delete()
202202
except Exception as e:
203203
logging.exception(f"Caught an exception while deleting blobs\n {e}")
204-
# Delete bucket.
205-
try:
206-
bucket.delete(force=True)
207-
except Exception as e:
208-
logging.exception(f"Caught an exception while deleting bucket\n {e}")
204+
# Delete bucket if delete_bucket is set to True
205+
if delete_bucket:
206+
try:
207+
bucket.delete(force=True)
208+
except Exception as e:
209+
logging.exception(f"Caught an exception while deleting bucket\n {e}")
209210

210211

211212
def get_min_max_size(object_size):

tests/perf/benchmarking.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def main(args):
8080
)
8181

8282
# Cleanup and delete blobs.
83-
_pu.cleanup_bucket(bucket)
83+
_pu.cleanup_bucket(bucket, delete_bucket=args.delete_bucket)
8484

8585
# BBMC will not surface errors unless the process is terminated with a non zero code.
8686
if counter.count.errors != 0:
@@ -173,6 +173,12 @@ def main(args):
173173
default=_pu.DEFAULT_BASE_DIR,
174174
help="Temp directory path on file system",
175175
)
176+
parser.add_argument(
177+
"--delete_bucket",
178+
type=bool,
179+
default=False,
180+
help="Whether or not to delete GCS bucket used for benchmarking",
181+
)
176182
args = parser.parse_args()
177183

178184
main(args)

tests/system/_helpers.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,8 +111,12 @@ def delete_bucket(bucket):
111111
retry(bucket.delete)(force=True)
112112

113113

114-
def await_config_changes_propagate(sec=3):
114+
def await_config_changes_propagate(sec=12):
115115
# Changes to the bucket will be readable immediately after writing,
116116
# but configuration changes may take time to propagate.
117117
# See https://cloud.google.com/storage/docs/json_api/v1/buckets/patch
118+
#
119+
# The default was changed from 3 to 12 in May 2023 due to changes in bucket
120+
# metadata handling. Note that the documentation recommends waiting "30
121+
# seconds".
118122
time.sleep(sec)

tests/system/conftest.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,8 @@
4444
"parent/child/other/file32.txt",
4545
]
4646

47+
ebh_bucket_iteration = 0
48+
4749

4850
@pytest.fixture(scope="session")
4951
def storage_client():
@@ -165,12 +167,20 @@ def signing_bucket(storage_client, signing_bucket_name):
165167
_helpers.delete_bucket(bucket)
166168

167169

168-
@pytest.fixture(scope="session")
170+
@pytest.fixture(scope="function")
169171
def default_ebh_bucket_name():
170-
return _helpers.unique_name("gcp-systest-default-ebh")
172+
# Keep track of how many ebh buckets have been created so we can get a
173+
# clean one each rerun. "unique_name" is unique per test iteration, not
174+
# per test rerun.
175+
global ebh_bucket_iteration
176+
ebh_bucket_iteration += 1
177+
return _helpers.unique_name("gcp-systest-default-ebh") + "-{}".format(
178+
ebh_bucket_iteration
179+
)
171180

172181

173-
@pytest.fixture(scope="session")
182+
# ebh_bucket/name are not scope=session because the bucket is modified in test.
183+
@pytest.fixture(scope="function")
174184
def default_ebh_bucket(storage_client, default_ebh_bucket_name):
175185
bucket = storage_client.bucket(default_ebh_bucket_name)
176186
bucket.default_event_based_hold = True

0 commit comments

Comments
 (0)