Skip to content

Commit 54663c0

Browse files
1 parent c2eff05 commit 54663c0

File tree

3 files changed

+64
-30
lines changed

3 files changed

+64
-30
lines changed

‎google/cloud/storage/transfer_manager.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -810,7 +810,7 @@ def download_chunks_concurrently(
810810
with pool_class(max_workers=max_workers) as executor:
811811
cursor = forced_start
812812
# forced_end is zero-indexed here, so add 1
813-
end = min(forced_end+1, blob.size) if forced_end else blob.size
813+
end = min(forced_end + 1, blob.size) if forced_end else blob.size
814814
while cursor < end:
815815
start = cursor
816816
cursor = min(cursor + chunk_size, end)

‎tests/system/test_transfer_manager.py‎

Lines changed: 57 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,9 @@ def test_upload_many(shared_bucket, file_data, blobs_to_delete):
2828
(file_data["simple"]["path"], shared_bucket.blob("simple2")),
2929
]
3030

31-
results = transfer_manager.upload_many(FILE_BLOB_PAIRS, worker_type=transfer_manager.PROCESS)
31+
results = transfer_manager.upload_many(
32+
FILE_BLOB_PAIRS, worker_type=transfer_manager.PROCESS
33+
)
3234
assert results == [None, None]
3335

3436
blobs = shared_bucket.list_blobs()
@@ -38,13 +40,17 @@ def test_upload_many(shared_bucket, file_data, blobs_to_delete):
3840
assert len(blobs_to_delete) == 2
3941

4042

41-
def test_upload_many_with_threads_and_file_objs(shared_bucket, file_data, blobs_to_delete):
43+
def test_upload_many_with_threads_and_file_objs(
44+
shared_bucket, file_data, blobs_to_delete
45+
):
4246
FILE_BLOB_PAIRS = [
4347
(open(file_data["simple"]["path"], "rb"), shared_bucket.blob("simple1")),
4448
(open(file_data["simple"]["path"], "rb"), shared_bucket.blob("simple2")),
4549
]
4650

47-
results = transfer_manager.upload_many(FILE_BLOB_PAIRS, worker_type=transfer_manager.THREAD)
51+
results = transfer_manager.upload_many(
52+
FILE_BLOB_PAIRS, worker_type=transfer_manager.THREAD
53+
)
4854
assert results == [None, None]
4955

5056
blobs = shared_bucket.list_blobs()
@@ -78,10 +84,15 @@ def test_upload_many_skip_if_exists(
7884
def test_download_many(listable_bucket):
7985
blobs = list(listable_bucket.list_blobs())
8086
with tempfile.TemporaryDirectory() as tempdir:
81-
filenames = [os.path.join(tempdir, "file_a.txt"), os.path.join(tempdir, "file_b.txt")]
87+
filenames = [
88+
os.path.join(tempdir, "file_a.txt"),
89+
os.path.join(tempdir, "file_b.txt"),
90+
]
8291
BLOB_FILE_PAIRS = zip(blobs[:2], filenames)
8392

84-
results = transfer_manager.download_many(BLOB_FILE_PAIRS, worker_type=transfer_manager.PROCESS)
93+
results = transfer_manager.download_many(
94+
BLOB_FILE_PAIRS, worker_type=transfer_manager.PROCESS
95+
)
8596
assert results == [None, None]
8697
for count, filename in enumerate(filenames):
8798
with open(filename, "rb") as fp:
@@ -94,47 +105,66 @@ def test_download_many_with_threads_and_file_objs(listable_bucket):
94105
tempfiles = [file_a, file_b]
95106
BLOB_FILE_PAIRS = zip(blobs[:2], tempfiles)
96107

97-
results = transfer_manager.download_many(BLOB_FILE_PAIRS, worker_type=transfer_manager.THREAD)
108+
results = transfer_manager.download_many(
109+
BLOB_FILE_PAIRS, worker_type=transfer_manager.THREAD
110+
)
98111
assert results == [None, None]
99112
for fp in tempfiles:
100113
assert fp.tell() != 0
101114

102115

103116
def test_download_chunks_concurrently(shared_bucket, file_data):
104117
# Upload a big file
105-
source_file = file_data['big']
106-
upload_blob = shared_bucket.blob('chunky_file')
107-
upload_blob.upload_from_filename(source_file['path'])
118+
source_file = file_data["big"]
119+
upload_blob = shared_bucket.blob("chunky_file")
120+
upload_blob.upload_from_filename(source_file["path"])
108121
upload_blob.reload()
109122
size = upload_blob.size
110123
chunk_size = size // 32
111124
midpoint = size // 2
112125

113126
# Get a fresh blob obj w/o metadata for testing purposes
114-
download_blob = shared_bucket.blob('chunky_file')
127+
download_blob = shared_bucket.blob("chunky_file")
115128

116129
with tempfile.TemporaryDirectory() as tempdir:
117-
full_filename = os.path.join(tempdir, 'chunky_file')
118-
transfer_manager.download_chunks_concurrently(download_blob, full_filename, chunk_size=chunk_size)
119-
with open(full_filename, 'rb') as file_obj:
120-
assert _base64_md5hash(file_obj) == source_file['hash']
130+
full_filename = os.path.join(tempdir, "chunky_file")
131+
transfer_manager.download_chunks_concurrently(
132+
download_blob, full_filename, chunk_size=chunk_size
133+
)
134+
with open(full_filename, "rb") as file_obj:
135+
assert _base64_md5hash(file_obj) == source_file["hash"]
121136

122137
# Now test for case where last chunk is exactly 1 byte.
123-
trailing_chunk_filename = os.path.join(tempdir, 'chunky_file')
124-
transfer_manager.download_chunks_concurrently(download_blob, trailing_chunk_filename, chunk_size=size-1)
125-
with open(trailing_chunk_filename, 'rb') as file_obj:
126-
assert _base64_md5hash(file_obj) == source_file['hash']
138+
trailing_chunk_filename = os.path.join(tempdir, "chunky_file")
139+
transfer_manager.download_chunks_concurrently(
140+
download_blob, trailing_chunk_filename, chunk_size=size - 1
141+
)
142+
with open(trailing_chunk_filename, "rb") as file_obj:
143+
assert _base64_md5hash(file_obj) == source_file["hash"]
127144

128145
# Also test the start and end handling, and threaded mode.
129-
first_half_filename = os.path.join(tempdir, 'chunky_file_half_a')
130-
transfer_manager.download_chunks_concurrently(download_blob, first_half_filename, chunk_size=chunk_size, download_kwargs={'end': midpoint-1})
131-
second_half_filename = os.path.join(tempdir, 'chunky_file_half_b')
132-
transfer_manager.download_chunks_concurrently(download_blob, second_half_filename, chunk_size=chunk_size, download_kwargs={'start': midpoint}, worker_type=transfer_manager.THREAD)
133-
134-
joined_filename = os.path.join(tempdir, 'chunky_file_joined')
135-
with open(joined_filename, 'wb') as joined, open(first_half_filename, 'rb') as half_a, open(second_half_filename, 'rb') as half_b:
146+
first_half_filename = os.path.join(tempdir, "chunky_file_half_a")
147+
transfer_manager.download_chunks_concurrently(
148+
download_blob,
149+
first_half_filename,
150+
chunk_size=chunk_size,
151+
download_kwargs={"end": midpoint - 1},
152+
)
153+
second_half_filename = os.path.join(tempdir, "chunky_file_half_b")
154+
transfer_manager.download_chunks_concurrently(
155+
download_blob,
156+
second_half_filename,
157+
chunk_size=chunk_size,
158+
download_kwargs={"start": midpoint},
159+
worker_type=transfer_manager.THREAD,
160+
)
161+
162+
joined_filename = os.path.join(tempdir, "chunky_file_joined")
163+
with open(joined_filename, "wb") as joined, open(
164+
first_half_filename, "rb"
165+
) as half_a, open(second_half_filename, "rb") as half_b:
136166
joined.write(half_a.read())
137167
joined.write(half_b.read())
138168

139-
with open(joined_filename, 'rb') as file_obj:
140-
assert _base64_md5hash(file_obj) == source_file['hash']
169+
with open(joined_filename, "rb") as file_obj:
170+
assert _base64_md5hash(file_obj) == source_file["hash"]

‎tests/unit/test_transfer_manager.py‎

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -695,9 +695,13 @@ def test__reduce_client():
695695
def test__call_method_on_maybe_pickled_blob():
696696
blob = mock.Mock(spec=Blob)
697697
blob.download_to_file.return_value = "SUCCESS"
698-
result = transfer_manager._call_method_on_maybe_pickled_blob(blob, "download_to_file")
698+
result = transfer_manager._call_method_on_maybe_pickled_blob(
699+
blob, "download_to_file"
700+
)
699701
assert result == "SUCCESS"
700702

701703
pickled_blob = pickle.dumps(_PickleableMockBlob())
702-
result = transfer_manager._call_method_on_maybe_pickled_blob(pickled_blob, "download_to_file")
704+
result = transfer_manager._call_method_on_maybe_pickled_blob(
705+
pickled_blob, "download_to_file"
706+
)
703707
assert result == "SUCCESS"

0 commit comments

Comments
 (0)