Skip to content

Commit ee0af9b

Browse files
authored
[Storage][Blob][Fix]fix get_block_list bug (Azure#18751)
Azure#16314
1 parent 88a742c commit ee0af9b

File tree

3 files changed

+22
-5
lines changed

3 files changed

+22
-5
lines changed

sdk/storage/azure-storage-blob/azure/storage/blob/_models.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from azure.core.exceptions import HttpResponseError
1313
from ._generated.models import ArrowField
1414

15-
from ._shared import decode_base64_to_text
15+
from ._shared import decode_base64_to_bytes
1616
from ._shared.response_handlers import return_context_and_deserialized, process_storage_error
1717
from ._shared.models import DictMixin, get_enum_value
1818
from ._generated.models import Logging as GeneratedLogging
@@ -739,7 +739,15 @@ def __init__(self, block_id, state=BlockState.Latest):
739739

740740
@classmethod
741741
def _from_generated(cls, generated):
742-
block = cls(decode_base64_to_text(generated.name))
742+
try:
743+
decoded_bytes = decode_base64_to_bytes(generated.name)
744+
block_id = decoded_bytes.decode('utf-8')
745+
# this is to fix a bug. When large blocks are uploaded through upload_blob the block id isn't base64 encoded
746+
# while service expected block id is base64 encoded, so when we get block_id if we cannot base64 decode, it
747+
# means we didn't base64 encode it when stage the block, we want to use the returned block_id directly.
748+
except UnicodeDecodeError:
749+
block_id = generated.name
750+
block = cls(block_id)
743751
block.size = generated.size
744752
return block
745753

sdk/storage/azure-storage-blob/tests/test_large_block_blob.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,10 @@ def _setup(self, storage_account, key):
5151
self.container_name = self.get_resource_name('utcontainer')
5252

5353
if self.is_live:
54-
self.bsc.create_container(self.container_name)
54+
try:
55+
self.bsc.create_container(self.container_name)
56+
except:
57+
pass
5558

5659
def _teardown(self, file_name):
5760
if path.isfile(file_name):
@@ -170,9 +173,12 @@ def test_create_large_blob_from_path(self, resource_group, location, storage_acc
170173

171174
# Act
172175
with open(FILE_PATH, 'rb') as stream:
173-
blob.upload_blob(stream, max_concurrency=2)
176+
blob.upload_blob(stream, max_concurrency=2, overwrite=True)
177+
178+
block_list = blob.get_block_list()
174179

175180
# Assert
181+
self.assertIsNot(len(block_list), 0)
176182
self.assertBlobEqual(self.container_name, blob_name, data)
177183
self._teardown(FILE_PATH)
178184

sdk/storage/azure-storage-blob/tests/test_large_block_blob_async.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,9 +199,12 @@ async def test_create_large_blob_from_path_async(self, resource_group, location,
199199
# Act
200200
try:
201201
with open(FILE_PATH, 'rb') as stream:
202-
await blob.upload_blob(stream, max_concurrency=2)
202+
await blob.upload_blob(stream, max_concurrency=2, overwrite=True)
203+
204+
block_list = await blob.get_block_list()
203205

204206
# Assert
207+
self.assertIsNot(len(block_list), 0)
205208
await self.assertBlobEqual(self.container_name, blob_name, data)
206209
finally:
207210
self._teardown(FILE_PATH)

0 commit comments

Comments
 (0)