I am uploading a file to the azure blob using python. I want to do the same using put_block by splitting the file into small parts then uploading them then combining. so that in case of large files, it does not fail. Can someone help me with this? I tried multiple solutions on StackOverflow. Nothing is working
from azure.storage.blob import BlockBlobService
def uploadFile():
accountey="account-key"
accountName="account-name"
containerName="container-name"
blobService =BlockBlobService(account_name=accountName, account_key=accountey )
blobService.create_container(containerName)
blobService.create_blob_from_path(containerName, "image1.jpg", "./images/python.jpg")
uploadFile()
I tried this also this is not working:
def upload():
blob_service.create_container(container_name, None, None, False)
#blob_service.put_block_blob(container_name, blob_name, '', 'BlockBlob')
chunk_size = 65536
block_ids = []
index = 0
with open(file_path, 'rb') as f:
while True:
data = f.read(chunk_size)
if data:
length = len(data)
block_id = base64.b64encode(bytes(index))
blob_service.put_block(container_name, blob_name, data, block_id)
block_ids.append(block_id)
index += 1
else:
break
resp = blob_service.put_block_list(container_name, blob_name, block_ids)