-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathupload_to_blob_legacy.py
48 lines (39 loc) · 1.69 KB
/
upload_to_blob_legacy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
"""
Upload data to blob storage w/ legacy package (tested with version 2.1.0)
This script is for reference only and will not be maintained.
Make sure to set the environment variables before running:
STORAGE_ACCOUNT_NAME
STORAGE_CONTAINER_NAME_TRAINDATA
STORAGE_ACCOUNT_KEY
"""
import os
from azure.storage.blob import BlockBlobService, PublicAccess
import argparse
import glob
def arg_parse():
"""
Parse arguments
"""
parser = argparse.ArgumentParser(description='This script is for uploading a directory to Azure Blob Storage.')
parser.add_argument("--dir", dest='directory', help="The directory to upload")
return parser.parse_args()
args = arg_parse()
ACCOUNT=os.getenv("STORAGE_ACCOUNT_NAME", "")
CONTAINER=os.getenv("STORAGE_CONTAINER_NAME_RAWDATA", "")
KEY=os.getenv("STORAGE_ACCOUNT_KEY", "")
# Create the BlockBlockService that is used to call the Blob service for the storage account
block_blob_service = BlockBlobService(account_name=ACCOUNT, account_key=KEY)
# Create a container
container_name = CONTAINER
block_blob_service.create_container(container_name)
# Set the permission so the blobs are public.
block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)
for filename in glob.iglob(os.path.join(args.directory, '**', '*'), recursive=True):
if os.path.isfile(filename):
print('Uploading ', filename)
# Upload the created file, use local_file_name for the blob name
block_blob_service.create_blob_from_path(container_name, filename, filename)
# Check that the files uploaded correctly to blob
generator = block_blob_service.list_blobs(container_name)
for blob in generator:
print("Blob name in Azure: " + blob.name)