Skip to content
This repository was archived by the owner on Feb 10, 2026. It is now read-only.

Commit 57215af

Browse files
JuliehzlUbuntu
andauthored
{0.5.0b1} Add new api support (#39)
* add new SDK * add new SDK * add release note Co-authored-by: Ubuntu <zunli@zuhvm.etyrgwjlsqfeplvzbzef2qjagg.cbnx.internal.cloudapp.net>
1 parent abfd2ce commit 57215af

202 files changed

Lines changed: 87594 additions & 25 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

README.rst

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,16 @@ Handles multi-API versions of Azure Storage Data Plane originally from https://g
1717

1818
Change Log
1919
----------
20+
0.5.0
21+
+++++
22+
* Add new api support for azure-multiapi-storagev2:
23+
- blob
24+
- v2020-02-10
25+
- filedatalake
26+
- v2020-02-10
27+
- fileshare
28+
- v2020-02-10
29+
2030
0.4.1
2131
+++++
2232
* Add tags support for blob
Lines changed: 229 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,229 @@
1+
# -------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for
4+
# license information.
5+
# --------------------------------------------------------------------------
6+
import os
7+
8+
from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import
9+
from ._version import VERSION
10+
from ._blob_client import BlobClient
11+
from ._container_client import ContainerClient
12+
from ._blob_service_client import BlobServiceClient
13+
from ._lease import BlobLeaseClient
14+
from ._download import StorageStreamDownloader
15+
from ._quick_query_helper import BlobQueryReader
16+
from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas
17+
from ._shared.policies import ExponentialRetry, LinearRetry
18+
from ._shared.response_handlers import PartialBatchErrorException
19+
from ._shared.models import(
20+
LocationMode,
21+
ResourceTypes,
22+
AccountSasPermissions,
23+
StorageErrorCode,
24+
UserDelegationKey
25+
)
26+
from ._generated.models import (
27+
RehydratePriority
28+
)
29+
from ._models import (
30+
BlobType,
31+
BlockState,
32+
StandardBlobTier,
33+
PremiumPageBlobTier,
34+
SequenceNumberAction,
35+
PublicAccess,
36+
BlobAnalyticsLogging,
37+
Metrics,
38+
RetentionPolicy,
39+
StaticWebsite,
40+
CorsRule,
41+
ContainerProperties,
42+
BlobProperties,
43+
FilteredBlob,
44+
LeaseProperties,
45+
ContentSettings,
46+
CopyProperties,
47+
BlobBlock,
48+
PageRange,
49+
AccessPolicy,
50+
ContainerSasPermissions,
51+
BlobSasPermissions,
52+
CustomerProvidedEncryptionKey,
53+
ContainerEncryptionScope,
54+
BlobQueryError,
55+
DelimitedJsonDialect,
56+
DelimitedTextDialect,
57+
ArrowDialect,
58+
ArrowType,
59+
ObjectReplicationPolicy,
60+
ObjectReplicationRule
61+
)
62+
from ._list_blobs_helper import BlobPrefix
63+
64+
__version__ = VERSION
65+
66+
67+
def upload_blob_to_url(
68+
blob_url, # type: str
69+
data, # type: Union[Iterable[AnyStr], IO[AnyStr]]
70+
credential=None, # type: Any
71+
**kwargs):
72+
# type: (...) -> Dict[str, Any]
73+
"""Upload data to a given URL
74+
75+
The data will be uploaded as a block blob.
76+
77+
:param str blob_url:
78+
The full URI to the blob. This can also include a SAS token.
79+
:param data:
80+
The data to upload. This can be bytes, text, an iterable or a file-like object.
81+
:type data: bytes or str or Iterable
82+
:param credential:
83+
The credentials with which to authenticate. This is optional if the
84+
blob URL already has a SAS token. The value can be a SAS token string, an account
85+
shared access key, or an instance of a TokenCredentials class from azure.identity.
86+
If the URL already has a SAS token, specifying an explicit credential will take priority.
87+
:keyword bool overwrite:
88+
Whether the blob to be uploaded should overwrite the current data.
89+
If True, upload_blob_to_url will overwrite any existing data. If set to False, the
90+
operation will fail with a ResourceExistsError.
91+
:keyword int max_concurrency:
92+
The number of parallel connections with which to download.
93+
:keyword int length:
94+
Number of bytes to read from the stream. This is optional, but
95+
should be supplied for optimal performance.
96+
:keyword dict(str,str) metadata:
97+
Name-value pairs associated with the blob as metadata.
98+
:keyword bool validate_content:
99+
If true, calculates an MD5 hash for each chunk of the blob. The storage
100+
service checks the hash of the content that has arrived with the hash
101+
that was sent. This is primarily valuable for detecting bitflips on
102+
the wire if using http instead of https as https (the default) will
103+
already validate. Note that this MD5 hash is not stored with the
104+
blob. Also note that if enabled, the memory-efficient upload algorithm
105+
will not be used, because computing the MD5 hash requires buffering
106+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
107+
:keyword str encoding:
108+
Encoding to use if text is supplied as input. Defaults to UTF-8.
109+
:returns: Blob-updated property dict (Etag and last modified)
110+
:rtype: dict(str, Any)
111+
"""
112+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
113+
return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs)
114+
115+
116+
def _download_to_stream(client, handle, **kwargs):
117+
"""Download data to specified open file-handle."""
118+
stream = client.download_blob(**kwargs)
119+
stream.readinto(handle)
120+
121+
122+
def download_blob_from_url(
123+
blob_url, # type: str
124+
output, # type: str
125+
credential=None, # type: Any
126+
**kwargs):
127+
# type: (...) -> None
128+
"""Download the contents of a blob to a local file or stream.
129+
130+
:param str blob_url:
131+
The full URI to the blob. This can also include a SAS token.
132+
:param output:
133+
Where the data should be downloaded to. This could be either a file path to write to,
134+
or an open IO handle to write to.
135+
:type output: str or writable stream.
136+
:param credential:
137+
The credentials with which to authenticate. This is optional if the
138+
blob URL already has a SAS token or the blob is public. The value can be a SAS token string,
139+
an account shared access key, or an instance of a TokenCredentials class from azure.identity.
140+
If the URL already has a SAS token, specifying an explicit credential will take priority.
141+
:keyword bool overwrite:
142+
Whether the local file should be overwritten if it already exists. The default value is
143+
`False` - in which case a ValueError will be raised if the file already exists. If set to
144+
`True`, an attempt will be made to write to the existing file. If a stream handle is passed
145+
in, this value is ignored.
146+
:keyword int max_concurrency:
147+
The number of parallel connections with which to download.
148+
:keyword int offset:
149+
Start of byte range to use for downloading a section of the blob.
150+
Must be set if length is provided.
151+
:keyword int length:
152+
Number of bytes to read from the stream. This is optional, but
153+
should be supplied for optimal performance.
154+
:keyword bool validate_content:
155+
If true, calculates an MD5 hash for each chunk of the blob. The storage
156+
service checks the hash of the content that has arrived with the hash
157+
that was sent. This is primarily valuable for detecting bitflips on
158+
the wire if using http instead of https as https (the default) will
159+
already validate. Note that this MD5 hash is not stored with the
160+
blob. Also note that if enabled, the memory-efficient upload algorithm
161+
will not be used, because computing the MD5 hash requires buffering
162+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
163+
:rtype: None
164+
"""
165+
overwrite = kwargs.pop('overwrite', False)
166+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
167+
if hasattr(output, 'write'):
168+
_download_to_stream(client, output, **kwargs)
169+
else:
170+
if not overwrite and os.path.isfile(output):
171+
raise ValueError("The file '{}' already exists.".format(output))
172+
with open(output, 'wb') as file_handle:
173+
_download_to_stream(client, file_handle, **kwargs)
174+
175+
176+
__all__ = [
177+
'upload_blob_to_url',
178+
'download_blob_from_url',
179+
'BlobServiceClient',
180+
'ContainerClient',
181+
'BlobClient',
182+
'BlobType',
183+
'BlobLeaseClient',
184+
'StorageErrorCode',
185+
'UserDelegationKey',
186+
'ExponentialRetry',
187+
'LinearRetry',
188+
'LocationMode',
189+
'BlockState',
190+
'StandardBlobTier',
191+
'PremiumPageBlobTier',
192+
'SequenceNumberAction',
193+
'PublicAccess',
194+
'BlobAnalyticsLogging',
195+
'Metrics',
196+
'RetentionPolicy',
197+
'StaticWebsite',
198+
'CorsRule',
199+
'ContainerProperties',
200+
'BlobProperties',
201+
'BlobPrefix',
202+
'FilteredBlob',
203+
'LeaseProperties',
204+
'ContentSettings',
205+
'CopyProperties',
206+
'BlobBlock',
207+
'PageRange',
208+
'AccessPolicy',
209+
'ContainerSasPermissions',
210+
'BlobSasPermissions',
211+
'ResourceTypes',
212+
'AccountSasPermissions',
213+
'StorageStreamDownloader',
214+
'CustomerProvidedEncryptionKey',
215+
'RehydratePriority',
216+
'generate_account_sas',
217+
'generate_container_sas',
218+
'generate_blob_sas',
219+
'PartialBatchErrorException',
220+
'ContainerEncryptionScope',
221+
'BlobQueryError',
222+
'DelimitedJsonDialect',
223+
'DelimitedTextDialect',
224+
'ArrowDialect',
225+
'ArrowType',
226+
'BlobQueryReader',
227+
'ObjectReplicationPolicy',
228+
'ObjectReplicationRule'
229+
]

0 commit comments

Comments
 (0)