Skip to content

Commit

Permalink
update 3.23.9
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangtingwei998 committed Sep 20, 2023
1 parent 650fc1b commit a185839
Show file tree
Hide file tree
Showing 11 changed files with 106 additions and 54 deletions.
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
Version 3.23.9

New Features:

1. The setBucketLifecycle interface supports setting the expiration time of fragments in the bucket.

Resolved Issues:

1. Fix the bug that read of closed file may be reported when uploading retrying

-------------------------------------------------------------------------------------------------
Version 3.23.5

New Features:
Expand Down
11 changes: 10 additions & 1 deletion README_CN.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,13 @@
Version 3.23.5
Version 3.23.9
新特性:
1. setBucketLifecycle接口支持设置桶碎片的过期时间。

修复问题

1. 修复上传重试时可能报read of closed file问题。

-------------------------------------------------------------------------------------------------
Version 3.23.5
新特性:
1. getBucketStorageInfo接口增加查询标准、归档、低频三种类型的容量统计
2. 用户可以在发送请求时添加任意自定义头域
Expand Down
3 changes: 2 additions & 1 deletion src/obs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from obs.model import NoncurrentVersionExpiration, GetObjectHeader, HeadPermission, Lifecycle, Notification
from obs.model import TopicConfiguration, FunctionGraphConfiguration, FilterRule, Replication, ReplicationRule
from obs.model import Options, PutObjectHeader, AppendObjectHeader, AppendObjectContent, RedirectAllRequestTo
from obs.model import Redirect, RoutingRule, Tag, TagInfo, Transition, NoncurrentVersionTransition, Rule, Versions
from obs.model import Redirect, RoutingRule, Tag, TagInfo, Transition, NoncurrentVersionTransition, Rule, Versions, AbortIncompleteMultipartUpload
from obs.model import Object, WebsiteConfiguration, Logging, CompleteMultipartUploadRequest, DeleteObjectsRequest
from obs.model import ListMultipartUploadsRequest, GetObjectRequest, UploadFileHeader, Payer
from obs.model import ExtensionHeader, FetchStatus, BucketAliasModel, ListBucketAliasModel
Expand Down Expand Up @@ -56,6 +56,7 @@
'IndexDocument',
'Expiration',
'NoncurrentVersionExpiration',
'AbortIncompleteMultipartUpload',
'GetObjectHeader',
'HeadPermission',
'Lifecycle',
Expand Down
52 changes: 21 additions & 31 deletions src/obs/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,6 +621,7 @@ def _rename_request_headers(self, headers, extension_headers, method):
k = util.encode_item(k, ' ;/?:@&=+$,')

new_headers = self._rename_request_headers_handle(k, v, new_headers)
if isinstance(extension_headers, dict):
for k, v in extension_headers.items():
new_headers = self._rename_request_headers_handle(k, v, new_headers)
return new_headers
Expand Down Expand Up @@ -1278,7 +1279,7 @@ def _getApiVersion(self, bucketName=''):
return const.V2_SIGNATURE, res

@funcCache
def listBuckets(self, isQueryLocation=True, extensionHeaders=None, bucketType=None, maxKeys=100, marker=None):
def listBuckets(self, isQueryLocation=True, extensionHeaders=None, bucketType=None, maxKeys=None, marker=None):
"""
Obtain a bucket list.
:param isQueryLocation: Whether to query the bucket location.
Expand All @@ -1289,7 +1290,9 @@ def listBuckets(self, isQueryLocation=True, extensionHeaders=None, bucketType=No
If this parameter is left blank, both buckets and parallel file systems will be listed.
:return: A bucket list
"""
pathArgs = {'marker':marker, 'max-keys':maxKeys}
pathArgs = None
if maxKeys or marker:
pathArgs = {'marker': marker, 'max-keys': maxKeys}
if self.is_cname:
raise Exception('listBuckets is not allowed in custom domain mode')
return self._make_get_request(methodName='listBuckets', pathArgs=pathArgs, extensionHeaders=extensionHeaders,
Expand Down Expand Up @@ -1613,18 +1616,15 @@ def _prepare_file_notifier_and_entity(self, offset, file_size, headers, progress
notifier = progress.ProgressNotifier(progressCallback, totalCount)
else:
notifier = progress.NONE_NOTIFIER
readable_object = self.gen_readable_object_from_file(file_path)
readable_object.seek(offset)
entity = util.get_entity_for_send_with_total_count(readable_object, totalCount, self.chunk_size, notifier)
entity = util.get_entity_for_send_with_total_count(file_path, totalCount, offset, self.chunk_size, notifier)
else:
totalCount = headers['contentLength']
if totalCount > 0 and progressCallback is not None:
readable = True
notifier = progress.ProgressNotifier(progressCallback, totalCount)
else:
notifier = progress.NONE_NOTIFIER
readable_object = self.gen_readable_object_from_file(file_path)
entity = util.get_entity_for_send_with_total_count(readable_object, totalCount, self.chunk_size, notifier)
entity = util.get_entity_for_send_with_total_count(file_path, totalCount, None, self.chunk_size, notifier)

return headers, readable, notifier, entity

Expand All @@ -1644,8 +1644,8 @@ def _prepare_content_notifier_and_entity(self, entity, headers, progressCallback
notifier = progress.ProgressNotifier(progressCallback,
totalCount) if totalCount > 0 and progressCallback is not None \
else progress.NONE_NOTIFIER
entity = util.get_entity_for_send_with_total_count(entity, totalCount, self.chunk_size, notifier,
autoClose)
entity = util.get_entity_for_send_with_total_count(read_able=entity, totalCount=totalCount, chunk_size=self.chunk_size, notifier=notifier,
auto_close=autoClose)

return entity, readable, chunkedMode, notifier

Expand Down Expand Up @@ -1728,8 +1728,9 @@ def putContent(self, bucketName, objectKey, content=None, metadata=None, headers
notifier = progress.ProgressNotifier(progressCallback,
totalCount) if totalCount > 0 and progressCallback \
is not None else progress.NONE_NOTIFIER
entity = util.get_entity_for_send_with_total_count(entity, totalCount, self.chunk_size, notifier,
autoClose)
entity = util.get_entity_for_send_with_total_count(read_able=entity, totalCount=totalCount,
chunk_size=self.chunk_size, notifier=notifier,
auto_close=autoClose)

notifier.start()
ret = self._make_put_request(bucketName, objectKey, headers=_headers, entity=entity,
Expand Down Expand Up @@ -1789,8 +1790,6 @@ def putFile(self, bucketName, objectKey, file_path, metadata=None, headers=None,

headers = self._putFileHandleHeader(headers, size, objectKey, file_path)

readable_object = self.gen_readable_object_from_file(file_path)
metadata = self.add_metadata_from_content(metadata, headers, readable_object)
_headers = self.convertor.trans_put_object(metadata=metadata, headers=headers)
if const.CONTENT_LENGTH_HEADER not in _headers:
_headers[const.CONTENT_LENGTH_HEADER] = util.to_string(size)
Expand All @@ -1805,7 +1804,7 @@ def putFile(self, bucketName, objectKey, file_path, metadata=None, headers=None,
notifier = progress.NONE_NOTIFIER
readable = False

entity = util.get_entity_for_send_with_total_count(readable_object, totalCount, self.chunk_size, notifier)
entity = util.get_entity_for_send_with_total_count(file_path, totalCount, None, self.chunk_size, notifier)
try:
notifier.start()
ret = self._make_put_request(bucketName, objectKey, headers=_headers, entity=entity,
Expand All @@ -1815,13 +1814,6 @@ def putFile(self, bucketName, objectKey, file_path, metadata=None, headers=None,
self._generate_object_url(ret, bucketName, objectKey)
return ret

@staticmethod
def add_metadata_from_content(metadata, headers, content):
return metadata

def gen_readable_object_from_file(self, file_path):
return open(file_path, "rb")

@staticmethod
def _putFileHandleHeader(headers, size, objectKey, file_path):
headers['contentLength'] = util.to_long(headers.get('contentLength'))
Expand Down Expand Up @@ -1920,9 +1912,7 @@ def uploadPart(self, bucketName, objectKey, partNumber, uploadId, object=None, i

readable, notifier = self._prepare_upload_part_notifier(checked_file_part_info["partSize"],
progressCallback, readable)
readable_object = open(checked_file_part_info["file_path"], "rb")
readable_object.seek(checked_file_part_info["offset"])
entity = util.get_entity_for_send_with_total_count(readable_object, checked_file_part_info["partSize"],
entity = util.get_entity_for_send_with_total_count(checked_file_part_info["file_path"], checked_file_part_info["partSize"], checked_file_part_info["offset"],
self.chunk_size, notifier)
else:
headers = {}
Expand All @@ -1939,8 +1929,9 @@ def uploadPart(self, bucketName, objectKey, partNumber, uploadId, object=None, i
headers[const.CONTENT_LENGTH_HEADER] = util.to_string(partSize)
totalCount = util.to_long(partSize)
notifier = self._get_notifier_with_size(progressCallback, totalCount)
entity = util.get_entity_for_send_with_total_count(content, totalCount, self.chunk_size, notifier,
autoClose)
entity = util.get_entity_for_send_with_total_count(read_able=content, totalCount=totalCount,
chunk_size=self.chunk_size, notifier=notifier,
auto_close=autoClose)
else:
entity = content
if entity is None:
Expand Down Expand Up @@ -1989,9 +1980,8 @@ def _uploadPartWithNotifier(self, bucketName, objectKey, partNumber, uploadId, c

if notifier is not None and not isinstance(notifier, progress.NoneNotifier):
readable = True
readable_object = open(checked_file_part_info["file_path"], "rb")
readable_object.seek(checked_file_part_info["offset"])
entity = util.get_entity_for_send_with_total_count(readable_object, partSize, self.chunk_size, notifier)
entity = util.get_entity_for_send_with_total_count(checked_file_part_info["file_path"], partSize, checked_file_part_info["offset"],
self.chunk_size, notifier)
else:
if content is not None and hasattr(content, 'read') and callable(content.read):
readable = True
Expand All @@ -2002,8 +1992,8 @@ def _uploadPartWithNotifier(self, bucketName, objectKey, partNumber, uploadId, c
entity = util.get_readable_entity(content, self.chunk_size, notifier)
else:
headers[const.CONTENT_LENGTH_HEADER] = util.to_string(partSize)
entity = util.get_entity_for_send_with_total_count(content, util.to_long(partSize), self.chunk_size,
notifier)
entity = util.get_entity_for_send_with_total_count(read_able=content, totalCount=util.to_long(partSize),
chunk_size=self.chunk_size, notifier=notifier)
else:
entity = content
if entity is None:
Expand Down
2 changes: 1 addition & 1 deletion src/obs/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@
DEFAULT_TASK_NUM = 8
DEFAULT_TASK_QUEUE_SIZE = 20000

OBS_SDK_VERSION = '3.22.2'
OBS_SDK_VERSION = '3.23.9'

V2_META_HEADER_PREFIX = 'x-amz-meta-'
V2_HEADER_PREFIX = 'x-amz-'
Expand Down
18 changes: 15 additions & 3 deletions src/obs/convertor.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
from obs.model import DateTime, ListObjectsResponse, Content, CorsRule, ObjectVersionHead, ObjectVersion, \
ObjectDeleteMarker, DeleteObjectResult, NoncurrentVersionExpiration, NoncurrentVersionTransition, Rule, Condition, \
Redirect, FilterRule, FunctionGraphConfiguration, Upload, CompleteMultipartUploadResponse, ListPartsResponse, \
Grant, ReplicationRule, Transition, Grantee, BucketAliasModel, ListBucketAliasModel
Grant, ReplicationRule, Transition, Grantee, BucketAliasModel, ListBucketAliasModel, AbortIncompleteMultipartUpload

if const.IS_PYTHON2:
from urllib import unquote_plus, quote_plus
Expand Down Expand Up @@ -558,6 +558,11 @@ def trans_lifecycle(self, lifecycle):
ET.SubElement(noncurrentVersionExpirationEle, 'NoncurrentDays').text = util.to_string(
item['noncurrentVersionExpiration']['noncurrentDays'])

if item.get('abortIncompleteMultipartUpload') is not None and item['abortIncompleteMultipartUpload'].get(
'daysAfterInitiation') is not None:
abortIncompleteMultipartUploadEle = ET.SubElement(ruleEle, 'AbortIncompleteMultipartUpload')
ET.SubElement(abortIncompleteMultipartUploadEle, 'DaysAfterInitiation').text = util.to_string(
item['abortIncompleteMultipartUpload']['daysAfterInitiation'])
return ET.tostring(root, 'UTF-8')

def _trans_lifecycle_transition_expiration(self, item, ruleEle):
Expand Down Expand Up @@ -1047,7 +1052,9 @@ def _set_sse_header(self, sseHeader, headers=None, onlySseCHeader=False):
headers = {}
if isinstance(sseHeader, SseCHeader):
self._put_key_value(headers, self.ha.sse_c_header(), sseHeader.get('encryption'))
key = util.to_string(sseHeader.get('key'))
key = sseHeader.get('key')
if not isinstance(key, bytes):
key = util.to_string(sseHeader.get('key'))
self._put_key_value(headers, self.ha.sse_c_key_header(), util.base64_encode(key))
self._put_key_value(headers, self.ha.sse_c_key_md5_header(), util.base64_encode(util.md5_encode(key)))
elif isinstance(sseHeader, SseKmsHeader) and not onlySseCHeader:
Expand Down Expand Up @@ -1520,8 +1527,13 @@ def parseGetBucketLifecycle(self, xml, headers=None):
noncurrentDays=noncurrentDays)
noncurrentVersionTransitions.append(noncurrentVersionTransition)

abort_parts = rule.find('AbortIncompleteMultipartUpload')
abortIncompleteMultipartUpload = AbortIncompleteMultipartUpload(daysAfterInitiation=util.to_int(
abort_parts.find('DaysAfterInitiation').text)) if abort_parts is not None else None

rule = Rule(id=_id, prefix=prefix, status=status, expiration=expiration,
noncurrentVersionExpiration=noncurrentVersionExpiration)
noncurrentVersionExpiration=noncurrentVersionExpiration,
abortIncompleteMultipartUpload=abortIncompleteMultipartUpload)
rule.transition = transitions
rule.noncurrentVersionTransition = noncurrentVersionTransitions
entries.append(rule)
Expand Down
15 changes: 13 additions & 2 deletions src/obs/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
'IndexDocument',
'Expiration',
'NoncurrentVersionExpiration',
'AbortIncompleteMultipartUpload',
'GetObjectHeader',
'HeadPermission',
'Lifecycle',
Expand Down Expand Up @@ -831,20 +832,30 @@ def __init__(self, storageClass=None, noncurrentDays=None):
self.storageClass = storageClass


class AbortIncompleteMultipartUpload(BaseModel):
allowedAttr = {'daysAfterInitiation': int}

def __init__(self, daysAfterInitiation=None):
super(AbortIncompleteMultipartUpload, self).__init__()
self.daysAfterInitiation = daysAfterInitiation


class Rule(BaseModel):
allowedAttr = {'id': BASESTRING, 'prefix': BASESTRING, 'status': BASESTRING, 'expiration': Expiration,
'noncurrentVersionExpiration': NoncurrentVersionExpiration,
'transition': [Transition, list], 'noncurrentVersionTransition': [NoncurrentVersionTransition, list]}
'transition': [Transition, list], 'noncurrentVersionTransition': [NoncurrentVersionTransition, list],
'abortIncompleteMultipartUpload': AbortIncompleteMultipartUpload}

def __init__(self, id=None, prefix=None, status=None, expiration=None, noncurrentVersionExpiration=None,
transition=None, noncurrentVersionTransition=None):
transition=None, noncurrentVersionTransition=None, abortIncompleteMultipartUpload=None):
self.id = id
self.prefix = prefix
self.status = status
self.expiration = expiration
self.noncurrentVersionExpiration = noncurrentVersionExpiration
self.transition = transition
self.noncurrentVersionTransition = noncurrentVersionTransition
self.abortIncompleteMultipartUpload = abortIncompleteMultipartUpload


class Upload(BaseModel):
Expand Down
20 changes: 11 additions & 9 deletions src/obs/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,21 +199,25 @@ def entity(conn):

def get_readable_entity_by_total_count(readable, totalCount, chunk_size=const.READ_ONCE_LENGTH, notifier=None,
auto_close=True):
return get_entity_for_send_with_total_count(readable, totalCount, chunk_size, notifier, auto_close)
return get_entity_for_send_with_total_count(totalCount=totalCount, chunk_size=chunk_size, notifier=notifier, auto_close=auto_close, read_able=readable)


def get_file_entity_by_total_count(file_path, totalCount, chunk_size=const.READ_ONCE_LENGTH, notifier=None):
f = open(file_path, "rb")
return get_entity_for_send_with_total_count(f, totalCount, chunk_size, notifier)
return get_entity_for_send_with_total_count(file_path, totalCount, None, chunk_size, notifier)


def get_entity_for_send_with_total_count(readable, totalCount=None, chunk_size=const.READ_ONCE_LENGTH, notifier=None,
auto_close=True):
def get_entity_for_send_with_total_count(file_path=None, totalCount=None, offset=None, chunk_size=const.READ_ONCE_LENGTH,
notifier=None, auto_close=True, read_able=None):
if notifier is None:
notifier = progress.NONE_NOTIFIER

def entity(conn):
readCount = 0
if file_path:
readable = open(file_path, "rb")
if offset:
readable.seek(offset)
else:
readable = read_able
try:
while True:
if totalCount is None or totalCount - readCount >= chunk_size:
Expand All @@ -237,9 +241,7 @@ def entity(conn):


def get_file_entity_by_offset_partsize(file_path, offset, totalCount, chunk_size=const.READ_ONCE_LENGTH, notifier=None):
f = open(file_path, "rb")
f.seek(offset)
return get_entity_for_send_with_total_count(f, totalCount, chunk_size, notifier)
return get_entity_for_send_with_total_count(file_path, totalCount, offset, chunk_size, notifier)


def is_ipaddress(item):
Expand Down
4 changes: 2 additions & 2 deletions src/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from setuptools import setup, find_packages

setup(
name='sdk-obs-python',
version='3.23.5',
name='esdk-obs-python',
version='3.23.9',
packages=find_packages(exclude=['tests']),
zip_safe=False,
description='OBS Python SDK',
Expand Down
Loading

0 comments on commit a185839

Please sign in to comment.