@@ -621,6 +621,7 @@ def _rename_request_headers(self, headers, extension_headers, method):
621621 k = util .encode_item (k , ' ;/?:@&=+$,' )
622622
623623 new_headers = self ._rename_request_headers_handle (k , v , new_headers )
624+ if isinstance (extension_headers , dict ):
624625 for k , v in extension_headers .items ():
625626 new_headers = self ._rename_request_headers_handle (k , v , new_headers )
626627 return new_headers
@@ -1278,7 +1279,7 @@ def _getApiVersion(self, bucketName=''):
12781279 return const .V2_SIGNATURE , res
12791280
12801281 @funcCache
1281- def listBuckets (self , isQueryLocation = True , extensionHeaders = None , bucketType = None , maxKeys = 100 , marker = None ):
1282+ def listBuckets (self , isQueryLocation = True , extensionHeaders = None , bucketType = None , maxKeys = None , marker = None ):
12821283 """
12831284 Obtain a bucket list.
12841285 :param isQueryLocation: Whether to query the bucket location.
@@ -1289,7 +1290,9 @@ def listBuckets(self, isQueryLocation=True, extensionHeaders=None, bucketType=No
12891290 If this parameter is left blank, both buckets and parallel file systems will be listed.
12901291 :return: A bucket list
12911292 """
1292- pathArgs = {'marker' :marker , 'max-keys' :maxKeys }
1293+ pathArgs = None
1294+ if maxKeys or marker :
1295+ pathArgs = {'marker' : marker , 'max-keys' : maxKeys }
12931296 if self .is_cname :
12941297 raise Exception ('listBuckets is not allowed in custom domain mode' )
12951298 return self ._make_get_request (methodName = 'listBuckets' , pathArgs = pathArgs , extensionHeaders = extensionHeaders ,
@@ -1613,18 +1616,15 @@ def _prepare_file_notifier_and_entity(self, offset, file_size, headers, progress
16131616 notifier = progress .ProgressNotifier (progressCallback , totalCount )
16141617 else :
16151618 notifier = progress .NONE_NOTIFIER
1616- readable_object = self .gen_readable_object_from_file (file_path )
1617- readable_object .seek (offset )
1618- entity = util .get_entity_for_send_with_total_count (readable_object , totalCount , self .chunk_size , notifier )
1619+ entity = util .get_entity_for_send_with_total_count (file_path , totalCount , offset , self .chunk_size , notifier )
16191620 else :
16201621 totalCount = headers ['contentLength' ]
16211622 if totalCount > 0 and progressCallback is not None :
16221623 readable = True
16231624 notifier = progress .ProgressNotifier (progressCallback , totalCount )
16241625 else :
16251626 notifier = progress .NONE_NOTIFIER
1626- readable_object = self .gen_readable_object_from_file (file_path )
1627- entity = util .get_entity_for_send_with_total_count (readable_object , totalCount , self .chunk_size , notifier )
1627+ entity = util .get_entity_for_send_with_total_count (file_path , totalCount , None , self .chunk_size , notifier )
16281628
16291629 return headers , readable , notifier , entity
16301630
@@ -1644,8 +1644,8 @@ def _prepare_content_notifier_and_entity(self, entity, headers, progressCallback
16441644 notifier = progress .ProgressNotifier (progressCallback ,
16451645 totalCount ) if totalCount > 0 and progressCallback is not None \
16461646 else progress .NONE_NOTIFIER
1647- entity = util .get_entity_for_send_with_total_count (entity , totalCount , self .chunk_size , notifier ,
1648- autoClose )
1647+ entity = util .get_entity_for_send_with_total_count (read_able = entity , totalCount = totalCount , chunk_size = self .chunk_size , notifier = notifier ,
1648+ auto_close = autoClose )
16491649
16501650 return entity , readable , chunkedMode , notifier
16511651
@@ -1728,8 +1728,9 @@ def putContent(self, bucketName, objectKey, content=None, metadata=None, headers
17281728 notifier = progress .ProgressNotifier (progressCallback ,
17291729 totalCount ) if totalCount > 0 and progressCallback \
17301730 is not None else progress .NONE_NOTIFIER
1731- entity = util .get_entity_for_send_with_total_count (entity , totalCount , self .chunk_size , notifier ,
1732- autoClose )
1731+ entity = util .get_entity_for_send_with_total_count (read_able = entity , totalCount = totalCount ,
1732+ chunk_size = self .chunk_size , notifier = notifier ,
1733+ auto_close = autoClose )
17331734
17341735 notifier .start ()
17351736 ret = self ._make_put_request (bucketName , objectKey , headers = _headers , entity = entity ,
@@ -1789,8 +1790,6 @@ def putFile(self, bucketName, objectKey, file_path, metadata=None, headers=None,
17891790
17901791 headers = self ._putFileHandleHeader (headers , size , objectKey , file_path )
17911792
1792- readable_object = self .gen_readable_object_from_file (file_path )
1793- metadata = self .add_metadata_from_content (metadata , headers , readable_object )
17941793 _headers = self .convertor .trans_put_object (metadata = metadata , headers = headers )
17951794 if const .CONTENT_LENGTH_HEADER not in _headers :
17961795 _headers [const .CONTENT_LENGTH_HEADER ] = util .to_string (size )
@@ -1805,7 +1804,7 @@ def putFile(self, bucketName, objectKey, file_path, metadata=None, headers=None,
18051804 notifier = progress .NONE_NOTIFIER
18061805 readable = False
18071806
1808- entity = util .get_entity_for_send_with_total_count (readable_object , totalCount , self .chunk_size , notifier )
1807+ entity = util .get_entity_for_send_with_total_count (file_path , totalCount , None , self .chunk_size , notifier )
18091808 try :
18101809 notifier .start ()
18111810 ret = self ._make_put_request (bucketName , objectKey , headers = _headers , entity = entity ,
@@ -1815,13 +1814,6 @@ def putFile(self, bucketName, objectKey, file_path, metadata=None, headers=None,
18151814 self ._generate_object_url (ret , bucketName , objectKey )
18161815 return ret
18171816
1818- @staticmethod
1819- def add_metadata_from_content (metadata , headers , content ):
1820- return metadata
1821-
1822- def gen_readable_object_from_file (self , file_path ):
1823- return open (file_path , "rb" )
1824-
18251817 @staticmethod
18261818 def _putFileHandleHeader (headers , size , objectKey , file_path ):
18271819 headers ['contentLength' ] = util .to_long (headers .get ('contentLength' ))
@@ -1920,9 +1912,7 @@ def uploadPart(self, bucketName, objectKey, partNumber, uploadId, object=None, i
19201912
19211913 readable , notifier = self ._prepare_upload_part_notifier (checked_file_part_info ["partSize" ],
19221914 progressCallback , readable )
1923- readable_object = open (checked_file_part_info ["file_path" ], "rb" )
1924- readable_object .seek (checked_file_part_info ["offset" ])
1925- entity = util .get_entity_for_send_with_total_count (readable_object , checked_file_part_info ["partSize" ],
1915+ entity = util .get_entity_for_send_with_total_count (checked_file_part_info ["file_path" ], checked_file_part_info ["partSize" ], checked_file_part_info ["offset" ],
19261916 self .chunk_size , notifier )
19271917 else :
19281918 headers = {}
@@ -1939,8 +1929,9 @@ def uploadPart(self, bucketName, objectKey, partNumber, uploadId, object=None, i
19391929 headers [const .CONTENT_LENGTH_HEADER ] = util .to_string (partSize )
19401930 totalCount = util .to_long (partSize )
19411931 notifier = self ._get_notifier_with_size (progressCallback , totalCount )
1942- entity = util .get_entity_for_send_with_total_count (content , totalCount , self .chunk_size , notifier ,
1943- autoClose )
1932+ entity = util .get_entity_for_send_with_total_count (read_able = content , totalCount = totalCount ,
1933+ chunk_size = self .chunk_size , notifier = notifier ,
1934+ auto_close = autoClose )
19441935 else :
19451936 entity = content
19461937 if entity is None :
@@ -1989,9 +1980,8 @@ def _uploadPartWithNotifier(self, bucketName, objectKey, partNumber, uploadId, c
19891980
19901981 if notifier is not None and not isinstance (notifier , progress .NoneNotifier ):
19911982 readable = True
1992- readable_object = open (checked_file_part_info ["file_path" ], "rb" )
1993- readable_object .seek (checked_file_part_info ["offset" ])
1994- entity = util .get_entity_for_send_with_total_count (readable_object , partSize , self .chunk_size , notifier )
1983+ entity = util .get_entity_for_send_with_total_count (checked_file_part_info ["file_path" ], partSize , checked_file_part_info ["offset" ],
1984+ self .chunk_size , notifier )
19951985 else :
19961986 if content is not None and hasattr (content , 'read' ) and callable (content .read ):
19971987 readable = True
@@ -2002,8 +1992,8 @@ def _uploadPartWithNotifier(self, bucketName, objectKey, partNumber, uploadId, c
20021992 entity = util .get_readable_entity (content , self .chunk_size , notifier )
20031993 else :
20041994 headers [const .CONTENT_LENGTH_HEADER ] = util .to_string (partSize )
2005- entity = util .get_entity_for_send_with_total_count (content , util .to_long (partSize ), self . chunk_size ,
2006- notifier )
1995+ entity = util .get_entity_for_send_with_total_count (read_able = content , totalCount = util .to_long (partSize ),
1996+ chunk_size = self . chunk_size , notifier = notifier )
20071997 else :
20081998 entity = content
20091999 if entity is None :
0 commit comments