@@ -1692,7 +1692,7 @@ def appendObject(self, bucketName, objectKey, content=None, metadata=None, heade
16921692 headers , readable , notifier , entity = self ._prepare_file_notifier_and_entity (offset , file_size , headers ,
16931693 progressCallback , file_path ,
16941694 readable )
1695- headers = self .convertor .trans_put_object (metadata = metadata , headers = headers )
1695+ headers = self .convertor .trans_put_object (metadata = metadata , headers = headers , file_path = file_path )
16961696 self .log_client .log (DEBUG , 'send Path:%s' % file_path )
16971697 else :
16981698 entity = content .get ('content' )
@@ -1701,7 +1701,7 @@ def appendObject(self, bucketName, objectKey, content=None, metadata=None, heade
17011701 autoClose , readable ,
17021702 chunkedMode , notifier )
17031703
1704- headers = self .convertor .trans_put_object (metadata = metadata , headers = headers )
1704+ headers = self .convertor .trans_put_object (metadata = metadata , headers = headers , content = content . get ( 'content' ) )
17051705
17061706 try :
17071707 if notifier is not None :
@@ -1727,7 +1727,7 @@ def putContent(self, bucketName, objectKey, content=None, metadata=None, headers
17271727 headers = PutObjectHeader ()
17281728 if headers .get ('contentType' ) is None :
17291729 headers ['contentType' ] = const .MIME_TYPES .get (objectKey [objectKey .rfind ('.' ) + 1 :].lower ())
1730- _headers = self .convertor .trans_put_object (metadata = metadata , headers = headers )
1730+ _headers = self .convertor .trans_put_object (metadata = metadata , headers = headers , content = content )
17311731
17321732 readable = False
17331733 chunkedMode = False
@@ -1789,12 +1789,12 @@ def putFile(self, bucketName, objectKey, file_path, metadata=None, headers=None,
17891789 __file_path = os .path .join (file_path , f )
17901790 if not const .IS_PYTHON2 :
17911791 if not objectKey :
1792- key = util .safe_trans_to_gb2312 ('{0}/ ' .format (os .path .split (file_path )[1 ]) + f )
1792+ key = util .safe_trans_to_gb2312 ('{0}' .format (os .path .split (file_path )[1 ]) + f )
17931793 else :
17941794 key = '{0}/' .format (objectKey ) + util .safe_trans_to_gb2312 (f )
17951795 else :
17961796 if not objectKey :
1797- key = util .safe_trans_to_gb2312 ('{0}/ ' .format (os .path .split (file_path )[1 ]) + f ).decode (
1797+ key = util .safe_trans_to_gb2312 ('{0}' .format (os .path .split (file_path )[1 ]) + f ).decode (
17981798 'GB2312' ).encode ('UTF-8' )
17991799 else :
18001800 key = '{0}/' .format (objectKey ) + util .safe_trans_to_gb2312 (f ).decode ('GB2312' ).encode ('UTF-8' )
@@ -1810,7 +1810,7 @@ def putFile(self, bucketName, objectKey, file_path, metadata=None, headers=None,
18101810
18111811 headers = self ._putFileHandleHeader (headers , size , objectKey , file_path )
18121812
1813- _headers = self .convertor .trans_put_object (metadata = metadata , headers = headers )
1813+ _headers = self .convertor .trans_put_object (metadata = metadata , headers = headers , file_path = file_path )
18141814 if const .CONTENT_LENGTH_HEADER not in _headers :
18151815 _headers [const .CONTENT_LENGTH_HEADER ] = util .to_string (size )
18161816 self .log_client .log (DEBUG , 'send Path:%s' % file_path )
@@ -1857,13 +1857,16 @@ def _get_part_size(partSize, file_size, offset):
18571857 partSize = partSize if partSize is not None and 0 < partSize <= (file_size - offset ) else file_size - offset
18581858 return partSize
18591859
1860- def _prepare_headers (self , md5 , isAttachMd5 , file_path , partSize , offset , sseHeader , headers ):
1860+ def _prepare_headers (self , md5 , isAttachMd5 , crc64 , isAttachCrc64 , file_path , partSize , offset , sseHeader , headers ):
18611861 if md5 :
18621862 headers [const .CONTENT_MD5_HEADER ] = md5
18631863 elif isAttachMd5 :
18641864 headers [const .CONTENT_MD5_HEADER ] = util .base64_encode (
18651865 util .md5_file_encode_by_size_offset (file_path , partSize , offset , self .chunk_size ))
1866-
1866+ if crc64 :
1867+ self .convertor ._put_key_value (headers , self .ha .crc64_header (), crc64 )
1868+ elif isAttachCrc64 :
1869+ self .convertor ._put_key_value (headers , self .ha .crc64_header (), util .calculate_file_crc64 (file_path , offset = offset , totalCount = partSize ))
18671870 if sseHeader is not None :
18681871 self .convertor ._set_sse_header (sseHeader , headers , True )
18691872
@@ -1879,12 +1882,15 @@ def _prepare_upload_part_notifier(partSize, progressCallback, readable):
18791882
18801883 return readable , notifier
18811884
1882- def _get_headers (self , md5 , sseHeader , headers ):
1885+ def _get_headers (self , md5 , crc64 , isAttachCrc64 , content , sseHeader , headers ):
18831886 if md5 :
18841887 headers [const .CONTENT_MD5_HEADER ] = md5
18851888 if sseHeader is not None :
18861889 self .convertor ._set_sse_header (sseHeader , headers , True )
1887-
1890+ if crc64 :
1891+ headers [self .ha .crc64_header ()] = crc64
1892+ elif isAttachCrc64 :
1893+ headers [self .ha .crc64_header ()] = util .calculate_content_crc64 (util .covert_string_to_bytes (content ))
18881894 return headers
18891895
18901896 @staticmethod
@@ -1911,7 +1917,7 @@ def _check_file_part_info(self, file_path, offset, partSize):
19111917 @funcCache
19121918 def uploadPart (self , bucketName , objectKey , partNumber , uploadId , object = None , isFile = False , partSize = None ,
19131919 offset = 0 , sseHeader = None , isAttachMd5 = False , md5 = None , content = None , progressCallback = None ,
1914- autoClose = True , extensionHeaders = None ):
1920+ autoClose = True , isAttachCrc64 = False , crc64 = None , extensionHeaders = None ):
19151921 self ._assert_not_null (partNumber , 'partNumber is empty' )
19161922 self ._assert_not_null (uploadId , 'uploadId is empty' )
19171923
@@ -1926,7 +1932,7 @@ def uploadPart(self, bucketName, objectKey, partNumber, uploadId, object=None, i
19261932 checked_file_part_info = self ._check_file_part_info (content , offset , partSize )
19271933
19281934 headers = {const .CONTENT_LENGTH_HEADER : util .to_string (checked_file_part_info ["partSize" ])}
1929- headers = self ._prepare_headers (md5 , isAttachMd5 , checked_file_part_info ["file_path" ],
1935+ headers = self ._prepare_headers (md5 , isAttachMd5 , crc64 , isAttachCrc64 , checked_file_part_info ["file_path" ],
19301936 checked_file_part_info ["partSize" ], checked_file_part_info ["offset" ],
19311937 sseHeader , headers )
19321938
@@ -1938,7 +1944,7 @@ def uploadPart(self, bucketName, objectKey, partNumber, uploadId, object=None, i
19381944 headers = {}
19391945 if content is not None and hasattr (content , 'read' ) and callable (content .read ):
19401946 readable = True
1941- headers = self ._get_headers (md5 , sseHeader , headers )
1947+ headers = self ._get_headers (md5 , crc64 , isAttachCrc64 , content , sseHeader , headers )
19421948
19431949 if partSize is None :
19441950 self .log_client .log (DEBUG , 'missing partSize when uploading a readable stream' )
@@ -1956,7 +1962,7 @@ def uploadPart(self, bucketName, objectKey, partNumber, uploadId, object=None, i
19561962 entity = content
19571963 if entity is None :
19581964 entity = ''
1959- headers = self ._get_headers (md5 , sseHeader , headers )
1965+ headers = self ._get_headers (md5 , crc64 , isAttachCrc64 , content , sseHeader , headers )
19601966
19611967 try :
19621968 if notifier is not None :
@@ -1982,7 +1988,7 @@ def check_file_path(file_path):
19821988 @funcCache
19831989 def _uploadPartWithNotifier (self , bucketName , objectKey , partNumber , uploadId , content = None , isFile = False ,
19841990 partSize = None , offset = 0 , sseHeader = None , isAttachMd5 = False , md5 = None , notifier = None ,
1985- extensionHeaders = None , headers = None ):
1991+ extensionHeaders = None , headers = None , isAttachCrc64 = False , crc64 = None ):
19861992 self ._assert_not_null (partNumber , 'partNumber is empty' )
19871993 self ._assert_not_null (uploadId , 'uploadId is empty' )
19881994
@@ -1994,7 +2000,7 @@ def _uploadPartWithNotifier(self, bucketName, objectKey, partNumber, uploadId, c
19942000 checked_file_part_info = self ._check_file_part_info (content , offset , partSize )
19952001
19962002 headers [const .CONTENT_LENGTH_HEADER ] = util .to_string (checked_file_part_info ["partSize" ])
1997- headers = self ._prepare_headers (md5 , isAttachMd5 , checked_file_part_info ["file_path" ],
2003+ headers = self ._prepare_headers (md5 , isAttachMd5 , crc64 , isAttachCrc64 , checked_file_part_info ["file_path" ],
19982004 checked_file_part_info ["partSize" ], checked_file_part_info ["offset" ],
19992005 sseHeader , headers )
20002006
@@ -2005,7 +2011,7 @@ def _uploadPartWithNotifier(self, bucketName, objectKey, partNumber, uploadId, c
20052011 else :
20062012 if content is not None and hasattr (content , 'read' ) and callable (content .read ):
20072013 readable = True
2008- headers = self ._get_headers (md5 , sseHeader , headers )
2014+ headers = self ._get_headers (md5 , crc64 , isAttachCrc64 , content , sseHeader , headers )
20092015
20102016 if partSize is None :
20112017 chunkedMode = True
@@ -2018,7 +2024,7 @@ def _uploadPartWithNotifier(self, bucketName, objectKey, partNumber, uploadId, c
20182024 entity = content
20192025 if entity is None :
20202026 entity = ''
2021- headers = self ._get_headers (md5 , sseHeader , headers )
2027+ headers = self ._get_headers (md5 , crc64 , isAttachCrc64 , content , sseHeader , headers )
20222028
20232029 ret = self ._make_put_request (bucketName , objectKey , pathArgs = {'partNumber' : partNumber , 'uploadId' : uploadId },
20242030 headers = headers , entity = entity , chunkedMode = chunkedMode , methodName = 'uploadPart' ,
@@ -2132,16 +2138,16 @@ def copyPart(self, bucketName, objectKey, partNumber, uploadId, copySource, copy
21322138
21332139 @funcCache
21342140 def completeMultipartUpload (self , bucketName , objectKey , uploadId , completeMultipartUploadRequest ,
2135- extensionHeaders = None , encoding_type = None ):
2141+ isAttachCrc64 = False , extensionHeaders = None , encoding_type = None ):
21362142 self ._assert_not_null (uploadId , 'uploadId is empty' )
21372143 self ._assert_not_null (completeMultipartUploadRequest , 'completeMultipartUploadRequest is empty' )
21382144 pathArgs = {'uploadId' : uploadId }
21392145 if encoding_type is not None :
21402146 pathArgs ["encoding-type" ] = encoding_type
2147+ entity , headers = self .convertor .trans_complete_multipart_upload_request (completeMultipartUploadRequest , isAttachCrc64 )
21412148 ret = self ._make_post_request (bucketName , objectKey ,
2142- pathArgs = pathArgs ,
2143- entity = self .convertor .trans_complete_multipart_upload_request (
2144- completeMultipartUploadRequest ), methodName = 'completeMultipartUpload' ,
2149+ pathArgs = pathArgs , headers = headers ,
2150+ entity = entity , methodName = 'completeMultipartUpload' ,
21452151 extensionHeaders = extensionHeaders )
21462152 self ._generate_object_url (ret , bucketName , objectKey )
21472153 return ret
0 commit comments