@@ -210,21 +210,22 @@ def upload_object(get_client, args):
210210 print ('No bucket named ' + parsed .bucket )
211211 sys .exit (2 )
212212
213+ policy = 'public-read' if parsed .acl_public else None
214+
213215 for filename , file_path in to_upload :
214216 k = Key (bucket )
215217 k .key = filename
216218
217219 print (filename )
218- policy = 'public-read' if parsed .acl_public else None
219220 k .set_contents_from_filename (file_path , cb = _progress , num_cb = 100 , policy = policy )
220221
221222 for filename , file_path , file_size in to_multipart_upload :
222- _do_multipart_upload (bucket , filename , file_path , file_size )
223+ _do_multipart_upload (bucket , filename , file_path , file_size , policy )
223224
224225 print ('Done.' )
225226
226227
227- def _do_multipart_upload (bucket , filename , file_path , file_size ):
228+ def _do_multipart_upload (bucket , filename , file_path , file_size , policy ):
228229 """
229230 Handles the internals of a multipart upload for a large file.
230231
@@ -236,8 +237,12 @@ def _do_multipart_upload(bucket, filename, file_path, file_size):
236237 :type file_path: str
237238 :param file_size: The size of this file in bytes (used for chunking)
238239 :type file_size: int
240+ :param policy: The canned ACLs to include with the new key once the upload
241+ completes. None for no ACLs, or "public-read" to make the
242+ key accessible publicly.
243+ :type policy: str
239244 """
240- upload = bucket .initiate_multipart_upload (filename )
245+ upload = bucket .initiate_multipart_upload (filename , policy = policy )
241246
242247 num_chunks = int (math .ceil (file_size / MULTIPART_UPLOAD_CHUNK_SIZE ))
243248 upload_exception = None
0 commit comments