Skip to content

Commit

Permalink
MultiPartUpload.get_all_parts return 1,000 multipart uploads max
Browse files Browse the repository at this point in the history
  • Loading branch information
islue committed May 30, 2017
1 parent 3a4ce7e commit 899ddde
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 1 deletion.
1 change: 1 addition & 0 deletions conf/mongodb-consistent-backup.example.conf
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ production:
# bucket_name: [AWS S3 Bucket Name]
# bucket_prefix: [prefix] (default: /)
# threads: [1+] (default: 1 per CPU)
# chunk_size_mb: [1+] (default: 50)
# secure: [true|false] (default: true)
# retries: [1+] (default: 5)
# acl: [acl_str] (default: none)
6 changes: 5 additions & 1 deletion mongodb_consistent_backup/Upload/S3/S3.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import logging

import boto
from copy_reg import pickle
from math import ceil
from multiprocessing import Pool
Expand Down Expand Up @@ -102,7 +103,10 @@ def run(self):
self._pool.close()
self._pool.join()

if len(self._multipart.get_all_parts()) == chunk_count:
part_count = 0
for part in boto.s3.multipart.part_lister(self._multipart):
part_count += 1
if part_count == chunk_count:
self._multipart.complete_upload()
key = self.bucket.get_key(key_name)
if self.s3_acl:
Expand Down

0 comments on commit 899ddde

Please sign in to comment.