From 5d5a7c6389cbff8205ec4452fd752eac72d8f35d Mon Sep 17 00:00:00 2001 From: Hu Hailin Date: Tue, 30 May 2017 10:04:47 +0000 Subject: [PATCH] MultiPartUpload.get_all_parts return 1,000 multipart uploads max --- conf/mongodb-consistent-backup.example.conf | 1 + mongodb_consistent_backup/Upload/S3/S3.py | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/conf/mongodb-consistent-backup.example.conf b/conf/mongodb-consistent-backup.example.conf index 6c13860a..5c52e8a4 100644 --- a/conf/mongodb-consistent-backup.example.conf +++ b/conf/mongodb-consistent-backup.example.conf @@ -55,6 +55,7 @@ production: # bucket_name: [AWS S3 Bucket Name] # bucket_prefix: [prefix] (default: /) # threads: [1+] (default: 1 per CPU) + # chunk_size_mb: [1+] (default: 50) # secure: [true|false] (default: true) # retries: [1+] (default: 5) # acl: [acl_str] (default: none) diff --git a/mongodb_consistent_backup/Upload/S3/S3.py b/mongodb_consistent_backup/Upload/S3/S3.py index fe3d79dc..dea67d00 100644 --- a/mongodb_consistent_backup/Upload/S3/S3.py +++ b/mongodb_consistent_backup/Upload/S3/S3.py @@ -1,6 +1,7 @@ import os import logging +import boto.s3.multipart from copy_reg import pickle from math import ceil from multiprocessing import Pool @@ -102,7 +103,10 @@ def run(self): self._pool.close() self._pool.join() - if len(self._multipart.get_all_parts()) == chunk_count: + part_count = 0 + for part in boto.s3.multipart.part_lister(self._multipart): + part_count += 1 + if part_count == chunk_count: self._multipart.complete_upload() key = self.bucket.get_key(key_name) if self.s3_acl: