@@ -737,8 +737,8 @@ def upload(self):
737
737
f'Upload failed for store { self .name } ' ) from e
738
738
739
739
def delete (self ) -> None :
740
- logger . info ( f'Deleting S3 Bucket { self .name } ' )
741
- return self . _delete_s3_bucket ( self .name )
740
+ self . _delete_s3_bucket ( self .name )
741
+ logger . info ( f'Deleted S3 bucket { self .name } .' )
742
742
743
743
def get_handle (self ) -> StorageHandle :
744
744
return aws .resource ('s3' ).Bucket (self .name )
@@ -941,15 +941,22 @@ def _delete_s3_bucket(self, bucket_name: str) -> None:
941
941
Args:
942
942
bucket_name: str; Name of bucket
943
943
"""
944
+ # Deleting objects is very slow programatically
945
+ # (i.e. bucket.objects.all().delete() is slow).
946
+ # In addition, standard delete operations (i.e. via `aws s3 rm`)
947
+ # are slow, since AWS puts deletion markers.
948
+ # https://stackoverflow.com/questions/49239351/why-is-it-so-much-slower-to-delete-objects-in-aws-s3-than-it-is-to-create-them
949
+ # The fastest way to delete is to run `aws s3 rb --force`,
950
+ # which removes the bucket by force.
951
+ remove_command = f'aws s3 rb s3://{ bucket_name } --force'
944
952
try :
945
- s3 = aws .resource ('s3' )
946
- bucket = s3 .Bucket (bucket_name )
947
- bucket .objects .all ().delete ()
948
- bucket .delete ()
949
- except aws .client_exception () as e :
950
- logger .error (f'Unable to delete S3 bucket { self .name } ' )
951
- logger .error (e )
952
- raise e
953
+ with backend_utils .safe_console_status (
954
+ f'[bold cyan]Deleting [green]bucket { bucket_name } ' ):
955
+ subprocess .check_output (remove_command .split (' ' ))
956
+ except subprocess .CalledProcessError as e :
957
+ logger .error (e .output )
958
+ raise exceptions .StorageBucketDeleteError (
959
+ f'Failed to delete S3 bucket { bucket_name } .' )
953
960
954
961
955
962
class GcsStore (AbstractStore ):
0 commit comments