Skip to content

Commit

Permalink
Let AWS CLI do checksum validation itself
Browse files Browse the repository at this point in the history
Apparently the AWS CLI already validates MD5 checksums when down-/uploading
files. By adding a flag in its config it does this explicitly, using a SHA256
checksum.
Source: aws/aws-cli#2585 (comment)

This makes our own implementation of hashing superfluous.
  • Loading branch information
tomwassenberg committed Aug 11, 2017
1 parent 18e07df commit f814526
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 21 deletions.
11 changes: 1 addition & 10 deletions ansible/playbooks/oneoff-restore-backup.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
- name: "find latest backups"
shell:
"aws s3 ls s3://sticky-automatic-backups/{{ item }} | grep -v -E \
'^ |\\.sha256$' | sort | tail -n 1 | awk '{print $4}'"
'^ ' | sort | tail -n 1 | awk '{print $4}'"
register: "backup_filenames"
with_items:
- "admins/"
Expand All @@ -43,15 +43,6 @@
{{ item.stdout }} {{ tmp_dir }}/{{ item.stdout }}"
with_items: "{{ backup_filenames.results }}"

- name: "download checksums of backup archives"
command:
"aws s3 cp s3://sticky-automatic-backups/{{ item.item }}\
{{ item.stdout }}.sha256 {{ tmp_dir }}/{{ item.stdout }}.sha256"

- name: "compare checksums of backup archives"
command: "sha256sum --check {{ tmp_dir }}/{{ item.stdout }}.sha256"
with_items: "{{ backup_filenames.results }}"

- name: "unpack admins and website backups"
unarchive:
src: "{{ tmp_dir }}/{{ item.stdout }}"
Expand Down
1 change: 1 addition & 0 deletions ansible/templates/home/ansible/.aws/config.j2
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ aws_secret_access_key = {{ secret_backup_aws.secret_key }}

output = text
region = eu-west-1
payload_signing_enabled = true
12 changes: 1 addition & 11 deletions ansible/templates/usr/local/bin/backup-to-s3.sh.j2
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ if [[ -z ${1+x} ]]; then
fi

SOURCE="${1}"
HASH="sha256" # Choose from md5/sha1/sha224/sha256/sha384/sha512
FILE_TITLE="${SOURCE}-$(date +'%Y%m%d-%H%M%S')"

S3BUCKET="{% if staging == 'true' %}staging-{% endif %}sticky-automatic-backups"
Expand All @@ -40,10 +39,7 @@ cleanup() {
# Check because complete file name is only set when a valid backup source is
# passed
if [[ -n ${FILE_NAME+x} ]]; then
{
rm -rf "{{ tmp_dir }}/${FILE_NAME}"
rm -rf "{{ tmp_dir }}/${FILE_NAME}.${HASH}"
} 1> /dev/null
rm -rf "{{ tmp_dir }}/${FILE_NAME}" 1> /dev/null
fi
}

Expand Down Expand Up @@ -92,20 +88,14 @@ numfmt --to=iec --suffix=B --format="%.2f")
SUCCESS_MESSAGE="*{% if staging == 'true' %}_FROM STAGING:_ {% endif %}Backup of \
${SOURCE} completed* _(${BACKUP_DATE})_\n_(Backup size: ${BACKUP_SIZE})_"

${HASH}sum "{{ tmp_dir }}/${FILE_NAME}" > "{{ tmp_dir }}/${FILE_NAME}.${HASH}"

{
aws s3 cp "{{ tmp_dir }}/${FILE_NAME}" "s3://${S3BUCKET}/${S3PATH}/"

aws s3 cp "{{ tmp_dir }}/${FILE_NAME}.${HASH}" "s3://${S3BUCKET}/${S3PATH}/"

rm "{{ tmp_dir }}/${FILE_NAME}"

aws s3 cp "s3://${S3BUCKET}/${S3PATH}/${FILE_NAME}" \
"{{ tmp_dir }}/${FILE_NAME}"

${HASH}sum --check "{{ tmp_dir }}/${FILE_NAME}.${HASH}"

echo -e "${SUCCESS_MESSAGE}" | /usr/local/bin/slacktee --plain-text\
--username 'Backup service' --icon ':floppy_disk:' --attachment 'good'
} 1> /dev/null
Expand Down

0 comments on commit f814526

Please sign in to comment.