-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathimage-sucker.sh
executable file
·33 lines (27 loc) · 977 Bytes
/
image-sucker.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#!/bin/bash
#
# A simple Bash script to grab all the images from a website and save them into a directory.
#
# Written by @willc. https://www.willchatham.com
#
# Use at your own risk.
#
#
cat << "EOF"
____ __ __ __ ___ ____ ___ __ __ ___ _ _ ____ ____
(_ _)( \/ ) /__\ / __)( ___) / __)( )( ) / __)( )/ )( ___)( _ \
_)(_ ) ( /(__)\ ( (_-. )__) \__ \ )(__)( ( (__ ) ( )__) ) /
(____)(_/\/\_)(__)(__) \___/(____) (___/(______) \___)(_)\_)(____)(_)\_)
-------------------------------------------------------------------------
EOF
echo Enter a URL or IP address to get started. Do include http:// or https://:
read url
echo Output will be saved in ./$url
sleep 1
URL=$url
DIR=$url
echo Here we go...
# Create dir to save everything into. -p tells this script to not fail if the dir already exists.
mkdir -p $url
# Download all images from the domain
wget --no-check-certificate -nd -r -P $DIR -A jpeg,jpg,bmp,gif,png $URL