-
Notifications
You must be signed in to change notification settings - Fork 2
/
cloudinit.txt
46 lines (46 loc) · 3.79 KB
/
cloudinit.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
#cloud-config
package_upgrade: true
packages:
- openjdk-8-jdk
runcmd:
- 'wget https://reportresources.blob.core.windows.net/public/spark-2.2.0-bin-hadoop2.7.tgz -O /home/spark/spark-2.2.0-bin-hadoop2.7.tgz'
- tar -xzf /home/spark/spark-2.2.0-bin-hadoop2.7.tgz -C /home/spark/
- rm /home/spark/spark-2.2.0-bin-hadoop2.7.tgz
- 'wget https://reportresources.blob.core.windows.net/public/hadoop-2.7.4.tar.gz -O /home/spark/hadoop-2.7.4.tar.gz'
- tar -xzf /home/spark/hadoop-2.7.4.tar.gz -C /home/spark/
- rm /home/spark/hadoop-2.7.4.tar.gz
- cp /home/spark/hadoop-2.7.4/share/hadoop/tools/lib/azure-storage-2.0.0.jar /home/spark/spark-2.2.0-bin-hadoop2.7/jars/azure-storage-2.2.0.jar
- cp /home/spark/hadoop-2.7.4/share/hadoop/tools/lib/hadoop-azure-2.7.4.jar /home/spark/spark-2.2.0-bin-hadoop2.7/jars/hadoop-azure-2.7.4.jar
- cp /home/spark/hadoop-2.7.4/share/hadoop/tools/lib/aws-java-sdk-1.7.4.jar /home/spark/spark-2.2.0-bin-hadoop2.7/jars/aws-java-sdk-1.7.4.jar
- cp /home/spark/hadoop-2.7.4/share/hadoop/tools/lib/hadoop-aws-2.7.4.jar /home/spark/spark-2.2.0-bin-hadoop2.7/jars/hadoop-aws-2.7.4.jar
- echo "SPARK_LOCAL_DIRS=/mnt/spark" > /home/spark/spark-2.2.0-bin-hadoop2.7/conf/spark-env.sh
- chmod 755 /home/spark/spark-2.2.0-bin-hadoop2.7/conf/spark-env.sh
- chown -R spark.spark /home/spark/spark-2.2.0-bin-hadoop2.7
- 'echo "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" > /home/spark/hadoop-2.7.4/etc/hadoop/core-site.xml'
- 'echo "<configuration>" >> /home/spark/hadoop-2.7.4/etc/hadoop/core-site.xml'
- 'echo " <property>" >> /home/spark/hadoop-2.7.4/etc/hadoop/core-site.xml'
- 'echo " <name>fs.defaultFS</name>" >> /home/spark/hadoop-2.7.4/etc/hadoop/core-site.xml'
- 'echo " <value>hdfs://172.16.1.4</value>" >> /home/spark/hadoop-2.7.4/etc/hadoop/core-site.xml'
- 'echo " </property>" >> /home/spark/hadoop-2.7.4/etc/hadoop/core-site.xml'
- 'echo "</configuration>" >> /home/spark/hadoop-2.7.4/etc/hadoop/core-site.xml'
- 'echo "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" > /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo "<configuration>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " <property>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " <name>dfs.namenode.datanode.registration.ip-hostname-check</name>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " <value>false</value>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " </property>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " <property>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " <name>dfs.namenode.name.dir</name>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " <value>/mnt/spark/namenode</value>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " </property>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " <property>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " <name>dfs.datanode.data.dir</name>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " <value>/mnt/spark/datanode</value>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo " </property>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- 'echo "</configuration>" >> /home/spark/hadoop-2.7.4/etc/hadoop/hdfs-site.xml'
- chown -R spark.spark /home/spark/hadoop-2.7.4
- mkdir /mnt/spark/
- chown spark.spark /mnt/spark
- su -c "/home/spark/spark-2.2.0-bin-hadoop2.7/sbin/start-master.sh" spark
- su -c "JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64/ /home/spark/hadoop-2.7.4/bin/hdfs namenode -format spark" spark
- su -c "JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64/ /home/spark/hadoop-2.7.4/sbin/hadoop-daemon.sh --config /home/spark/hadoop-2.7.4/etc/hadoop/ --script hdfs start namenode" spark