1.下载Hadoop和JDK
下载Hadoop地址:http://archive.cloudera.com/cdh5/cdh/5/hadoop-2.6.0-cdh5.7.0.tar.gz
下载jdk:http://mvnrepository.com/artifact/mysql/mysql-connector-java/5.1.27
2.安装JDK
2.1解压jdk压缩包 tar -zxvf /home/hadoop/software/jdk-7u80-linux-x64.tar.gz -C /usr/java
2.2配置jdk环境变量
vi /etc/profile
export JAVA_HOME=/usr/java/jdk1.7.0_80
export PATH=$JAVA_HOME/bin:$PATH
2.3配置ssh
ssh-keygen
cp .ssh/id_rsa.pub ~/.ssh/authorized_keys
3.安装Hadoop
3.1解压Hadoop
tar -zxvf /home/hadoop/software/hadoop-2.6.0-cdh5.7.0.tar.gz -C /home/Hadoop/app/
3.2配置环境
vi .bash_profile
export HADOOP_HOME=/home/hadoop/app/hadoop-2.6.0-cdh5.7.0
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
source .bash_profile
下载Hadoop地址:http://archive.cloudera.com/cdh5/cdh/5/hadoop-2.6.0-cdh5.7.0.tar.gz
下载jdk:http://mvnrepository.com/artifact/mysql/mysql-connector-java/5.1.27
2.安装JDK
2.1解压jdk压缩包 tar -zxvf /home/hadoop/software/jdk-7u80-linux-x64.tar.gz -C /usr/java
2.2配置jdk环境变量
vi /etc/profile
export JAVA_HOME=/usr/java/jdk1.7.0_80
export PATH=$JAVA_HOME/bin:$PATH
2.3配置ssh
ssh-keygen
cp .ssh/id_rsa.pub ~/.ssh/authorized_keys
3.安装Hadoop
3.1解压Hadoop
tar -zxvf /home/hadoop/software/hadoop-2.6.0-cdh5.7.0.tar.gz -C /home/Hadoop/app/
3.2配置环境
vi .bash_profile
export HADOOP_HOME=/home/hadoop/app/hadoop-2.6.0-cdh5.7.0
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
source .bash_profile
4.修改配置文件
hadoop-env.sh
export JAVA_HOME=/usr/java/jdk1.8.0_45
hadoop-env.sh
export JAVA_HOME=/usr/java/jdk1.8.0_45
core-site.xml
<property>
<name>fs.default.name</name>
<value>hdfs://hadoop-01:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/home/hadoop/app/tmp</value>
</property>
<property>
<name>fs.default.name</name>
<value>hdfs://hadoop-01:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/home/hadoop/app/tmp</value>
</property>
hdfs-site.xml
<configuration>
<property>
<name>dfs.namenode.name.dir</name>
<value>/home/hadoop/app/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/home/hadoop/app/tmp/dfs/data</value>
</property>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>hadoop-01:50090</value>
</property>
<property>
<name>dfs.namenode.secondary.https-address</name>
<value>hadoop-01:50091</value>
</property>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
<configuration>
<property>
<name>dfs.namenode.name.dir</name>
<value>/home/hadoop/app/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/home/hadoop/app/tmp/dfs/data</value>
</property>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>hadoop-01:50090</value>
</property>
<property>
<name>dfs.namenode.secondary.https-address</name>
<value>hadoop-01:50091</value>
</property>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
slaves
echo "ruozehadoop000" > ./etc/hadoop/slaves
echo "ruozehadoop000" > ./etc/hadoop/slaves
mapred-site.xml(正常情况下没有这个文件,可由 mapred-site.xml.template
复制而来) cp mapred-site.xml.template mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
复制而来) cp mapred-site.xml.template mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
yarn-site.xml:
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
格式化Hadoop
hdfs namenode -format
hdfs namenode -format
启动Hadoop