linux查看jdk安装路径
which java
ls -lrt /usr/bin/java
ls -lrt /etc/alternatives/java
user:hadoop
pwd:admin
JAVA_HOME='/usr/lib/jvm/java-7-openjdk-amd64/jre/bin'
1.1创建hadoop用户(123456)
sudo useradd -m hadoop -s /bin/bash #创建hadoop用户,并使用/bin/bash作为shell
sudo passwd hadoop #为hadoop用户设置密码,之后需要连续输入两次密码
sudo adduser hadoop sudo #为hadoop用户增加管理员权限
su - hadoop #切换当前用户为用户hadoop
sudo apt-get update #更新hadoop用户的apt,方便后面的安装
1.2安装SSH,设置SSH无密码登陆
sudo apt-get install openssh-server #安装SSH server
ssh localhost #登陆SSH,第一次登陆输入yes
exit #退出登录的ssh localhost
cd ~/.ssh/ #如果没法进入该目录,执行一次ssh localhost
ssh-keygen -t rsa
cat ./id_rsa.pub >> ./authorized_keys #加入授权
ssh localhost #此时已不需密码即可登录localhost,并可见下图。如果失败则可以搜索SSH免密码登录来寻求答案
1.3配置java环境
vim ~/.bashrc
export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
source ~/.bashrc #使新配置的环境变量生效
java -version #检测是否安装成功,查看java版本
1.4安装hadoop-2.6.0
sudo tar -zxvf hadoop-2.6.0.tar.gz -C /usr/local #解压到/usr/local目录下
cd /usr/local
sudo mv hadoop-2.6.0 hadoop #重命名为hadoop
sudo chown -R hadoop ./hadoop #修改文件权限
vim ~/.bashrc
export HADOOP_HOME=/usr/local/hadoop
export CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath):$CLASSPATH
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
source ~/.bashrc
hadoop version
1.5伪分布式
Hadoop 的配置文件位于 /usr/local/hadoop/etc/hadoop/ 中
修改2个配置文件 core-site.xml 和 hdfs-site.xml
1.首先将jdk1.7的路径添(export JAVA_HOME=/usr/lib/jvm/java )加到hadoop-env.sh文件
2.接下来修改core-site.xml文件:
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>file:/usr/local/hadoop/tmp</value>
<description>A base for other temporary directories.</description>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
3.修改配置文件 hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/data</value>
</property>
<property>
<name>hive.hwi.listen.host</name>
<value>0.0.0.0</value>
</property>
<property>
<name>hive.hwi.listen.port</name>
<value>9999</value>
</property>
<property>
<name>hive.hwi.war.file</name>
<value>lib/hive-hwi-1.2.2.war</value>
</property>
</configuration>
4.配置完成后,执行 NameNode 的格式化
./bin/hdfs namenode -format
5.启动namenode和datanode进程,并查看启动结果
./sbin/start-dfs.sh
jps
6.查看 localhost:50070
export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64
export HIVE_HOME=/usr/local/hive
export HADOOP_HOME=/usr/local/hadoop
export HIVE_CONF_DIR=/usr/local/hive/conf