
jps 无法找到:
- yum install java-1.8.0-openjdk-devel.x86_64
安装和配置ssh免密码登录
- yum install ssh
- ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
- cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
- chmod 0600 ~/.ssh/authorized_keys
安装hadoop
[root@localhost hadoop-2.6.5]# cd ~
[root@localhost ~]# vim .bash_profile
在里面添加:
export HADOOP_HOME=/home/kkxmoye/Downloads/hadoop-2.6.5
PATH=$JAVA_HOME/bin:$PATH:$HOME/bin:$HADOOP_HOME/bin
执行 source .bash_profile 使配置生效
- cd /home/kkxmoye/Downloads/hadoop-2.6.5
-
vim etc/hadoop/core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
- vim etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
- vim etc/hadoop/mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
<property>
<name>mapreduce.admin.user.env</name>
<value>HADOOP_MAPRED_HOME=$HADOOP_COMMON_HOME</value>
</property>
<property>
<name>yarn.app.mapreduce.am.env</name>
<value>HADOOP_MAPRED_HOME=$HADOOP_COMMON_HOME</value>
</property>
</configuration>
- vim etc/hadoop/yarn-site.xml
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
- vim sbin/start-dfs.sh
- 在顶部空白处添加
HDFS_DATANODE_USER=root
HDFS_DATANODE_SECURE_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
- vim sbin/stop-dfs.sh
在顶部空白处添加
HDFS_DATANODE_USER=root
HDFS_DATANODE_SECURE_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
-
vim sbin/start-yarn.sh
- 在顶部空白处添加:
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root
-
vim sbin/stop-yarn.sh
- 在顶部空白处添加:
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root
-
vim etc/hadoop/hadoop-env.sh
- 设置JAVA_HOME
启动hadoop
- sbin/start-dfs.sh
- sbin/start-yarn.sh
停止hadoop
- sbin/stop-dfs.sh
- sbin/stop-yarn.sh
访问Hadoop
- http://192.168.48.133:8088/
wordcount测试
-
创建本地示例文件
[root@localhost hadoop-3.0.0]# mkdir /home/zby/file
[root@localhost hadoop-3.0.0]# cd ../file/
[root@localhost file]#
[[email protected] file]#
[[email protected] file]# echo "hello world" > file1.txt
[[email protected] file]# echo "hello hadoop" > file2.txt
[[email protected] file]# echo "hello mapreduce" >> file2.txt
[[email protected] file]# ls
file1.txt file2.txt
- 在HDFS上创建输入文件夹
[[email protected] file]# cd ../hadoop-3.0.0/
[[email protected] hadoop-3.0.0]# bin/hadoop fs -mkdir /hdfsinput
#[[email protected] hadoop-3.0.0]# bin/hadoop fs -put /home/kkxmoye/Downloads/file
#file1.txt file2.txt
[[email protected] hadoop-3.0.0]# bin/hadoop fs -put /home/kkxmoye/Downloads/file/file* /hdfsinput
运行Hadoop 自带示例wordcount
- bin/hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.6.5.jar wordcount /hdfsinput /hdfsoutput
- 运行成功:
- bin/hadoop fs -ls /hdfsoutput
- bin/hadoop fs -cat /hdfsoutput/part-r-00000
- 查看part-r-00000文件发现hello出现3次,hadoop出现1次,world出现1次
版权声明:本文为CSDN博主「weixin_33737134」的原创文章,遵循CC 4.0 BY-SA版权协议,转载请附上原文出处链接及本声明。
原文链接:https://blog.csdn.net/weixin_33737134/article/details/92415684