天天看點

hive安裝 (hive1.2.1+hadoop2.7+mysql)

1. 下載下傳解壓

cd /mk/soft
tar -xvzf apache-hive-1.2.1-bin.tar.gz -C /appl/
cd /appl
mv apache-hive-1.2.1-bin hive-1.2.1
           

2. 配置環境變量

vi /etc/profile
export HIVE_HOME=/appl/hive-1.2.1
export PATH=$PATH:$HIVE_HOME/bin
export CLASSPATH=.:$HIVE_HOME/lib
wq
source /etc/profile
           

3. 配置Hive

cd /appl/hive-1.2.1/conf
cp hive-default.xml.template hive-default.xml (不改變)
cp hive-default.xml.template hive-site.xml (覆寫hive-default.xml配置項)
cp hive-env.sh.template hive-env.sh
           
cp hive-log4j.properties.template hive-log4j.properties
vi hive-env.sh
export HADOOP_HOME=${HADOOP_HOME}
export HIVE_CONF_DIR=${HIVE_HOME}/conf
           
vi hive-log4j.properties
#log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
#log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
           

vi hive-site.xml

--------

<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl" target="_blank" rel="external nofollow" ?>
<configuration>
  <!-- Hive Execution Parameters -->
  <property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://centos1:3306/metastore_hive?


createDatabaseIfNotExist=true</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>


<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
<description>Driver class name for a JDBC metastore</description>
</property>


<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>hive</value>
<description>username to use against metastore database</description>
</property>


<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>hive</value>
<description>password to use against metastore database</description>
</property>


<property>
<name>hive.metastore.warehouse.dir</name>
<value>/hive/warehouse</value>
<description>location of default database for the warehouse (hdfs 


path)</description>
</property>


<property> 
<name>hive.metastore.uris</name> 
<value>thrift://centos1:9083</value>
</property>


<property>
<name>hive.server2.authentication</name>
<value>NONE</value>
</property>
</configuration>
           

--------

hadoop fs -mkdir /hive
hadoop fs -mkdir /hive/warehouse
hadoop fs -chmod g+w /hive/warehouse
hadoop fs -mkdir /hive/tmp
hadoop fs -chmod g+w /hive/tmp
           

mysql

mysql -uroot -proot

create database metastore_hive;
grant all on metastore_hive.* to [email protected]'%' identified by 'hive';
grant all on metastore_hive.* to [email protected] identified by 'hive';
grant all on metastore_hive.* to [email protected] identified by 'hive';
ALTER DATABASE metastore_hive CHARACTER SET latin1;
show databases;
exit
mysql -hcentos1 -P3306 -uhive -phive
use metastore_hive
exit
           

傳mysql-connector-java-5.1.30-bin.jar到/appl/hive-2.1.0/lib/

hadoop 配置(這一步很多參考文章缺少了)

/appl/hadoop-2.7.0/etc/hadoop/hdfs-site.xml

<property>
  <name>dfs.permissions</name>
  <value>false</value>
</property>
           

/appl/hadoop-2.7.0/etc/hadoop/hadoop-env.sh

export HADOOP_CLASSPATH=$CLASSPATH:$HADOOP_CLASSPATH
           

# 開通端口(centos)

/sbin/iptables -I INPUT -p tcp --dport 9083 -j ACCEPT
/sbin/iptables -I INPUT -p tcp --dport 10000 -j ACCEPT
/etc/init.d/iptables save
service iptables restart
           

5. 啟動服務

1、啟動metastore服務

hive --service metastore & (預設端口:9083,可-p 9083控制;測試:hive --service metastore)

2、啟動hiveserver服務

hive --service hiveserver2 &  (測試:hive --service hiveserver2)

3、通路hiveserver的兩種方式

(1) JDBC

Class.forName("org.apache.hive.jdbc.HiveDriver");

DriverManager.getConnection("jdbc:hive2://IP:10000/default","","");

(2) beeline

beeline -u jdbc:hive2://localhost:10000/ -n hive -p hive

beeline

!connect jdbc:hive2://localhost:10000
show tables;
create table test1 (id int, name string) row format delimited fields terminated by ',' stored as textfile;
load data local inpath '/mk/test/test1.txt' into table test1;
select * from test1;
!quit
           

檢視hive檔案

hadoop fs -ls /hive/warehouse/test1

停止:因為不是通過服務啟動,隻能ps -ef|grep hive,再kill <pid>

參考

http://blog.csdn.net/an342647823/article/details/46048403

http://blog.fens.me/hadoop-hive-intro/

http://blog.csdn.net/jiangkai_nju/article/details/7292313

http://blog.csdn.net/reesun/article/details/8556078

http://www.micmiu.com/bigdata/hive/hive-metastore-config/

http://blog.csdn.net/stark_summer/article/details/45844403

繼續閱讀