1.基础环境##
和上一篇 hadoop 相同 //www.greatytc.com/p/57ffe2e3a092
2.准备工作
获取安装文件 http://apache.fayea.com/hive/hive-1.2.1/apache-hive-1.2.1-bin.tar.gz
依赖MySQL
因当前操作系统为CentOS6 可安装mysql5.5
获取:MySQL-client-5.5.46-1.linux2.6.x86_64.rpm 、MySQL-server-5.5.46-1.linux2.6.x86_64.rpm
安装
rpm -ivh MySQL-client-5.5.46-1.linux2.6.x86_64.rpm
rpm -ivh MySQL-server-5.5.46-1.linux2.6.x86_64.rpm
启动
service mysql start
设置密码
/usr/bin/mysqladmin -uroot password '123456'
验证可用
mysql -uroot -p123456
能登陆成功
创建hive库
create database hivemeta character set latin1;
设置访问密码
grant all privileges on *.* to hive@"%" identified by "123456" with grant option;
flush privileges;
3.正式安装##
- 设置环境变量
设置文件 .bashrc
export HIVE_HOME=/home/hadoop/bigdata/hive
export PATH=$HIVE_HOME/bin:$PATH
HIVE_HOME 表示HIVE安装目录
生效设置的环境变量
source .bashrc
验证
[hadoop@master ~]$ echo $HIVE_HOME
/home/hadoop/bigdata/hive
拷贝到slave机器
scp .bashrc hadoop@192.168.0.167:/home/hadoop/
scp .bashrc hadoop@192.168.0.168:/home/hadoop/
验证slave
方法同master端
- 配置hive
安装目录
cd /home/hadoop/bigdata/
tar -zxf apache-hive-1.2.1-bin.tar.gz
mv apache-hive-1.2.1-bin hive
hive-sitv配置
cd /home/hadoop/bigdata/hive/conf
cp hive-default.xml.template hive-site.xml
修改的内容
<property
<name>hive.metastore.warehouse.dir</name>
<value>hdfs://master:9000/hive/warehouse</value>
</property>
<property>
<name>hive.exec.scratchdir</name>
<value>hdfs://master:9000/hive/scratchdir</value>
</property>
<property>
<name>hive.metastore.warehouse.dir</name
<value>/home/hadoop/bigdata/hive/warehouse</value></property>
<property>
<name>hive.querylog.location</name>
<value>/home/hadoop/bigdata/hive/logs</value>
</property>
<property>
<name>javax.jdo.option.ConnectionURL</name
<value>jdbc:mysql://master:3306/hivemeta?createDatabaseIfNotExist=true</value>
</property>
<property> <name>javax.jdo.option.ConnectionDriverName</name
<value>com.mysql.jdbc.Driver</value>
</property>
<property> <name>javax.jdo.option.ConnectionUserName</name
<value>hive</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name> <value>123456</value>
</property>
<property>
<name>hive.aux.jars.path</name>
<value>file:///home/hadoop/bigdata/hive/lib/hive-hbase-handler-1.2.1.jar,
file:///home/hadoop/bigdata/hive/lib/protobuf-java-2.5.0.jar,
file:///home/hadoop/bigdata/hive/lib/hbase-client-1.0.1.1.jar,
file:///home/hadoop/bigdata/hive/lib/hbase-common-1.0.1.1.jar,
file:///home/hadoop/bigdata/hive/lib/zookeeper-3.4.6.jar,
file:///home/hadoop/bigdata/hive/lib/guava-14.0.1.jar</value>
</property>
<property>
<name>hive.metastore.local</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.uris</name>
<value>thrift://master:9083,thrift://slave01:9083,thrift://slave02:9083</value>
</property>
hive-env配置
cd /home/hadoop/bigdata/hive/conf
cp hive-env.sh.template hive-env.sh
编辑hive-env.sh
export HADOOP_HOME=/home/hadoop/bigdata/hadoop
export HIVE_CONF_DIR=/home/hadoop/bigdata/hive/conf
hive-log4j配置
cd /home/hadoop/bigdata/hive/conf
cp hive-log4j.properties.template hive-log4j.properties
编辑hive-log4j
hive.log.threshold=ALL
hive.root.logger=INFO,DRFA
hive.log.dir=/home/hadoop/bigdata/hive/log
hive.log.file=hive.log
hive-config配置
cd /home/hadoop/bigdata/hive/bin
export JAVA_HOME=/usr/java/jdk1.8.0_60
export HADOOP_HOME=/home/hadoop/bigdata/hadoop
export HIVE_HOME=/home/hadoop/bigdata/hive
创建目录,并赋权
hdfs dfs -mkdir /hive/scratchdir
hdfs dfs -mkdir /hive/warehouse
hdfs dfs -chmod g+w /hive/scratchdir
hdfs dfs -chmod g+w /hive/warehouse
mkdir -p /home/hadoop/bigdata/hive/warehouse
mkdir -p /home/hadoop/bigdata/hive/logs
ssh slave01 mkdir -p /home/hadoop/bigdata/hive/warehouse
ssh slave01 mkdir -p /home/hadoop/bigdata/hive/logs
ssh slave02 mkdir -p /home/hadoop/bigdata/hive/warehouse
ssh slave02 mkdir -p /home/hadoop/bigdata/hive/logs
启动&验证启动
nohup hive --service metastore -v &
hive
验证
create table hive_test(id int,name string)
show tables;hive_test
Time taken: 1.87 seconds, Fetched: 1 row(s)
4.遇到错误##
问题1:
Unable to instantiate org.apache.hadoop.hive.metastore.HiveMetaStoreClient
解决方法
1)mysql jdbc驱动 是否放在hive/lib
2)检查user表中是否有 空记录,先删除,然后flush privileges
3)hive-site.xml 配置是否正确
问题2:
Exception in thread "main" java.lang.RuntimeException: java.lang.RuntimeException: The root scratch dir: hdfs://master:9000/hive/scratchdir on HDFS should be writable. Current permissions are: rwxr-xr-
解决方法
hadoop fs -chmod -R 777 /hive/scratchdir
问题3:
Exception in thread "main"java.lang.RuntimeException: java.lang.IllegalArgumentException:java.net.URISyntaxException: Relative path in absolute URI:${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D
解决方法
hive.querylog.location
hive.exec.local.scratchdir
hive.downloaded.resources.dir
三个值写成绝对值
问题4:
NestedThrowablesStackTrace: com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: Specified key was too long; max key length is 767 bytes
解决方法
alter database hive character set latin1;
create table hive_test(id int,name string)