hive安装

1.先把apache-hive-2.3.2-bin.tar上传解压到/usr/local/下重命名为hive

1 [root@hadoop1 conf]# tail -4 /etc/profile
2 ############# hive env #################
3 export  HIVE_HOME=/usr/local/hive
4 export  PATH=$HIVE_HOME/bin:$PATH
5 将mysql-connector-java-5.1.7-bin.jar放置在/usr/local/hive/lib里

2.配置hive各配置文件

 1 mysql>GRANT ALL PRIVILEGES ON hive.* TO 'hive'@'localhost' IDENTIFIED BY 'hive';                        # 数据库内建立hive用户并让其有权限
 2 mysql>GRANT ALL PRIVILEGES ON hive.* TO 'hive'@'%' IDENTIFIED BY 'hive';
 3 [root@hadoop1 conf]# cd /usr/local/hive/conf
 4 [root@hadoop1 conf]# vim hive-site.xml                    # 新建一个配置文件
 5 <?xml version="1.0" encoding="UTF-8" standalone="no"?>
 6 <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
 7 <configuration>
 8     <property>
 9         <name>javax.jdo.option.ConnectionURL</name>
10         <value>jdbc:mysql://10.81.8.53:3306/hive?createDatabaseIfNotExist=true&amp;characterEncoding=UTF-8</value>                    # 连接的数据库
11     </property>
12     <property>
13         <name>javax.jdo.option.ConnectionDriverName</name>
14         <value>com.mysql.jdbc.Driver</value>                                                            # 使用的驱动
15     </property>
16     <property>
17         <name>javax.jdo.option.ConnectionUserName</name>
18         <value>hive</value>                                                                                # 连接数据库的用户
19     </property>
20     <property>
21         <name>javax.jdo.option.ConnectionPassword</name>
22         <value>hive</value>                                                                                # 用户密码
23     </property>
24     <property>
25         <name>hive.metastore.schema.verification</name>
26         <value>false</value>
27         <description>
28             Enforce metastore schema version consistency.
29             True: Verify that version information stored in metastore matches with one from Hive jars.  Also disable automatic
30                 schema migration attempt. Users are required to manully migrate schema after Hive upgrade which ensures
31                 proper metastore schema migration. (Default)
32             False: Warn if the version information stored in metastore doesn't match with one from in Hive jars.
33         </description>
34     </property>
35 </configuration>
36 [root@hadoop1 conf]# schematool -initSchema -dbType mysql                                                # 初始化数据库
37 修改中文乱码问题,数据库内部操作
38 mysql> use hive
39 mysql> alter table COLUMNS_V2 modify column COMMENT varchar(256) character set utf8;
40 mysql> alter table TABLE_PARAMS modify column PARAM_VALUE varchar(4000) character set utf8;
41 mysql> alter table PARTITION_PARAMS modify column PARAM_VALUE varchar(4000) character set utf8;
42 mysql> alter table PARTITION_KEYS modify column PKEY_COMMENT varchar(4000) character set utf8;
43 mysql> alter table  INDEX_PARAMS  modify column PARAM_VALUE  varchar(4000) character set utf8;

3.验证

 1 mysql> show databases;
 2 +--------------------+
 3 | Database           |
 4 +--------------------+
 5 | information_schema |
 6 | hive               |                                            # 出现hive库
 7 | mysql              |
 8 +--------------------+
 9 3 rows in set (0.00 sec)
10 [root@hadoop1 conf]# hive                                        # 直接输入命令可以进入相关环境
11 SLF4J: Class path contains multiple SLF4J bindings.
12 SLF4J: Found binding in [jar:file:/usr/local/hive/lib/log4j-slf4j-impl-2.6.2.jar!/org/slf4j/impl/StaticLoggerBinder.class]
13 SLF4J: Found binding in [jar:file:/usr/local/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
14 SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
15 SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
16 
17 Logging initialized using configuration in jar:file:/usr/local/hive/lib/hive-common-2.3.2.jar!/hive-log4j2.properties Async: true
18 Hive-on-MR is deprecated in Hive 2 and may not be available in the future versions. Consider using a different execution engine (i.e. spark, tez) or using Hive 1.X releases.
19 hive> 
原文地址:https://www.cnblogs.com/bfmq/p/7844714.html