本文节选自《Netkiller Database 手札》
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-hadoop</artifactId>
<version>2.4.0.RELEASE</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hive/hive-jdbc -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>2.1.1</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.0</version>
</dependency>
</dependencies>
hive 数据源配置项
hive.url=jdbc:hive2://172.16.0.10:10000/default
hive.type: com.alibaba.druid.pool.DruidDataSource
hive.driver-class-name: org.apache.hive.jdbc.HiveDriver
hive.username=hive
hive.password=hive
如果使用 yaml 格式 application.yml 配置如下
hive:
url: jdbc:hive2://172.16.0.10:10000/default
driver-class-name: org.apache.hive.jdbc.HiveDriver
type: com.alibaba.druid.pool.DruidDataSource
username: hive
password: hive
package cn.netkiller.api.config;
@Configuration
public class HiveDataSource {
@Autowired
private Environment env;
@Bean(name = "hiveJdbcDataSource")
@Qualifier("hiveJdbcDataSource")
public DataSource dataSource() {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl(env.getProperty("hive.url"));
dataSource.setDriverClassName(env.getProperty("hive.driver-class-name"));
dataSource.setUsername(env.getProperty("hive.username"));
dataSource.setPassword(env.getProperty("hive.password"));
return dataSource;
}
@Bean(name = "hiveJdbcTemplate")
public JdbcTemplate hiveJdbcTemplate(@Qualifier("hiveJdbcDataSource") DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
}
beeline 是 hive 提供的一个新的命令行工具,基于SQLLine CLI的JDBC客户端,beeline 与HiveServer2配合使用,支持嵌入模式和远程模式两种,可以像hive client一样访问本机的hive服务,也可以通过指定ip和端口访问远程hive服务。
hive 官方是推荐使用beeline,因为它还提供了更为友好的交互方式(类似mysql client)
连接远程主机
[hadoop@localhost ~]$ /srv/apache-hive/bin/beeline -u jdbc:hive2://hadoop@localhost:10000
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/srv/apache-hive-2.1.1/lib/log4j-slf4j-impl-2.4.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/srv/apache-hadoop-2.8.0/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
Connecting to jdbc:hive2://hadoop@localhost:10000
Connected to: Apache Hive (version 2.1.1)
Driver: Hive JDBC (version 2.1.1)
17/06/29 23:05:35 [main]: WARN jdbc.HiveConnection: Request to set autoCommit to false; Hive does not support autoCommit=false.
Transaction isolation: TRANSACTION_REPEATABLE_READ
Beeline version 2.1.1 by Apache Hive
0: jdbc:hive2://hadoop@localhost:10000> show databases;
+----------------+--+
| database_name |
+----------------+--+
| default |
+----------------+--+
1 row selected (1.332 seconds)
0: jdbc:hive2://hadoop@localhost:10000> use default;
No rows affected (0.038 seconds)
0: jdbc:hive2://hadoop@localhost:10000> show tables;
+-----------+--+
| tab_name |
+-----------+--+
| invites |
| member |
| passwd |
| t_hive |
| v_test |
| vipuser |
+-----------+--+
6 rows selected (0.049 seconds)
0: jdbc:hive2://hadoop@localhost:10000> select * from member;
+--------------+-------------+-------------+---------------+--+
| member.name | member.age | member.sex | member.phone |
+--------------+-------------+-------------+---------------+--+
| Neo | 30 | 1 | 13113668890 |
+--------------+-------------+-------------+---------------+--+
No rows selected (1.137 seconds)
1: jdbc:hive2://hadoop@localhost:10000>
如果 beeline 正常登陆,现在就可以访问 Hive 了 使用 hiveJdbcTempldate 方式跟传统 JDBC 一样。