`

Hive的JDBC接口

    博客分类:
  • Hive
阅读更多

>>>在eclipse中使用JDBC连接Hive前需要开启Hive监听用户链接

hive/bin/ext$ hive --service hiveserver

>>>配置eclipse环境

在pom.xml中添加hive包依赖

<properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <hadoop.version>2.5.0</hadoop.version>
    <hive.version>0.13.1</hive.version>
 </properties>

 <dependencies>
  	<dependency>
  	 	<groupId>org.apache.hadoop</groupId>
  	 	<artifactId>hadoop-client</artifactId>
  	 	<version>${hadoop.version}</version>
 </dependency>

<dependency>
  		<groupId>org.apache.hive</groupId>
  		<artifactId>hive-jdbc</artifactId>
  		<version>${hive.version}</version>
 </dependency>
  	
  <dependency>
  		<groupId>org.apache.hive</groupId>
  		<artifactId>hive-exec</artifactId>
  		<version>${hive.version}</version>
  </dependency>

 >>>示例代码:

package com.fb.hadoop.hive;

import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
 
public class HiveJdbcClient {
  private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
 
  public static void main(String[] args) throws SQLException {
   //注册JDBC
    try {
      Class.forName(driverName);
    } catch (ClassNotFoundException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
      System.exit(1);
    }
    //创建连接
    Connection con = DriverManager.getConnection("jdbc:hive://master:10000/default", "root", "gsdjsj");
    Statement stmt = con.createStatement();
    String tableName = "user2";
    stmt.executeQuery("drop table " + tableName);
    ResultSet res = stmt.executeQuery("create table " + tableName + " (name string,age int,sex string,phone string)"
    		+"ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'"
    		+"STORED AS TEXTFILE");
    // show tables
    String sql = "show tables '" + tableName + "'";
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    if (res.next()) {
      System.out.println(res.getString(1));
    }
    // describe table
    sql = "describe " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(res.getString(1) + "\t" + res.getString(2));
    }
 
    // load data into table
    // NOTE: filepath has to be local to the hive server
    // NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
    String filepath = "/opt/data/user.txt";
    sql = "load data local inpath '" + filepath + "' into table " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
 
    // select * query
    sql = "select * from " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(String.valueOf(res.getString(1)) + "\t" + res.getInt(2));
    }
 
    // regular hive query
    sql = "select count(1) from " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(res.getString(1));
    }
  }
}

 

 

分享到:
评论

相关推荐

Global site tag (gtag.js) - Google Analytics