今天看啥
热点:

hive通过jdbc创建表,分区,桶,hivejdbc


首先我们需要打开hiveserver服务:hive --service hiveserver

然后我们和操作普通数据库一样,先加载驱动,然后建立连接,接着创建一个statement,然后执行查询,然会结果集。代码如下(一定要写对sql语句,要仔细,下面的需要注意的地方我已经标示出来了:)

package playHive;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

public class HiveJdbcClient {

	private final static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
	private final static String localFilePath="/home/hadoop/test/hive/test.txt";
	private final static String hdfsFilePath="hdfs://192.168.0.1:9000/user/hadoop/";
	private final static String tableName="testHiveDriverTable";
	private final static String partitionName="testHiveDriverPartition";
	private final static String bucketName="testHiveDriverBucket";
	private static String sql = "";
	private static Connection connection ;
	private static Statement statement;
	private static ResultSet resultSet;

	static {
		try {
			Class.forName(driverName);
		} catch (ClassNotFoundException e) {
			System.out.println(e);
			System.exit(1);
		}
		try {
			 connection = DriverManager.getConnection(
					"jdbc:hive://192.168.0.1:50000/default", "hive", "hadoop"); //首先要打开hiveserver服务:hive  --service hiveserver
			statement = connection.createStatement();
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	public static void createTable() throws SQLException {
		sql = "drop table " + tableName;
		System.out.println("delete table****");
		statement.executeQuery(sql);
		sql = "create table " + tableName + " (key int,value String)"
				+ " row format delimited fields terminated by '\t'";
		System.out.println("create table:"+tableName);
		statement.executeQuery(sql);
		showTable();
		describeTable();
	}

	public static void showTable() throws SQLException {
		sql = "show tables " + tableName;
		System.out.println("show table:"+tableName);
		resultSet=statement.executeQuery(sql);
		while(resultSet.next()){
			System.out.println(resultSet.getString(1));
		}
	}
	
	public static void describeTable() throws SQLException{
		sql="describe "+tableName;
		System.out.println("describe table:"+tableName);
		resultSet=statement.executeQuery(sql);
		while(resultSet.next()){
			System.out.println(resultSet.getString(1)+"\t"+resultSet.getString(2));
		}
	}
	
	public static void loadDataToTable(boolean isLocal) throws SQLException{
		sql=isLocal?"load data local inpath '"+localFilePath+"' overwrite into table "+tableName:
			  "load data inpath '"+hdfsFilePath+"' overwrite into table "+tableName;
		System.out.println("load data into table:"+tableName);
		statement.executeQuery(sql);
	}
	
	public static void queryTable() throws SQLException{
		sql="select * from "+tableName;
		System.out.println("execute query:select *query");
		resultSet=statement.executeQuery(sql);
		while(resultSet.next()){
			System.out.println(resultSet.getString(1)+'\t'+resultSet.getString(2));
		}
	}
	
	public static void regularTableQuery() throws SQLException{
		//sql="select count(1) from "+tableName+";";
		sql="select key,max(n) from (select key,count(value) as n from "+tableName+" group by key)sbuq group by key";
		System.out.println("execute query:");
		resultSet =statement.executeQuery(sql);
		while(resultSet.next()){
			System.out.println(resultSet.getString(1));
		}
	}
	
	public static void createPartition() throws SQLException{
		sql="drop table "+partitionName;
		System.out.println("delete partition");
		statement.execute(sql);
		sql="create table "+partitionName+"(key int) partitioned by (value string) "
				+ "row format delimited fields terminated by '\t'";
		System.out.println("create partition:"+partitionName);
		statement.execute(sql);
	}
	
	public static void insertDataToPartition() throws SQLException{
		//这里一定是select key from "+tableName;key不可以写成value,否则不能插入进去,插入的值将会是null
		sql="insert overwrite table "+partitionName+" partition (value='qinqin') select key from "+tableName;
		statement.execute(sql);
		System.out.println("insert data to "+partitionName+" success");
	}
	
	public static void selectFromPartition() throws SQLException{
		sql="select * from "+partitionName+" where value='qinqin'";
		System.out.println("query in partition:select * in "+partitionName);
		resultSet=statement.executeQuery(sql);
		while(resultSet.next()){
			System.out.println(resultSet.getString(1));
		}
	}
	
	public static void createBucket() throws SQLException{
		sql="drop table "+bucketName;
		System.out.println("delete bucket");
		statement.executeQuery(sql);
		sql="create table "+bucketName+"(key int,value string) clustered by(key) into 3 buckets "
				+ "row format delimited fields terminated by '\t'";
		System.out.println("create bucket:"+bucketName);
		statement.execute(sql);
	}
	
	public static void insertDataToBucket() throws SQLException{
		sql="insert overwrite table "+bucketName+" select key,value from "+tableName;
		System.out.println("insert data into bucket:"+bucketName);
		statement.executeQuery(sql);
	}
	
	public static void selectFromBucket() throws SQLException{
		sql="select * from "+bucketName+" tablesample(bucket 1 out of 3 on key)";
		System.out.println("select from bucket:"+bucketName);
		resultSet=statement.executeQuery(sql);
		while(resultSet.next()){
			System.out.println(resultSet.getString(1)+"\t" +resultSet.getString(2));
		}
	}
	
	public static void closeConnection(){
		try {
			connection.close();
		} catch (SQLException e) {
			e.printStackTrace();
			System.exit(1);
		}
	}
	
	public static void tableOperation() throws SQLException{
		HiveJdbcClient.createTable();
		HiveJdbcClient.loadDataToTable(true);
		HiveJdbcClient.queryTable();
		HiveJdbcClient.regularTableQuery();
	}
	
	public static void partitionOperation() throws SQLException{
		HiveJdbcClient.createPartition();
		HiveJdbcClient.insertDataToPartition();
		HiveJdbcClient.selectFromPartition();
	}
	
	public static void bucketOperation() throws SQLException{
		HiveJdbcClient.createBucket();
	    HiveJdbcClient.insertDataToBucket();
	    HiveJdbcClient.selectFromBucket();
	}
	
	public static void main(String[] args) {
		try {
			System.out.println("table Operation***********************************************");
			//HiveJdbcClient.tableOperation();
			System.out.println("partition Operation***********************************************");
			HiveJdbcClient.partitionOperation();
			System.out.println("bucket Operation***********************************************");
			//HiveJdbcClient.bucketOperation();
			HiveJdbcClient.closeConnection();
		} catch (SQLException e) {
			e.printStackTrace();
		}
	}
}




www.bkjia.comtruehttp://www.bkjia.com/yjs/997440.htmlTechArticlehive通过jdbc创建表,分区,桶,hivejdbc 首先我们需要打开hiveserver服务:hive --service hiveserver 然后我们和操作普通数据库一样,先加载驱动,...

相关文章

相关搜索: hadoop hive jdbc 数据库 sql

帮客评论

视觉看点