hive经典例子

Posted 大数据的未来

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了hive经典例子相关的知识,希望对你有一定的参考价值。

package com.bailiangroup.bigdata.utils;

import java.io.File;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

import org.apache.log4j.Logger;

public class HiveJdbcCliUtil 

	/**
	 * Hive的JavaApi
	 * 
	 * 启动hive的远程服务接口命令行执行:hive --service hiveserver >/dev/null 2>/dev/null &
	 * 
	 * 
	 */
	 String driverName;
	 String url;
	 String username;
	 String password;
	 String sql = "";
	 ResultSet res = null;
	 Connection conn = null;
	 Statement stmt = null;
	static final Logger log = Logger.getLogger(HiveJdbcCliUtil.class);

	public HiveJdbcCliUtil(String driverName, String url, String username, String password) 
		this.driverName = driverName;
		this.url = url;
		this.username = username;
		this.password = password;

	

	public  void init() 
		try 
			Class.forName(driverName);
			if (conn == null) 
				conn = DriverManager.getConnection(url, username, password);
			
			if (stmt == null) 
				stmt = conn.createStatement();
			
		 catch (Exception e) 
			e.printStackTrace();
		

	

	public void showAllFiles(File dir) throws Exception 
		File[] fs = dir.listFiles();
		for (int i = 0; i < fs.length; i++) 
			//System.out.println(fs[i].getAbsolutePath());
			String name = fs[i].getName();
			//System.out.println("name =" + name);
			String substring = null;
			String date  = null;
			if(name.length()>14)
			 substring = name.substring(name.length() - 14, name.length());
			 date = substring.substring(0, 10);
			 //System.out.println(date);
			
			String s17_kf_case_normal = "sourcedata.s17_kf_case_normal";
			String s17_kf_case_postil = "sourcedata.s17_kf_case_postil";
			String s17_kf_case_transfer_his = "sourcedata.s17_kf_case_transfer_his";
			String s17_kf_customer_info = "sourcedata.s17_kf_customer_info";
			String s17_kf_main_cases = "sourcedata.s17_kf_main_cases";
			
			if (name.contains("KF_CASE_NORMAL")) 
				System.out.println("------>"+fs[i].getAbsolutePath());
				sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + "  overwrite into table "
						+ s17_kf_case_normal + " partition(" + "dt ='" + date + "')";
				log.info("Running:" + sql);
				System.out.println("sql==="+sql);
				stmt.execute(sql);
			
			
			if (name.contains("KF_CASE_POSTIL")) 
				System.out.println("------>"+fs[i].getAbsolutePath());
				sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + "  overwrite into table "
						+ s17_kf_case_postil + " partition(" + "dt ='" + date + "')";
				System.out.println("sql==="+sql);
				log.info("Running:" + sql);
				stmt.execute(sql);
			
			
			if (name.contains("KF_CASE_TRANSFER_HIS")) 
				System.out.println("------>"+fs[i].getAbsolutePath());
				sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + "  overwrite into table "
						+ s17_kf_case_transfer_his + " partition(" + "dt ='" + date + "')";
				System.out.println("sql==="+sql);
				log.info("Running:" + sql);
				stmt.execute(sql);
			
			
			if (name.contains("KF_CUSTOMER_INFO")) 
				System.out.println("------>"+fs[i].getAbsolutePath());
				sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + "   overwrite into table "
						+ s17_kf_customer_info + " partition(" + "dt ='" + date + "')";
				System.out.println("sql==="+sql);
				log.info("Running:" + sql);
				stmt.execute(sql);

			
			
			if (name.contains("KF_MAIN_CASES")) 
				System.out.println("------>"+fs[i].getAbsolutePath());
				sql = "load data local inpath " + "'" + fs[i].getAbsolutePath() + "'" + "   overwrite into table "
						+ s17_kf_main_cases + " partition(" + "dt ='" + date + "')";
				System.out.println("sql==="+sql);
				log.info("Running:" + sql);
				stmt.execute(sql);
			

			if (fs[i].isDirectory()) 
				try 
					showAllFiles(fs[i]);
				 catch (Exception e) 
				
			
			
		
	

	public  static Connection getConn(String driverName, String url, String username, String password) 
		Connection conn = null;
		try 
			Class.forName(driverName);
			conn = DriverManager.getConnection(url, username, password);
		 catch (Exception e) 
			e.printStackTrace();
		
		return conn;
	

	public  void selectData(Statement stmt, String tableName) throws SQLException 
		sql = "select * from " + tableName;
		System.out.println("Running:" + sql);
		res = stmt.executeQuery(sql);
		System.out.println("执行 select * query 运行结果:");
		while (res.next()) 
			System.out.println(res.getString(1) + "\\t" + res.getString(2));
			log.info((res.getString(1) + "\\t" + res.getString(2)));
		
	

	public static void main(String[] args) throws Exception 
		String driverName = "org.apache.hive.jdbc.HiveDriver";
		String url = "jdbc:hive2://10.201.129.78:10000/default"; //
		String username = "";
		String password = "";
		HiveJdbcCliUtil hiveJdbcCliUtil = new HiveJdbcCliUtil(driverName, url, username, password);
		hiveJdbcCliUtil.init();
		//selectData(stmt, "test_hive");
		String filepath = "E:\\\\ftp\\\\kefu2\\\\";
		File dir = new File(filepath);
		try 
			hiveJdbcCliUtil.showAllFiles(dir);
		 catch (Exception e) 
			e.printStackTrace();
		

	

	public String getDriverName() 
		return driverName;
	

	public void setDriverName(String driverName) 
		this.driverName = driverName;
	

	public String getUrl() 
		return url;
	

	public void setUrl(String url) 
		this.url = url;
	

	public String getUsername() 
		return username;
	

	public void setUsername(String username) 
		this.username = username;
	

	public String getPassword() 
		return password;
	

	public void setPassword(String password) 
		this.password = password;
	




public class HiveJdbcCli   
  
    private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";  
    private static String url = "jdbc:hive://hadoop3:10000/default";  
    private static String user = "hive";  
    private static String password = "mysql";  
    private static String sql = "";  
    private static ResultSet res;  
    private static final Logger log = Logger.getLogger(HiveJdbcCli.class);  
  
    public static void main(String[] args)   
        Connection conn = null;  
        Statement stmt = null;  
        try   
            conn = getConn();  
            stmt = conn.createStatement();  
  
            // 第一步:存在就先删除  
            String tableName = dropTable(stmt);  
  
            // 第二步:不存在就创建  
            createTable(stmt, tableName);  
  
            // 第三步:查看创建的表  
            showTables(stmt, tableName);  
  
            // 执行describe table操作  
            describeTables(stmt, tableName);  
  
            // 执行load data into table操作  
            loadData(stmt, tableName);  
  
            // 执行 select * query 操作  
            selectData(stmt, tableName);  
  
            // 执行 regular hive query 统计操作  
            countData(stmt, tableName);  
  
         catch (ClassNotFoundException e)   
            e.printStackTrace();  
            log.error(driverName + " not found!", e);  
            System.exit(1);  
         catch (SQLException e)   
            e.printStackTrace();  
            log.error("Connection error!", e);  
            System.exit(1);  
         finally   
            try   
                if (conn != null)   
                    conn.close();  
                    conn = null;  
                  
                if (stmt != null)   
                    stmt.close();  
                    stmt = null;  
                  
             catch (SQLException e)   
                e.printStackTrace();  
              
          
      
  
    private static void countData(Statement stmt, String tableName)  
            throws SQLException   
        sql = "select count(1) from " + tableName;  
        System.out.println("Running:" + sql);  
        res = stmt.executeQuery(sql);  
        System.out.println("执行“regular hive query”运行结果:");  
        while (res.next())   
            System.out.println("count ------>" + res.getString(1));  
          
      
  
    private static void selectData(Statement stmt, String tableName)  
            throws SQLException   
        sql = "select * from " + tableName;  
        System.out.println("Running:" + sql);  
        res = stmt.executeQuery(sql);  
        System.out.println("执行 select * query 运行结果:");  
        while (res.next())   
            System.out.println(res.getInt(1) + "\\t" + res.getString(2));  
          
      
  
    private static void loadData(Statement stmt, String tableName)  
            throws SQLException   
        String filepath = "/home/hadoop01/data";  
        sql = "load data local inpath '" + filepath + "' into table "  
                + tableName;  
        System.out.println("Running:" + sql);  
        res = stmt.executeQuery(sql);  
      
  
    private static void describeTables(Statement stmt, String tableName)  
            throws SQLException   
        sql = "describe " + tableName;  
        System.out.println("Running:" + sql);  
        res = stmt.executeQuery(sql);  
        System.out.println("执行 describe table 运行结果:");  
        while (res.next())   
            System.out.println(res.getString(1) + "\\t" + res.getString(2));  
          
      
  
    private static void showTables(Statement stmt, String tableName)  
            throws SQLException   
        sql = "show tables '" + tableName + "'";  
        System.out.println("Running:" + sql);  
        res = stmt.executeQuery(sql);  
        System.out.println("执行 show tables 运行结果:");  
        if (res.next())   
            System.out.println(res.getString(1));  
          
      
  
    private static void createTable(Statement stmt, String tableName)  
            throws SQLException   
        sql = "create table "  
                + tableName  
                + " (key int, value string)  row format delimited fields terminated by '\\t'";  
        stmt.executeQuery(sql);  
      
  
    private static String dropTable(Statement stmt) throws SQLException   
        // 创建的表名  
        String tableName = "testHive";  
        sql = "drop table " + tableName;  
        stmt.executeQuery(sql);  
        return tableName;  
      
  
    private static Connection getConn() throws ClassNotFoundException,  
            SQLException   
        Class.forName(driverName);  
        Connection conn = DriverManager.getConnection(url, user, password);  
        return conn;  
      
  


以上是关于hive经典例子的主要内容,如果未能解决你的问题,请参考以下文章

Hive经典最全笔试题系列

Hive SQL 五大经典面试题

hummel HIVE 重现八十年代经典

Hadoop技术篇hive的优化,经典面试

经典的SparkSQL/Hive-SQL/MySQL面试-练习题

[Spark][Hive]外部文件导入到Hive的例子