大数据Hive系列之Hive API

x33g5p2x  于2021-12-25 转载在 其他  
字(7.2k)|赞(0)|评价(0)|浏览(636)

Maven依赖配置

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
	<modelVersion>4.0.0</modelVersion>
	<groupId>com.volitation.hive</groupId>
	<artifactId>bigdata-data-management</artifactId>
	<version>1.0.0-SNAPSHOT</version>
	<packaging>jar</packaging>

	<properties>
		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
		<java.version>1.8</java.version>
		<!-- junit -->
		<junit.version>4.11</junit.version>
		<!-- hadoop -->
		<hadoop.core.version>1.2.1</hadoop.core.version>
		<!-- hive -->
		<hive.version>2.1.1</hive.version>
	</properties>

	<repositories>
		<repository>
			<id>spring-milestones</id>
			<url>http://repo.spring.io/libs-milestone/</url>
		</repository>
	</repositories>

	<dependencies>
		<dependency>
			<groupId>junit</groupId>
			<artifactId>junit</artifactId>
			<version>${junit.version}</version>
		</dependency>

		<!-- hadoop -->
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-core</artifactId>
			<version>${hadoop.core.version}</version>
		</dependency>

		<!-- hive -->
		<dependency>
			<groupId>org.apache.hive</groupId>
			<artifactId>hive-jdbc</artifactId>
			<version>${hive.version}</version>
		</dependency>

	</dependencies>

</project>

Hive JDBC四大参数定义

/**
 * 代码集定义
 * 
 * @author volitation
 *
 */
public class PlatformDictionary {
    
    /*
     * Hive
     */
    public static final String DRIVER_NAME = "org.apache.hive.jdbc.HiveDriver";
    public static final String URL = "jdbc:hive2://192.168.9.87:10000";
    public static final String USER_NAME = "hadoop";
    public static final String PASSWORD = "hadoop!QWE";

}

异常处理

import java.util.HashMap;
import java.util.Map;

/**
 * 异常处理工具类
 * 
 * @author volitation
 *
 */
public class AbnormalUtils {

	/**
	 * 获取异常信息
	 * 
	 * @param e
	 * @return
	 */
	public static Object getAbnormal(Exception e) {
		Object abnormalType = e.getCause().getClass().toString();
		Object abnormalName = e.getCause().getMessage().toString();
		Map<String, Object> map = new HashMap<>();
		map.put("异常类型", abnormalType);
		map.put("异常点信息", abnormalName);
		return map.toString();
	}

}

Hive通过JDBC进行增删查操作

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import com.volitation.hive.platform.PlatformDictionary;
import com.volitation.hive.platform.utils.AbnormalUtils;

/**
 * Hive JDBC操作
 * 
 * @author volitation
 *
 */
public class HiveJDBC {

	private static Connection conn = null;
	private static Statement stmt = null;
	private static ResultSet rs = null;

	/**
	 * 加载驱动,创建连接
	 * 
	 * @throws Exception
	 */
	private static void init() {
		try {
			Class.forName(PlatformDictionary.DRIVER_NAME);
			conn = DriverManager.getConnection(PlatformDictionary.URL, PlatformDictionary.USER_NAME,
					PlatformDictionary.PASSWORD);
			stmt = conn.createStatement();
		} catch (ClassNotFoundException | SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		}
	}

	/**
	 * 创建数据库
	 * 
	 * @param databaseName
	 */
	public static void createDatabase(String databaseName) {
		try {
			init();

			String sql = "create database " + databaseName;
			System.out.println("Running: " + sql);

			stmt.execute(sql);
		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		} finally {
			destory();
		}
	}

	/**
	 * 查询所有数据库
	 * 
	 * @return
	 */
	public static List<String> showDatabases() {
		List<String> list = new ArrayList<>();
		try {
			init();

			String sql = "show databases";
			System.out.println("Running: " + sql);

			rs = stmt.executeQuery(sql);
			while (rs.next()) {
				list.add(rs.getString(1));
			}

		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		} finally {
			destory();
		}
		return list;
	}

	/**
	 * 删除数据库
	 * 
	 * @param databaseName
	 */
	public static void dropDatabase(String databaseName) {
		try {
			init();

			String sql = "drop database if exists " + databaseName;
			System.out.println("Running: " + sql);

			stmt.execute(sql);
		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		} finally {
			destory();
		}
	}

	/**
	 * 创建表
	 * 
	 * @param createTableSql
	 */
	public static void createTable(String createTableSql) {
		try {
			init();

			System.out.println("Running: " + createTableSql);

			stmt.execute(createTableSql);
		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		} finally {
			destory();
		}
	}

	/**
	 * 查询所有表
	 * 
	 * @return
	 */
	public static List<String> showTables(String databaseName) {
		List<String> list = new ArrayList<>();
		try {
			init();

			String useSql = "use " + databaseName;
			System.out.println("Running: " + useSql);
			stmt.execute(useSql);

			String sql = "show tables";
			System.out.println("Running: " + sql);

			rs = stmt.executeQuery(sql);
			while (rs.next()) {
				list.add(rs.getString(1));
			}

		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		} finally {
			destory();
		}
		return list;
	}

	/**
	 * 查看表结构
	 * 
	 * @param databaseName
	 * @param tableName
	 * @return
	 */
	public static List<Map<String, Object>> descTable(String databaseName, String tableName) {
		List<Map<String, Object>> list = new ArrayList<>();
		Map<String, Object> map = null;

		try {
			init();

			String sql = "desc " + databaseName + "." + tableName;
			System.out.println("Running: " + sql);

			rs = stmt.executeQuery(sql);
			while (rs.next()) {
				map = new HashMap<>();
				map.put("colName", rs.getString(1));
				map.put("dataType", rs.getString(2));
				list.add(map);
			}

		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		} finally {
			destory();
		}
		return list;
	}

	/**
	 * 加载数据
	 * 
	 * @param hdfsPath
	 * @param tableName
	 */
	public static void loadData(String hdfsPath, String tableName) {
		try {
			init();

			String sql = "load data inpath '" + hdfsPath + "' insert into table " + tableName;
			System.out.println("Running: " + sql);

			stmt.execute(sql);
		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		} finally {
			destory();
		}
	}

	/**
	 * 查询数据
	 * 
	 * @param selectSql
	 * @return
	 */
	public static List<String> selectData(String selectSql) {
		List<String> list = new ArrayList<>();
		try {
			init();

			System.out.println("Running: " + selectSql);

			rs = stmt.executeQuery(selectSql);
			while (rs.next()) {
				list.add(rs.getString(1));
			}
		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		} finally {
			destory();
		}
		return list;
	}

	/**
	 * 删除数据库表
	 * 
	 * @param databaseName
	 * @param tableName
	 */
	public static void deopTable(String databaseName, String tableName) {
		try {
			init();

			String sql = "drop table if exists " + databaseName + "." + tableName;
			System.out.println("Running: " + sql);

			stmt.execute(sql);
		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		} finally {
			destory();
		}
	}

	/**
	 * 释放资源
	 */
	private static void destory() {
		try {
			if (rs != null) {
				rs.close();
			}
			if (stmt != null) {
				stmt.close();
			}
			if (conn != null) {
				conn.close();
			}
		} catch (SQLException e) {
			Object object = AbnormalUtils.getAbnormal(e);
			System.err.println(object);
		}
	}

}
上一篇:Hive常用SQL
下一篇:Hive MapReduce

相关文章