Java 呼叫Hive 自定義UDF
Hive 具有一個可選的元件叫做HiveServer,它允許通過在一定的埠訪問Hive.
將自定義的UDF的jar拷貝到安裝的hive伺服器中(如 /home/hadoop下)
具體的不囉嗦了,看程式碼吧!、
執行結果:package com.hive.client; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; public class HiveClientUDF { private static String driverName = "org.apache.hive.jdbc.HiveDriver"; private static String url = "jdbc:hive2://192.168.150.162:10000/default"; private static String sql = ""; private static ResultSet res; public static void main(String[] args) throws SQLException, ClassNotFoundException { // TODO Auto-generated method stub Connection conn = null; Class.forName(driverName); //預設使用埠10000, 使用預設資料庫,使用者名稱密碼預設 hadoop 伺服器登入使用者名稱 dh!123456登入密碼 conn = DriverManager.getConnection(url, "hadoop", "dh!123456"); String tableName = "vehicle_pass"; Statement stmt = conn.createStatement(); //將cityudf.jar加入到HIVE類路徑下 sql = "add jar /home/hadoop/cityudf.jar"; stmt.execute(sql); //加入到classpath下 sql = "create temporary function cityudf as 'com.hive.utf.CityUDF'"; stmt.execute(sql); //使用自定義UDF cityudf sql = "select key,kkNO,cityudf(key) as city from " + tableName; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("執行“select * query”執行結果:"); while (res.next()) { System.out.println(res.getString(1) + "\t" +":" + res.getString(3)); } stmt.close(); conn.close(); } }
1522301_20150417103242404_黑A01BCI :哈爾濱
1522301_20150417103242494_黑A013AA :哈爾濱
1522301_20150417103242614_黑A01GHI :哈爾濱
1522301_20150417103242616_黑A01GLJ :哈爾濱
1522301_20150417103242617_黑A01E5G :哈爾濱
1522301_20150417103242623_黑A01HDK :哈爾濱
1522301_20150417103242625_黑A018MM :哈爾濱
1522301_20150417103242758_黑A015KD :哈爾濱
http://blog.csdn.net/allen_oscar/article/details/45146913