Hive--10---函数----自定义函数 (UDF-UDAF-UDTF)
Posted 高高for 循环
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了Hive--10---函数----自定义函数 (UDF-UDAF-UDTF)相关的知识,希望对你有一定的参考价值。
提示:文章写完后,目录可以自动生成,如何生成可参考右边的帮助文档
文章目录
自定义函数
官网地址:https://cwiki.apache.org/confluence/display/Hive/HivePlugins
1.函数分类
- UDF—普通函数
- UDAF—聚合函数
- UDTF—炸裂函数
2.编程步骤
3.自定义 UDF函数-----案例
<dependencies>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>3.1.2</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.3</version>
</dependency>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>3.1.3</version>
</dependency>
</dependencies>
</dependencies>
MyUDF
package com.atguigu.udf;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
public class MyUDF extends GenericUDF
//校验数据参数个数
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException
if (arguments.length != 1)
throw new UDFArgumentException("参数个数不为1");
return PrimitiveObjectInspectorFactory.javaIntObjectInspector;
//处理数据
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException
//1.取出输入数据
String input = arguments[0].get().toString();
//2.判断输入数据是否为null
if (input == null)
return 0;
//3.返回输入数据的长度
return input.length();
@Override
public String getDisplayString(String[] children)
return "";
4.自定义 UDTF函数-----案例
MyUDTF
package com.atguigu.udf;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import java.util.ArrayList;
import java.util.List;
/**
* 输入数据:hello,atguigu,hive
* 输出数据:
* hello
* atguigu
* hive
*/
public class MyUDTF extends GenericUDTF
//输出数据的集合
private ArrayList<String> outPutList = new ArrayList<>();
@Override
public StructObjectInspector initialize(StructObjectInspector argOIs) throws UDFArgumentException
//输出数据的默认列名,可以别别名覆盖
List<String> fieldNames = new ArrayList<>();
fieldNames.add("word");
//输出数据的类型
List<ObjectInspector> fieldOIs = new ArrayList<>();
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
//最终返回值
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
//处理输入数据:hello,atguigu,hive
@Override
public void process(Object[] args) throws HiveException
//1.取出输入数据
String input = args[0].toString();
//2.按照","分割字符串
String[] words = input.split(",");
//3.遍历数据写出
for (String word : words)
//清空集合
outPutList.clear();
//将数据放入集合
outPutList.add(word);
//输出数据
forward(outPutList);
//收尾方法
@Override
public void close() throws HiveException
MyUDTF2
package com.atguigu.udf;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import java.util.ArrayList;
import java.util.List;
/**
* 输入数据:hello,atguigu:hello,hive
* 输出数据:
* hello atguigu
* hello hive
*/
public class MyUDTF2 extends GenericUDTF
//输出数据的集合
private ArrayList<String> outPutList = new ArrayList<>();
@Override
public StructObjectInspector initialize(StructObjectInspector argOIs) throws UDFArgumentException
//输出数据的默认列名,可以别别名覆盖
List<String> fieldNames = new ArrayList<>();
fieldNames.add("word1");
fieldNames.add("word2");
//输出数据的类型
List<ObjectInspector> fieldOIs = new ArrayList<>();
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
//最终返回值
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
//处理输入数据:hello,atguigu:hello,hive
@Override
public void process(Object[] args) throws HiveException
//1.取出输入数据
String input = args[0].toString();
//2.按照","分割字符串
String[] fields = input.split(":");
//3.遍历数据写出
for (String field : fields)
//清空集合
outPutList.clear();
//将field按照','分割
String[] words = field.split(",");
//将words放入集合
outPutList.add(words[0]);
outPutList.add(words[1]);
//写出数据
forward(outPutList);
//收尾方法
@Override
public void close() throws HiveException
以上是关于Hive--10---函数----自定义函数 (UDF-UDAF-UDTF)的主要内容,如果未能解决你的问题,请参考以下文章
与数据库联系时出错:使用无效数据调用函数 Query.where()。不支持的字段值:自定义 U 对象