本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo类的典型用法代码示例。如果您正苦于以下问题:Java CharTypeInfo类的具体用法?Java CharTypeInfo怎么用?Java CharTypeInfo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
CharTypeInfo类属于org.apache.hadoop.hive.serde2.typeinfo包,在下文中一共展示了CharTypeInfo类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: convertClobType
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
private Object convertClobType(Object val, HCatFieldSchema hfs) {
HCatFieldSchema.Type hfsType = hfs.getType();
ClobRef cr = (ClobRef) val;
String s = cr.isExternal() ? cr.toString() : cr.getData();
if (hfsType == HCatFieldSchema.Type.STRING) {
return s;
} else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
HiveVarchar hvc = new HiveVarchar(s, vti.getLength());
return hvc;
} else if (hfsType == HCatFieldSchema.Type.CHAR) {
CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
HiveChar hc = new HiveChar(s, cti.getLength());
return hc;
}
return null;
}
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:19,代码来源:SqoopHCatImportHelper.java
示例2: getPrimitiveType
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
private static Type getPrimitiveType(final ObjectInspector fieldInspector) {
final PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) fieldInspector)
.getPrimitiveCategory();
if (HiveTypeMapping.getHIVE_TO_CANONICAL().containsKey(primitiveCategory.name())) {
return HiveTypeMapping.getHIVE_TO_CANONICAL().get(primitiveCategory.name());
}
switch (primitiveCategory) {
case DECIMAL:
final DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
.getTypeInfo();
return DecimalType.createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.getScale());
case CHAR:
final int cLength = ((CharTypeInfo) ((PrimitiveObjectInspector)
fieldInspector).getTypeInfo()).getLength();
return CharType.createCharType(cLength);
case VARCHAR:
final int vLength = ((VarcharTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
.getTypeInfo()).getLength();
return VarcharType.createVarcharType(vLength);
default:
return null;
}
}
开发者ID:Netflix,项目名称:metacat,代码行数:24,代码来源:HiveTypeConverter.java
示例3: getQualifiedTypeName
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
/**
* Gets the qualified type name.
*
* @param typeDesc the type desc
* @return the qualified type name
*/
public static String getQualifiedTypeName(TypeDescriptor typeDesc) {
if (typeDesc.getType().isQualifiedType()) {
switch (typeDesc.getType()) {
case VARCHAR_TYPE:
return VarcharTypeInfo.getQualifiedName(typeDesc.getTypeName(),
typeDesc.getTypeQualifiers().getCharacterMaximumLength()).toLowerCase();
case CHAR_TYPE:
return CharTypeInfo.getQualifiedName(typeDesc.getTypeName(),
typeDesc.getTypeQualifiers().getCharacterMaximumLength()).toLowerCase();
case DECIMAL_TYPE:
return DecimalTypeInfo.getQualifiedName(typeDesc.getTypeQualifiers().getPrecision(),
typeDesc.getTypeQualifiers().getScale()).toLowerCase();
}
} else if (typeDesc.getType().isComplexType()) {
switch (typeDesc.getType()) {
case ARRAY_TYPE:
case MAP_TYPE:
case STRUCT_TYPE:
return "string";
}
}
return typeDesc.getTypeName().toLowerCase();
}
开发者ID:apache,项目名称:lens,代码行数:30,代码来源:LensResultSetMetadata.java
示例4: convertStringTypes
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
HCatFieldSchema.Type hfsType = hfs.getType();
if (hfsType == HCatFieldSchema.Type.STRING
|| hfsType == HCatFieldSchema.Type.VARCHAR
|| hfsType == HCatFieldSchema.Type.CHAR) {
String str = val.toString();
if (doHiveDelimsReplacement) {
str = FieldFormatter.hiveStringReplaceDelims(str,
hiveDelimsReplacement, hiveDelimiters);
}
if (hfsType == HCatFieldSchema.Type.STRING) {
return str;
} else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
return hvc;
} else if (hfsType == HCatFieldSchema.Type.CHAR) {
CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
HiveChar hc = new HiveChar(val.toString(), cti.getLength());
return hc;
}
} else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
HiveDecimal hd = HiveDecimal.create(bd);
return hd;
}
return null;
}
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:29,代码来源:SqoopHCatImportHelper.java
示例5: convertBooleanTypes
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
private Object convertBooleanTypes(Object val, HCatFieldSchema hfs) {
HCatFieldSchema.Type hfsType = hfs.getType();
Boolean b = (Boolean) val;
if (hfsType == HCatFieldSchema.Type.BOOLEAN) {
return b;
} else if (hfsType == HCatFieldSchema.Type.TINYINT) {
return (byte) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.SMALLINT) {
return (short) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.INT) {
return (int) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.BIGINT) {
return (long) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.FLOAT) {
return (float) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.DOUBLE) {
return (double) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.STRING) {
return val.toString();
} else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
HiveVarchar hvc = new HiveVarchar(val.toString(), vti.getLength());
return hvc;
} else if (hfsType == HCatFieldSchema.Type.CHAR) {
CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
HiveChar hChar = new HiveChar(val.toString(), cti.getLength());
return hChar;
}
return null;
}
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:31,代码来源:SqoopHCatImportHelper.java
示例6: getFieldObjectInspector
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
/**
* Given a Hive column type, returns the ObjectInspector that will be used to
* get data from the field. Currently using the the standard Writable object
* inspectors.
* TODO: Support all types
*/
private ObjectInspector getFieldObjectInspector(final TypeInfo typeInfo) {
if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
} else if (typeInfo instanceof DecimalTypeInfo) {
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
(DecimalTypeInfo) typeInfo);
} else if (typeInfo instanceof VarcharTypeInfo) {
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
(VarcharTypeInfo) typeInfo);
} else if (typeInfo instanceof CharTypeInfo) {
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
(CharTypeInfo) typeInfo);
} else {
throw new UnsupportedOperationException("Unknown field type: " + typeInfo);
}
}
开发者ID:cloudera,项目名称:RecordServiceClient,代码行数:33,代码来源:RecordServiceObjectInspector.java
注:本文中的org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论