• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Java CharTypeInfo类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo的典型用法代码示例。如果您正苦于以下问题:Java CharTypeInfo类的具体用法?Java CharTypeInfo怎么用?Java CharTypeInfo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



CharTypeInfo类属于org.apache.hadoop.hive.serde2.typeinfo包,在下文中一共展示了CharTypeInfo类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。

示例1: convertClobType

import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
private Object convertClobType(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  ClobRef cr = (ClobRef) val;
  String s = cr.isExternal() ? cr.toString() : cr.getData();

  if (hfsType == HCatFieldSchema.Type.STRING) {
    return s;
  } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
    VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
    HiveVarchar hvc = new HiveVarchar(s, vti.getLength());
    return hvc;
  } else if (hfsType == HCatFieldSchema.Type.CHAR) {
    CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
    HiveChar hc = new HiveChar(s, cti.getLength());
    return hc;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:19,代码来源:SqoopHCatImportHelper.java


示例2: getPrimitiveType

import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
private static Type getPrimitiveType(final ObjectInspector fieldInspector) {
    final PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) fieldInspector)
        .getPrimitiveCategory();
    if (HiveTypeMapping.getHIVE_TO_CANONICAL().containsKey(primitiveCategory.name())) {
        return HiveTypeMapping.getHIVE_TO_CANONICAL().get(primitiveCategory.name());
    }
    switch (primitiveCategory) {
        case DECIMAL:
            final DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
                .getTypeInfo();
            return DecimalType.createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.getScale());
        case CHAR:
            final int cLength = ((CharTypeInfo) ((PrimitiveObjectInspector)
                fieldInspector).getTypeInfo()).getLength();
            return CharType.createCharType(cLength);
        case VARCHAR:
            final int vLength = ((VarcharTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
                .getTypeInfo()).getLength();
            return VarcharType.createVarcharType(vLength);
        default:
            return null;
    }
}
 
开发者ID:Netflix,项目名称:metacat,代码行数:24,代码来源:HiveTypeConverter.java


示例3: getQualifiedTypeName

import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
/**
 * Gets the qualified type name.
 *
 * @param typeDesc the type desc
 * @return the qualified type name
 */
public static String getQualifiedTypeName(TypeDescriptor typeDesc) {
  if (typeDesc.getType().isQualifiedType()) {
    switch (typeDesc.getType()) {
    case VARCHAR_TYPE:
      return VarcharTypeInfo.getQualifiedName(typeDesc.getTypeName(),
        typeDesc.getTypeQualifiers().getCharacterMaximumLength()).toLowerCase();
    case CHAR_TYPE:
      return CharTypeInfo.getQualifiedName(typeDesc.getTypeName(),
        typeDesc.getTypeQualifiers().getCharacterMaximumLength()).toLowerCase();
    case DECIMAL_TYPE:
      return DecimalTypeInfo.getQualifiedName(typeDesc.getTypeQualifiers().getPrecision(),
        typeDesc.getTypeQualifiers().getScale()).toLowerCase();
    }
  } else if (typeDesc.getType().isComplexType()) {
    switch (typeDesc.getType()) {
    case ARRAY_TYPE:
    case MAP_TYPE:
    case STRUCT_TYPE:
      return "string";
    }
  }
  return typeDesc.getTypeName().toLowerCase();
}
 
开发者ID:apache,项目名称:lens,代码行数:30,代码来源:LensResultSetMetadata.java


示例4: convertStringTypes

import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (hfsType == HCatFieldSchema.Type.STRING
      || hfsType == HCatFieldSchema.Type.VARCHAR
      || hfsType == HCatFieldSchema.Type.CHAR) {
    String str = val.toString();
    if (doHiveDelimsReplacement) {
      str = FieldFormatter.hiveStringReplaceDelims(str,
        hiveDelimsReplacement, hiveDelimiters);
    }
    if (hfsType == HCatFieldSchema.Type.STRING) {
      return str;
    } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
      VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
      HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
      return hvc;
    } else if (hfsType == HCatFieldSchema.Type.CHAR) {
      CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
      HiveChar hc = new HiveChar(val.toString(), cti.getLength());
      return hc;
    }
  } else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
    BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
    HiveDecimal hd = HiveDecimal.create(bd);
    return hd;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:29,代码来源:SqoopHCatImportHelper.java


示例5: convertBooleanTypes

import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
private Object convertBooleanTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  Boolean b = (Boolean) val;
  if (hfsType == HCatFieldSchema.Type.BOOLEAN) {
    return b;
  } else if (hfsType == HCatFieldSchema.Type.TINYINT) {
    return (byte) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.SMALLINT) {
    return (short) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.INT) {
    return (int) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.BIGINT) {
    return (long) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.FLOAT) {
    return (float) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.DOUBLE) {
    return (double) (b ? 1 : 0);
  } else if (hfsType == HCatFieldSchema.Type.STRING) {
    return val.toString();
  } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
    VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
    HiveVarchar hvc = new HiveVarchar(val.toString(), vti.getLength());
    return hvc;
  } else if (hfsType == HCatFieldSchema.Type.CHAR) {
    CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
    HiveChar hChar = new HiveChar(val.toString(), cti.getLength());
    return hChar;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:31,代码来源:SqoopHCatImportHelper.java


示例6: getFieldObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; //导入依赖的package包/类
/**
 * Given a Hive column type, returns the ObjectInspector that will be used to
 * get data from the field. Currently using the the standard Writable object
 * inspectors.
 * TODO: Support all types
 */
private ObjectInspector getFieldObjectInspector(final TypeInfo typeInfo) {
  if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
  }  else if (typeInfo instanceof DecimalTypeInfo) {
    return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
        (DecimalTypeInfo) typeInfo);
  } else if (typeInfo instanceof VarcharTypeInfo) {
    return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
        (VarcharTypeInfo) typeInfo);
  } else if (typeInfo instanceof CharTypeInfo) {
    return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
        (CharTypeInfo) typeInfo);
  } else {
    throw new UnsupportedOperationException("Unknown field type: " + typeInfo);
  }
}
 
开发者ID:cloudera,项目名称:RecordServiceClient,代码行数:33,代码来源:RecordServiceObjectInspector.java



注:本文中的org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Java Bundle类代码示例发布时间:2022-05-22
下一篇:
Java ObjectLocator类代码示例发布时间:2022-05-22
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap