本文整理汇总了Java中org.apache.flink.table.api.TableSchema类的典型用法代码示例。如果您正苦于以下问题:Java TableSchema类的具体用法?Java TableSchema怎么用?Java TableSchema使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
TableSchema类属于org.apache.flink.table.api包,在下文中一共展示了TableSchema类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: testTableSchema
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
@Test
public void testTableSchema() {
KafkaTableSource.Builder b = getBuilder();
configureBuilder(b);
KafkaTableSource source = b.build();
// check table schema
TableSchema schema = source.getTableSchema();
assertNotNull(schema);
assertEquals(5, schema.getColumnNames().length);
// check table fields
assertEquals("field1", schema.getColumnNames()[0]);
assertEquals("field2", schema.getColumnNames()[1]);
assertEquals("time1", schema.getColumnNames()[2]);
assertEquals("time2", schema.getColumnNames()[3]);
assertEquals("field3", schema.getColumnNames()[4]);
assertEquals(Types.LONG(), schema.getTypes()[0]);
assertEquals(Types.STRING(), schema.getTypes()[1]);
assertEquals(Types.SQL_TIMESTAMP(), schema.getTypes()[2]);
assertEquals(Types.SQL_TIMESTAMP(), schema.getTypes()[3]);
assertEquals(Types.DOUBLE(), schema.getTypes()[4]);
}
开发者ID:axbaretto,项目名称:flink,代码行数:24,代码来源:KafkaTableSourceTestBase.java
示例2: getTableSchema
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
@Override
public TableSchema getTableSchema() {
TypeInformation<?>[] types = new TypeInformation[] {
Types.LONG,
Types.BOOLEAN,
Types.FLOAT,
Types.FLOAT,
Types.FLOAT,
Types.FLOAT,
Types.SHORT,
Types.SQL_TIMESTAMP
};
String[] names = new String[]{
"rideId",
"isStart",
"startLon",
"startLat",
"endLon",
"endLat",
"passengerCnt",
"eventTime"
};
return new TableSchema(names, types);
}
开发者ID:dataArtisans,项目名称:flink-training-exercises,代码行数:27,代码来源:TaxiRideTableSource.java
示例3: mockExternalCatalogTable
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
private static ExternalCatalogTable mockExternalCatalogTable(Map<String, String> props) {
ExternalCatalogTable table = mock(ExternalCatalogTable.class);
TableSchema schema = new TableSchema(new String[] {"foo"}, new TypeInformation[] {INT_TYPE_INFO});
doReturn(schema).when(table).schema();
doReturn(props).when(table).properties();
doReturn("kafka+json").when(table).tableType();
return table;
}
开发者ID:uber,项目名称:AthenaX,代码行数:9,代码来源:KafkaJsonConnectorITest.java
示例4: Kafka09TableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.9 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param typeInfo Type information describing the result type. The field names are used
* to parse the JSON file and so are the types.
*/
public Kafka09TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
TableSchema schema,
TypeInformation<Row> typeInfo) {
super(topic, properties, schema, typeInfo);
this.deserializationSchema = deserializationSchema;
}
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:Kafka09TableSource.java
示例5: Kafka09JsonTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.9 JSON {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param tableSchema The schema of the table.
* @param jsonSchema The schema of the JSON messages to decode from Kafka.
*/
public Kafka09JsonTableSource(
String topic,
Properties properties,
TableSchema tableSchema,
TableSchema jsonSchema) {
super(topic, properties, tableSchema, jsonSchema);
}
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:Kafka09JsonTableSource.java
示例6: Kafka09AvroTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.9 Avro {@link StreamTableSource} using a given {@link SpecificRecord}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param record Avro specific record.
*/
public Kafka09AvroTableSource(
String topic,
Properties properties,
TableSchema schema,
Class<? extends SpecificRecordBase> record) {
super(
topic,
properties,
schema,
record);
}
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:Kafka09AvroTableSource.java
示例7: Kafka08TableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.8 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param typeInfo Type information describing the result type. The field names are used
* to parse the JSON file and so are the types.
*/
public Kafka08TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
TableSchema schema,
TypeInformation<Row> typeInfo) {
super(topic, properties, schema, typeInfo);
this.deserializationSchema = deserializationSchema;
}
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:Kafka08TableSource.java
示例8: Kafka08AvroTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.8 Avro {@link StreamTableSource} using a given {@link SpecificRecord}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param record Avro specific record.
*/
public Kafka08AvroTableSource(
String topic,
Properties properties,
TableSchema schema,
Class<? extends SpecificRecordBase> record) {
super(
topic,
properties,
schema,
record);
}
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:Kafka08AvroTableSource.java
示例9: Kafka08JsonTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.8 JSON {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param tableSchema The schema of the table.
* @param jsonSchema The schema of the JSON messages to decode from Kafka.
*/
public Kafka08JsonTableSource(
String topic,
Properties properties,
TableSchema tableSchema,
TableSchema jsonSchema) {
super(topic, properties, tableSchema, jsonSchema);
}
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:Kafka08JsonTableSource.java
示例10: Kafka011JsonTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.11 JSON {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param tableSchema The schema of the table.
* @param jsonSchema The schema of the JSON messages to decode from Kafka.
*/
public Kafka011JsonTableSource(
String topic,
Properties properties,
TableSchema tableSchema,
TableSchema jsonSchema) {
super(topic, properties, tableSchema, jsonSchema);
}
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:Kafka011JsonTableSource.java
示例11: Kafka011TableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.11 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param typeInfo Type information describing the result type. The field names are used
* to parse the JSON file and so are the types.
*/
public Kafka011TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
TableSchema schema,
TypeInformation<Row> typeInfo) {
super(topic, properties, schema, typeInfo);
this.deserializationSchema = deserializationSchema;
}
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:Kafka011TableSource.java
示例12: Kafka011AvroTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.11 Avro {@link StreamTableSource} using a given {@link SpecificRecord}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param record Avro specific record.
*/
public Kafka011AvroTableSource(
String topic,
Properties properties,
TableSchema schema,
Class<? extends SpecificRecordBase> record) {
super(
topic,
properties,
schema,
record);
}
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:Kafka011AvroTableSource.java
示例13: Kafka010AvroTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.10 Avro {@link StreamTableSource} using a given {@link SpecificRecord}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param record Avro specific record.
*/
public Kafka010AvroTableSource(
String topic,
Properties properties,
TableSchema schema,
Class<? extends SpecificRecordBase> record) {
super(
topic,
properties,
schema,
record);
}
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:Kafka010AvroTableSource.java
示例14: Kafka010JsonTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.10 JSON {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param tableSchema The schema of the table.
* @param jsonSchema The schema of the JSON messages to decode from Kafka.
*/
public Kafka010JsonTableSource(
String topic,
Properties properties,
TableSchema tableSchema,
TableSchema jsonSchema) {
super(topic, properties, tableSchema, jsonSchema);
}
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:Kafka010JsonTableSource.java
示例15: Kafka010TableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.10 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param typeInfo Type information describing the result type. The field names are used
* to parse the JSON file and so are the types.
*/
public Kafka010TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
TableSchema schema,
TypeInformation<Row> typeInfo) {
super(topic, properties, schema, typeInfo);
this.deserializationSchema = deserializationSchema;
}
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:Kafka010TableSource.java
示例16: getTableSchema
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
@Override
public TableSchema getTableSchema() {
if (this.tableSchema == null) {
return new TableSchema(getFieldNames(), getFieldTypes());
} else {
return this.tableSchema;
}
}
开发者ID:axbaretto,项目名称:flink,代码行数:9,代码来源:HBaseTableSource.java
示例17: KafkaTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a generic Kafka {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param returnType Type information of the produced physical DataStream.
*/
protected KafkaTableSource(
String topic,
Properties properties,
TableSchema schema,
TypeInformation<Row> returnType) {
this.topic = Preconditions.checkNotNull(topic, "Topic must not be null.");
this.properties = Preconditions.checkNotNull(properties, "Properties must not be null.");
this.schema = Preconditions.checkNotNull(schema, "Schema must not be null.");
this.returnType = Preconditions.checkNotNull(returnType, "Type information must not be null.");
}
开发者ID:axbaretto,项目名称:flink,代码行数:20,代码来源:KafkaTableSource.java
示例18: withSchema
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Sets the schema of the produced table.
*
* @param schema The schema of the produced table.
* @return The builder.
*/
public B withSchema(TableSchema schema) {
Preconditions.checkNotNull(schema, "Schema must not be null.");
Preconditions.checkArgument(this.schema == null, "Schema has already been set.");
this.schema = schema;
return builder();
}
开发者ID:axbaretto,项目名称:flink,代码行数:13,代码来源:KafkaTableSource.java
示例19: KafkaAvroTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a generic Kafka Avro {@link StreamTableSource} using a given {@link SpecificRecord}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param avroRecordClass Class of the Avro record that is read from the Kafka topic.
*/
protected KafkaAvroTableSource(
String topic,
Properties properties,
TableSchema schema,
Class<? extends SpecificRecordBase> avroRecordClass) {
super(
topic,
properties,
schema,
convertToRowTypeInformation(avroRecordClass));
this.avroRecordClass = avroRecordClass;
}
开发者ID:axbaretto,项目名称:flink,代码行数:23,代码来源:KafkaAvroTableSource.java
示例20: KafkaJsonTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a generic Kafka JSON {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param tableSchema The schema of the table.
* @param jsonSchema The schema of the JSON messages to decode from Kafka.
*/
protected KafkaJsonTableSource(
String topic,
Properties properties,
TableSchema tableSchema,
TableSchema jsonSchema) {
super(
topic,
properties,
tableSchema,
jsonSchemaToReturnType(jsonSchema));
this.jsonSchema = jsonSchema;
}
开发者ID:axbaretto,项目名称:flink,代码行数:23,代码来源:KafkaJsonTableSource.java
注:本文中的org.apache.flink.table.api.TableSchema类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论