• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Java Row类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Java中org.apache.flink.types.Row的典型用法代码示例。如果您正苦于以下问题:Java Row类的具体用法?Java Row怎么用?Java Row使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



Row类属于org.apache.flink.types包,在下文中一共展示了Row类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。

示例1: testAggregationWithTwoCount

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testAggregationWithTwoCount() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());

	DataSource<Tuple2<Float, String>> input =
		env.fromElements(
			new Tuple2<>(1f, "Hello"),
			new Tuple2<>(2f, "Ciao"));

	Table table =
		tableEnv.fromDataSet(input);

	Table result =
		table.select("f0.count, f1.count");

	DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
	List<Row> results = ds.collect();
	String expected = "2,2";
	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:22,代码来源:AggregationsITCase.java


示例2: getOutputTable

import org.apache.flink.types.Row; //导入依赖的package包/类
private AppendStreamTableSink<Row> getOutputTable(
    ExternalCatalogTable output) throws IOException {
  String tableType = output.tableType();
  DataSinkProvider c = DataSinkRegistry.getProvider(tableType);
  Preconditions.checkNotNull(c, "Cannot find output connectors for " + tableType);
  return c.getAppendStreamTableSink(output);
}
 
开发者ID:uber,项目名称:AthenaX,代码行数:8,代码来源:JobCompiler.java


示例3: testCompile

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testCompile() throws IOException {
  RowTypeInfo schema = new RowTypeInfo(new TypeInformation[]{BasicTypeInfo.INT_TYPE_INFO}, new String[] {"id"});
  MockExternalCatalogTable inputTable = new MockExternalCatalogTable(schema, Collections.singletonList(Row.of(1)));
  MockExternalCatalogTable outputTable = new MockExternalCatalogTable(schema, new ArrayList<>());
  SingleLevelMemoryCatalog input = new SingleLevelMemoryCatalog("input",
      Collections.singletonMap("foo", inputTable));
  SingleLevelMemoryCatalog output = new SingleLevelMemoryCatalog("output",
      Collections.singletonMap("bar", outputTable));
  JobDescriptor job = new JobDescriptor(
      Collections.singletonMap("input", input),
      Collections.emptyMap(),
      output,
      1,
      "SELECT * FROM input.foo");
  CompilationResult res = new ContainedExecutor().run(job);
  assertNull(res.remoteThrowable());
  assertNotNull(res.jobGraph());
}
 
开发者ID:uber,项目名称:AthenaX,代码行数:20,代码来源:ProcessExecutorTest.java


示例4: testInvalidSql

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testInvalidSql() throws IOException {
  RowTypeInfo schema = new RowTypeInfo(new TypeInformation[]{BasicTypeInfo.INT_TYPE_INFO}, new String[] {"id"});
  MockExternalCatalogTable inputTable = new MockExternalCatalogTable(schema, Collections.singletonList(Row.of(1)));
  MockExternalCatalogTable outputTable = new MockExternalCatalogTable(schema, new ArrayList<>());
  SingleLevelMemoryCatalog input = new SingleLevelMemoryCatalog("input",
      Collections.singletonMap("foo", inputTable));
  SingleLevelMemoryCatalog output = new SingleLevelMemoryCatalog("output",
      Collections.singletonMap("bar", outputTable));
  JobDescriptor job = new JobDescriptor(
      Collections.singletonMap("input", input),
      Collections.emptyMap(),
      output,
      1,
      "SELECT2 * FROM input.foo");
  CompilationResult res = new ContainedExecutor().run(job);
  assertNull(res.jobGraph());
  assertTrue(res.remoteThrowable() instanceof SqlParserException);
}
 
开发者ID:uber,项目名称:AthenaX,代码行数:20,代码来源:ProcessExecutorTest.java


示例5: testBatchTableSourceSQL

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testBatchTableSourceSQL() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
	BatchTableSource csvTable = CommonTestData.getCsvTableSource();

	tableEnv.registerTableSource("persons", csvTable);

	Table result = tableEnv
		.sqlQuery("SELECT `last`, FLOOR(id), score * 2 FROM persons WHERE score < 20");

	DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
	List<Row> results = resultSet.collect();

	String expected = "Smith,1,24.6\n" +
		"Miller,3,15.78\n" +
		"Smith,4,0.24\n" +
		"Miller,6,13.56\n" +
		"Williams,8,4.68\n";

	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:23,代码来源:JavaTableSourceITCase.java


示例6: testNumericAutocastInArithmetic

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testNumericAutocastInArithmetic() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());

	DataSource<Tuple8<Byte, Short, Integer, Long, Float, Double, Long, Double>> input =
			env.fromElements(new Tuple8<>((byte) 1, (short) 1, 1, 1L, 1.0f, 1.0d, 1L, 1001.1));

	Table table =
			tableEnv.fromDataSet(input);

	Table result = table.select("f0 + 1, f1 +" +
			" 1, f2 + 1L, f3 + 1.0f, f4 + 1.0d, f5 + 1, f6 + 1.0d, f7 + f0");

	DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
	List<Row> results = ds.collect();
	String expected = "2,2,2,2.0,2.0,2.0,2.0,1002.1";
	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:20,代码来源:CastingITCase.java


示例7: testSimpleRegister

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testSimpleRegister() throws Exception {
	final String tableName = "MyTable";
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());

	DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
	tableEnv.registerDataSet(tableName, ds);
	Table t = tableEnv.scan(tableName);

	Table result = t.select("f0, f1");

	DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
	List<Row> results = resultSet.collect();
	String expected = "1,1\n" + "2,2\n" + "3,2\n" + "4,3\n" + "5,3\n" + "6,3\n" + "7,4\n" +
			"8,4\n" + "9,4\n" + "10,4\n" + "11,5\n" + "12,5\n" + "13,5\n" + "14,5\n" + "15,5\n" +
			"16,6\n" + "17,6\n" + "18,6\n" + "19,6\n" + "20,6\n" + "21,6\n";
	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:20,代码来源:JavaTableEnvironmentITCase.java


示例8: verifySplit

import org.apache.flink.types.Row; //导入依赖的package包/类
private void verifySplit(InputSplit split, int expectedIDSum) throws IOException {
	int sum = 0;

	Row row =  new Row(5);
	jdbcInputFormat.open(split);
	while (!jdbcInputFormat.reachedEnd()) {
		row = jdbcInputFormat.nextRecord(row);

		int id = ((int) row.getField(0));
		int testDataIndex = id - 1001;

		assertEquals(TEST_DATA[testDataIndex], row);
		sum += id;
	}

	Assert.assertEquals(expectedIDSum, sum);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:JDBCInputFormatTest.java


示例9: testMap

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testMap() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());

	List<Tuple2<Integer, Map<String, String>>> rows = new ArrayList<>();
	rows.add(new Tuple2<>(1, Collections.singletonMap("foo", "bar")));
	rows.add(new Tuple2<>(2, Collections.singletonMap("foo", "spam")));

	TypeInformation<Tuple2<Integer, Map<String, String>>> ty = new TupleTypeInfo<>(
		BasicTypeInfo.INT_TYPE_INFO,
		new MapTypeInfo<>(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO));

	DataSet<Tuple2<Integer, Map<String, String>>> ds1 = env.fromCollection(rows, ty);
	tableEnv.registerDataSet("t1", ds1, "a, b");

	String sqlQuery = "SELECT b['foo'] FROM t1";
	Table result = tableEnv.sqlQuery(sqlQuery);

	DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
	List<Row> results = resultSet.collect();
	String expected = "bar\n" + "spam\n";
	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:25,代码来源:JavaSqlITCase.java


示例10: testAsFromTupleByName

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testAsFromTupleByName() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());

	Table table = tableEnv.fromDataSet(CollectionDataSets.get3TupleDataSet(env), "f2");

	DataSet<Row> ds = tableEnv.toDataSet(table, Row.class);
	List<Row> results = ds.collect();
	String expected = "Hi\n" + "Hello\n" + "Hello world\n" +
		"Hello world, how are you?\n" + "I am fine.\n" + "Luke Skywalker\n" +
		"Comment#1\n" + "Comment#2\n" + "Comment#3\n" + "Comment#4\n" +
		"Comment#5\n" + "Comment#6\n" + "Comment#7\n" +
		"Comment#8\n" + "Comment#9\n" + "Comment#10\n" +
		"Comment#11\n" + "Comment#12\n" + "Comment#13\n" +
		"Comment#14\n" + "Comment#15\n";
	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:19,代码来源:JavaTableEnvironmentITCase.java


示例11: testAsFromPrivateFieldsPojo

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testAsFromPrivateFieldsPojo() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());

	List<PrivateSmallPojo> data = new ArrayList<>();
	data.add(new PrivateSmallPojo("Peter", 28, 4000.00, "Sales"));
	data.add(new PrivateSmallPojo("Anna", 56, 10000.00, "Engineering"));
	data.add(new PrivateSmallPojo("Lucy", 42, 6000.00, "HR"));

	Table table = tableEnv
		.fromDataSet(env.fromCollection(data),
			"department AS a, " +
			"age AS b, " +
			"salary AS c, " +
			"name AS d")
		.select("a, b, c, d");

	DataSet<Row> ds = tableEnv.toDataSet(table, Row.class);
	List<Row> results = ds.collect();
	String expected =
		"Sales,28,4000.0,Peter\n" +
		"Engineering,56,10000.0,Anna\n" +
		"HR,42,6000.0,Lucy\n";
	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:27,代码来源:TableEnvironmentITCase.java


示例12: testIncompatibleTypes

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test(expected = RuntimeException.class)
public void testIncompatibleTypes() throws IOException {
	jdbcOutputFormat = JDBCOutputFormat.buildJDBCOutputFormat()
			.setDrivername(DRIVER_CLASS)
			.setDBUrl(DB_URL)
			.setQuery(String.format(INSERT_TEMPLATE, INPUT_TABLE))
			.finish();
	jdbcOutputFormat.open(0, 1);

	Row row = new Row(5);
	row.setField(0, 4);
	row.setField(1, "hello");
	row.setField(2, "world");
	row.setField(3, 0.99);
	row.setField(4, "imthewrongtype");

	jdbcOutputFormat.writeRecord(row);
	jdbcOutputFormat.close();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:20,代码来源:JDBCOutputFormatTest.java


示例13: testIntegerBiggerThan128

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testIntegerBiggerThan128() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());

	DataSet<Tuple3<Integer, Long, String>> input = env.fromElements(new Tuple3<>(300, 1L, "Hello"));
	Table table = tableEnv.fromDataSet(input, "a, b, c");

	Table result = table
		.filter("a = 300 ");

	DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
	List<Row> results = ds.collect();
	String expected = "300,1,Hello\n";
	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:CalcITCase.java


示例14: testWorkingAggregationDataTypes

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testWorkingAggregationDataTypes() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());

	DataSource<Tuple7<Byte, Short, Integer, Long, Float, Double, String>> input =
			env.fromElements(
					new Tuple7<>((byte) 1, (short) 1, 1, 1L, 1.0f, 1.0d, "Hello"),
					new Tuple7<>((byte) 2, (short) 2, 2, 2L, 2.0f, 2.0d, "Ciao"));

	Table table = tableEnv.fromDataSet(input);

	Table result =
			table.select("f0.avg, f1.avg, f2.avg, f3.avg, f4.avg, f5.avg, f6.count");

	DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
	List<Row> results = ds.collect();
	String expected = "1,1,1,1,1.5,1.5,2";
	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:AggregationsITCase.java


示例15: addSink

import org.apache.flink.types.Row; //导入依赖的package包/类
/**
 * Writes a DataStream into a Cassandra database.
 *
 * @param input input DataStream
 * @param <IN>  input type
 * @return CassandraSinkBuilder, to further configure the sink
 */
public static <IN> CassandraSinkBuilder<IN> addSink(DataStream<IN> input) {
	TypeInformation<IN> typeInfo = input.getType();
	if (typeInfo instanceof TupleTypeInfo) {
		DataStream<Tuple> tupleInput = (DataStream<Tuple>) input;
		return (CassandraSinkBuilder<IN>) new CassandraTupleSinkBuilder<>(tupleInput, tupleInput.getType(), tupleInput.getType().createSerializer(tupleInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof RowTypeInfo) {
		DataStream<Row> rowInput = (DataStream<Row>) input;
		return (CassandraSinkBuilder<IN>) new CassandraRowSinkBuilder(rowInput, rowInput.getType(), rowInput.getType().createSerializer(rowInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof PojoTypeInfo) {
		return new CassandraPojoSinkBuilder<>(input, input.getType(), input.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof CaseClassTypeInfo) {
		DataStream<Product> productInput = (DataStream<Product>) input;
		return (CassandraSinkBuilder<IN>) new CassandraScalaProductSinkBuilder<>(productInput, productInput.getType(), productInput.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	throw new IllegalArgumentException("No support for the type of the given DataStream: " + input.getType());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:27,代码来源:CassandraSink.java


示例16: testGroupedAggregate

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testGroupedAggregate() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());

	DataSet<Tuple3<Integer, Long, String>> input = CollectionDataSets.get3TupleDataSet(env);
	Table table = tableEnv.fromDataSet(input, "a, b, c");

	Table result = table
			.groupBy("b").select("b, a.sum");

	DataSet<Row> ds = tableEnv.toDataSet(result, Row.class);
	List<Row> results = ds.collect();
	String expected = "1,1\n" + "2,5\n" + "3,15\n" + "4,34\n" + "5,65\n" + "6,111\n";
	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:AggregationsITCase.java


示例17: testJDBCInputFormatWithoutParallelism

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testJDBCInputFormatWithoutParallelism() throws IOException {
	jdbcInputFormat = JDBCInputFormat.buildJDBCInputFormat()
			.setDrivername(DRIVER_CLASS)
			.setDBUrl(DB_URL)
			.setQuery(SELECT_ALL_BOOKS)
			.setRowTypeInfo(ROW_TYPE_INFO)
			.setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE)
			.finish();
	//this query does not exploit parallelism
	Assert.assertEquals(1, jdbcInputFormat.createInputSplits(1).length);
	jdbcInputFormat.openInputFormat();
	jdbcInputFormat.open(null);
	Row row =  new Row(5);
	int recordCount = 0;
	while (!jdbcInputFormat.reachedEnd()) {
		Row next = jdbcInputFormat.nextRecord(row);

		assertEquals(TEST_DATA[recordCount], next);

		recordCount++;
	}
	jdbcInputFormat.close();
	jdbcInputFormat.closeInputFormat();
	Assert.assertEquals(TEST_DATA.length, recordCount);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:27,代码来源:JDBCInputFormatTest.java


示例18: verifyResultsWhenReScaling

import org.apache.flink.types.Row; //导入依赖的package包/类
@Override
protected void verifyResultsWhenReScaling(
	CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink, int startElementCounter, int endElementCounter) {

	// IMPORTANT NOTE:
	//
	// for cassandra we always have to start from 1 because
	// all operators will share the same final db

	ArrayList<Integer> expected = new ArrayList<>();
	for (int i = 1; i <= endElementCounter; i++) {
		expected.add(i);
	}

	ArrayList<Integer> actual = new ArrayList<>();
	ResultSet result = session.execute(injectTableName(SELECT_DATA_QUERY));

	for (com.datastax.driver.core.Row s : result) {
		actual.add(s.getInt("counter"));
	}

	Collections.sort(actual);
	Assert.assertArrayEquals(expected.toArray(), actual.toArray());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:25,代码来源:CassandraConnectorITCase.java


示例19: testCassandraScalaTupleAtLeastSink

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testCassandraScalaTupleAtLeastSink() throws Exception {
	CassandraScalaProductSink<scala.Tuple3<String, Integer, Integer>> sink = new CassandraScalaProductSink<>(injectTableName(INSERT_DATA_QUERY), builder);

	List<scala.Tuple3<String, Integer, Integer>> scalaTupleCollection = new ArrayList<>(20);
	for (int i = 0; i < 20; i++) {
		scalaTupleCollection.add(new scala.Tuple3<>(UUID.randomUUID().toString(), i, 0));
	}

	sink.open(new Configuration());
	for (scala.Tuple3<String, Integer, Integer> value : scalaTupleCollection) {
		sink.invoke(value, SinkContextUtil.forTimestamp(0));
	}
	sink.close();

	ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
	List<com.datastax.driver.core.Row> rows = rs.all();
	Assert.assertEquals(scalaTupleCollection.size(), rows.size());

	for (com.datastax.driver.core.Row row : rows) {
		scalaTupleCollection.remove(new scala.Tuple3<>(row.getString("id"), row.getInt("counter"), row.getInt("batch_id")));
	}
	Assert.assertEquals(0, scalaTupleCollection.size());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:25,代码来源:CassandraConnectorITCase.java


示例20: testBatchTableSourceTableAPI

import org.apache.flink.types.Row; //导入依赖的package包/类
@Test
public void testBatchTableSourceTableAPI() throws Exception {

	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
	BatchTableSource csvTable = CommonTestData.getCsvTableSource();

	tableEnv.registerTableSource("persons", csvTable);

	Table result = tableEnv.scan("persons")
		.select("id, first, last, score");

	DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
	List<Row> results = resultSet.collect();

	String expected = "1,Mike,Smith,12.3\n" +
		"2,Bob,Taylor,45.6\n" +
		"3,Sam,Miller,7.89\n" +
		"4,Peter,Smith,0.12\n" +
		"5,Liz,Williams,34.5\n" +
		"6,Sally,Miller,6.78\n" +
		"7,Alice,Smith,90.1\n" +
		"8,Kelly,Williams,2.34\n";

	compareResultAsText(results, expected);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:27,代码来源:JavaTableSourceITCase.java



注:本文中的org.apache.flink.types.Row类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Java AvcConfigurationBox类代码示例发布时间:2022-05-22
下一篇:
Java IdStreamSpec类代码示例发布时间:2022-05-22
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap