本文整理汇总了Java中org.apache.hadoop.hbase.mapreduce.JobUtil类的典型用法代码示例。如果您正苦于以下问题:Java JobUtil类的具体用法?Java JobUtil怎么用?Java JobUtil使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
JobUtil类属于org.apache.hadoop.hbase.mapreduce包,在下文中一共展示了JobUtil类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: doMapReduce
import org.apache.hadoop.hbase.mapreduce.JobUtil; //导入依赖的package包/类
/**
* Execute compaction, using a Map-Reduce job.
*/
private int doMapReduce(final FileSystem fs, final Set<Path> toCompactDirs,
final boolean compactOnce, final boolean major) throws Exception {
Configuration conf = getConf();
conf.setBoolean(CONF_COMPACT_ONCE, compactOnce);
conf.setBoolean(CONF_COMPACT_MAJOR, major);
Job job = new Job(conf);
job.setJobName("CompactionTool");
job.setJarByClass(CompactionTool.class);
job.setMapperClass(CompactionMapper.class);
job.setInputFormatClass(CompactionInputFormat.class);
job.setOutputFormatClass(NullOutputFormat.class);
job.setMapSpeculativeExecution(false);
job.setNumReduceTasks(0);
// add dependencies (including HBase ones)
TableMapReduceUtil.addDependencyJars(job);
Path stagingDir = JobUtil.getStagingDir(conf);
try {
// Create input file with the store dirs
Path inputPath = new Path(stagingDir, "compact-"+ EnvironmentEdgeManager.currentTime());
CompactionInputFormat.createInputFile(fs, inputPath, toCompactDirs);
CompactionInputFormat.addInputPath(job, inputPath);
// Initialize credential for secure cluster
TableMapReduceUtil.initCredentials(job);
// Start the MR Job and wait
return job.waitForCompletion(true) ? 0 : 1;
} finally {
fs.delete(stagingDir, true);
}
}
开发者ID:fengchen8086,项目名称:ditb,代码行数:38,代码来源:CompactionTool.java
示例2: doMapReduce
import org.apache.hadoop.hbase.mapreduce.JobUtil; //导入依赖的package包/类
/**
* Execute compaction, using a Map-Reduce job.
*/
private int doMapReduce(final FileSystem fs, final Set<Path> toCompactDirs,
final boolean compactOnce, final boolean major) throws Exception {
Configuration conf = getConf();
conf.setBoolean(CONF_COMPACT_ONCE, compactOnce);
conf.setBoolean(CONF_COMPACT_MAJOR, major);
Job job = new Job(conf);
job.setJobName("CompactionTool");
job.setJarByClass(CompactionTool.class);
job.setMapperClass(CompactionMapper.class);
job.setInputFormatClass(CompactionInputFormat.class);
job.setOutputFormatClass(NullOutputFormat.class);
job.setMapSpeculativeExecution(false);
job.setNumReduceTasks(0);
// add dependencies (including HBase ones)
TableMapReduceUtil.addDependencyJars(job);
// This job instantiates HRegions, which requires the Counter class from the high_scale library
TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
org.cliffc.high_scale_lib.Counter.class);
Path stagingDir = JobUtil.getStagingDir(conf);
try {
// Create input file with the store dirs
Path inputPath = new Path(stagingDir, "compact-"+ EnvironmentEdgeManager.currentTimeMillis());
CompactionInputFormat.createInputFile(fs, inputPath, toCompactDirs);
CompactionInputFormat.addInputPath(job, inputPath);
// Initialize credential for secure cluster
TableMapReduceUtil.initCredentials(job);
// Start the MR Job and wait
return job.waitForCompletion(true) ? 0 : 1;
} finally {
fs.delete(stagingDir, true);
}
}
开发者ID:tenggyut,项目名称:HIndex,代码行数:41,代码来源:CompactionTool.java
示例3: doMapReduce
import org.apache.hadoop.hbase.mapreduce.JobUtil; //导入依赖的package包/类
/**
* Execute compaction, using a Map-Reduce job.
*/
private int doMapReduce(final FileSystem fs, final Set<Path> toCompactDirs,
final boolean compactOnce, final boolean major) throws Exception {
Configuration conf = getConf();
conf.setBoolean(CONF_COMPACT_ONCE, compactOnce);
conf.setBoolean(CONF_COMPACT_MAJOR, major);
Job job = new Job(conf);
job.setJobName("CompactionTool");
job.setJarByClass(CompactionTool.class);
job.setMapperClass(CompactionMapper.class);
job.setInputFormatClass(CompactionInputFormat.class);
job.setOutputFormatClass(NullOutputFormat.class);
job.setMapSpeculativeExecution(false);
job.setNumReduceTasks(0);
// add dependencies (including HBase ones)
TableMapReduceUtil.addDependencyJars(job);
Path stagingDir = JobUtil.getStagingDir(conf);
try {
// Create input file with the store dirs
Path inputPath = new Path(stagingDir, "compact-"+ EnvironmentEdgeManager.currentTimeMillis());
CompactionInputFormat.createInputFile(fs, inputPath, toCompactDirs);
CompactionInputFormat.addInputPath(job, inputPath);
// Initialize credential for secure cluster
TableMapReduceUtil.initCredentials(job);
// Start the MR Job and wait
return job.waitForCompletion(true) ? 0 : 1;
} finally {
fs.delete(stagingDir, true);
}
}
开发者ID:shenli-uiuc,项目名称:PyroDB,代码行数:38,代码来源:CompactionTool.java
示例4: doMapReduce
import org.apache.hadoop.hbase.mapreduce.JobUtil; //导入依赖的package包/类
/**
* Execute compaction, using a Map-Reduce job.
*/
private int doMapReduce(final FileSystem fs, final Set<Path> toCompactDirs,
final boolean compactOnce, final boolean major) throws Exception {
Configuration conf = getConf();
conf.setBoolean(CONF_COMPACT_ONCE, compactOnce);
conf.setBoolean(CONF_COMPACT_MAJOR, major);
Job job = new Job(conf);
job.setJobName("CompactionTool");
job.setJarByClass(CompactionTool.class);
job.setMapperClass(CompactionMapper.class);
job.setInputFormatClass(CompactionInputFormat.class);
job.setOutputFormatClass(NullOutputFormat.class);
job.setMapSpeculativeExecution(false);
job.setNumReduceTasks(0);
Path stagingDir = JobUtil.getStagingDir(conf);
try {
// Create input file with the store dirs
Path inputPath = new Path(stagingDir, "compact-"+ EnvironmentEdgeManager.currentTimeMillis());
CompactionInputFormat.createInputFile(fs, inputPath, toCompactDirs);
CompactionInputFormat.addInputPath(job, inputPath);
// Initialize credential for secure cluster
TableMapReduceUtil.initCredentials(job);
// Start the MR Job and wait
return job.waitForCompletion(true) ? 0 : 1;
} finally {
fs.delete(stagingDir, true);
}
}
开发者ID:cloud-software-foundation,项目名称:c5,代码行数:35,代码来源:CompactionTool.java
示例5: doMapReduce
import org.apache.hadoop.hbase.mapreduce.JobUtil; //导入依赖的package包/类
/**
* Execute compaction, using a Map-Reduce job.
*/
private int doMapReduce(final FileSystem fs, final Set<Path> toCompactDirs,
final boolean compactOnce) throws Exception {
Configuration conf = getConf();
conf.setBoolean(CONF_COMPACT_ONCE, compactOnce);
Job job = new Job(conf);
job.setJobName("CompactionTool");
job.setJarByClass(CompactionTool.class);
job.setMapperClass(CompactionMapper.class);
job.setInputFormatClass(CompactionInputFormat.class);
job.setOutputFormatClass(NullOutputFormat.class);
job.setMapSpeculativeExecution(false);
job.setNumReduceTasks(0);
Path stagingDir = JobUtil.getStagingDir(conf);
try {
// Create input file with the store dirs
Path inputPath = new Path(stagingDir, "compact-"+ EnvironmentEdgeManager.currentTimeMillis());
CompactionInputFormat.createInputFile(fs, inputPath, toCompactDirs);
CompactionInputFormat.addInputPath(job, inputPath);
// Initialize credential for secure cluster
TableMapReduceUtil.initCredentials(job);
// Start the MR Job and wait
return job.waitForCompletion(true) ? 0 : 1;
} finally {
fs.delete(stagingDir, true);
}
}
开发者ID:daidong,项目名称:DominoHBase,代码行数:34,代码来源:CompactionTool.java
示例6: getInputFolderPath
import org.apache.hadoop.hbase.mapreduce.JobUtil; //导入依赖的package包/类
private static Path getInputFolderPath(Configuration conf)
throws IOException, InterruptedException {
Path stagingDir = JobUtil.getStagingDir(conf);
return new Path(stagingDir, INPUT_FOLDER_PREFIX +
String.valueOf(EnvironmentEdgeManager.currentTimeMillis()));
}
开发者ID:tenggyut,项目名称:HIndex,代码行数:7,代码来源:ExportSnapshot.java
注:本文中的org.apache.hadoop.hbase.mapreduce.JobUtil类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论