本文整理了Java中org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils.getStagingJobHistoryFile()
方法的一些代码示例,展示了JobHistoryUtils.getStagingJobHistoryFile()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JobHistoryUtils.getStagingJobHistoryFile()
方法的具体详情如下:
包路径:org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils
类名称:JobHistoryUtils
方法名:getStagingJobHistoryFile
[英]Get the job history file path for non Done history files.
[中]获取未完成历史记录文件的作业历史记录文件路径。
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-common
/**
* Get the job history file path for non Done history files.
*/
public static Path getStagingJobHistoryFile(Path dir, JobId jobId, int attempt) {
return getStagingJobHistoryFile(dir, TypeConverter.fromYarn(jobId).toString(), attempt);
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-common
/**
* Get the job history file path for non Done history files.
*/
public static Path getStagingJobHistoryFile(Path dir, JobId jobId, int attempt) {
return getStagingJobHistoryFile(dir, TypeConverter.fromYarn(jobId).toString(), attempt);
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-common
/**
* Get the job history file path for non Done history files.
*/
public static Path getStagingJobHistoryFile(Path dir, JobId jobId, int attempt) {
return getStagingJobHistoryFile(dir, TypeConverter.fromYarn(jobId).toString(), attempt);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-common
/**
* Get the job history file path for non Done history files.
*/
public static Path getStagingJobHistoryFile(Path dir, JobId jobId, int attempt) {
return getStagingJobHistoryFile(dir, TypeConverter.fromYarn(jobId).toString(), attempt);
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-common
public static Path getPreviousJobHistoryPath(
Configuration conf, ApplicationAttemptId applicationAttemptId)
throws IOException {
String jobId =
TypeConverter.fromYarn(applicationAttemptId.getApplicationId())
.toString();
String jobhistoryDir =
JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(conf, jobId);
Path histDirPath = FileContext.getFileContext(conf).makeQualified(
new Path(jobhistoryDir));
FileContext fc = FileContext.getFileContext(histDirPath.toUri(), conf);
return fc.makeQualified(JobHistoryUtils.getStagingJobHistoryFile(
histDirPath,jobId, (applicationAttemptId.getAttemptId() - 1)));
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-common
public static Path getPreviousJobHistoryPath(
Configuration conf, ApplicationAttemptId applicationAttemptId)
throws IOException {
String jobId =
TypeConverter.fromYarn(applicationAttemptId.getApplicationId())
.toString();
String jobhistoryDir =
JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(conf, jobId);
Path histDirPath = FileContext.getFileContext(conf).makeQualified(
new Path(jobhistoryDir));
FileContext fc = FileContext.getFileContext(histDirPath.toUri(), conf);
return fc.makeQualified(JobHistoryUtils.getStagingJobHistoryFile(
histDirPath,jobId, (applicationAttemptId.getAttemptId() - 1)));
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-common
public static Path getPreviousJobHistoryPath(
Configuration conf, ApplicationAttemptId applicationAttemptId)
throws IOException {
String jobId =
TypeConverter.fromYarn(applicationAttemptId.getApplicationId())
.toString();
String jobhistoryDir =
JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(conf, jobId);
Path histDirPath = FileContext.getFileContext(conf).makeQualified(
new Path(jobhistoryDir));
FileContext fc = FileContext.getFileContext(histDirPath.toUri(), conf);
return fc.makeQualified(JobHistoryUtils.getStagingJobHistoryFile(
histDirPath,jobId, (applicationAttemptId.getAttemptId() - 1)));
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-common
public static Path getPreviousJobHistoryPath(
Configuration conf, ApplicationAttemptId applicationAttemptId)
throws IOException {
String jobId =
TypeConverter.fromYarn(applicationAttemptId.getApplicationId())
.toString();
String jobhistoryDir =
JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(conf, jobId);
Path histDirPath = FileContext.getFileContext(conf).makeQualified(
new Path(jobhistoryDir));
FileContext fc = FileContext.getFileContext(histDirPath.toUri(), conf);
return fc.makeQualified(JobHistoryUtils.getStagingJobHistoryFile(
histDirPath,jobId, (applicationAttemptId.getAttemptId() - 1)));
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-app
public static FSDataInputStream getPreviousJobHistoryFileStream(
Configuration conf, ApplicationAttemptId applicationAttemptId)
throws IOException {
FSDataInputStream in = null;
Path historyFile = null;
String jobId =
TypeConverter.fromYarn(applicationAttemptId.getApplicationId())
.toString();
String jobhistoryDir =
JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(conf, jobId);
Path histDirPath =
FileContext.getFileContext(conf).makeQualified(new Path(jobhistoryDir));
FileContext fc = FileContext.getFileContext(histDirPath.toUri(), conf);
// read the previous history file
historyFile =
fc.makeQualified(JobHistoryUtils.getStagingJobHistoryFile(histDirPath,
jobId, (applicationAttemptId.getAttemptId() - 1)));
LOG.info("History file is at " + historyFile);
in = fc.open(historyFile);
return in;
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-app
public static FSDataInputStream getPreviousJobHistoryFileStream(
Configuration conf, ApplicationAttemptId applicationAttemptId)
throws IOException {
FSDataInputStream in = null;
Path historyFile = null;
String jobId =
TypeConverter.fromYarn(applicationAttemptId.getApplicationId())
.toString();
String jobhistoryDir =
JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(conf, jobId);
Path histDirPath =
FileContext.getFileContext(conf).makeQualified(new Path(jobhistoryDir));
FileContext fc = FileContext.getFileContext(histDirPath.toUri(), conf);
// read the previous history file
historyFile =
fc.makeQualified(JobHistoryUtils.getStagingJobHistoryFile(histDirPath,
jobId, (applicationAttemptId.getAttemptId() - 1)));
LOG.info("History file is at " + historyFile);
in = fc.open(historyFile);
return in;
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-app
public static FSDataInputStream getPreviousJobHistoryFileStream(
Configuration conf, ApplicationAttemptId applicationAttemptId)
throws IOException {
FSDataInputStream in = null;
Path historyFile = null;
String jobId =
TypeConverter.fromYarn(applicationAttemptId.getApplicationId())
.toString();
String jobhistoryDir =
JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(conf, jobId);
Path histDirPath =
FileContext.getFileContext(conf).makeQualified(new Path(jobhistoryDir));
FileContext fc = FileContext.getFileContext(histDirPath.toUri(), conf);
// read the previous history file
historyFile =
fc.makeQualified(JobHistoryUtils.getStagingJobHistoryFile(histDirPath,
jobId, (applicationAttemptId.getAttemptId() - 1)));
LOG.info("History file is at " + historyFile);
in = fc.open(historyFile);
return in;
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-app
Path historyFile = JobHistoryUtils.getStagingJobHistoryFile(
stagingDirPath, jobId, startCount);
String user = UserGroupInformation.getCurrentUser().getShortUserName();
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-app
Path historyFile = JobHistoryUtils.getStagingJobHistoryFile(
stagingDirPath, jobId, startCount);
String user = UserGroupInformation.getCurrentUser().getShortUserName();
代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-app
Path historyFile = JobHistoryUtils.getStagingJobHistoryFile(
stagingDirPath, jobId, startCount);
String user = UserGroupInformation.getCurrentUser().getShortUserName();
代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-app
jobId.toString());
dir.mkdirs();
File historyFile = new File(JobHistoryUtils.getStagingJobHistoryFile(
new Path(dir.toURI().toString()), jobId,
(applicationAttemptId.getAttemptId() - 1)).toUri().getRawPath());