本文整理了Java中org.apache.hadoop.fs.LocalFileSystem.delete()
方法的一些代码示例,展示了LocalFileSystem.delete()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。LocalFileSystem.delete()
方法的具体详情如下:
包路径:org.apache.hadoop.fs.LocalFileSystem
类名称:LocalFileSystem
方法名:delete
LocalFileSystem.delete介绍
暂无
代码示例
代码示例来源:origin: apache/kylin
@Test
public void testWriteCuboidStatistics() throws IOException {
final Configuration cOnf= HadoopUtil.getCurrentConfiguration();
File tmp = File.createTempFile("cuboidstatistics", "");
final Path outputPath = new Path(tmp.getParent().toString() + File.separator + RandomUtil.randomUUID().toString());
if (!FileSystem.getLocal(conf).exists(outputPath)) {
// FileSystem.getLocal(conf).create(outputPath);
}
System.out.println(outputPath);
Map cuboidHLLMap = Maps.newHashMap();
CubeStatsWriter.writeCuboidStatistics(conf, outputPath, cuboidHLLMap, 100);
FileSystem.getLocal(conf).delete(outputPath, true);
}
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
/**
* Use MRAsyncDiskService.moveAndDeleteAllVolumes instead.
*/
@Deprecated
public void deleteLocalFiles() throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i FileSystem.getLocal(this).delete(new Path(localDirs[i]), true);
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
public void deleteLocalFiles(String subdir) throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i FileSystem.getLocal(this).delete(new Path(localDirs[i], subdir), true);
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
/**
* Use MRAsyncDiskService.moveAndDeleteAllVolumes instead.
*/
@Deprecated
public void deleteLocalFiles() throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i FileSystem.getLocal(this).delete(new Path(localDirs[i]), true);
}
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
/**
* Use MRAsyncDiskService.moveAndDeleteAllVolumes instead.
* @see org.apache.hadoop.util.MRAsyncDiskService#cleanupAllVolumes()
*/
@Deprecated
public void deleteLocalFiles() throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i FileSystem.getLocal(this).delete(new Path(localDirs[i]));
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
/**
* Use MRAsyncDiskService.moveAndDeleteAllVolumes instead.
*/
@Deprecated
public void deleteLocalFiles() throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i FileSystem.getLocal(this).delete(new Path(localDirs[i]), true);
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
public void deleteLocalFiles(String subdir) throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i FileSystem.getLocal(this).delete(new Path(localDirs[i], subdir), true);
}
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
public void deleteLocalFiles(String subdir) throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i FileSystem.getLocal(this).delete(new Path(localDirs[i], subdir));
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
public void deleteLocalFiles(String subdir) throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i FileSystem.getLocal(this).delete(new Path(localDirs[i], subdir), true);
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
/**
* Use MRAsyncDiskService.moveAndDeleteAllVolumes instead.
*/
@Deprecated
public void deleteLocalFiles() throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i FileSystem.getLocal(this).delete(new Path(localDirs[i]), true);
}
}
代码示例来源:origin: org.apache.hama/hama-core
void deleteLocalFiles(String subdir) throws IOException {
try {
String[] localDirs = getLocalDirs();
for (String localDir : localDirs) {
FileSystem.getLocal(conf).delete(new Path(localDir, subdir), true);
}
} catch (NullPointerException e) {
LOG.info(e);
}
}
代码示例来源:origin: org.apache.hama/hama-core
public void deleteLocalFiles(String subdir) throws IOException {
try {
String[] localDirs = getLocalDirs();
for (String localDir : localDirs) {
FileSystem.getLocal(this.conf).delete(new Path(localDir, subdir), true);
}
} catch (NullPointerException e) {
LOG.info(e);
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
@Before
public void setup() throws IOException {
cOnf= new Configuration(false);
conf.set("fs.file.impl", LocalFileSystem.class.getName());
fileSys = FileSystem.getLocal(conf);
fileSys.delete(new Path(TEST_ROOT_DIR), true);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
private String readFile(String out) throws IOException {
Path path = new Path(out);
FileStatus stat = lfs.getFileStatus(path);
FSDataInputStream in = lfs.open(path);
byte[] buffer = new byte[(int)stat.getLen()];
in.readFully(buffer);
in.close();
lfs.delete(path, false);
return new String(buffer);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
@Before
@Override
public void setUp() throws IOException {
fileSys.delete(new Path(TEST_ROOT_DIR), true);
super.setUp();
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
@Test
public void testCopyFileFromLocal() throws Exception {
Path testRoot = new Path(testRootDir, "testPutFile");
lfs.delete(testRoot, true);
lfs.mkdirs(testRoot);
Path targetDir = new Path(testRoot, "target");
Path filePath = new Path(testRoot, new Path("srcFile"));
lfs.create(filePath).close();
checkPut(filePath, targetDir, false);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
@Test
public void testCopyFileFromLocal() throws Exception {
Path testRoot = new Path(testRootDir, "testPutFile");
lfs.delete(testRoot, true);
lfs.mkdirs(testRoot);
Path targetDir = new Path(testRoot, "target");
Path filePath = new Path(testRoot, new Path("srcFile"));
lfs.create(filePath).close();
checkPut(filePath, targetDir, false);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
@Override
public void setUp() throws Exception {
LocalFileSystem fs = FileSystem.getLocal(conf);
if (fs.exists(TEST_ROOT) && !fs.delete(TEST_ROOT, true)) {
Assert.fail("Can't clean up test root dir");
}
fs.mkdirs(TEST_ROOT);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
@Override
public void setUp() throws Exception {
LocalFileSystem fs = FileSystem.getLocal(conf);
if (fs.exists(TEST_ROOT) && !fs.delete(TEST_ROOT, true)) {
Assert.fail("Can't clean up test root dir");
}
fs.mkdirs(TEST_ROOT);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
@Before
public void prepFiles() throws Exception {
lfs.setVerifyChecksum(true);
lfs.setWriteChecksum(true);
lfs.delete(srcPath, true);
lfs.delete(dstPath, true);
FSDataOutputStream out = lfs.create(srcPath);
out.writeChars("hi");
out.close();
assertTrue(lfs.exists(lfs.getChecksumFile(srcPath)));
}