本文整理了Java中org.apache.hadoop.io.compress.Lz4Codec
类的一些代码示例,展示了Lz4Codec
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Lz4Codec
类的具体详情如下:
包路径:org.apache.hadoop.io.compress.Lz4Codec
类名称:Lz4Codec
[英]This class creates lz4 compressors/decompressors.
[中]此类创建lz4压缩器/解压缩器。
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Get the type of {@link Compressor} needed by this {@link CompressionCodec}.
*
* @return the type of compressor needed by this codec.
*/
@Override
public Class extends Compressor> getCompressorType() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native lz4 library not available");
}
return Lz4Compressor.class;
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
lz4LibraryName = Lz4Codec.getLibraryName();
代码示例来源:origin: airlift/aircompressor
public TestLz4CodecByteAtATime()
{
org.apache.hadoop.io.compress.Lz4Codec codec = new org.apache.hadoop.io.compress.Lz4Codec();
codec.setConf(new Configuration());
this.verifyCodec = codec;
}
代码示例来源:origin: org.apache.geode/gemfire-core
private static Option withCompression(Logger logger) {
String prop = System.getProperty(HoplogConfig.COMPRESSION);
if (prop != null) {
CompressionCodec codec;
if (prop.equalsIgnoreCase("SNAPPY")) {
codec = new SnappyCodec();
} else if (prop.equalsIgnoreCase("LZ4")) {
codec = new Lz4Codec();
} else if (prop.equals("GZ")) {
codec = new GzipCodec();
} else {
throw new IllegalStateException("Unsupported codec: " + prop);
}
if (logger.isDebugEnabled())
logger.debug("{}Using compression codec " + codec, logPrefix);
return SequenceFile.Writer.compression(CompressionType.BLOCK, codec);
}
return SequenceFile.Writer.compression(CompressionType.NONE, null);
}
代码示例来源:origin: airlift/aircompressor
public TestLz4Codec()
{
org.apache.hadoop.io.compress.Lz4Codec codec = new org.apache.hadoop.io.compress.Lz4Codec();
codec.setConf(new Configuration());
this.verifyCodec = codec;
}
代码示例来源:origin: io.snappydata/gemfire-core
private static Option withCompression(LogWriterI18n logger) {
String prop = System.getProperty(HoplogConfig.COMPRESSION);
if (prop != null) {
CompressionCodec codec;
if (prop.equalsIgnoreCase("SNAPPY")) {
codec = new SnappyCodec();
} else if (prop.equalsIgnoreCase("LZ4")) {
codec = new Lz4Codec();
} else if (prop.equals("GZ")) {
codec = new GzipCodec();
} else {
throw new IllegalStateException("Unsupported codec: " + prop);
}
logger.fine("Using compression codec " + codec);
return SequenceFile.Writer.compression(CompressionType.BLOCK, codec);
}
return SequenceFile.Writer.compression(CompressionType.NONE, null);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Get the type of {@link Decompressor} needed by this {@link CompressionCodec}.
*
* @return the type of decompressor needed by this codec.
*/
@Override
public Class extends Decompressor> getDecompressorType() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native lz4 library not available");
}
return Lz4Decompressor.class;
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
lz4LibraryName = Lz4Codec.getLibraryName();
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Create a new {@link Decompressor} for use by this {@link CompressionCodec}.
*
* @return a new decompressor for use by this codec
*/
@Override
public Decompressor createDecompressor() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native lz4 library not available");
}
int bufferSize = conf.getInt(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_KEY,
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_DEFAULT);
return new Lz4Decompressor(bufferSize);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
lz4LibraryName = Lz4Codec.getLibraryName();
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Create a new {@link Compressor} for use by this {@link CompressionCodec}.
*
* @return a new compressor for use by this codec
*/
@Override
public Compressor createCompressor() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native lz4 library not available");
}
int bufferSize = conf.getInt(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_KEY,
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_DEFAULT);
boolean useLz4HC = conf.getBoolean(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_DEFAULT);
return new Lz4Compressor(bufferSize, useLz4HC);
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
lz4LibraryName = Lz4Codec.getLibraryName();
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Create a {@link CompressionInputStream} that will read from the given
* {@link InputStream} with the given {@link Decompressor}.
*
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
* @throws IOException
*/
@Override
public CompressionInputStream createInputStream(InputStream in,
Decompressor decompressor)
throws IOException {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native lz4 library not available");
}
return new BlockDecompressorStream(in, decompressor, conf.getInt(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_KEY,
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_DEFAULT));
}
代码示例来源:origin: io.hops/hadoop-common
lz4LibraryName = Lz4Codec.getLibraryName();
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Create a {@link CompressionOutputStream} that will write to the given
* {@link OutputStream} with the given {@link Compressor}.
*
* @param out the location for the final output stream
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to have it compressed
* @throws IOException
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out,
Compressor compressor)
throws IOException {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native lz4 library not available");
}
int bufferSize = conf.getInt(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_KEY,
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_BUFFERSIZE_DEFAULT);
int compressiOnOverhead= bufferSize/255 + 16;
return new BlockCompressorStream(out, compressor, bufferSize,
compressionOverhead);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
@Test
public void testNativeCodeLoaded() {
if (requireTestJni() == false) {
LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
return;
}
if (!NativeCodeLoader.isNativeCodeLoaded()) {
fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
"libhadoop.so was not loaded.");
}
assertFalse(NativeCodeLoader.getLibraryName().isEmpty());
// library names are depended on platform and build envs
// so just check names are available
assertFalse(ZlibFactory.getLibraryName().isEmpty());
if (NativeCodeLoader.buildSupportsSnappy()) {
assertFalse(SnappyCodec.getLibraryName().isEmpty());
}
if (NativeCodeLoader.buildSupportsOpenssl()) {
assertFalse(OpensslCipher.getLibraryName().isEmpty());
}
assertFalse(Lz4Codec.getLibraryName().isEmpty());
LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
/**
* Get the type of {@link Decompressor} needed by this {@link CompressionCodec}.
*
* @return the type of decompressor needed by this codec.
*/
@Override
public Class extends Decompressor> getDecompressorType() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native lz4 library not available");
}
return Lz4Decompressor.class;
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
@Test
public void testNativeCodeLoaded() {
if (requireTestJni() == false) {
LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
return;
}
if (!NativeCodeLoader.isNativeCodeLoaded()) {
fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
"libhadoop.so was not loaded.");
}
assertFalse(NativeCodeLoader.getLibraryName().isEmpty());
// library names are depended on platform and build envs
// so just check names are available
assertFalse(ZlibFactory.getLibraryName().isEmpty());
if (NativeCodeLoader.buildSupportsSnappy()) {
assertFalse(SnappyCodec.getLibraryName().isEmpty());
}
if (NativeCodeLoader.buildSupportsOpenssl()) {
assertFalse(OpensslCipher.getLibraryName().isEmpty());
}
assertFalse(Lz4Codec.getLibraryName().isEmpty());
LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
/**
* Get the type of {@link Decompressor} needed by this {@link CompressionCodec}.
*
* @return the type of decompressor needed by this codec.
*/
@Override
public Class extends Decompressor> getDecompressorType() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native lz4 library not available");
}
return Lz4Decompressor.class;
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
/**
* Get the type of {@link Compressor} needed by this {@link CompressionCodec}.
*
* @return the type of compressor needed by this codec.
*/
@Override
public Class extends Compressor> getCompressorType() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native lz4 library not available");
}
return Lz4Compressor.class;
}