本文整理了Java中org.apache.avro.file.FileReader
类的一些代码示例,展示了FileReader
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。FileReader
类的具体详情如下:
包路径:org.apache.avro.file.FileReader
类名称:FileReader
FileReader介绍
[英]Interface for reading data from a file.
[中]用于从文件中读取数据的接口。
代码示例
代码示例来源:origin: apache/avro
public boolean next(AvroWrapper wrapper, NullWritable ignore)
throws IOException {
if (!reader.hasNext() || reader.pastSync(end))
return false;
wrapper.datum(reader.next(wrapper.datum()));
return true;
}
代码示例来源:origin: apache/hive
@Override
public void close() throws IOException {
if (isEmptyInput == false)
reader.close();
}
代码示例来源:origin: apache/avro
protected AvroAsTextRecordReader(FileReader reader, FileSplit split)
throws IOException {
this.reader = reader;
reader.sync(split.getStart()); // sync to start
this.start = reader.tell();
this.end = split.getStart() + split.getLength();
}
代码示例来源:origin: apache/hive
@Override
public boolean next(NullWritable nullWritable, AvroGenericRecordWritable record) throws IOException {
if(isEmptyInput || !reader.hasNext() || reader.pastSync(stop)) {
return false;
}
GenericData.Record r = (GenericData.Record)reader.next();
record.setRecord(r);
record.setRecordReaderID(recordReaderID);
record.setFileSchema(reader.getSchema());
return true;
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Read a single byte from the stream.
*/
@Override
public int read() throws IOException {
if (pos return buffer[pos++];
}
if (!fileReader.hasNext()) {
return -1;
}
writer.write(fileReader.next(), encoder);
encoder.flush();
if (!fileReader.hasNext()) {
// Write a new line after the last Avro record.
output.write(System.getProperty("line.separator")
.getBytes(StandardCharsets.UTF_8));
output.flush();
}
pos = 0;
buffer = output.toByteArray();
output.reset();
return read();
}
代码示例来源:origin: kite-sdk/kite
List expecteds = new ArrayList();
FileReader reader = new DataFileReader(file, new GenericDatumReader());
Schema schema = reader.getSchema();
while (reader.hasNext()) {
GenericData.Record expected = reader.next();
expecteds.add(expected);
代码示例来源:origin: apache/avro
@Test
public void testSerialization() throws Exception {
Schema testSchema = getTestSchema();
GenericRecord message = new Record(testSchema);
message.put("name", "testValue");
byte[] data = getSerializedMessage(message, testSchema);
GenericDatumReader reader = new GenericDatumReader<>(testSchema);
SeekableInput in = new SeekableByteArrayInput(data);
FileReader dfr = null;
IndexedRecord result = null;
try {
dfr = DataFileReader.openReader(in, reader);
result = dfr.next();
} finally {
if (dfr != null) {
dfr.close();
}
}
Assert.assertNotNull(result);
Assert.assertTrue(result instanceof GenericRecord);
Assert.assertEquals(new Utf8("testValue"), ((GenericRecord) result).get("name"));
}
}
代码示例来源:origin: org.apache.pig/pig
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (reader.pastSync(end)) {
return false;
}
try {
currentRecord = reader.next(new GenericData.Record(schema));
} catch (NoSuchElementException e) {
return false;
} catch (IOException ioe) {
reader.sync(reader.tell()+1);
throw ioe;
}
return true;
}
代码示例来源:origin: apache/sqoop
private boolean checkAvroFileForLine(FileSystem fs, Path p, List record)
throws IOException {
SeekableInput in = new FsInput(p, new Configuration());
DatumReader datumReader = new GenericDatumReader();
FileReader reader = DataFileReader.openReader(in, datumReader);
reader.sync(0);
while (reader.hasNext()) {
if (valueMatches(reader.next(), record)) {
return true;
}
}
return false;
}
代码示例来源:origin: apache/flink
dataFileReader.next(rec);
代码示例来源:origin: qubole/streamx
@Override
public Schema getSchema(Configuration conf, Path path) throws IOException {
SeekableInput input = new FsInput(path, conf);
DatumReader reader = new GenericDatumReader<>();
FileReader fileReader = DataFileReader.openReader(input, reader);
org.apache.avro.Schema schema = fileReader.getSchema();
fileReader.close();
return avroData.toConnectSchema(schema);
}
代码示例来源:origin: org.apache.pig/pig
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (reader.pastSync(end)) {
return false;
}
try {
currentArray = reader.next();
} catch (NoSuchElementException e) {
return false;
}
return true;
}
代码示例来源:origin: stratosphere/stratosphere
@Override
public boolean reachedEnd() throws IOException {
return !dataFileReader.hasNext();
}
代码示例来源:origin: Netflix/iceberg
@Override
public void sync(long position) throws IOException {
reader.sync(position);
}
代码示例来源:origin: apache/hive
@Override
public long getPos() throws IOException {
return isEmptyInput ? 0 : reader.tell();
}
代码示例来源:origin: Netflix/iceberg
@Override
public boolean hasNext() {
try {
return (reader.hasNext() && !reader.pastSync(end));
} catch (IOException e) {
throw new RuntimeIOException(e, "Failed to check range end: %d", end);
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
public AvroFileInputStream(FileStatus status) throws IOException {
pos = 0;
buffer = new byte[0];
GenericDatumReader reader = new GenericDatumReader();
FileContext fc = FileContext.getFileContext(new Configuration());
fileReader =
DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
Schema schema = fileReader.getSchema();
writer = new GenericDatumWriter(schema);
output = new ByteArrayOutputStream();
encoder = EncoderFactory.get().jsonEncoder(schema, output);
}
代码示例来源:origin: com.google.cloud.bigdataoss/bigquery-connector
dataFileReader.sync(fileSplit.getStart());
schema = dataFileReader.getSchema();
currentRecord = new GenericData.Record(schema);
代码示例来源:origin: Netflix/iceberg
@Override
public boolean pastSync(long position) throws IOException {
return reader.pastSync(position);
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
@Override
public boolean next(NullWritable nullWritable, AvroGenericRecordWritable record) throws IOException {
if(!reader.hasNext() || reader.pastSync(stop)) {
return false;
}
GenericData.Record r = (GenericData.Record)reader.next();
record.setRecord(r);
record.setRecordReaderID(recordReaderID);
record.setFileSchema(reader.getSchema());
return true;
}