diff options
author | Parth Chandra <pchandra@maprtech.com> | 2014-08-23 21:48:04 -0700 |
---|---|---|
committer | Jacques Nadeau <jacques@apache.org> | 2014-08-24 08:28:34 -0700 |
commit | 4216e0e2c60cf17caa678cee685cbfc2ca4e819a (patch) | |
tree | b30c1712bd4512403131751ac1e82fafca45bc70 /exec/java-exec/src | |
parent | ed72c1370fe98de28bebc587e063e1728a8027c3 (diff) |
Pass correct uncompressed data size to BytesInput ctor in ComplexParquetReader
Diffstat (limited to 'exec/java-exec/src')
-rw-r--r-- | exec/java-exec/src/main/java/parquet/hadoop/ColumnChunkIncReadStore.java | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/exec/java-exec/src/main/java/parquet/hadoop/ColumnChunkIncReadStore.java b/exec/java-exec/src/main/java/parquet/hadoop/ColumnChunkIncReadStore.java index 379d3e6a5..516be0ee0 100644 --- a/exec/java-exec/src/main/java/parquet/hadoop/ColumnChunkIncReadStore.java +++ b/exec/java-exec/src/main/java/parquet/hadoop/ColumnChunkIncReadStore.java @@ -147,7 +147,7 @@ public class ColumnChunkIncReadStore implements PageReadStore { ByteBuffer buffer = buf.nioBuffer(0, pageHeader.compressed_page_size); CompatibilityUtil.getBuf(in, buffer, pageHeader.compressed_page_size); return new Page( - decompressor.decompress(BytesInput.from(buffer, 0, pageHeader.compressed_page_size), pageHeader.compressed_page_size), + decompressor.decompress(BytesInput.from(buffer, 0, pageHeader.compressed_page_size), pageHeader.getUncompressed_page_size()), pageHeader.data_page_header.num_values, pageHeader.uncompressed_page_size, parquetMetadataConverter.fromParquetStatistics(pageHeader.data_page_header.statistics, columnDescriptor.getType()), |