Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
addressed review comments
  • Loading branch information
kiszk committed Aug 7, 2017
commit ff324fd9f588ace1f4f16d62b71d2de6842de102
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import org.apache.spark.unsafe.types.UTF8String;

/**
* A column backed by data compressed thru ColumnAccessor
* A column vector backed by data compressed thru ColumnAccessor
* this is a wrapper to read compressed data for table cache
*/
public final class CachedBatchColumnVector extends ReadOnlyColumnVector {
Expand All @@ -51,7 +51,6 @@ public final class CachedBatchColumnVector extends ReadOnlyColumnVector {
public CachedBatchColumnVector(byte[] buffer, int numRows, DataType type) {
super(numRows, type, MemoryMode.ON_HEAP);
initialize(buffer, type);
reset();
}

@Override
Expand Down Expand Up @@ -233,16 +232,13 @@ public final UTF8String getUTF8String(int rowId) {
}

private void initialize(byte[] buffer, DataType type) {
if (columnAccessor == null) {
ByteBuffer byteBuffer = ByteBuffer.wrap(buffer);
columnAccessor = ColumnAccessor$.MODULE$.apply(type, byteBuffer);
}
if (mutableRow == null) {
unsafeRow = new UnsafeRow(1);
bufferHolder = new BufferHolder(unsafeRow);
rowWriter = new UnsafeRowWriter(bufferHolder, 1);
mutableRow = new MutableUnsafeRow(rowWriter);
}
ByteBuffer byteBuffer = ByteBuffer.wrap(buffer);
columnAccessor = ColumnAccessor$.MODULE$.apply(type, byteBuffer);

unsafeRow = new UnsafeRow(1);
bufferHolder = new BufferHolder(unsafeRow);
rowWriter = new UnsafeRowWriter(bufferHolder, 1);
mutableRow = new MutableUnsafeRow(rowWriter);

if (type instanceof ArrayType) {
throw new UnsupportedOperationException();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1257,9 +1257,8 @@ class ColumnarBatchSuite extends SparkFunSuite {
val columnBuilder = ColumnBuilderHelper(dataType, 1024, "col", true)
val row = new SpecificInternalRow(Array(dataType))

val nullRow = InternalRow(false)
nullRow.setNullAt(0)
columnBuilder.appendFrom(nullRow, 0)
row.setNullAt(0)
columnBuilder.appendFrom(row, 0)
for (i <- 1 until 16) {
row.setBoolean(0, i % 2 == 0)
columnBuilder.appendFrom(row, 0)
Expand All @@ -1281,9 +1280,8 @@ class ColumnarBatchSuite extends SparkFunSuite {
val columnBuilder = ColumnBuilderHelper(dataType, 1024, "col", true)
val row = new SpecificInternalRow(Array(dataType))

val nullRow = InternalRow(false)
nullRow.setNullAt(0)
columnBuilder.appendFrom(nullRow, 0)
row.setNullAt(0)
columnBuilder.appendFrom(row, 0)
for (i <- 1 until 16) {
row.setByte(0, i.toByte)
columnBuilder.appendFrom(row, 0)
Expand All @@ -1305,9 +1303,8 @@ class ColumnarBatchSuite extends SparkFunSuite {
val columnBuilder = ColumnBuilderHelper(dataType, 1024, "col", true)
val row = new SpecificInternalRow(Array(dataType))

val nullRow = InternalRow(false)
nullRow.setNullAt(0)
columnBuilder.appendFrom(nullRow, 0)
row.setNullAt(0)
columnBuilder.appendFrom(row, 0)
for (i <- 1 until 16) {
row.setShort(0, i.toShort)
columnBuilder.appendFrom(row, 0)
Expand All @@ -1329,9 +1326,8 @@ class ColumnarBatchSuite extends SparkFunSuite {
val columnBuilder = ColumnBuilderHelper(dataType, 1024, "col", true)
val row = new SpecificInternalRow(Array(dataType))

val nullRow = InternalRow(false)
nullRow.setNullAt(0)
columnBuilder.appendFrom(nullRow, 0)
row.setNullAt(0)
columnBuilder.appendFrom(row, 0)
for (i <- 1 until 16) {
row.setInt(0, i)
columnBuilder.appendFrom(row, 0)
Expand All @@ -1353,9 +1349,8 @@ class ColumnarBatchSuite extends SparkFunSuite {
val columnBuilder = ColumnBuilderHelper(dataType, 1024, "col", true)
val row = new SpecificInternalRow(Array(dataType))

val nullRow = InternalRow(false)
nullRow.setNullAt(0)
columnBuilder.appendFrom(nullRow, 0)
row.setNullAt(0)
columnBuilder.appendFrom(row, 0)
for (i <- 1 until 16) {
row.setLong(0, i.toLong)
columnBuilder.appendFrom(row, 0)
Expand All @@ -1377,9 +1372,8 @@ class ColumnarBatchSuite extends SparkFunSuite {
val columnBuilder = ColumnBuilderHelper(dataType, 1024, "col", true)
val row = new SpecificInternalRow(Array(dataType))

val nullRow = InternalRow(false)
nullRow.setNullAt(0)
columnBuilder.appendFrom(nullRow, 0)
row.setNullAt(0)
columnBuilder.appendFrom(row, 0)
for (i <- 1 until 16) {
row.setFloat(0, i.toFloat)
columnBuilder.appendFrom(row, 0)
Expand All @@ -1401,9 +1395,8 @@ class ColumnarBatchSuite extends SparkFunSuite {
val columnBuilder = ColumnBuilderHelper(dataType, 1024, "col", true)
val row = new SpecificInternalRow(Array(dataType))

val nullRow = InternalRow(false)
nullRow.setNullAt(0)
columnBuilder.appendFrom(nullRow, 0)
row.setNullAt(0)
columnBuilder.appendFrom(row, 0)
for (i <- 1 until 16) {
row.setDouble(0, i.toDouble)
columnBuilder.appendFrom(row, 0)
Expand All @@ -1423,13 +1416,12 @@ class ColumnarBatchSuite extends SparkFunSuite {
test("CachedBatch String type Apis") {
val dataType = StringType
val columnBuilder = ColumnBuilderHelper(dataType, 1024, "col", true)
val row = new SpecificInternalRow(Array(dataType))

val nullRow = InternalRow("")
nullRow.setNullAt(0)
columnBuilder.appendFrom(nullRow, 0)
row.setNullAt(0)
columnBuilder.appendFrom(row, 0)
for (i <- 1 until 16) {
val converter = UnsafeProjection.create(Array[DataType](dataType))
val row = converter.apply(InternalRow(UTF8String.fromString((i % 4).toString)))
row.update(0, UTF8String.fromString((i % 4).toString))
columnBuilder.appendFrom(row, 0)
}

Expand Down