Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
fix hive again...
  • Loading branch information
cloud-fan committed Jul 30, 2015
commit 6dbfa6fc44ac7833b78838db4d7e08ecd3a0a3d7
Original file line number Diff line number Diff line change
Expand Up @@ -591,8 +591,8 @@ private[hive] object HiveContext {
struct.toSeq.zip(fields).map {
case (v, t) => s""""${t.name}":${toHiveStructString(v, t.dataType)}"""
}.mkString("{", ",", "}")
case (array: ArrayData, ArrayType(typ, _)) =>
array.toArray().map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]")
case (seq: Seq[_], ArrayType(typ, _)) =>
seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]")
case (map: Map[_, _], MapType(kType, vType, _)) =>
map.map {
case (key, value) =>
Expand All @@ -614,8 +614,8 @@ private[hive] object HiveContext {
struct.toSeq.zip(fields).map {
case (v, t) => s""""${t.name}":${toHiveStructString(v, t.dataType)}"""
}.mkString("{", ",", "}")
case (array: ArrayData, ArrayType(typ, _)) =>
array.toArray().map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]")
case (seq: Seq[_], ArrayType(typ, _)) =>
seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]")
case (map: Map[_, _], MapType(kType, vType, _)) =>
map.map {
case (key, value) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,8 @@ import scala.collection.JavaConversions._
* java.sql.Timestamp
* Complex Types =>
* Map: scala.collection.immutable.Map
* List: scala.collection.immutable.Seq
* Struct:
* [[org.apache.spark.sql.catalyst.InternalRow]]
* List: [[org.apache.spark.sql.types.ArrayData]]
* Struct: [[org.apache.spark.sql.catalyst.InternalRow]]
* Union: NOT SUPPORTED YET
* The Complex types plays as a container, which can hold arbitrary data types.
*
Expand Down Expand Up @@ -297,7 +296,10 @@ private[hive] trait HiveInspectors {
}.toMap
case li: StandardConstantListObjectInspector =>
// take the value from the list inspector object, rather than the input data
li.getWritableConstantValue.map(unwrap(_, li.getListElementObjectInspector)).toSeq
val values = li.getWritableConstantValue
.map(unwrap(_, li.getListElementObjectInspector))
.toArray
new GenericArrayData(values)
// if the value is null, we don't care about the object inspector type
case _ if data == null => null
case poi: VoidObjectInspector => null // always be null for void object inspector
Expand Down Expand Up @@ -339,7 +341,10 @@ private[hive] trait HiveInspectors {
}
case li: ListObjectInspector =>
Option(li.getList(data))
.map(_.map(unwrap(_, li.getListElementObjectInspector)).toSeq)
.map { l =>
val values = l.map(unwrap(_, li.getListElementObjectInspector)).toArray
new GenericArrayData(values)
}
.orNull
case mi: MapObjectInspector =>
Option(mi.getMap(data)).map(
Expand Down Expand Up @@ -391,7 +396,13 @@ private[hive] trait HiveInspectors {

case loi: ListObjectInspector =>
val wrapper = wrapperFor(loi.getListElementObjectInspector)
(o: Any) => if (o != null) seqAsJavaList(o.asInstanceOf[Seq[_]].map(wrapper)) else null
(o: Any) => {
if (o != null) {
seqAsJavaList(o.asInstanceOf[ArrayData].toArray().map(wrapper))
} else {
null
}
}

case moi: MapObjectInspector =>
// The Predef.Map is scala.collection.immutable.Map.
Expand Down