Skip to content
Closed
Prev Previous commit
Next Next commit
Remove asFormat methods.
  • Loading branch information
rdblue committed Aug 30, 2019
commit e424c2c36ef2c38a689f90a12fe13ca4ff9a6098
Original file line number Diff line number Diff line change
Expand Up @@ -358,56 +358,6 @@ trait CreateTableWriter[T] extends WriteConfigMethods[CreateTableWriter[T]] {
*/
def using(provider: String): CreateTableWriter[T]

/**
* Use the "csv" provider.
*
* This is equivalent to:
* {{{
* using("csv")
* }}}
*/
def asCsv: CreateTableWriter[T] = using("csv")

/**
* Use the "text" provider.
*
* This is equivalent to:
* {{{
* using("text")
* }}}
*/
def asText: CreateTableWriter[T] = using("text")

/**
* Use the "json" provider.
*
* This is equivalent to:
* {{{
* using("json")
* }}}
*/
def asJson: CreateTableWriter[T] = using("json")

/**
* Use the "parquet" provider.
*
* This is equivalent to:
* {{{
* using("parquet")
* }}}
*/
def asParquet: CreateTableWriter[T] = using("parquet")

/**
* Use the "orc" provider.
*
* This is equivalent to:
* {{{
* using("orc")
* }}}
*/
def asOrc: CreateTableWriter[T] = using("orc")

/**
* Add a table property.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -275,56 +275,6 @@ class DataFrameWriterV2Suite extends QueryTest with SharedSparkSession with Befo
assert(table.properties === Map("prop" -> "value").asJava)
}

test("Create: asText") {
spark.table("source").writeTo("testcat.table_name").asText.create()

val table = catalog("testcat").asInstanceOf[TestInMemoryTableCatalog]
.loadTable(Identifier.of(Array(), "table_name"))

assert(table.name === "testcat.table_name")
assert(table.properties === Map("provider" -> "text").asJava)
}

test("Create: asCsv") {
spark.table("source").writeTo("testcat.table_name").asCsv.create()

val table = catalog("testcat").asInstanceOf[TestInMemoryTableCatalog]
.loadTable(Identifier.of(Array(), "table_name"))

assert(table.name === "testcat.table_name")
assert(table.properties === Map("provider" -> "csv").asJava)
}

test("Create: asJson") {
spark.table("source").writeTo("testcat.table_name").asJson.create()

val table = catalog("testcat").asInstanceOf[TestInMemoryTableCatalog]
.loadTable(Identifier.of(Array(), "table_name"))

assert(table.name === "testcat.table_name")
assert(table.properties === Map("provider" -> "json").asJava)
}

test("Create: asParquet") {
spark.table("source").writeTo("testcat.table_name").asParquet.create()

val table = catalog("testcat").asInstanceOf[TestInMemoryTableCatalog]
.loadTable(Identifier.of(Array(), "table_name"))

assert(table.name === "testcat.table_name")
assert(table.properties === Map("provider" -> "parquet").asJava)
}

test("Create: asOrc") {
spark.table("source").writeTo("testcat.table_name").asOrc.create()

val table = catalog("testcat").asInstanceOf[TestInMemoryTableCatalog]
.loadTable(Identifier.of(Array(), "table_name"))

assert(table.name === "testcat.table_name")
assert(table.properties === Map("provider" -> "orc").asJava)
}

test("Create: identity partitioned table") {
spark.table("source").writeTo("testcat.table_name").partitionedBy($"id").create()

Expand Down