diff --git a/src/main/scala/com/databricks/spark/redshift/RedshiftRelation.scala b/src/main/scala/com/databricks/spark/redshift/RedshiftRelation.scala index 31dc11b2..1c476e16 100644 --- a/src/main/scala/com/databricks/spark/redshift/RedshiftRelation.scala +++ b/src/main/scala/com/databricks/spark/redshift/RedshiftRelation.scala @@ -131,7 +131,6 @@ private[redshift] case class RedshiftRelation( // Unload data from Redshift into a temporary directory in S3: val tempDir = params.createPerQueryTempDir() val unloadSql = buildUnloadStmt(requiredColumns, filters, tempDir, creds) - log.info(unloadSql) val conn = jdbcWrapper.getConnector(params.jdbcDriver, params.jdbcUrl, params.credentials) try { jdbcWrapper.executeInterruptibly(conn.prepareStatement(unloadSql)) @@ -189,6 +188,7 @@ private[redshift] case class RedshiftRelation( val escapedTableNameOrSubqury = tableNameOrSubquery.replace("\\", "\\\\").replace("'", "\\'") s"SELECT $columnList FROM $escapedTableNameOrSubqury $whereClause" } + log.info(query) // We need to remove S3 credentials from the unload path URI because they will conflict with // the credentials passed via `credsString`. val fixedUrl = Utils.fixS3Url(Utils.removeCredentialsFromURI(new URI(tempDir)).toString)