Skip to content
This repository was archived by the owner on May 13, 2025. It is now read-only.

Commit 967dddb

Browse files
Merge pull request #3 from Yelp/fdc_DATALAKE-486_avoid-log-creds
datalake-486 avoid log creds
2 parents 040b4a9 + 3230aaa commit 967dddb

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

src/main/scala/com/databricks/spark/redshift/RedshiftRelation.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,6 @@ private[redshift] case class RedshiftRelation(
131131
// Unload data from Redshift into a temporary directory in S3:
132132
val tempDir = params.createPerQueryTempDir()
133133
val unloadSql = buildUnloadStmt(requiredColumns, filters, tempDir, creds)
134-
log.info(unloadSql)
135134
val conn = jdbcWrapper.getConnector(params.jdbcDriver, params.jdbcUrl, params.credentials)
136135
try {
137136
jdbcWrapper.executeInterruptibly(conn.prepareStatement(unloadSql))
@@ -189,6 +188,7 @@ private[redshift] case class RedshiftRelation(
189188
val escapedTableNameOrSubqury = tableNameOrSubquery.replace("\\", "\\\\").replace("'", "\\'")
190189
s"SELECT $columnList FROM $escapedTableNameOrSubqury $whereClause"
191190
}
191+
log.info(query)
192192
// We need to remove S3 credentials from the unload path URI because they will conflict with
193193
// the credentials passed via `credsString`.
194194
val fixedUrl = Utils.fixS3Url(Utils.removeCredentialsFromURI(new URI(tempDir)).toString)

0 commit comments

Comments
 (0)