Skip to content

Commit

Permalink
Instead of creating a new SparkConf, use existing SparkConf in StoreH…
Browse files Browse the repository at this point in the history
…iveCatalog.CatalogQuery#initCatalog (#1390)
  • Loading branch information
dshirish authored Jul 30, 2019
1 parent 73126e9 commit 187809f
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@ import com.pivotal.gemfirexd.internal.engine.locks.GfxdLockSet
import com.pivotal.gemfirexd.internal.engine.store.GemFireStore
import com.pivotal.gemfirexd.internal.impl.sql.catalog.GfxdDataDictionary
import com.pivotal.gemfirexd.internal.shared.common.reference.SQLState
import com.pivotal.gemfirexd.Attribute.{USERNAME_ATTR, PASSWORD_ATTR}
import io.snappydata.Constant
import io.snappydata.Constant.{SPARK_STORE_PREFIX, STORE_PROPERTY_PREFIX}
import io.snappydata.sql.catalog.SnappyExternalCatalog.checkSchemaPermission
import io.snappydata.sql.catalog.{CatalogObjectType, ConnectorExternalCatalog, SnappyExternalCatalog}
import io.snappydata.thrift._
Expand All @@ -51,7 +53,7 @@ import org.apache.spark.sql.policy.PolicyProperties
import org.apache.spark.sql.sources.JdbcExtendedUtils.{toLowerCase, toUpperCase}
import org.apache.spark.sql.sources.{DataSourceRegister, JdbcExtendedUtils}
import org.apache.spark.sql.{AnalysisException, SnappyContext}
import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.{Logging, SparkConf, SparkEnv}

class StoreHiveCatalog extends ExternalCatalog with Logging {

Expand Down Expand Up @@ -419,7 +421,16 @@ class StoreHiveCatalog extends ExternalCatalog with Logging {
var done = false
while (!done) {
try {
val conf = new SparkConf
val conf = SparkEnv.get match {
case null => new SparkConf
case env =>
val sparkConf = env.conf.clone()
sparkConf.remove(SPARK_STORE_PREFIX + USERNAME_ATTR)
sparkConf.remove(STORE_PROPERTY_PREFIX + USERNAME_ATTR)
sparkConf.remove(SPARK_STORE_PREFIX + PASSWORD_ATTR)
sparkConf.remove(STORE_PROPERTY_PREFIX + PASSWORD_ATTR)
sparkConf
}
for ((k, v) <- Misc.getMemStoreBooting.getBootProperties.asScala) {
val key = k.toString
if ((v ne null) && (key.startsWith(Constant.SPARK_PREFIX) ||
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,8 @@ object HiveClientUtil extends Logging {
val propertyNames = props.stringPropertyNames.iterator()
while (propertyNames.hasNext) {
val name = propertyNames.next()
System.setProperty(name, props.getProperty(name))
val value = props.getProperty(name)
if (value ne null) System.setProperty(name, value)
}

// set integer properties after the system properties have been used by
Expand Down

0 comments on commit 187809f

Please sign in to comment.