diff --git a/cluster/src/main/scala/io/snappydata/ToolsCallbackImpl.scala b/cluster/src/main/scala/io/snappydata/ToolsCallbackImpl.scala index 3e9d0ffe59..9461ab40e1 100644 --- a/cluster/src/main/scala/io/snappydata/ToolsCallbackImpl.scala +++ b/cluster/src/main/scala/io/snappydata/ToolsCallbackImpl.scala @@ -17,6 +17,7 @@ package io.snappydata import java.io.File +import java.lang.reflect.InvocationTargetException import java.net.URLClassLoader import java.util.UUID @@ -68,7 +69,11 @@ object ToolsCallbackImpl extends ToolsCallback { }) // Close and reopen interpreter if (alias != null) { - lead.closeAndReopenInterpreterServer(); + try { + lead.closeAndReopenInterpreterServer(); + } catch { + case ite: InvocationTargetException => assert(ite.getCause.isInstanceOf[SecurityException]) + } } } diff --git a/cluster/src/test/scala/org/apache/spark/sql/store/BugTest.scala b/cluster/src/test/scala/org/apache/spark/sql/store/BugTest.scala index 061e30db7c..fe57672389 100644 --- a/cluster/src/test/scala/org/apache/spark/sql/store/BugTest.scala +++ b/cluster/src/test/scala/org/apache/spark/sql/store/BugTest.scala @@ -20,11 +20,9 @@ import java.util.Properties import com.pivotal.gemfirexd.Attribute import com.pivotal.gemfirexd.security.{LdapTestServer, SecurityTestUtils} +import io.snappydata.util.TestUtils import io.snappydata.{Constant, PlanTest, SnappyFunSuite} import org.scalatest.BeforeAndAfterAll - - - import org.junit.Assert.{assertEquals, assertFalse, assertTrue} import org.apache.spark.SparkConf @@ -32,6 +30,10 @@ import org.apache.spark.SparkConf class BugTest extends SnappyFunSuite with BeforeAndAfterAll { private val sysUser = "gemfire10" + override def beforeAll(): Unit = { + this.stopAll() + } + protected override def newSparkConf(addOn: (SparkConf) => SparkConf): SparkConf = { val ldapProperties = SecurityTestUtils.startLdapServerAndGetBootProperties(0, 0, sysUser, getClass.getResource("/auth.ldif").getPath) @@ -56,6 +58,7 @@ class BugTest extends SnappyFunSuite with BeforeAndAfterAll { } override def afterAll(): Unit = { + this.stopAll() val ldapServer = LdapTestServer.getInstance() if (ldapServer.isServerStarted) { ldapServer.stopService() @@ -63,13 +66,15 @@ class BugTest extends SnappyFunSuite with BeforeAndAfterAll { import com.pivotal.gemfirexd.Property.{AUTH_LDAP_SERVER, AUTH_LDAP_SEARCH_BASE} for (k <- List(Attribute.AUTH_PROVIDER, AUTH_LDAP_SERVER, AUTH_LDAP_SEARCH_BASE)) { System.clearProperty(k) + System.clearProperty("gemfirexd." + k) + System.clearProperty(Constant.STORE_PROPERTY_PREFIX + k) } System.clearProperty(Constant.STORE_PROPERTY_PREFIX + Attribute.USERNAME_ATTR) System.clearProperty(Constant.STORE_PROPERTY_PREFIX + Attribute.PASSWORD_ATTR) - super.afterAll() + System.setProperty("gemfirexd.authentication.required", "false") } - test("Bug SNAP-2255 connection pool exhaustion ") { + test("Bug SNAP-2255 connection pool exhaustion") { val user1 = "gemfire1" val user2 = "gemfire2" @@ -77,7 +82,6 @@ class BugTest extends SnappyFunSuite with BeforeAndAfterAll { snc1.snappySession.conf.set(Attribute.USERNAME_ATTR, user1) snc1.snappySession.conf.set(Attribute.PASSWORD_ATTR, user1) - snc1.sql(s"create table test (id integer," + s" name STRING) using column") snc1.sql("insert into test values (1, 'name1')") @@ -95,6 +99,5 @@ class BugTest extends SnappyFunSuite with BeforeAndAfterAll { val rs = snc2.sql(s"select * from $user1.test").collect() assertEquals(1, rs.length) } - } } diff --git a/core/src/main/scala/io/snappydata/util/ServiceUtils.scala b/core/src/main/scala/io/snappydata/util/ServiceUtils.scala index 62934c44d8..5425dc6bc2 100644 --- a/core/src/main/scala/io/snappydata/util/ServiceUtils.scala +++ b/core/src/main/scala/io/snappydata/util/ServiceUtils.scala @@ -109,8 +109,8 @@ object ServiceUtils { } } - def invokeStopFabricServer(sc: SparkContext): Unit = { - ServerManager.getServerInstance.stop(null) + def invokeStopFabricServer(sc: SparkContext, shutDownCreds: Properties = null): Unit = { + ServerManager.getServerInstance.stop(shutDownCreds) } def getAllLocators(sc: SparkContext): scala.collection.Map[DistributedMember, String] = { diff --git a/core/src/main/scala/org/apache/spark/sql/SnappyBaseParser.scala b/core/src/main/scala/org/apache/spark/sql/SnappyBaseParser.scala index 2645e1af67..a179073910 100644 --- a/core/src/main/scala/org/apache/spark/sql/SnappyBaseParser.scala +++ b/core/src/main/scala/org/apache/spark/sql/SnappyBaseParser.scala @@ -299,7 +299,7 @@ object SnappyParserConsts { final val exponent: CharPredicate = CharPredicate('e', 'E') final val numeric: CharPredicate = CharPredicate.Digit ++ CharPredicate('.') - final val numericSuffix: CharPredicate = CharPredicate('D', 'd', 'F', 'f', 'L', 'l') + final val numericSuffix: CharPredicate = CharPredicate('D', 'd', 'F', 'f', 'L', 'l', 'B', 'b') final val plural: CharPredicate = CharPredicate('s', 'S') final val reservedKeywords: OpenHashSet[String] = new OpenHashSet[String] diff --git a/core/src/main/scala/org/apache/spark/sql/SnappyContext.scala b/core/src/main/scala/org/apache/spark/sql/SnappyContext.scala index bc3041d31c..86c6bb7c17 100644 --- a/core/src/main/scala/org/apache/spark/sql/SnappyContext.scala +++ b/core/src/main/scala/org/apache/spark/sql/SnappyContext.scala @@ -26,6 +26,7 @@ import scala.language.implicitConversions import scala.reflect.runtime.universe.TypeTag import com.gemstone.gemfire.distributed.internal.MembershipListener import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember +import com.pivotal.gemfirexd.Attribute import com.pivotal.gemfirexd.internal.engine.Misc import com.pivotal.gemfirexd.internal.shared.common.SharedUtils import io.snappydata.util.ServiceUtils @@ -1148,7 +1149,16 @@ object SnappyContext extends Logging { // clear current hive catalog connection Hive.closeCurrent() if (ExternalStoreUtils.isLocalMode(sc)) { - ServiceUtils.invokeStopFabricServer(sc) + val props = sc.conf.getOption(Constant.STORE_PROPERTY_PREFIX + + Attribute.USERNAME_ATTR) match { + case Some(user) => val prps = new java.util.Properties(); + val pass = sc.conf.get(Constant.STORE_PROPERTY_PREFIX + Attribute.PASSWORD_ATTR, "") + prps.put(com.pivotal.gemfirexd.Attribute.USERNAME_ATTR, user) + prps.put(com.pivotal.gemfirexd.Attribute.PASSWORD_ATTR, pass) + prps + case None => null + } + ServiceUtils.invokeStopFabricServer(sc, props) } // clear static objects on the driver diff --git a/core/src/main/scala/org/apache/spark/sql/SnappyDDLParser.scala b/core/src/main/scala/org/apache/spark/sql/SnappyDDLParser.scala index 4e84c5cb07..f983a25ccb 100644 --- a/core/src/main/scala/org/apache/spark/sql/SnappyDDLParser.scala +++ b/core/src/main/scala/org/apache/spark/sql/SnappyDDLParser.scala @@ -764,7 +764,6 @@ case class DeployCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val jarsstr = SparkSubmitUtils.resolveMavenCoordinates(coordinates, SparkSubmitUtils.buildIvySettings(repos, jarCache)) - logInfo(s"KN: jarstr ${jarsstr}") if (jarsstr.nonEmpty) { val jars = jarsstr.split(",") val sc = sparkSession.sparkContext diff --git a/core/src/main/scala/org/apache/spark/sql/SnappyParser.scala b/core/src/main/scala/org/apache/spark/sql/SnappyParser.scala index d567176c88..925eabe7b6 100644 --- a/core/src/main/scala/org/apache/spark/sql/SnappyParser.scala +++ b/core/src/main/scala/org/apache/spark/sql/SnappyParser.scala @@ -104,16 +104,33 @@ class SnappyParser(session: SnappySession) var index = 0 val len = s.length // use double if ending with D/d, float for F/f and long for L/l + s.charAt(len - 1) match { case 'D' | 'd' => - return newTokenizedLiteral( - java.lang.Double.parseDouble(s.substring(0, len - 1)), DoubleType) - case 'F' | 'f' => + if (s.length > 2) { + s.charAt(len - 2) match { + case 'B' | 'b' => return toDecimalLiteral(s.substring(0, len - 2), + checkExactNumeric = false) + case c if (Character.isDigit(c)) => return newTokenizedLiteral( + java.lang.Double.parseDouble(s.substring(0, len - 1)), DoubleType) + case _ => throw new ParseException(s"Found non numeric token $s") + } + } else { + return newTokenizedLiteral( + java.lang.Double.parseDouble(s.substring(0, len - 1)), DoubleType) + } + case 'F' | 'f' => if (Character.isDigit(s.charAt(len - 2))) { return newTokenizedLiteral( java.lang.Float.parseFloat(s.substring(0, len - 1)), FloatType) - case 'L' | 'l' => + } else { + throw new ParseException(s"Found non numeric token $s") + } + case 'L' | 'l' => if (Character.isDigit(s.charAt(len - 2))) { return newTokenizedLiteral( java.lang.Long.parseLong(s.substring(0, len - 1)), LongType) + } else { + throw new ParseException(s"Found non numeric token $s") + } case _ => } while (index < len) { @@ -144,6 +161,7 @@ class SnappyParser(session: SnappySession) } else { toDecimalLiteral(s, checkExactNumeric = false) } + } private final def updatePerTableQueryHint(tableIdent: TableIdentifier, @@ -176,8 +194,8 @@ class SnappyParser(session: SnappySession) protected final def numericLiteral: Rule1[Expression] = rule { capture(plusOrMinus.? ~ Consts.numeric. + ~ (Consts.exponent ~ - plusOrMinus.? ~ CharPredicate.Digit. +).? ~ Consts.numericSuffix.?) ~ - delimiter ~> ((s: String) => toNumericLiteral(s)) + plusOrMinus.? ~ CharPredicate.Digit. +).? ~ Consts.numericSuffix.? ~ + Consts.numericSuffix.?) ~ delimiter ~> ((s: String) => toNumericLiteral(s)) } protected final def literal: Rule1[Expression] = rule { diff --git a/core/src/test/scala/org/apache/spark/sql/store/ColumnTableTest.scala b/core/src/test/scala/org/apache/spark/sql/store/ColumnTableTest.scala index ca860bff8a..635661a7f2 100644 --- a/core/src/test/scala/org/apache/spark/sql/store/ColumnTableTest.scala +++ b/core/src/test/scala/org/apache/spark/sql/store/ColumnTableTest.scala @@ -138,6 +138,51 @@ class ColumnTableTest count } + test("Test Bug SNAP-2308 view creation fails if query contains decimal numbers not suffixed") { + val tableName = "TEST_COLUMN" + val viewName = "TEST_VIEW" + snc.sql(s"CREATE TABLE $tableName (Col1 String, Col2 String) " + + s" USING column " + options) + + val data = Seq(("1.1", "2.2"), ("1", "2"), ("3.57", "3"), ("4.3", "4"), ("5.341", "5")) + val rdd = sc.parallelize(data) + val dataDF = snc.createDataFrame(rdd) + dataDF.write.insertInto(tableName) + + var query = s"SELECT sum(Col1) as summ FROM $tableName where col1 > .0001 having summ > .001" + snc.sql(query).collect + + snc.sql(s"create or replace view $viewName as ($query)") + + query = s"SELECT sum(Col1) as summ FROM $tableName where col1 > .0001BD having summ > .001bD" + snc.sql(query)collect + + snc.sql(s"create or replace view $viewName as ($query)") + + query = s"SELECT sum(Col1) as summ FROM $tableName having summ > .001f" + snc.sql(query).collect + + snc.sql(s"create or replace view $viewName as ($query)") + + query = s"SELECT sum(Col1) as summ FROM $tableName having summ > .001d" + snc.sql(query).collect + + snc.sql(s"create or replace view $viewName as ($query)") + + query = s"SELECT sum(Col1) as summ FROM $tableName having summ > .004ld" + var expectedException = intercept[Exception]{ snc.sql(query).collect } + assert(expectedException.isInstanceOf[ParseException]) + + query = s"SELECT sum(Col1) as summ FROM $tableName having summ > 4bl" + expectedException = intercept[Exception]{ snc.sql(query).collect } + assert(expectedException.isInstanceOf[ParseException]) + + snc.sql(s"drop view $viewName") + snc.sql(s"drop table $tableName") + + logInfo("Successful") + } + test("More columns -- SNAP-1345") { snc.sql(s"Create Table coltab (a INT) " + "using column options()") @@ -146,7 +191,6 @@ class ColumnTableTest } catch { case ex: SQLException => assert("42802".equals(ex.getSQLState)) } - snc.sql("drop table coltab") } diff --git a/core/src/test/scala/org/apache/spark/sql/store/ViewTest.scala b/core/src/test/scala/org/apache/spark/sql/store/ViewTest.scala index 38656c8100..6987eaf784 100644 --- a/core/src/test/scala/org/apache/spark/sql/store/ViewTest.scala +++ b/core/src/test/scala/org/apache/spark/sql/store/ViewTest.scala @@ -300,4 +300,230 @@ class ViewTest extends SnappyFunSuite { session2.sql("drop view viewOnJoin") assert(session2.sessionCatalog.tableExists("viewOnJoin") === false) } + + + test("SNAP-2342 nested query involving joins & union throws Exception") { + val session = this.snc.snappySession + + session.sql(s"create table ujli ( " + + "aagmaterial string," + + "accountassignmentgroup string," + + "accounttype string," + + "allocationcycle string," + + "allocationsegment string," + + "asset string," + + "billingdocument string," + + "billingdocumentitem string," + + "bravoequitycode string," + + "bravominorcode string," + + "bsegdocumentlinenumber string," + + "businessplace string," + + "businesstransaction string," + + "controllingarea string," + + "copadocumentnumber string," + + "copaobjectnumber string," + + "costcenter string," + + "costelement string," + + "countryofshiptocustomer string," + + "createdby string," + + "creationtime string," + + "customer string," + + "customergroup string," + + "debitcreditindicator string," + + "distributionchannel string," + + "division string," + + "documentdate string," + + "documentheadertext string," + + "documentlinenumberinsourcesystem string," + + "documentnumberinsourcesystem string," + + "documenttype string," + + "edgcreateditemindoc string," + + "entrydate string," + + "errorstatus string," + + "fidocumentquantity string," + + "fiscalperiod string," + + "fiscalyear string," + + "fsid string," + + "functionalareacode string," + + "glaccountcode string," + + "hleamount string," + + "indexfromcopa string," + + "itemcategory string," + + "itemtext string," + + "kitmaterial string," + + "kittype string," + + "leamount string," + + "lebillingtype string," + + "lecode string," + + "lecurrencycode string," + + "lesalesqty string," + + "lesalesqtyuom string," + + "ledgercode string," + + "localcompanycode string," + + "localdocumenttype string," + + "localfiscalperiod string," + + "localfiscalyear string," + + "localfunctionalareacode string," + + "localglaccountcode string," + + "locallecurrencycode string," + + "localledgercode string," + + "localmrccode string," + + "localprofitcenter string," + + "localsku string," + + "localversioncode string," + + "mrccode string," + + "parentdocumentnumberinsourcesystem string," + + "partnercostcenter string," + + "partnerfunctionalarea string," + + "partnerprofitcenter string," + + "partnersegment string," + + "payer string," + + "pcadocnumber string," + + "pcaitemnumber string," + + "plant string," + + "postingdate string," + + "postingkey string," + + "producthierarchy string," + + "psegment string," + + "rclnt string," + + "reference string," + + "referencedocument string," + + "referencetransaction string," + + "regionofshiptocustomer string," + + "salesdoctype string," + + "salesgroup string," + + "salesoffice string," + + "salesorder string," + + "salesorderitem string," + + "salesorganization string," + + "sectorproductgroup string," + + "shipto string," + + "sleamount string," + + "sourcesystemid string," + + "tradingpartner string," + + "transactioncode string," + + "transactioncurrencyamount string," + + "transactioncurrencycode string," + + "transactiontype string," + + "ujlkey string," + + "valuefieldfromcopa string," + + "vendor string," + + "versioncode string )") + + session.sql ("create table ujs (" + + "uuid string," + + "bravoequitycode string," + + "controllingarea string," + + "costcenter string," + + "creationtime string," + + "debitcreditindicator string," + + "errstatus string," + + "fiscalyear string," + + "fsid string," + + "functionalareacode string," + + "glaccountcode string," + + "hleamount string," + + "leamount string," + + "lecode string," + + "lecostelement string," + + "lecurrencycode string," + + "leplant string," + + "ledgercode string," + + "localcompanycode string," + + "localfiscalyear string," + + "localfunctionalareacode string," + + "localglaccountcode string," + + "locallecurrencycode string," + + "localledgercode string," + + "localmrccode string," + + "localprofitcenter string," + + "localversioncode string," + + "mrccode string," + + "partnerfunctionalarea string," + + "partnerprofitcenter string," + + "partnersegment string," + + "referencetransaction string," + + "sleamount string," + + "sourceadditionalkey string," + + "sourcesystemid string," + + "tradingpartner string," + + "transactioncurrencyamount string," + + "transactioncurrencycode string," + + "transactiontype string," + + "versioncode string)") + + session.sql("create table gfs (" + + "gfs string, " + + " gfsdescription string, " + + " globalfunctionalarea string )") + + session.sql("create table bravo (" + + " bravo string," + + "bravodescription string," + + " gfs string, " + + " gfsdescription string)") + + session.sql("create table gtw (" + + "gfs string," + + "gfsdescription string," + + "gtw string," + + "gtwdescription string)") + + session.sql("create table coa (" + + "accounttype string," + + "errorcode string," + + "errormessage string," + + "errorstatus string," + + "gfs string," + + "gfsdescription string," + + "globalfunctionalarea string," + + "indicevalue string," + + "localfunctionalarea string," + + "localgl string," + + "localgldescription string)") + + session.sql(s"create or replace view TrialBalance as " + + s"( select leUniversal,gfs,first(gfsDescription) as gfsDescription, " + + s"first(bravo) as bravo, " + + s"first(bravoDescription) as bravoDescription, first(gtw) as gtw, " + + s"first(gtwDescription) as gtwDescription, " + + s"first(globalFunctionalArea) as globalFunctionalArea," + + s"format_number(sum(credit),2) as credit," + + s" format_number(sum(debit),2) as debit,format_number(sum(total),2) as total from" + + s" ( select a.leCode as leUniversal,a.localCompanyCode as leLocal," + + s" a.mrcCode as mrcUniversal," + + s" a.sourceSystemId as sourceSystem,a.glAccountCode as gfs," + + s"a.localGlAccountCode as localGl," + + s" SUM(hleAmount) as debit,SUM(sleAmount) as credit,SUM(leAmount) as total," + + s" first(b.gfsDescription) as gfsDescription," + + s" first(b.globalFunctionalArea) as globalFunctionalArea," + + s" first((case when a.sourceSystemId='project_one' then e.localGlDescription " + + s" when a.sourceSystemId='btb_latam' then b.gfsDescription else '' end)) " + + s" as localGlDescription ," + + s" first(c.bravoDescription) as bravoDescription," + + s"first(d.gtwDescription) as gtwDescription, " + + s" first(c.bravo) as bravo, first(d.gtw) as gtw from ( select ledgerCode,leCode," + + s" localCompanyCode,mrcCode,fiscalYear,sourceSystemId,localGlAccountCode," + + s" glAccountCode,last(localFunctionalAreaCode),SUM(leAmount) as leAmount," + + s" SUM(hleAmount) as hleAmount,SUM(sleAmount) as sleAmount, glAccountCode ," + + s" 'Local GL' as accountType,localGlAccountCode as localGl from " + + s" ( select ledgerCode,leCode,localCompanyCode,mrcCode,fiscalYear,sourceSystemId," + + s" localGlAccountCode,glAccountCode,localFunctionalAreaCode,leAmount,hleAmount,sleAmount" + + s" from ujli where ledgerCode='0L' and leCode='7600' " + + s" AND fiscalYear='2017' and fiscalPeriod<=3 AND sourceSystemId='btb_latam' union all" + + s" select ledgerCode,leCode,localCompanyCode,mrcCode,fiscalYear,sourceSystemId," + + s" localGlAccountCode,glAccountCode,localFunctionalAreaCode,leAmount,hleAmount," + + s" sleAmount from ujs where ledgerCode='0L' and leCode='7600'" + + s" AND fiscalYear='2017' AND sourceSystemId='btb_latam' ) group by ledgerCode," + + s" leCode,localCompanyCode,mrcCode,fiscalYear,sourceSystemId," + + s" localGlAccountCode,glAccountCode ) a" + + s" left join gfs b on (a.glAccountCode=b.gfs) left join " + + s" bravo c " + + s" on (a.glAccountCode=c.gfs) left join gtw d on (a.glAccountCode=d.gfs)" + + s" left join coa e on(a.accountType=e.accountType and " + + s" a.glAccountCode = e.gfs and a.localGl = e.localGl ) group by a.leCode," + + s"a.localCompanyCode," + + s" a.mrcCode,a.sourceSystemId,a.glAccountCode,a.localGlAccountCode," + + s"c.bravo,d.gtw) group by leUniversal,gfs)") + } } diff --git a/spark b/spark index 77423b4602..e0e1048547 160000 --- a/spark +++ b/spark @@ -1 +1 @@ -Subproject commit 77423b460253794bc62b43151f0d2533af9da9be +Subproject commit e0e1048547fe2857ef492301bb6ceee71db3c22f