diff --git a/docs/exclusive.md b/docs/exclusive.md index eb250e6e..401dc828 100644 --- a/docs/exclusive.md +++ b/docs/exclusive.md @@ -103,7 +103,7 @@ val sparkCol = f.expr("array_sort(value, (l, r) -> case " + val doricCol = colArray[Row]("value").sortBy(CName("name"), CNameOrd("age", Desc)) // doricCol: ArrayColumn[Row] = TransformationDoricColumn( -// Kleisli(scala.Function1$$Lambda$3002/0x0000000801343840@2be41d90) +// Kleisli(scala.Function1$$Lambda$3003/0x0000000801343840@3ea4712b) // ) dfArrayStruct.select(sparkCol.as("sorted")).show(false) @@ -151,7 +151,7 @@ val mapColDoric = colString("value").matches[String] .caseW(_.length > 4, "error key".lit) .otherwiseNull // mapColDoric: DoricColumn[String] = TransformationDoricColumn( -// Kleisli(scala.Function1$$Lambda$3002/0x0000000801343840@3a7c75d9) +// Kleisli(scala.Function1$$Lambda$3003/0x0000000801343840@569b57c4) // ) dfMatch.withColumn("mapResult", mapColDoric).show() diff --git a/docs/implicits.md b/docs/implicits.md index 1d9b62f7..f5b957dd 100644 --- a/docs/implicits.md +++ b/docs/implicits.md @@ -85,7 +85,7 @@ val complexCol: DoricColumn[Int] = .transform(_ + 1.lit) .aggregate(0.lit)(_ + _) // complexCol: DoricColumn[Int] = TransformationDoricColumn( -// Kleisli(scala.Function1$$Lambda$3002/0x0000000801343840@5234f6c5) +// Kleisli(scala.Function1$$Lambda$3003/0x0000000801343840@366558a2) // ) dfArrays.select(complexCol as "complexTransformation").show() @@ -277,7 +277,7 @@ The default doric syntax is a little stricter and forces us to transform these v ```scala val colD = colInt("int") + 1.lit // colD: DoricColumn[Int] = TransformationDoricColumn( -// Kleisli(scala.Function1$$Lambda$3002/0x0000000801343840@51787bc1) +// Kleisli(scala.Function1$$Lambda$3003/0x0000000801343840@7bc0e96a) // ) intDF.select(colD).show() @@ -298,11 +298,11 @@ we have to _explicitly_ add the following import statement: import doric.implicitConversions.literalConversion val colSugarD = colInt("int") + 1 // colSugarD: DoricColumn[Int] = TransformationDoricColumn( -// Kleisli(scala.Function1$$Lambda$3002/0x0000000801343840@734bb821) +// Kleisli(scala.Function1$$Lambda$3003/0x0000000801343840@3368136a) // ) val columConcatLiterals = concat("this", "is","doric") // concat expects DoricColumn[String] values, the conversion puts them as expected // columConcatLiterals: StringColumn = TransformationDoricColumn( -// Kleisli(scala.Function1$$Lambda$3002/0x0000000801343840@7bc0e96a) +// Kleisli(scala.Function1$$Lambda$3003/0x0000000801343840@21dcd0b5) // ) intDF.select(colSugarD, columConcatLiterals).show() diff --git a/docs/quickstart.md b/docs/quickstart.md index 248bd08b..d1d7b2f9 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -28,7 +28,7 @@ _Maven_ Doric is committed to use the most modern APIs first. * The latest stable version of doric is 0.0.7. -* The latest experimental version of doric is 0.0.0+1-c8febe50-SNAPSHOT. +* The latest experimental version of doric is 0.0.0+1-f7010827-SNAPSHOT. * Doric is compatible with the following Spark versions: | Spark | Scala | Tested | doric | @@ -85,7 +85,7 @@ It's only when we try to construct the DataFrame that an exception is raised at ```scala df // org.apache.spark.sql.AnalysisException: [DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES] Cannot resolve "(value * true)" due to data type mismatch: the left and right operands of the binary operator have incompatible types ("INT" and "BOOLEAN").; -// 'Project [unresolvedalias((value#365 * true), Some(org.apache.spark.sql.Column$$Lambda$5128/0x0000000801bca840@73d193ab))] +// 'Project [unresolvedalias((value#365 * true), Some(org.apache.spark.sql.Column$$Lambda$5129/0x0000000801bca840@2cb6e497))] // +- LocalRelation [value#365] // // at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.dataTypeMismatch(package.scala:73) @@ -182,7 +182,7 @@ strDf.select(f.col("str").asDoric[String]).show() strDf.select((f.col("str") + f.lit(true)).asDoric[String]).show() // doric.sem.DoricMultiError: Found 1 error in select // [DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES] Cannot resolve "(str + true)" due to data type mismatch: the left and right operands of the binary operator have incompatible types ("DOUBLE" and "BOOLEAN").; -// 'Project [unresolvedalias((cast(str#378 as double) + true), Some(org.apache.spark.sql.Column$$Lambda$5128/0x0000000801bca840@73d193ab))] +// 'Project [unresolvedalias((cast(str#378 as double) + true), Some(org.apache.spark.sql.Column$$Lambda$5129/0x0000000801bca840@2cb6e497))] // +- Project [value#375 AS str#378] // +- LocalRelation [value#375] // @@ -196,7 +196,7 @@ strDf.select((f.col("str") + f.lit(true)).asDoric[String]).show() // at repl.MdocSession$MdocApp$$anonfun$2.apply(quickstart.md:76) // at repl.MdocSession$MdocApp$$anonfun$2.apply(quickstart.md:76) // Caused by: org.apache.spark.sql.AnalysisException: [DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES] Cannot resolve "(str + true)" due to data type mismatch: the left and right operands of the binary operator have incompatible types ("DOUBLE" and "BOOLEAN").; -// 'Project [unresolvedalias((cast(str#378 as double) + true), Some(org.apache.spark.sql.Column$$Lambda$5128/0x0000000801bca840@73d193ab))] +// 'Project [unresolvedalias((cast(str#378 as double) + true), Some(org.apache.spark.sql.Column$$Lambda$5129/0x0000000801bca840@2cb6e497))] // +- Project [value#375 AS str#378] // +- LocalRelation [value#375] // diff --git a/docs/validations.md b/docs/validations.md index 1f8750dc..40ba2d4c 100644 --- a/docs/validations.md +++ b/docs/validations.md @@ -15,7 +15,7 @@ raising a run-time exception: // Spark List(1,2,3).toDF().select(f.col("id")+1) // org.apache.spark.sql.AnalysisException: [UNRESOLVED_COLUMN.WITH_SUGGESTION] A column or function parameter with name `id` cannot be resolved. Did you mean one of the following? [`value`].; -// 'Project [unresolvedalias(('id + 1), Some(org.apache.spark.sql.Column$$Lambda$5128/0x0000000801bca840@73d193ab))] +// 'Project [unresolvedalias(('id + 1), Some(org.apache.spark.sql.Column$$Lambda$5129/0x0000000801bca840@2cb6e497))] // +- LocalRelation [value#399] // // at org.apache.spark.sql.errors.QueryCompilationErrors$.unresolvedAttributeError(QueryCompilationErrors.scala:221)