diff --git a/.github/workflows/pr-build.yaml b/.github/workflows/pr-build.yaml index ecb801b7..d6f0adf3 100644 --- a/.github/workflows/pr-build.yaml +++ b/.github/workflows/pr-build.yaml @@ -22,7 +22,11 @@ jobs: distribution: 'adopt' - name: Run mill tests run: ./mill __.test - - name: Compile with sbt + - name: Compile Scala 2.13 with sbt + run: sbt ++2.13 compile bench/compile bench/jmh:compile + - name: Compile Scala 2.13 with sbt run: sbt compile bench/compile bench/jmh:compile - - name: Run sbt tests + - name: Run sbt tests (Scala 2.13) + run: sbt ++2.13 test + - name: Run sbt tests (Scala 3) run: sbt test diff --git a/bench/src/main/scala/sjsonnet/MaterializerBenchmark.scala b/bench/src/main/scala/sjsonnet/MaterializerBenchmark.scala index 9152f6f0..edb75417 100644 --- a/bench/src/main/scala/sjsonnet/MaterializerBenchmark.scala +++ b/bench/src/main/scala/sjsonnet/MaterializerBenchmark.scala @@ -6,6 +6,7 @@ import java.util.concurrent.TimeUnit import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra._ import ujson.JsVisitor +import scala.compiletime.uninitialized @BenchmarkMode(Array(Mode.AverageTime)) @Fork(2) @@ -16,8 +17,8 @@ import ujson.JsVisitor @State(Scope.Benchmark) class MaterializerBenchmark { - private var interp: Interpreter = _ - private var value: Val = _ + private var interp: Interpreter = uninitialized + private var value: Val = uninitialized @Setup def setup(): Unit = { diff --git a/bench/src/main/scala/sjsonnet/OptimizerBenchmark.scala b/bench/src/main/scala/sjsonnet/OptimizerBenchmark.scala index 2e95de54..8b55a952 100644 --- a/bench/src/main/scala/sjsonnet/OptimizerBenchmark.scala +++ b/bench/src/main/scala/sjsonnet/OptimizerBenchmark.scala @@ -10,6 +10,7 @@ import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra._ import scala.collection.mutable +import scala.compiletime.uninitialized @BenchmarkMode(Array(Mode.AverageTime)) @Fork(2) @@ -20,16 +21,17 @@ import scala.collection.mutable @State(Scope.Benchmark) class OptimizerBenchmark { - private var inputs: Iterable[(Expr, FileScope)] = _ - private var allFiles: IndexedSeq[(Path, String)] = _ - private var ev: EvalScope = _ + private var inputs: Iterable[(Expr, FileScope)] = uninitialized + private var allFiles: IndexedSeq[(Path, String)] = uninitialized + private var ev: EvalScope = uninitialized @Setup def setup(): Unit = { val (allFiles, ev) = MainBenchmark.findFiles() this.inputs = allFiles.map { case (p, s) => - fastparse.parse(s, new Parser(p, true, mutable.HashMap.empty, mutable.HashMap.empty).document(_)) match { + fastparse.parse(s, new Parser(p, true, mutable.HashMap.empty, mutable.HashMap.empty).document(using _)) match { case Success(v, _) => v + case _ => throw new RuntimeException("Parse Failed") } } this.ev = ev diff --git a/bench/src/main/scala/sjsonnet/ParserBenchmark.scala b/bench/src/main/scala/sjsonnet/ParserBenchmark.scala index 6100534d..b0a2f45e 100644 --- a/bench/src/main/scala/sjsonnet/ParserBenchmark.scala +++ b/bench/src/main/scala/sjsonnet/ParserBenchmark.scala @@ -7,6 +7,7 @@ import scala.collection.mutable.HashMap import fastparse.Parsed.Success import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra._ +import scala.compiletime.uninitialized @BenchmarkMode(Array(Mode.AverageTime)) @Fork(2) @@ -17,8 +18,8 @@ import org.openjdk.jmh.infra._ @State(Scope.Benchmark) class ParserBenchmark { - private var allFiles: IndexedSeq[(Path, String)] = _ - private var interp: Interpreter = _ + private var allFiles: IndexedSeq[(Path, String)] = uninitialized + private var interp: Interpreter = uninitialized @Setup def setup(): Unit = @@ -27,8 +28,8 @@ class ParserBenchmark { @Benchmark def main(bh: Blackhole): Unit = { bh.consume(allFiles.foreach { case (p, s) => - val res = fastparse.parse(s, new Parser(p, true, HashMap.empty, HashMap.empty).document(_)) - bh.consume(res.asInstanceOf[Success[_]]) + val res = fastparse.parse(s, new Parser(p, true, HashMap.empty, HashMap.empty).document(using _)) + bh.consume(res.asInstanceOf[Success[?]]) }) } } diff --git a/build.sbt b/build.sbt index b887b9c2..1f45883a 100644 --- a/build.sbt +++ b/build.sbt @@ -1,29 +1,55 @@ val sjsonnetVersion = "0.4.4" -scalaVersion in Global := "2.13.4" +val scala213 = "2.13.15" +val scala3 = "3.5.1" + +val commonOptions: Seq[String] = Seq( + "-opt:l:inline", + "-opt-inline-from:sjsonnet.*,sjsonnet.**", +) cancelable in Global := true +publish / skip := true lazy val main = (project in file("sjsonnet")) .settings( - Compile / scalacOptions ++= Seq("-opt:l:inline", "-opt-inline-from:sjsonnet.*,sjsonnet.**"), + name := "sjsonnet", + + // Enable cross-compilation + scalaVersion := scala3, + crossScalaVersions := Seq(scala213, scala3), + scalacOptions ++= { + (CrossVersion.partialVersion(scalaVersion.value) match { + case Some((3, _)) => + commonOptions ++ Seq( + // options dedicated for cross build / migration to Scala 3 + "-source:3.5-migration" + ) + case _ => + commonOptions ++ Seq( + "-Xsource:3" + ) + }) + }, + + Test / fork := true, Test / baseDirectory := (ThisBuild / baseDirectory).value, libraryDependencies ++= Seq( - "com.lihaoyi" %% "fastparse" % "2.3.1", - "com.lihaoyi" %% "pprint" % "0.6.1", - "com.lihaoyi" %% "ujson" % "1.3.7", - "com.lihaoyi" %% "scalatags" % "0.9.3", - "com.lihaoyi" %% "os-lib" % "0.7.2", - "com.lihaoyi" %% "mainargs" % "0.2.0", + "com.lihaoyi" %% "fastparse" % "3.1.1", + "com.lihaoyi" %% "pprint" % "0.9.0", + "com.lihaoyi" %% "ujson" % "4.0.0", + "com.lihaoyi" %% "scalatags" % "0.12.0", + "com.lihaoyi" %% "os-lib" % "0.10.3", + "com.lihaoyi" %% "mainargs" % "0.7.5", "org.lz4" % "lz4-java" % "1.8.0", - "org.json" % "json" % "20211205", - "org.scala-lang.modules" %% "scala-collection-compat" % "2.4.0", - "org.tukaani" % "xz" % "1.8", - "org.yaml" % "snakeyaml" % "1.30", + "org.json" % "json" % "20240303", + "org.scala-lang.modules" %% "scala-collection-compat" % "2.12.0", + "org.tukaani" % "xz" % "1.9", + "org.yaml" % "snakeyaml" % "2.0", ), libraryDependencies ++= Seq( - "com.lihaoyi" %% "utest" % "0.7.7", + "com.lihaoyi" %% "utest" % "0.8.3", ).map(_ % "test"), testFrameworks += new TestFramework("utest.runner.Framework"), (Compile / unmanagedSourceDirectories) := Seq( @@ -56,4 +82,12 @@ lazy val bench = (project in file("bench")) .enablePlugins(JmhPlugin) .settings( run / fork := true, + // Do not cross-compile the benchmark + scalaVersion := scala3, ) + +lazy val root = (project in file(".")) + .aggregate(main) + .settings( + publishArtifact := false + ) \ No newline at end of file diff --git a/project/plugins.sbt b/project/plugins.sbt index b0a3453b..81e64e63 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,2 +1 @@ -//addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.3") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.3") \ No newline at end of file diff --git a/sjsonnet/src-jvm-native/sjsonnet/CachedResolvedFile.scala b/sjsonnet/src-jvm-native/sjsonnet/CachedResolvedFile.scala index a5c93946..549ee339 100644 --- a/sjsonnet/src-jvm-native/sjsonnet/CachedResolvedFile.scala +++ b/sjsonnet/src-jvm-native/sjsonnet/CachedResolvedFile.scala @@ -26,7 +26,7 @@ class CachedResolvedFile(val resolvedImportPath: OsPath, memoryLimitBytes: Long, // Assert that the file is less than limit assert(jFile.length() <= memoryLimitBytes, s"Resolved import path ${resolvedImportPath} is too large: ${jFile.length()} bytes > ${memoryLimitBytes} bytes") - private[this] val resolvedImportContent: StaticResolvedFile = { + private val resolvedImportContent: StaticResolvedFile = { if (jFile.length() > cacheThresholdBytes) { // If the file is too large, then we will just read it from disk null @@ -35,7 +35,7 @@ class CachedResolvedFile(val resolvedImportPath: OsPath, memoryLimitBytes: Long, } } - private[this] def readString(jFile: File): String = { + private def readString(jFile: File): String = { new String(Files.readAllBytes(jFile.toPath), StandardCharsets.UTF_8); } diff --git a/sjsonnet/src-jvm-native/sjsonnet/OsPath.scala b/sjsonnet/src-jvm-native/sjsonnet/OsPath.scala index 91827ef0..996e00c5 100644 --- a/sjsonnet/src-jvm-native/sjsonnet/OsPath.scala +++ b/sjsonnet/src-jvm-native/sjsonnet/OsPath.scala @@ -29,6 +29,6 @@ case class OsPath(p: os.Path) extends Path{ ":" + Util.prettyIndex(lineStarts, offset) } - p.relativeTo(os.pwd) + offsetStr + p.relativeTo(os.pwd).toString() + offsetStr } } \ No newline at end of file diff --git a/sjsonnet/src-jvm-native/sjsonnet/SjsonnetMain.scala b/sjsonnet/src-jvm-native/sjsonnet/SjsonnetMain.scala index 1ea9989c..f9d30b98 100644 --- a/sjsonnet/src-jvm-native/sjsonnet/SjsonnetMain.scala +++ b/sjsonnet/src-jvm-native/sjsonnet/SjsonnetMain.scala @@ -64,7 +64,7 @@ object SjsonnetMain { val doc = "usage: sjsonnet [sjsonnet-options] script-file" val result = for{ config <- parser.constructEither( - args, + args.toIndexedSeq, customName = name, customDoc = doc, autoPrintHelpAndExit = None ) @@ -113,7 +113,7 @@ object SjsonnetMain { def writeFile(config: Config, f: os.Path, contents: String): Either[String, Unit] = handleWriteFile(os.write.over(f, contents, createFolders = config.createDirs.value)) - def writeToFile(config: Config, wd: os.Path)(materialize: Writer => Either[String, _]): Either[String, String] = { + def writeToFile(config: Config, wd: os.Path)(materialize: Writer => Either[String, ?]): Either[String, String] = { config.outputFile match{ case None => val sw = new StringWriter @@ -205,12 +205,12 @@ object SjsonnetMain { importer = importer match{ case Some(i) => new Importer { def resolve(docBase: Path, importName: String): Option[Path] = - i(docBase, importName).map(OsPath) + i(docBase, importName).map(OsPath.apply) def read(path: Path): Option[ResolvedFile] = { readPath(path) } } - case None => resolveImport(config.jpaths.map(os.Path(_, wd)).map(OsPath(_)), allowedInputs) + case None => resolveImport(config.jpaths.map(os.Path(_, wd)).map(OsPath.apply), allowedInputs) }, parseCache, settings = new Settings( @@ -246,8 +246,8 @@ object SjsonnetMain { Right(writer.toString) } } - relPath = os.FilePath(multiPath) / os.RelPath(f) - _ <- writeFile(config, relPath.resolveFrom(wd), rendered) + relPath = os.Path(multiPath, wd) / f + _ <- writeFile(config, relPath, rendered) } yield relPath } @@ -299,7 +299,7 @@ object SjsonnetMain { * of caching on top of the underlying file system. Small files are read into memory, while large * files are read from disk. */ - private[this] def readPath(path: Path): Option[ResolvedFile] = { + private def readPath(path: Path): Option[ResolvedFile] = { val osPath = path.asInstanceOf[OsPath].p if (os.exists(osPath) && os.isFile(osPath)) { Some(new CachedResolvedFile(path.asInstanceOf[OsPath], memoryLimitBytes = Int.MaxValue.toLong)) diff --git a/sjsonnet/src-jvm/sjsonnet/Platform.scala b/sjsonnet/src-jvm/sjsonnet/Platform.scala index c94d867c..160e211f 100644 --- a/sjsonnet/src-jvm/sjsonnet/Platform.scala +++ b/sjsonnet/src-jvm/sjsonnet/Platform.scala @@ -2,13 +2,13 @@ package sjsonnet import org.json.JSONObject -import java.io.{ByteArrayOutputStream, BufferedInputStream, File, FileInputStream} +import java.io.{BufferedInputStream, ByteArrayOutputStream, File, FileInputStream} import java.util.Base64 import java.util.zip.GZIPOutputStream -import net.jpountz.xxhash.{StreamingXXHash64, XXHashFactory, XXHash64} +import net.jpountz.xxhash.{StreamingXXHash64, XXHash64, XXHashFactory} import org.tukaani.xz.LZMA2Options import org.tukaani.xz.XZOutputStream -import org.yaml.snakeyaml.Yaml +import org.yaml.snakeyaml.{LoaderOptions, Yaml} import org.yaml.snakeyaml.constructor.Constructor object Platform { @@ -23,12 +23,13 @@ object Platform { outputStream.close() } } + def gzipString(s: String): String = { gzipBytes(s.getBytes()) } /** - * Valid compression levels are 0 (no compression) to 9 (maximum compression). + * Valid compression levels are 0 (no compression) to 9 (maximum compression). */ def xzBytes(b: Array[Byte], compressionLevel: Option[Int]): String = { val outputStream: ByteArrayOutputStream = new ByteArrayOutputStream(b.length) @@ -49,14 +50,15 @@ object Platform { } def yamlToJson(yamlString: String): String = { - val yaml: java.util.LinkedHashMap[String, Object] = new Yaml(new Constructor(classOf[java.util.LinkedHashMap[String, Object]])).load(yamlString) + val options = new LoaderOptions() + val yaml: java.util.LinkedHashMap[String, Object] = new Yaml(new Constructor(classOf[java.util.LinkedHashMap[String, Object]], options)).load(yamlString) new JSONObject(yaml).toString() } private def computeHash(algorithm: String, s: String) = { java.security.MessageDigest.getInstance(algorithm) .digest(s.getBytes("UTF-8")) - .map{ b => String.format("%02x", (b & 0xff).asInstanceOf[Integer])} + .map { b => String.format("%02x", Integer.valueOf(b & 0xff)) } .mkString } @@ -71,7 +73,7 @@ object Platform { // Same as go-jsonnet https://github.com/google/go-jsonnet/blob/2b4d7535f540f128e38830492e509a550eb86d57/builtins.go#L959 def sha3(s: String): String = computeHash("SHA3-512", s) - private[this] val xxHashFactory = XXHashFactory.fastestInstance() + private val xxHashFactory = XXHashFactory.fastestInstance() def hashFile(file: File): String = { val buffer = new Array[Byte](8192) diff --git a/sjsonnet/src/sjsonnet/BaseCharRenderer.scala b/sjsonnet/src/sjsonnet/BaseCharRenderer.scala index 406ceb64..9c5f419a 100644 --- a/sjsonnet/src/sjsonnet/BaseCharRenderer.scala +++ b/sjsonnet/src/sjsonnet/BaseCharRenderer.scala @@ -10,16 +10,16 @@ class BaseCharRenderer[T <: upickle.core.CharOps.Output] (out: T, indent: Int = -1, escapeUnicode: Boolean = false) extends JsVisitor[T, T]{ - protected[this] val elemBuilder = new upickle.core.CharBuilder - protected[this] val unicodeCharBuilder = new upickle.core.CharBuilder() + protected val elemBuilder = new upickle.core.CharBuilder + protected val unicodeCharBuilder = new upickle.core.CharBuilder() def flushCharBuilder() = { elemBuilder.writeOutToIfLongerThan(out, if (depth == 0) 0 else 1000) } - protected[this] var depth: Int = 0 + protected var depth: Int = 0 - protected[this] var commaBuffered = false + protected var commaBuffered = false def flushBuffer() = { if (commaBuffered) { @@ -28,17 +28,21 @@ class BaseCharRenderer[T <: upickle.core.CharOps.Output] renderIndent() } } - def visitArray(length: Int, index: Int) = new ArrVisitor[T, T] { + + def visitArray(length: Int, index: Int): ArrVisitor[T, T] = new ArrVisitor[T, T] { flushBuffer() elemBuilder.append('[') depth += 1 renderIndent() - def subVisitor = BaseCharRenderer.this + + def subVisitor: BaseCharRenderer[T] = BaseCharRenderer.this + def visitValue(v: T, index: Int): Unit = { flushBuffer() commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false depth -= 1 @@ -49,20 +53,25 @@ class BaseCharRenderer[T <: upickle.core.CharOps.Output] } } - def visitObject(length: Int, index: Int) = new ObjVisitor[T, T] { + def visitJsonableObject(length: Int, index: Int): ObjVisitor[T, T] = new ObjVisitor[T, T] { flushBuffer() elemBuilder.append('{') depth += 1 renderIndent() - def subVisitor = BaseCharRenderer.this - def visitKey(index: Int) = BaseCharRenderer.this + + def subVisitor: BaseCharRenderer[T] = BaseCharRenderer.this + + def visitKey(index: Int): BaseCharRenderer[T] = BaseCharRenderer.this + def visitKeyValue(s: Any): Unit = { elemBuilder.append(':') if (indent != -1) elemBuilder.append(' ') } + def visitValue(v: T, index: Int): Unit = { commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false depth -= 1 @@ -144,7 +153,7 @@ class BaseCharRenderer[T <: upickle.core.CharOps.Output] def visitNonNullString(s: CharSequence, index: Int) = { flushBuffer() - upickle.core.RenderUtils.escapeChar(unicodeCharBuilder, elemBuilder, s, escapeUnicode) + upickle.core.RenderUtils.escapeChar(unicodeCharBuilder, elemBuilder, s, escapeUnicode, wrapQuotes = true) flushCharBuilder() out } diff --git a/sjsonnet/src/sjsonnet/BaseRenderer.scala b/sjsonnet/src/sjsonnet/BaseRenderer.scala index 3bd2a632..9b52be5f 100644 --- a/sjsonnet/src/sjsonnet/BaseRenderer.scala +++ b/sjsonnet/src/sjsonnet/BaseRenderer.scala @@ -27,17 +27,21 @@ class BaseRenderer[T <: java.io.Writer] renderIndent() } } - def visitArray(length: Int, index: Int) = new ArrVisitor[T, T] { + + def visitArray(length: Int, index: Int): ArrVisitor[T, T] = new ArrVisitor[T, T] { flushBuffer() out.append('[') depth += 1 renderIndent() - def subVisitor = BaseRenderer.this + + def subVisitor: BaseRenderer[T] = BaseRenderer.this + def visitValue(v: T, index: Int): Unit = { flushBuffer() commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false depth -= 1 @@ -47,17 +51,22 @@ class BaseRenderer[T <: java.io.Writer] } } - def visitObject(length: Int, index: Int) = new ObjVisitor[T, T] { + def visitJsonableObject(length: Int, index: Int): ObjVisitor[T, T] = new ObjVisitor[T, T] { flushBuffer() out.append('{') depth += 1 renderIndent() - def subVisitor = BaseRenderer.this - def visitKey(index: Int) = BaseRenderer.this + + def subVisitor: BaseRenderer[T] = BaseRenderer.this + + def visitKey(index: Int): BaseRenderer[T] = BaseRenderer.this + def visitKeyValue(s: Any): Unit = out.append(colonSnippet) + def visitValue(v: T, index: Int): Unit = { commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false depth -= 1 diff --git a/sjsonnet/src/sjsonnet/DecimalFormat.scala b/sjsonnet/src/sjsonnet/DecimalFormat.scala index 0e0566d3..9e0653df 100644 --- a/sjsonnet/src/sjsonnet/DecimalFormat.scala +++ b/sjsonnet/src/sjsonnet/DecimalFormat.scala @@ -22,7 +22,7 @@ object DecimalFormat { def leftPad(n: Long, targetWidth: Int): String = { val sign = if (n < 0) "-" else "" val absN = math.abs(n) - val nWidth = if (absN == 0) 1 else Math.log10(absN).toInt + 1 + val nWidth = if (absN == 0) 1 else Math.log10(absN.toDouble).toInt + 1 sign + "0" * (targetWidth - nWidth) + absN } def rightPad(n0: Long, minWidth: Int, maxWidth: Int): String = { @@ -31,7 +31,7 @@ object DecimalFormat { val n = (n0 / Math.pow(10, trailingZeroes(n0))).toInt assert(n == math.abs(n)) val nWidth = if (n == 0) 1 else Math.log10(n).toInt + 1 - (n + "0" * (minWidth - nWidth)).take(maxWidth) + (n.toString + "0" * (minWidth - nWidth)).take(maxWidth) } } def format(fracLengthOpt: Option[(Int, Int)], expLengthOpt: Option[Int], number: Double): String = { @@ -39,13 +39,13 @@ object DecimalFormat { case Some(expLength) => val roundLog10 = Math.ceil(Math.log10(Math.abs(number))).toLong val expNum = roundLog10 - 1 - val scaled = number / math.pow(10, expNum) + val scaled = number / math.pow(10, expNum.toDouble) val prefix = scaled.toLong.toString val expFrag = leftPad(expNum, expLength) val fracFrag = fracLengthOpt.map{case (zeroes, hashes) => if (zeroes == 0 && hashes == 0) "" else { - val divided = number / Math.pow(10, expNum - zeroes - hashes) + val divided = number / Math.pow(10, (expNum - zeroes - hashes).toDouble) val scaledFrac = divided % Math.pow(10, zeroes + hashes) rightPad(Math.abs(Math.round(scaledFrac)), zeroes, zeroes + hashes) } diff --git a/sjsonnet/src/sjsonnet/Error.scala b/sjsonnet/src/sjsonnet/Error.scala index edbe18d2..cb6ef569 100644 --- a/sjsonnet/src/sjsonnet/Error.scala +++ b/sjsonnet/src/sjsonnet/Error.scala @@ -32,11 +32,11 @@ class Error(msg: String, stack: List[Error.Frame] = Nil, underlying: Option[Thro def asSeenFrom(ev: EvalErrorScope): Error = copy(stack = stack.map(_.asSeenFrom(ev))) - protected[this] def copy(msg: String = msg, stack: List[Error.Frame] = stack, + protected def copy(msg: String = msg, stack: List[Error.Frame] = stack, underlying: Option[Throwable] = underlying) = new Error(msg, stack, underlying) - private[this] def alwaysAddPos(expr: Expr): Boolean = expr match { + private def alwaysAddPos(expr: Expr): Boolean = expr match { case _: Expr.LocalExpr | _: Expr.Arr | _: Expr.ObjExtend | _: Expr.ObjBody | _: Expr.IfElse => false case _ => true } @@ -77,16 +77,16 @@ object Error { class ParseError(msg: String, stack: List[Error.Frame] = Nil, underlying: Option[Throwable] = None) extends Error(msg, stack, underlying) { - override protected[this] def copy(msg: String = msg, stack: List[Error.Frame] = stack, - underlying: Option[Throwable] = underlying) = + override protected def copy(msg: String = msg, stack: List[Error.Frame] = stack, + underlying: Option[Throwable] = underlying): ParseError = new ParseError(msg, stack, underlying) } class StaticError(msg: String, stack: List[Error.Frame] = Nil, underlying: Option[Throwable] = None) extends Error(msg, stack, underlying) { - override protected[this] def copy(msg: String = msg, stack: List[Error.Frame] = stack, - underlying: Option[Throwable] = underlying) = + override protected def copy(msg: String = msg, stack: List[Error.Frame] = stack, + underlying: Option[Throwable] = underlying): StaticError = new StaticError(msg, stack, underlying) } diff --git a/sjsonnet/src/sjsonnet/Evaluator.scala b/sjsonnet/src/sjsonnet/Evaluator.scala index d9f1d1ea..859eb3f1 100644 --- a/sjsonnet/src/sjsonnet/Evaluator.scala +++ b/sjsonnet/src/sjsonnet/Evaluator.scala @@ -170,7 +170,7 @@ class Evaluator(resolver: CachedResolver, case _ => fail() } case Expr.UnaryOp.OP_~ => v match { - case Val.Num(_, v) => Val.Num(pos, ~v.toLong) + case Val.Num(_, v) => Val.Num(pos, (~v.toLong).toDouble) case _ => fail() } case Expr.UnaryOp.OP_+ => v match { @@ -430,12 +430,12 @@ class Evaluator(resolver: CachedResolver, } case Expr.BinaryOp.OP_<< => (l, r) match { - case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, l.toLong << r.toLong) + case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, (l.toLong << r.toLong).toDouble) case _ => fail() } case Expr.BinaryOp.OP_>> => (l, r) match { - case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, l.toLong >> r.toLong) + case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, (l.toLong >> r.toLong).toDouble) case _ => fail() } @@ -445,17 +445,17 @@ class Evaluator(resolver: CachedResolver, } case Expr.BinaryOp.OP_& => (l, r) match { - case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, l.toLong & r.toLong) + case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, (l.toLong & r.toLong).toDouble) case _ => fail() } case Expr.BinaryOp.OP_^ => (l, r) match { - case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, l.toLong ^ r.toLong) + case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, (l.toLong ^ r.toLong).toDouble) case _ => fail() } case Expr.BinaryOp.OP_| => (l, r) match { - case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, l.toLong | r.toLong) + case (Val.Num(_, l), Val.Num(_, r)) => Val.Num(pos, (l.toLong | r.toLong).toDouble) case _ => fail() } @@ -502,14 +502,20 @@ class Evaluator(resolver: CachedResolver, def visitMemberList(objPos: Position, e: ObjBody.MemberList, sup: Val.Obj)(implicit scope: ValScope): Val.Obj = { val asserts = e.asserts val fields = e.fields - var cachedSimpleScope: ValScope = null.asInstanceOf[ValScope] + var cachedSimpleScope: Option[ValScope] = None var cachedObj: Val.Obj = null var asserting: Boolean = false def makeNewScope(self: Val.Obj, sup: Val.Obj): ValScope = { if((sup eq null) && (self eq cachedObj)) { - if(cachedSimpleScope == null.asInstanceOf[ValScope]) cachedSimpleScope = createNewScope(self, sup) - cachedSimpleScope + cachedSimpleScope match { + case None => + val cachedScope = createNewScope(self, sup) + cachedSimpleScope = Some(cachedScope) + cachedScope + case Some(cachedScope) => + cachedScope + } } else createNewScope(self, sup) } @@ -581,7 +587,8 @@ class Evaluator(resolver: CachedResolver, val v = new Val.Obj.Member(plus, sep) { def invoke(self: Val.Obj, sup: Val.Obj, fs: FileScope, ev: EvalScope): Val = { if(asserts != null) assertions(self) - visitExpr(rhs)(makeNewScope(self, sup)) + val newScope = makeNewScope(self, sup) + visitExpr(rhs)(using newScope) } } builder.put(k, v) diff --git a/sjsonnet/src/sjsonnet/Expr.scala b/sjsonnet/src/sjsonnet/Expr.scala index cc6cdf19..1944992e 100644 --- a/sjsonnet/src/sjsonnet/Expr.scala +++ b/sjsonnet/src/sjsonnet/Expr.scala @@ -20,7 +20,7 @@ trait Expr{ } } object Expr{ - private final def arrStr(a: Array[_]): String = { + private final def arrStr(a: Array[?]): String = { if(a == null) "null" else a.mkString("[", ", ", "]") } diff --git a/sjsonnet/src/sjsonnet/ExprTransform.scala b/sjsonnet/src/sjsonnet/ExprTransform.scala index 964833ee..970c5618 100644 --- a/sjsonnet/src/sjsonnet/ExprTransform.scala +++ b/sjsonnet/src/sjsonnet/ExprTransform.scala @@ -188,10 +188,10 @@ abstract class ExprTransform { } } - protected[this] def transformArr[T <: Expr](a: Array[T]): Array[T] = - transformGenericArr(a)((transform _).asInstanceOf[T => T]) + protected def transformArr[T <: Expr](a: Array[T]): Array[T] = + transformGenericArr(a)((transform).asInstanceOf[T => T]) - protected[this] def transformParams(p: Params): Params = { + protected def transformParams(p: Params): Params = { if(p == null) return null val defs = p.defaultExprs if(defs == null) p @@ -202,16 +202,16 @@ abstract class ExprTransform { } } - protected[this] def transformBinds(a: Array[Bind]): Array[Bind] = + protected def transformBinds(a: Array[Bind]): Array[Bind] = transformGenericArr(a)(transformBind) - protected[this] def transformFields(a: Array[Member.Field]): Array[Member.Field] = + protected def transformFields(a: Array[Member.Field]): Array[Member.Field] = transformGenericArr(a)(transformField) - protected[this] def transformAsserts(a: Array[Member.AssertStmt]): Array[Member.AssertStmt] = + protected def transformAsserts(a: Array[Member.AssertStmt]): Array[Member.AssertStmt] = transformGenericArr(a)(transformAssert) - protected[this] def transformBind(b: Bind): Bind = { + protected def transformBind(b: Bind): Bind = { val args = b.args val rhs = b.rhs val args2 = transformParams(args) @@ -220,7 +220,7 @@ abstract class ExprTransform { else b.copy(args = args2, rhs = rhs2) } - protected[this] def transformField(f: Member.Field): Member.Field = { + protected def transformField(f: Member.Field): Member.Field = { val x = f.fieldName val y = f.args val z = f.rhs @@ -231,14 +231,14 @@ abstract class ExprTransform { else f.copy(fieldName = x2, args = y2, rhs = z2) } - protected[this] def transformFieldName(f: FieldName): FieldName = f match { + protected def transformFieldName(f: FieldName): FieldName = f match { case FieldName.Dyn(x) => val x2 = transform(x) if(x2 eq x) f else FieldName.Dyn(x2) case _ => f } - protected[this] def transformAssert(a: Member.AssertStmt): Member.AssertStmt = { + protected def transformAssert(a: Member.AssertStmt): Member.AssertStmt = { val x = a.value val y = a.msg val x2 = transform(x) @@ -247,14 +247,14 @@ abstract class ExprTransform { else a.copy(value = x2, msg = y2) } - protected[this] def transformOption(o: Option[Expr]): Option[Expr] = o match { + protected def transformOption(o: Option[Expr]): Option[Expr] = o match { case Some(e) => val e2 = transform(e) if(e2 eq e) o else Some(e2) case None => o } - protected[this] def transformList(l: List[Expr]): List[Expr] = { + protected def transformList(l: List[Expr]): List[Expr] = { val lb = List.newBuilder[Expr] var diff = false l.foreach { e => @@ -265,7 +265,7 @@ abstract class ExprTransform { if(diff) lb.result() else l } - protected[this] def transformGenericArr[T <: AnyRef](a: Array[T])(f: T => T): Array[T] = { + protected def transformGenericArr[T <: AnyRef](a: Array[T])(f: T => T): Array[T] = { if(a == null) return null var i = 0 while(i < a.length) { diff --git a/sjsonnet/src/sjsonnet/Format.scala b/sjsonnet/src/sjsonnet/Format.scala index 1b21085f..eec5ef9e 100644 --- a/sjsonnet/src/sjsonnet/Format.scala +++ b/sjsonnet/src/sjsonnet/Format.scala @@ -21,14 +21,15 @@ object Format{ width: Option[Int], precision: Option[Int], conversion: Char) - import fastparse._, NoWhitespace._ - def integer[_: P] = P( CharIn("1-9") ~ CharsWhileIn("0-9", 0) | "0" ) - def label[_: P] = P( ("(" ~ CharsWhile(_ != ')').! ~ ")").? ) - def flags[_: P] = P( CharsWhileIn("#0\\- +", 0).! ) - def width[_: P] = P( (integer | "*").!.? ) - def precision[_: P] = P( ("." ~/ integer.!).? ) - def conversion[_: P] = P( CharIn("diouxXeEfFgGcrsa%").! ) - def formatSpec[_: P] = P( label ~ flags ~ width ~ precision ~ CharIn("hlL").? ~ conversion ).map{ + import fastparse.*, NoWhitespace.* + + def integer[A: P] = P( CharIn("1-9") ~ CharsWhileIn("0-9", 0) | "0" ) + def label[A: P] = P( ("(" ~ CharsWhile(_ != ')').! ~ ")").? ) + def flags[A: P] = P( CharsWhileIn("#0\\- +", 0).! ) + def width[A: P] = P( (integer | "*").!.? ) + def precision[A: P] = P( ("." ~/ integer.!).? ) + def conversion[A: P] = P( CharIn("diouxXeEfFgGcrsa%").! ) + def formatSpec[A: P] = P( label ~ flags ~ width ~ precision ~ CharIn("hlL").? ~ conversion ).map{ case (label, flags, width, precision, conversion) => FormatSpec( label, @@ -44,8 +45,8 @@ object Format{ } - def plain[_: P] = P( CharsWhile(_ != '%', 0).! ) - def format[_: P] = P( plain ~ (("%" ~/ formatSpec) ~ plain).rep ~ End) + def plain[A: P] = P( CharsWhile(_ != '%', 0).! ) + def format0[A: P] = P( plain ~ (("%" ~/ formatSpec) ~ plain).rep ~ End) @@ -80,7 +81,7 @@ object Format{ values0: Val, pos: Position) (implicit evaluator: EvalScope): String = { - val (leading, chunks) = fastparse.parse(s, format(_)).get.value + val (leading, chunks) = fastparse.parse(s, format0(using _)).get.value format(leading, chunks, values0, pos) } @@ -274,7 +275,7 @@ object Format{ } class PartialApplyFmt(fmt: String) extends Val.Builtin1("values") { - val (leading, chunks) = fastparse.parse(fmt, format(_)).get.value + val (leading, chunks) = fastparse.parse(fmt, format0(using _)).get.value def evalRhs(values0: Val, ev: EvalScope, pos: Position): Val = Val.Str(pos, format(leading, chunks, values0, pos)(ev)) } diff --git a/sjsonnet/src/sjsonnet/Importer.scala b/sjsonnet/src/sjsonnet/Importer.scala index c4e616a2..b42c3f66 100644 --- a/sjsonnet/src/sjsonnet/Importer.scala +++ b/sjsonnet/src/sjsonnet/Importer.scala @@ -33,7 +33,7 @@ object Importer { case class FileParserInput(file: File) extends ParserInput { - private[this] val bufferedFile = new BufferedRandomAccessFile(file.getAbsolutePath, 1024 * 8) + private val bufferedFile = new BufferedRandomAccessFile(file.getAbsolutePath, 1024 * 8) private lazy val fileLength = file.length.toInt @@ -55,7 +55,7 @@ case class FileParserInput(file: File) extends ParserInput { override def checkTraceable(): Unit = {} - private[this] lazy val lineNumberLookup: Array[Int] = { + private lazy val lineNumberLookup: Array[Int] = { val lines = mutable.ArrayBuffer[Int]() val bufferedStream = new BufferedInputStream(new FileInputStream(file)) var byteRead: Int = 0 @@ -149,7 +149,7 @@ trait ResolvedFile { def readString(): String // Get a content hash of the file suitable for detecting changes in a given file. - def contentHash(): String + def contentHash: String } case class StaticResolvedFile(content: String) extends ResolvedFile { @@ -158,7 +158,7 @@ case class StaticResolvedFile(content: String) extends ResolvedFile { def readString(): String = content // We just cheat, the content hash can be the content itself for static imports - lazy val contentHash: String = content + def contentHash: String = content } class CachedImporter(parent: Importer) extends Importer { @@ -184,8 +184,9 @@ class CachedResolver( internedStaticFieldSets: mutable.HashMap[Val.StaticObjectFieldSet, java.util.LinkedHashMap[String, java.lang.Boolean]]) extends CachedImporter(parentImporter) { def parse(path: Path, content: ResolvedFile)(implicit ev: EvalErrorScope): Either[Error, (Expr, FileScope)] = { - parseCache.getOrElseUpdate((path, content.contentHash.toString), { - val parsed = fastparse.parse(content.getParserInput(), new Parser(path, strictImportSyntax, internedStrings, internedStaticFieldSets).document(_)) match { + parseCache.getOrElseUpdate((path, content.contentHash), { + val parser = new Parser(path, strictImportSyntax, internedStrings, internedStaticFieldSets) + val parsed = fastparse.parse(content.getParserInput(), parser.document(using _)) match { case f @ Parsed.Failure(_, _, _) => val traced = f.trace() val pos = new Position(new FileScope(path), traced.index) diff --git a/sjsonnet/src/sjsonnet/Interpreter.scala b/sjsonnet/src/sjsonnet/Interpreter.scala index b82ed5dc..6de9806d 100644 --- a/sjsonnet/src/sjsonnet/Interpreter.scala +++ b/sjsonnet/src/sjsonnet/Interpreter.scala @@ -128,6 +128,6 @@ class Interpreter(extVars: Map[String, String], ) } } - handleException(m.apply0(res, visitor)(evaluator)) + handleException(m.apply0(res, visitor)(using evaluator)) } } diff --git a/sjsonnet/src/sjsonnet/Materializer.scala b/sjsonnet/src/sjsonnet/Materializer.scala index c6bea8f5..747dcd5f 100644 --- a/sjsonnet/src/sjsonnet/Materializer.scala +++ b/sjsonnet/src/sjsonnet/Materializer.scala @@ -25,7 +25,7 @@ abstract class Materializer { case obj: Val.Obj => storePos(obj.pos) obj.triggerAllAsserts(obj) - val objVisitor = visitor.visitObject(obj.visibleKeyNames.length , -1) + val objVisitor = visitor.visitObject(obj.visibleKeyNames.length, jsonableKeys = true, -1) val sort = !evaluator.settings.preserveOrder var prevKey: String = null obj.foreachElement(sort, evaluator.emptyMaterializeFileScopePos) { (k, v) => diff --git a/sjsonnet/src/sjsonnet/Parser.scala b/sjsonnet/src/sjsonnet/Parser.scala index 3db4fb85..1d728a5d 100644 --- a/sjsonnet/src/sjsonnet/Parser.scala +++ b/sjsonnet/src/sjsonnet/Parser.scala @@ -1,7 +1,7 @@ package sjsonnet -import fastparse.JsonnetWhitespace._ -import fastparse._ +import fastparse.JsonnetWhitespace.* +import fastparse.* import Expr.Member.Visibility import scala.annotation.switch @@ -48,19 +48,19 @@ class Parser(val currentFile: Path, strictImportSyntax: Boolean, internedStrings: mutable.HashMap[String, String], internedStaticFieldSets: mutable.HashMap[Val.StaticObjectFieldSet, java.util.LinkedHashMap[String, java.lang.Boolean]]) { - import Parser._ + import Parser.* private val fileScope = new FileScope(currentFile) - def Pos[_: P]: P[Position] = Index.map(offset => new Position(fileScope, offset)) + def Pos[A: P]: P[Position] = Index.map(offset => new Position(fileScope, offset)) - def id[_: P] = P( + def id[A: P] = P( CharIn("_a-zA-Z") ~~ CharsWhileIn("_a-zA-Z0-9", 0) ).!.filter(s => !keywords.contains(s)) - def break[_: P] = P(!CharIn("_a-zA-Z0-9")) - def number[_: P]: P[Val.Num] = P( + def break[A: P] = P(!CharIn("_a-zA-Z0-9")) + def number[A: P]: P[Val.Num] = P( Pos ~~ ( CharsWhileIn("0-9") ~~ ("." ~ CharsWhileIn("0-9")).? ~~ @@ -68,8 +68,8 @@ class Parser(val currentFile: Path, ).! ).map(s => Val.Num(s._1, s._2.toDouble)) - def escape[_: P] = P( escape0 | escape1 ) - def escape0[_: P] = P("\\" ~~ !"u" ~~ AnyChar.!).map{ + def escape[A: P] = P( escape0 | escape1 ) + def escape0[A: P] = P("\\" ~~ !"u" ~~ AnyChar.!).map{ case "\"" => "\"" case "'" => "\'" case "\\" => "\\" @@ -80,29 +80,29 @@ class Parser(val currentFile: Path, case "r" => "\r" case "t" => "\t" } - def escape1[_: P] = P( "\\u" ~~ CharIn("0-9a-fA-F").repX(min=4, max=4).! ).map{ + def escape1[A: P] = P( "\\u" ~~ CharIn("0-9a-fA-F").repX(min=4, max=4).! ).map{ s => Integer.parseInt(s, 16).toChar.toString } - def doubleString[_: P]: P[Seq[String]] = + def doubleString[A: P]: P[Seq[String]] = P( (CharsWhile(x => x != '"' && x != '\\').! | escape).repX ~~ "\"" ) - def singleString[_: P]: P[Seq[String]] = + def singleString[A: P]: P[Seq[String]] = P( (CharsWhile(x => x != '\'' && x != '\\').! | escape).repX ~~ "'" ) - def literalDoubleString[_: P]: P[Seq[String]] = + def literalDoubleString[A: P]: P[Seq[String]] = P( (CharsWhile(_ != '"').! | "\"\"".!.map(_ => "\"")).repX ~~ "\"" ) - def literalSingleString[_: P]: P[Seq[String]] = + def literalSingleString[A: P]: P[Seq[String]] = P( (CharsWhile(_ != '\'').! | "''".!.map(_ => "'")).repX ~~ "'" ) - def tripleBarStringLines[_: P]: P[Seq[String]] = P( + def tripleBarStringLines[A: P]: P[Seq[String]] = P( tripleBarStringHead.flatMapX { case (pre, w, head) => tripleBarStringBody(w).map(pre ++ Seq(head, "\n") ++ _) } ) - def tripleBarString[_: P]: P[Seq[String]] = P( + def tripleBarString[A: P]: P[Seq[String]] = P( "||"./ ~~ CharsWhileIn(" \t", 0) ~~ "\n" ~~ tripleBarStringLines ~~ "\n" ~~ CharsWhileIn(" \t", 0) ~~ "|||" ) - def string[_: P]: P[String] = P( + def string[A: P]: P[String] = P( SingleChar.flatMapX{ case '\"' => doubleString case '\'' => singleString @@ -116,24 +116,24 @@ class Parser(val currentFile: Path, } ).map(_.mkString) - def tripleBarStringHead[_: P] = P( + def tripleBarStringHead[A: P] = P( (CharsWhileIn(" \t", 0) ~~ "\n".!).repX ~~ CharsWhileIn(" \t", 1).! ~~ CharsWhile(_ != '\n').! ) - def tripleBarBlankHead[_: P]: P[String] = + def tripleBarBlankHead[A: P]: P[String] = P( CharsWhileIn(" \t", 0) ~~ &("\n").map(_ => "\n") ) - def tripleBarBlank[_: P]: P[String] = P( "\n" ~~ tripleBarBlankHead ) + def tripleBarBlank[A: P]: P[String] = P( "\n" ~~ tripleBarBlankHead ) - def tripleBarStringBody[_: P](w: String): P[Seq[String]] = P( + def tripleBarStringBody[A: P](w: String): P[Seq[String]] = P( (tripleBarBlank | "\n" ~~ w ~~ CharsWhile(_ != '\n').!.map(_ + "\n")).repX ) - def arr[_: P]: P[Expr] = P( (Pos ~~ &("]")).map(new Val.Arr(_, emptyLazyArray)) | arrBody ) - def compSuffix[_: P] = P( forspec ~ compspec ).map(Left(_)) - def arrBody[_: P]: P[Expr] = P( + def arr[A: P]: P[Expr] = P( (Pos ~~ &("]")).map(new Val.Arr(_, emptyLazyArray)) | arrBody ) + def compSuffix[A: P] = P( forspec ~ compspec ).map(Left(_)) + def arrBody[A: P]: P[Expr] = P( Pos ~~ expr ~ (compSuffix | "," ~ (compSuffix | (expr.rep(0, sep = ",") ~ ",".?).map(Right(_)))).? ).map{ @@ -152,19 +152,19 @@ class Parser(val currentFile: Path, case (offset, first, Some(Right(rest))) => Expr.Arr(offset, Array(first) ++ rest) } - def assertExpr[_: P](pos: Position): P[Expr] = + def assertExpr[A: P](pos: Position): P[Expr] = P( assertStmt ~ ";" ~ expr ).map(t => Expr.AssertExpr(pos, t._1, t._2)) - def function[_: P](pos: Position): P[Expr] = + def function[A: P](pos: Position): P[Expr] = P( "(" ~/ params ~ ")" ~ expr ).map(t => Expr.Function(pos, t._1, t._2)) - def ifElse[_: P](pos: Position): P[Expr] = - P( Pos ~~ expr ~ "then" ~~ break ~ expr ~ ("else" ~~ break ~ expr).?.map(_.getOrElse(null)) ).map(Expr.IfElse.tupled) + def ifElse[A: P](pos: Position): P[Expr] = + P( Pos ~~ expr ~ "then" ~~ break ~ expr ~ ("else" ~~ break ~ expr).?.map(_.orNull) ).map((Expr.IfElse.apply).tupled) - def localExpr[_: P]: P[Expr] = - P( Pos ~~ bind.rep(min=1, sep = ","./).map(s => if(s.isEmpty) null else s.toArray) ~ ";" ~ expr ).map(Expr.LocalExpr.tupled) + def localExpr[A: P]: P[Expr] = + P( Pos ~~ bind.rep(min=1, sep = ","./).map(s => if(s.isEmpty) null else s.toArray) ~ ";" ~ expr ).map((Expr.LocalExpr.apply).tupled) - def expr[_: P]: P[Expr] = + def expr[A: P]: P[Expr] = P("" ~ expr1 ~ (Pos ~~ binaryop ~/ expr1).rep ~ "").map{ case (pre, fs) => var remaining = fs def climb(minPrec: Int, current: Expr): Expr = { @@ -214,11 +214,11 @@ class Parser(val currentFile: Path, climb(0, pre) } - def expr1[_: P]: P[Expr] = P(expr2 ~ exprSuffix2.rep).map{ + def expr1[A: P]: P[Expr] = P(expr2 ~ exprSuffix2.rep).map{ case (pre, fs) => fs.foldLeft(pre){case (p, f) => f(p) } } - def exprSuffix2[_: P]: P[Expr => Expr] = P( + def exprSuffix2[A: P]: P[Expr => Expr] = P( Pos.flatMapX{i => CharIn(".[({")./.!.map(_(0)).flatMapX{ c => (c: @switch) match{ @@ -237,12 +237,12 @@ class Parser(val currentFile: Path, } ) - def local[_: P] = P( localExpr ) - def importStr[_: P](pos: Position) = P( importExpr.map(Expr.ImportStr(pos, _)) ) - def `import`[_: P](pos: Position) = P( importExpr.map(Expr.Import(pos, _)) ) - def error[_: P](pos: Position) = P(expr.map(Expr.Error(pos, _)) ) + def local[A: P] = P( localExpr ) + def importStr[A: P](pos: Position) = P( importExpr.map(Expr.ImportStr(pos, _)) ) + def `import`[A: P](pos: Position) = P( importExpr.map(Expr.Import(pos, _)) ) + def error[A: P](pos: Position) = P(expr.map(Expr.Error(pos, _)) ) - def importExpr[_: P]: P[String] = P( + def importExpr[A: P]: P[String] = P( if (!strictImportSyntax) string else expr.flatMap { case Val.Str(_, s) => Pass(s) @@ -250,7 +250,7 @@ class Parser(val currentFile: Path, } ) - def unaryOpExpr[_: P](pos: Position, op: Char) = P( + def unaryOpExpr[A: P](pos: Position, op: Char) = P( expr1.map{ e => def k2 = op match{ case '+' => Expr.UnaryOp.OP_+ @@ -269,7 +269,7 @@ class Parser(val currentFile: Path, } // Any `expr` that isn't naively left-recursive - def expr2[_: P]: P[Expr] = P( + def expr2[A: P]: P[Expr] = P( Pos.flatMapX{ pos => SingleChar.flatMapX{ c => (c: @switch) match { @@ -289,7 +289,7 @@ class Parser(val currentFile: Path, case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => P.current.index = pos.offset; number case x if idStartChar(x) => CharsWhileIn("_a-zA-Z0-9", 0).!.flatMapX { y => - x + y match { + s"$x$y" match { case "null" => Pass(Val.Null(pos)) case "true" => Pass(Val.True(pos)) case "false" => Pass(Val.False(pos)) @@ -311,7 +311,7 @@ class Parser(val currentFile: Path, } ) - def objinside[_: P]: P[Expr.ObjBody] = P( + def objinside[A: P]: P[Expr.ObjBody] = P( Pos ~ member.rep(sep = ",") ~ ",".? ~ (forspec ~ compspec).? ).flatMap { case t @ (pos, exprs, _) => val seen = collection.mutable.Set.empty[String] @@ -345,10 +345,10 @@ class Parser(val currentFile: Path, exprs(preLocals.length) val postLocals = exprs.drop(preLocals.length+1).takeWhile(_.isInstanceOf[Expr.Bind]) .map(_.asInstanceOf[Expr.Bind]) - - /* + + /* * Prevent duplicate fields in list comprehension. See: https://github.com/databricks/sjsonnet/issues/99 - * + * * If comps._1 is a forspec with value greater than one lhs cannot be a Expr.Str * Otherwise the field value will be overriden by the multiple iterations of forspec */ @@ -362,35 +362,35 @@ class Parser(val currentFile: Path, Expr.ObjBody.ObjComp(pos, preLocals.toArray, lhs, rhs, plus, postLocals.toArray, comps._1, comps._2.toList) } - def member[_: P]: P[Expr.Member] = P( objlocal | "assert" ~~ break ~ assertStmt | field ) - def field[_: P] = P( + def member[A: P]: P[Expr.Member] = P( objlocal | "assert" ~~ break ~ assertStmt | field ) + def field[A: P] = P( (Pos ~~ fieldname ~/ "+".!.? ~ ("(" ~ params ~ ")").? ~ fieldKeySep ~/ expr).map{ case (pos, name, plus, p, h2, e) => - Expr.Member.Field(pos, name, plus.nonEmpty, p.getOrElse(null), h2, e) + Expr.Member.Field(pos, name, plus.nonEmpty, p.orNull, h2, e) } ) - def fieldKeySep[_: P] = P( StringIn(":::", "::", ":") ).!.map{ + def fieldKeySep[A: P] = P( StringIn(":::", "::", ":") ).!.map{ case ":" => Visibility.Normal case "::" => Visibility.Hidden case ":::" => Visibility.Unhide } - def objlocal[_: P] = P( "local" ~~ break ~/ bind ) - def compspec[_: P]: P[Seq[Expr.CompSpec]] = P( (forspec | ifspec).rep ) - def forspec[_: P] = - P( Pos ~~ "for" ~~ break ~/ id ~ "in" ~~ break ~ expr ).map(Expr.ForSpec.tupled) - def ifspec[_: P] = P( Pos ~~ "if" ~~ break ~/ expr ).map(Expr.IfSpec.tupled) - def fieldname[_: P] = P( - id.map(Expr.FieldName.Fixed) | - string.map(Expr.FieldName.Fixed) | - "[" ~ expr.map(Expr.FieldName.Dyn) ~ "]" + def objlocal[A: P] = P( "local" ~~ break ~/ bind ) + def compspec[A: P]: P[Seq[Expr.CompSpec]] = P( (forspec | ifspec).rep ) + def forspec[A: P] = + P( Pos ~~ "for" ~~ break ~/ id ~ "in" ~~ break ~ expr ).map((Expr.ForSpec.apply).tupled) + def ifspec[A: P] = P( Pos ~~ "if" ~~ break ~/ expr ).map((Expr.IfSpec.apply).tupled) + def fieldname[A: P] = P( + id.map(Expr.FieldName.Fixed.apply) | + string.map(Expr.FieldName.Fixed.apply) | + "[" ~ expr.map(Expr.FieldName.Dyn.apply) ~ "]" ) - def assertStmt[_: P] = - P( expr ~ (":" ~ expr).?.map(_.getOrElse(null)) ).map(Expr.Member.AssertStmt.tupled) + def assertStmt[A: P] = + P( expr ~ (":" ~ expr).?.map(_.orNull) ).map((Expr.Member.AssertStmt.apply).tupled) - def bind[_: P] = - P( Pos ~~ id ~ ("(" ~/ params.? ~ ")").?.map(_.flatten).map(_.getOrElse(null)) ~ "=" ~ expr ).map(Expr.Bind.tupled) + def bind[A: P] = + P( Pos ~~ id ~ ("(" ~/ params.? ~ ")").?.map(_.flatten).map(_.orNull) ~ "=" ~ expr ).map((Expr.Bind.apply).tupled) - def args[_: P] = P( ((id ~ "=" ~ !"=").? ~ expr).rep(sep = ",") ~ ",".? ).flatMapX{ x => + def args[A: P] = P( ((id ~ "=" ~ !"=").? ~ expr).rep(sep = ",") ~ ",".? ).flatMapX{ x => if (x.sliding(2).exists{case Seq(l, r) => l._1.isDefined && r._1.isEmpty case _ => false}) { Fail.opaque("no positional params after named params") } else { @@ -400,7 +400,7 @@ class Parser(val currentFile: Path, } } - def params[_: P]: P[Expr.Params] = P( (id ~ ("=" ~ expr).?).rep(sep = ",") ~ ",".? ).flatMapX{ x => + def params[A: P]: P[Expr.Params] = P( (id ~ ("=" ~ expr).?).rep(sep = ",") ~ ",".? ).flatMapX{ x => val seen = collection.mutable.Set.empty[String] var overlap: String = null for((k, v) <- x){ @@ -416,7 +416,7 @@ class Parser(val currentFile: Path, } - def binaryop[_: P] = P( + def binaryop[A: P] = P( StringIn( "<<", ">>", "<=", ">=", "in", "==", "!=", "&&", "||", "*", "/", "%", "+", "-", "<", ">", "&", "^", "|" @@ -424,7 +424,7 @@ class Parser(val currentFile: Path, ).! - def document[_: P]: P[(Expr, FileScope)] = P( expr ~ Pass(fileScope) ~ End ) + def document[A: P]: P[(Expr, FileScope)] = P( expr ~ Pass(fileScope) ~ End ) } final class Position(val fileScope: FileScope, val offset: Int) { diff --git a/sjsonnet/src/sjsonnet/PrettyYamlRenderer.scala b/sjsonnet/src/sjsonnet/PrettyYamlRenderer.scala index a2829fa2..076f98fd 100644 --- a/sjsonnet/src/sjsonnet/PrettyYamlRenderer.scala +++ b/sjsonnet/src/sjsonnet/PrettyYamlRenderer.scala @@ -138,18 +138,20 @@ class PrettyYamlRenderer(out: Writer = new java.io.StringWriter(), newlineBuffered = false dashBuffered = false } - override def visitArray(length: Int, index: Int) = new ArrVisitor[Writer, Writer] { + + override def visitArray(length: Int, index: Int): ArrVisitor[Writer, Writer] = new ArrVisitor[Writer, Writer] { var empty = true val dedentInObject = afterKey && !indentArrayInObject - def subVisitor = { - if (empty){ + + def subVisitor: PrettyYamlRenderer = { + if (empty) { afterColon = false flushBuffer() val outerFirstElementInArray = firstElementInArray firstElementInArray = true if (!topLevel) { depth += 1 - if (!firstElementInArray || !outerFirstElementInArray) newlineBuffered = true + if (!firstElementInArray || !outerFirstElementInArray) newlineBuffered = true } topLevel = false @@ -161,6 +163,7 @@ class PrettyYamlRenderer(out: Writer = new java.io.StringWriter(), leftHandPrefixOffset = 0 PrettyYamlRenderer.this } + def visitValue(v: Writer, index: Int): Unit = { firstElementInArray = true empty = false @@ -169,6 +172,7 @@ class PrettyYamlRenderer(out: Writer = new java.io.StringWriter(), dashBuffered = true } + def visitEnd(index: Int) = { firstElementInArray = false if (!dedentInObject) depth -= 1 @@ -182,16 +186,19 @@ class PrettyYamlRenderer(out: Writer = new java.io.StringWriter(), out } } - override def visitObject(length: Int, index: Int) = new ObjVisitor[Writer, Writer] { + + override def visitJsonableObject(length: Int, index: Int): ObjVisitor[Writer, Writer] = new ObjVisitor[Writer, Writer] { firstElementInArray = false var empty = true flushBuffer() if (!topLevel) depth += 1 topLevel = false - def subVisitor = PrettyYamlRenderer.this - def visitKey(index: Int) = { - if (empty){ + def subVisitor: PrettyYamlRenderer = PrettyYamlRenderer.this + + def visitKey(index: Int): PrettyYamlRenderer = { + + if (empty) { leftHandPrefixOffset = 0 afterColon = false @@ -200,12 +207,13 @@ class PrettyYamlRenderer(out: Writer = new java.io.StringWriter(), } PrettyYamlRenderer.this } + def visitKeyValue(s: Any): Unit = { empty = false flushBuffer() out.append(":") saveCurrentPos() - if (bufferedComment != null){ + if (bufferedComment != null) { out.append(bufferedComment) bufferedComment = null } @@ -213,10 +221,12 @@ class PrettyYamlRenderer(out: Writer = new java.io.StringWriter(), afterColon = true newlineBuffered = false } + def visitValue(v: Writer, index: Int): Unit = { newlineBuffered = true afterKey = false } + def visitEnd(index: Int) = { if (empty) { addSpaceAfterColon() @@ -402,7 +412,7 @@ object PrettyYamlRenderer{ def stringNeedsToBeQuoted(str: String) = { import fastparse._ import NoWhitespace._ - def yamlPunctuation[_: P] = P( + def yamlPunctuation[A: P] = P( // http://blogs.perl.org/users/tinita/2018/03/strings-in-yaml---to-quote-or-not-to-quote.html StringIn( "!", // ! Tag like !!null @@ -420,7 +430,7 @@ object PrettyYamlRenderer{ " " // leading or trailing empty spaces need quotes to define them ) ) - def yamlKeyword[_: P] = P( + def yamlKeyword[A: P] = P( StringIn( // https://makandracards.com/makandra/24809-yaml-keys-like-yes-or-no-evaluate-to-true-and-false // y|Y|yes|Yes|YES|n|N|no|No|NO @@ -437,17 +447,17 @@ object PrettyYamlRenderer{ ) ) - def digits[_: P] = P( CharsWhileIn("0-9") ) - def yamlFloat[_: P] = P( + def digits[A: P] = P( CharsWhileIn("0-9") ) + def yamlFloat[A: P] = P( (digits.? ~ "." ~ digits | digits ~ ".") ~ (("e" | "E") ~ ("+" | "-").? ~ digits).? ) - def yamlOctalSuffix[_: P] = P( "x" ~ CharIn("1-9a-fA-F") ~ CharsWhileIn("0-9a-fA-F").? ) - def yamlHexSuffix[_: P] = P( "o" ~ CharIn("1-7") ~ CharsWhileIn("0-7").? ) - def yamlOctalHex[_: P] = P( "0" ~ (yamlOctalSuffix | yamlHexSuffix) ) - def yamlNumber0[_: P] = P( ".inf" | yamlFloat | yamlOctalHex | digits ) + def yamlOctalSuffix[A: P] = P( "x" ~ CharIn("1-9a-fA-F") ~ CharsWhileIn("0-9a-fA-F").? ) + def yamlHexSuffix[A: P] = P( "o" ~ CharIn("1-7") ~ CharsWhileIn("0-7").? ) + def yamlOctalHex[A: P] = P( "0" ~ (yamlOctalSuffix | yamlHexSuffix) ) + def yamlNumber0[A: P] = P( ".inf" | yamlFloat | yamlOctalHex | digits ) // Add a `CharIn` lookahead to bail out quickly if something cannot possibly be a number - def yamlNumber[_: P] = P( "-".? ~ yamlNumber0 ) + def yamlNumber[A: P] = P( "-".? ~ yamlNumber0 ) // Strings and numbers aren't the only scalars that YAML can understand. // ISO-formatted date and datetime literals are also parsed. @@ -455,25 +465,25 @@ object PrettyYamlRenderer{ // datetime: 2001-12-15T02:59:43.1Z // datetime_with_spaces: 2001-12-14 21:59:43.10 -5 - def fourDigits[_: P] = P( CharIn("0-9") ~ CharIn("0-9") ~ CharIn("0-9") ~ CharIn("0-9") ) - def oneTwoDigits[_: P] = P( CharIn("0-9") ~ CharIn("0-9").? ) - def twoDigits[_: P] = P( CharIn("0-9") ~ CharIn("0-9") ) - def dateTimeSuffix[_: P] = P( + def fourDigits[A: P] = P( CharIn("0-9") ~ CharIn("0-9") ~ CharIn("0-9") ~ CharIn("0-9") ) + def oneTwoDigits[A: P] = P( CharIn("0-9") ~ CharIn("0-9").? ) + def twoDigits[A: P] = P( CharIn("0-9") ~ CharIn("0-9") ) + def dateTimeSuffix[A: P] = P( ("T" | " ") ~ twoDigits ~ ":" ~ twoDigits ~ ":" ~ twoDigits ~ ("." ~ digits.?).? ~ ((" " | "Z").? ~ ("-".? ~ oneTwoDigits).?).? ) - def yamlDate[_: P] = P( fourDigits ~ "-" ~ oneTwoDigits ~ "-" ~ oneTwoDigits ~ dateTimeSuffix.? ) + def yamlDate[A: P] = P( fourDigits ~ "-" ~ oneTwoDigits ~ "-" ~ oneTwoDigits ~ dateTimeSuffix.? ) // Not in the YAML, but included to match PyYAML behavior - def yamlTime[_: P] = P( twoDigits ~ ":" ~ twoDigits ) + def yamlTime[A: P] = P( twoDigits ~ ":" ~ twoDigits ) - def parser[_: P] = P( + def parser[A: P] = P( // Use a `&` lookahead to bail out early in the common case, so we don't // need to try parsing times/dates/numbers one by one yamlPunctuation | (&(CharIn(".0-9\\-")) ~ (yamlTime | yamlDate | yamlNumber) | yamlKeyword) ~ End ) - fastparse.parse(str, parser(_)).isSuccess || + fastparse.parse(str, parser(using _)).isSuccess || str.contains(": ") || // Looks like a key-value pair str.contains(" #") || // Comments str.charAt(str.length - 1) == ':' || // Looks like a key-value pair diff --git a/sjsonnet/src/sjsonnet/ReadWriter.scala b/sjsonnet/src/sjsonnet/ReadWriter.scala index d7667bea..01e109a9 100644 --- a/sjsonnet/src/sjsonnet/ReadWriter.scala +++ b/sjsonnet/src/sjsonnet/ReadWriter.scala @@ -10,34 +10,34 @@ sealed abstract class ReadWriter[T] { object ReadWriter{ implicit object StringRead extends ReadWriter[String]{ def apply(t: Val) = t.asString - def write(pos: Position, t: String) = Val.Str(pos, t) + def write(pos: Position, t: String): Val.Str = Val.Str(pos, t) } implicit object BooleanRead extends ReadWriter[Boolean]{ def apply(t: Val) = t.asBoolean - def write(pos: Position, t: Boolean) = Val.bool(pos, t) + def write(pos: Position, t: Boolean): Val.Bool = Val.bool(pos, t) } implicit object IntRead extends ReadWriter[Int]{ def apply(t: Val) = t.asInt - def write(pos: Position, t: Int) = Val.Num(pos, t) + def write(pos: Position, t: Int): Val.Num = Val.Num(pos, t) } implicit object DoubleRead extends ReadWriter[Double]{ def apply(t: Val) = t.asDouble - def write(pos: Position, t: Double) = Val.Num(pos, t) + def write(pos: Position, t: Double): Val.Num = Val.Num(pos, t) } implicit object ValRead extends ReadWriter[Val]{ def apply(t: Val) = t - def write(pos: Position, t: Val) = t + def write(pos: Position, t: Val): Val = t } implicit object ObjRead extends ReadWriter[Val.Obj]{ def apply(t: Val) = t.asObj - def write(pos: Position, t: Val.Obj) = t + def write(pos: Position, t: Val.Obj): Val.Obj = t } implicit object ArrRead extends ReadWriter[Val.Arr]{ def apply(t: Val) = t.asArr - def write(pos: Position, t: Val.Arr) = t + def write(pos: Position, t: Val.Arr): Val.Arr = t } implicit object FuncRead extends ReadWriter[Val.Func]{ def apply(t: Val) = t.asFunc - def write(pos: Position, t: Val.Func) = t + def write(pos: Position, t: Val.Func): Val.Func = t } } diff --git a/sjsonnet/src/sjsonnet/Renderer.scala b/sjsonnet/src/sjsonnet/Renderer.scala index 7c70da8e..dc75792e 100644 --- a/sjsonnet/src/sjsonnet/Renderer.scala +++ b/sjsonnet/src/sjsonnet/Renderer.scala @@ -46,19 +46,23 @@ class Renderer(out: Writer = new java.io.StringWriter(), newlineBuffered = false commaBuffered = false } - override def visitArray(length: Int, index: Int) = new ArrVisitor[Writer, Writer] { + + override def visitArray(length: Int, index: Int): ArrVisitor[Writer, Writer] = new ArrVisitor[Writer, Writer] { var empty = true flushBuffer() elemBuilder.append('[') newlineBuffered = true depth += 1 - def subVisitor = Renderer.this + + def subVisitor: Renderer = Renderer.this + def visitValue(v: Writer, index: Int): Unit = { empty = false flushBuffer() commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false newlineBuffered = false @@ -72,23 +76,28 @@ class Renderer(out: Writer = new java.io.StringWriter(), } } - override def visitObject(length: Int, index: Int) = new ObjVisitor[Writer, Writer] { + override def visitJsonableObject(length: Int, index: Int): ObjVisitor[Writer, Writer] = new ObjVisitor[Writer, Writer] { var empty = true flushBuffer() elemBuilder.append('{') newlineBuffered = true depth += 1 - def subVisitor = Renderer.this - def visitKey(index: Int) = Renderer.this + + def subVisitor: Renderer = Renderer.this + + def visitKey(index: Int): Renderer = Renderer.this + def visitKeyValue(v: Any): Unit = { empty = false //flushBuffer() elemBuilder.append(':') elemBuilder.append(' ') } + def visitValue(v: Writer, index: Int): Unit = { commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false newlineBuffered = false @@ -141,21 +150,26 @@ class PythonRenderer(out: Writer = new java.io.StringWriter(), out } - override def visitObject(length: Int, index: Int) = new ObjVisitor[Writer, Writer] { + override def visitJsonableObject(length: Int, index: Int): ObjVisitor[Writer, Writer] = new ObjVisitor[Writer, Writer] { flushBuffer() elemBuilder.append('{') depth += 1 renderIndent() - def subVisitor = PythonRenderer.this - def visitKey(index: Int) = PythonRenderer.this + + def subVisitor: PythonRenderer = PythonRenderer.this + + def visitKey(index: Int): PythonRenderer = PythonRenderer.this + def visitKeyValue(s: Any): Unit = { elemBuilder.ensureLength(2) elemBuilder.append(':') elemBuilder.append(' ') } + def visitValue(v: Writer, index: Int): Unit = { commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false depth -= 1 @@ -181,18 +195,20 @@ class PythonRenderer(out: Writer = new java.io.StringWriter(), case class MaterializeJsonRenderer(indent: Int = 4, escapeUnicode: Boolean = false, out: StringWriter = new StringWriter()) extends BaseCharRenderer(out, indent, escapeUnicode) { - override def visitArray(length: Int, index: Int) = new ArrVisitor[StringWriter, StringWriter] { + override def visitArray(length: Int, index: Int): ArrVisitor[StringWriter, StringWriter] = new ArrVisitor[StringWriter, StringWriter] { flushBuffer() elemBuilder.append('[') depth += 1 // account for rendering differences of whitespaces in ujson and jsonnet manifestJson if (length == 0 && indent != -1) elemBuilder.append('\n') else renderIndent() - def subVisitor = MaterializeJsonRenderer.this + + def subVisitor: MaterializeJsonRenderer = MaterializeJsonRenderer.this def visitValue(v: StringWriter, index: Int): Unit = { flushBuffer() commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false depth -= 1 @@ -203,21 +219,25 @@ case class MaterializeJsonRenderer(indent: Int = 4, escapeUnicode: Boolean = fal } } - override def visitObject(length: Int, index: Int) = new ObjVisitor[StringWriter, StringWriter] { + override def visitJsonableObject(length: Int, index: Int): ObjVisitor[StringWriter, StringWriter] = new ObjVisitor[StringWriter, StringWriter] { flushBuffer() elemBuilder.append('{') depth += 1 // account for rendering differences of whitespaces in ujson and jsonnet manifestJson if (length == 0 && indent != -1) elemBuilder.append('\n') else renderIndent() - def subVisitor = MaterializeJsonRenderer.this - def visitKey(index: Int) = MaterializeJsonRenderer.this + + def subVisitor: MaterializeJsonRenderer = MaterializeJsonRenderer.this + + def visitKey(index: Int): MaterializeJsonRenderer = MaterializeJsonRenderer.this def visitKeyValue(s: Any): Unit = { elemBuilder.append(':') if (indent != -1) elemBuilder.append(' ') } + def visitValue(v: StringWriter, index: Int): Unit = { commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false depth -= 1 diff --git a/sjsonnet/src/sjsonnet/ScopedExprTransform.scala b/sjsonnet/src/sjsonnet/ScopedExprTransform.scala index 54a6f730..a9d49796 100644 --- a/sjsonnet/src/sjsonnet/ScopedExprTransform.scala +++ b/sjsonnet/src/sjsonnet/ScopedExprTransform.scala @@ -40,12 +40,12 @@ class ScopedExprTransform extends ExprTransform { (transformBinds(preLocals), transformBinds(postLocals), transform(value)) }) }) - if((f2 eq first) && (k2 eq key) && (v2 eq value) && (pre2 eq preLocals) && (post2 eq postLocals) && (r2, rest).zipped.forall(_ eq _)) e + if((f2 eq first) && (k2 eq key) && (v2 eq value) && (pre2 eq preLocals) && (post2 eq postLocals) && (r2 lazyZip rest).forall(_ eq _)) e else ObjComp(pos, pre2, k2, v2, plus, post2, f2.asInstanceOf[ForSpec], r2) case Comp(pos, value, first, rest) => val (f2 :: r2, v2) = compSpecs(first :: rest.toList, () => transform(value)) - if((f2 eq first) && (v2 eq value) && (r2, rest).zipped.forall(_ eq _)) e + if((f2 eq first) && (v2 eq value) && (r2 lazyZip rest).forall(_ eq _)) e else Comp(pos, v2, f2.asInstanceOf[ForSpec], r2.toArray) case e => rec(e) @@ -62,13 +62,13 @@ class ScopedExprTransform extends ExprTransform { } } - protected[this] def transformFieldNameOnly(f: Member.Field): Member.Field = { + protected def transformFieldNameOnly(f: Member.Field): Member.Field = { val x = f.fieldName val x2 = transformFieldName(x) if(x2 eq x) f else f.copy(fieldName = x2) } - protected[this] def transformFieldNoName(f: Member.Field): Member.Field = { + protected def transformFieldNoName(f: Member.Field): Member.Field = { def g = { val y = f.args val z = f.rhs @@ -80,9 +80,9 @@ class ScopedExprTransform extends ExprTransform { else nestedNames(f.args.names)(g) } - override protected[this] def transformField(f: Member.Field): Member.Field = ??? + override protected def transformField(f: Member.Field): Member.Field = ??? - protected[this] def compSpecs[T](a: List[CompSpec], value: () => T): (List[CompSpec], T) = a match { + protected def compSpecs[T](a: List[CompSpec], value: () => T): (List[CompSpec], T) = a match { case (c @ ForSpec(pos, name, cond)) :: cs => val c2 = rec(c).asInstanceOf[ForSpec] nestedWith(c2.name, dynamicExpr) { @@ -97,19 +97,19 @@ class ScopedExprTransform extends ExprTransform { (Nil, value()) } - protected[this] def nestedNew[T](sc: Scope)(f: => T): T = { + protected def nestedNew[T](sc: Scope)(f: => T): T = { val oldScope = scope scope = sc try f finally { scope = oldScope } } - protected[this] def nestedWith[T](n: String, e: Expr)(f: => T): T = + protected def nestedWith[T](n: String, e: Expr)(f: => T): T = nestedNew(new Scope(scope.mappings.updated(n, new ScopedVal(e, scope, scope.size)), scope.size+1))(f) - protected[this] def nestedFileScope[T](fs: FileScope)(f: => T): T = + protected def nestedFileScope[T](fs: FileScope)(f: => T): T = nestedNew(emptyScope)(f) - protected[this] def nestedConsecutiveBindings[T](a: Array[Bind])(f: => Bind => Bind)(g: => T): (Array[Bind], T) = { + protected def nestedConsecutiveBindings[T](a: Array[Bind])(f: => Bind => Bind)(g: => T): (Array[Bind], T) = { if(a == null || a.length == 0) (a, g) else { val oldScope = scope @@ -131,7 +131,7 @@ class ScopedExprTransform extends ExprTransform { } } - protected[this] def nestedBindings[T](a: Array[Bind])(f: => T): T = { + protected def nestedBindings[T](a: Array[Bind])(f: => T): T = { if(a == null || a.length == 0) f else { val newm = a.zipWithIndex.map { case (b, idx) => @@ -142,7 +142,7 @@ class ScopedExprTransform extends ExprTransform { } } - protected[this] def nestedObject[T](self0: Expr, super0: Expr)(f: => T): T = { + protected def nestedObject[T](self0: Expr, super0: Expr)(f: => T): T = { val self = new ScopedVal(self0, scope, scope.size) val sup = new ScopedVal(super0, scope, scope.size+1) val newm = { @@ -152,10 +152,10 @@ class ScopedExprTransform extends ExprTransform { nestedNew(new Scope(newm, scope.size + 2))(f) } - protected[this] def nestedBindings[T](self0: Expr, super0: Expr, a: Array[Bind])(f: => T): T = + protected def nestedBindings[T](self0: Expr, super0: Expr, a: Array[Bind])(f: => T): T = nestedObject(self0, super0)(nestedBindings(a)(f)) - protected[this] def nestedNames[T](a: Array[String])(f: => T): T = { + protected def nestedNames[T](a: Array[String])(f: => T): T = { if(a == null || a.length == 0) f else { val newm = a.zipWithIndex.map { case (n, idx) => (n, new ScopedVal(dynamicExpr, scope, scope.size + idx)) } diff --git a/sjsonnet/src/sjsonnet/StaticOptimizer.scala b/sjsonnet/src/sjsonnet/StaticOptimizer.scala index 6e1e2854..3b2b4bd0 100644 --- a/sjsonnet/src/sjsonnet/StaticOptimizer.scala +++ b/sjsonnet/src/sjsonnet/StaticOptimizer.scala @@ -42,7 +42,7 @@ class StaticOptimizer( case e @ Id(pos, name) => scope.get(name) match { - case ScopedVal(v: Val with Expr, _, _) => v + case ScopedVal(v: (Val & Expr), _, _) => v case ScopedVal(_, _, idx) => ValidId(pos, name, idx) case null if name == "std" => std case _ => failOrWarn("Unknown variable: "+name, e) @@ -98,7 +98,7 @@ class StaticOptimizer( case _ => false } - override protected[this] def transformFieldName(f: FieldName): FieldName = f match { + override protected def transformFieldName(f: FieldName): FieldName = f match { case FieldName.Dyn(x) => transform(x) match { case x2: Val.Str => @@ -201,7 +201,7 @@ class StaticOptimizer( while(i < target.length) { if(target(i) == null) { params.defaultExprs(i) match { - case v: Val with Expr => target(i) = v + case v: (Val & Expr) => target(i) = v case _ => return null // no default or non-constant } } diff --git a/sjsonnet/src/sjsonnet/Std.scala b/sjsonnet/src/sjsonnet/Std.scala index b2526318..e1ae89db 100644 --- a/sjsonnet/src/sjsonnet/Std.scala +++ b/sjsonnet/src/sjsonnet/Std.scala @@ -696,7 +696,7 @@ class Std { } private class Spec1Str(_a: Val.Arr) extends Val.Builtin1("b") { - private[this] val a = + private val a = ArrayOps.sortInPlaceBy(ArrayOps.distinctBy(_a.asLazyArray)(_.asInstanceOf[Val.Str].value))(_.asInstanceOf[Val.Str].value) // 2.13+: _a.asLazyArray.distinctBy(_.asInstanceOf[Val.Str].value).sortInPlaceBy(_.asInstanceOf[Val.Str].value) @@ -758,7 +758,7 @@ class Std { case (_, Some(rChild)) => k -> createMember{recSingle(rChild)} } - Val.Obj.mk(mergePosition, kvs:_*) + Val.Obj.mk(mergePosition, kvs*) case (_, _) => recSingle(r) } @@ -770,7 +770,7 @@ class Std { if !value.isInstanceOf[Val.Null] } yield (k, createMember{recSingle(value)}) - Val.Obj.mk(obj.pos, kvs:_*) + Val.Obj.mk(obj.pos, kvs*) case _ => v } @@ -1227,7 +1227,7 @@ class Std { v = rec(o.value(k, pos.fileScope.noOffsetPos)(ev)) if filter(v) }yield (k, new Val.Obj.ConstMember(false, Visibility.Normal, v)) - Val.Obj.mk(pos, bindings: _*) + Val.Obj.mk(pos, bindings*) case a: Val.Arr => new Val.Arr(pos, a.asStrictArray.map(rec).filter(filter).map(identity)) case _ => x @@ -1466,7 +1466,7 @@ class Std { def getAllKeys(ev: EvalScope, v1: Val.Obj): Array[String] = maybeSortKeys(ev, v1.allKeyNames) - @inline private[this] def maybeSortKeys(ev: EvalScope, keys: Array[String]): Array[String] = + @inline private def maybeSortKeys(ev: EvalScope, keys: Array[String]): Array[String] = if(ev.settings.preserveOrder) keys else keys.sorted def getObjValuesFromKeys(pos: Position, ev: EvalScope, v1: Val.Obj, keys: Array[String]): Val.Arr = diff --git a/sjsonnet/src/sjsonnet/Val.scala b/sjsonnet/src/sjsonnet/Val.scala index 6f2050d6..840596da 100644 --- a/sjsonnet/src/sjsonnet/Val.scala +++ b/sjsonnet/src/sjsonnet/Val.scala @@ -17,7 +17,7 @@ import scala.reflect.ClassTag * are all wrapped in [[Lazy]] and only truly evaluated on-demand */ abstract class Lazy { - protected[this] var cached: Val = null + protected var cached: Val = null def compute(): Val final def force: Val = { if(cached == null) cached = compute() @@ -42,7 +42,7 @@ sealed abstract class Val extends Lazy { if (implicitly[ClassTag[T]].runtimeClass.isInstance(this)) this.asInstanceOf[T] else Error.fail("Expected " + implicitly[PrettyNamed[T]].s + ", found " + prettyName) - private[this] def failAs(err: String): Nothing = + private def failAs(err: String): Nothing = Error.fail("Wrong parameter type: expected " + err + ", got " + prettyName) def asString: String = failAs("String") @@ -92,7 +92,7 @@ object Val{ override def asDouble: Double = value } - class Arr(val pos: Position, private val value: Array[_ <: Lazy]) extends Literal { + class Arr(val pos: Position, private val value: Array[? <: Lazy]) extends Literal { def prettyName = "array" override def asArr: Arr = this @@ -142,17 +142,17 @@ object Val{ } final class Obj(val pos: Position, - private[this] var value0: util.LinkedHashMap[String, Obj.Member], + private var value0: util.LinkedHashMap[String, Obj.Member], static: Boolean, triggerAsserts: Val.Obj => Unit, `super`: Obj, valueCache: mutable.HashMap[Any, Val] = mutable.HashMap.empty[Any, Val], - private[this] var allKeys: util.LinkedHashMap[String, java.lang.Boolean] = null) extends Literal with Expr.ObjBody { + private var allKeys: util.LinkedHashMap[String, java.lang.Boolean] = null) extends Literal with Expr.ObjBody { var asserting: Boolean = false def getSuper = `super` - private[this] def getValue0: util.LinkedHashMap[String, Obj.Member] = { + private def getValue0: util.LinkedHashMap[String, Obj.Member] = { if(value0 == null) { val value0 = new java.util.LinkedHashMap[String, Val.Obj.Member] allKeys.forEach { (k, _) => @@ -355,7 +355,7 @@ object Val{ override def asFunc: Func = this - def apply(argsL: Array[_ <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = { + def apply(argsL: Array[? <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = { val simple = namedNames == null && params.names.length == argsL.length val funDefFileScope: FileScope = pos match { case null => outerPos.fileScope case p => p.fileScope } //println(s"apply: argsL: ${argsL.length}, namedNames: $namedNames, paramNames: ${params.names.mkString(",")}") @@ -493,7 +493,7 @@ object Val{ def evalRhs(arg1: Val, ev: EvalScope, pos: Position): Val - override def apply(argVals: Array[_ <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = + override def apply(argVals: Array[? <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = if(namedNames == null && argVals.length == 1) evalRhs(argVals(0).force, ev, outerPos) else super.apply(argVals, namedNames, outerPos) @@ -508,7 +508,7 @@ object Val{ def evalRhs(arg1: Val, arg2: Val, ev: EvalScope, pos: Position): Val - override def apply(argVals: Array[_ <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = + override def apply(argVals: Array[? <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = if(namedNames == null && argVals.length == 2) evalRhs(argVals(0).force, argVals(1).force, ev, outerPos) else super.apply(argVals, namedNames, outerPos) @@ -524,7 +524,7 @@ object Val{ def evalRhs(arg1: Val, arg2: Val, arg3: Val, ev: EvalScope, pos: Position): Val - override def apply(argVals: Array[_ <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = + override def apply(argVals: Array[? <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = if(namedNames == null && argVals.length == 3) evalRhs(argVals(0).force, argVals(1).force, argVals(2).force, ev, outerPos) else super.apply(argVals, namedNames, outerPos) @@ -536,7 +536,7 @@ object Val{ def evalRhs(arg1: Val, arg2: Val, arg3: Val, arg4: Val, ev: EvalScope, pos: Position): Val - override def apply(argVals: Array[_ <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = + override def apply(argVals: Array[? <: Lazy], namedNames: Array[String], outerPos: Position)(implicit ev: EvalScope): Val = if(namedNames == null && argVals.length == 4) evalRhs(argVals(0).force, argVals(1).force, argVals(2).force, argVals(3).force, ev, outerPos) else super.apply(argVals, namedNames, outerPos) diff --git a/sjsonnet/src/sjsonnet/ValScope.scala b/sjsonnet/src/sjsonnet/ValScope.scala index d930cbaf..fbb3e602 100644 --- a/sjsonnet/src/sjsonnet/ValScope.scala +++ b/sjsonnet/src/sjsonnet/ValScope.scala @@ -35,12 +35,11 @@ final class ValScope private (val bindings: Array[Lazy]) extends AnyVal { i += 1 j += 1 } - b } new ValScope(b) } - def extendSimple(newBindingsV: Array[_ <: Lazy]) = { + def extendSimple(newBindingsV: Array[? <: Lazy]) = { if(newBindingsV == null || newBindingsV.length == 0) this else { val b = Arrays.copyOf(bindings, bindings.length + newBindingsV.length) @@ -76,6 +75,6 @@ final class ValScope private (val bindings: Array[Lazy]) extends AnyVal { } object ValScope{ - private[this] val emptyArr = new Array[Lazy](0) + private val emptyArr = new Array[Lazy](0) def empty = new ValScope(emptyArr) } diff --git a/sjsonnet/src/sjsonnet/ValVisitor.scala b/sjsonnet/src/sjsonnet/ValVisitor.scala index 441acabe..2f2e68ca 100644 --- a/sjsonnet/src/sjsonnet/ValVisitor.scala +++ b/sjsonnet/src/sjsonnet/ValVisitor.scala @@ -11,17 +11,17 @@ import scala.collection.mutable class ValVisitor(pos: Position) extends JsVisitor[Val, Val] { self => def visitArray(length: Int, index: Int): ArrVisitor[Val, Val] = new ArrVisitor[Val, Val] { val a = new mutable.ArrayBuilder.ofRef[Lazy] - def subVisitor: Visitor[_, _] = self + def subVisitor: Visitor[?, ?] = self def visitValue(v: Val, index: Int): Unit = a.+=(v) def visitEnd(index: Int): Val = new Val.Arr(pos, a.result()) } - def visitObject(length: Int, index: Int): ObjVisitor[Val, Val] = new ObjVisitor[Val, Val] { + def visitJsonableObject(length: Int, index: Int): ObjVisitor[Val, Val] = new ObjVisitor[Val, Val] { val cache = mutable.HashMap.empty[Any, Val] val allKeys = new util.LinkedHashMap[String, java.lang.Boolean] var key: String = null - def subVisitor: Visitor[_, _] = self - def visitKey(index: Int) = upickle.core.StringVisitor + def subVisitor: Visitor[?, ?] = self + def visitKey(index: Int): Visitor[?, ?] = upickle.core.StringVisitor def visitKeyValue(s: Any): Unit = key = s.toString def visitValue(v: Val, index: Int): Unit = { cache.put(key, v) @@ -39,7 +39,7 @@ class ValVisitor(pos: Position) extends JsVisitor[Val, Val] { self => def visitFloat64StringParts(s: CharSequence, decIndex: Int, expIndex: Int, index: Int): Val = Val.Num(pos, if (decIndex != -1 || expIndex != -1) s.toString.toDouble - else upickle.core.Util.parseIntegralNum(s, decIndex, expIndex, index) + else upickle.core.ParseUtils.parseIntegralNum(s, decIndex, expIndex, index).toDouble ) def visitString(s: CharSequence, index: Int): Val = Val.Str(pos, s.toString) diff --git a/sjsonnet/src/sjsonnet/YamlRenderer.scala b/sjsonnet/src/sjsonnet/YamlRenderer.scala index b542fad2..ae55600a 100644 --- a/sjsonnet/src/sjsonnet/YamlRenderer.scala +++ b/sjsonnet/src/sjsonnet/YamlRenderer.scala @@ -8,7 +8,7 @@ import upickle.core.{ArrVisitor, ObjVisitor} class YamlRenderer(_out: StringWriter = new java.io.StringWriter(), indentArrayInObject: Boolean = false, - indent: Int = 2) extends BaseCharRenderer(_out, indent){ + indent: Int = 2) extends BaseCharRenderer[StringWriter](_out, indent){ var newlineBuffered = false var dashBuffered = false var afterKey = false @@ -20,7 +20,7 @@ class YamlRenderer(_out: StringWriter = new java.io.StringWriter(), indentArrayI elemBuilder.writeOutToIfLongerThan(_out, if (depth <= 0 || topLevel) 0 else 1000) } - private[this] def appendString(s: String) = { + private def appendString(s: String) = { val len = s.length var i = 0 elemBuilder.ensureLength(len) @@ -48,7 +48,7 @@ class YamlRenderer(_out: StringWriter = new java.io.StringWriter(), indentArrayI } depth -= 1 } else { - upickle.core.RenderUtils.escapeChar(unicodeCharBuilder, elemBuilder, s, true) + upickle.core.RenderUtils.escapeChar(unicodeCharBuilder, elemBuilder, s, escapeUnicode = true, wrapQuotes = true) } flushCharBuilder() _out @@ -81,7 +81,7 @@ class YamlRenderer(_out: StringWriter = new java.io.StringWriter(), indentArrayI dashBuffered = false } - override def visitArray(length: Int, index: Int) = new ArrVisitor[StringWriter, StringWriter] { + override def visitArray(length: Int, index: Int): ArrVisitor[StringWriter, StringWriter] = new ArrVisitor[StringWriter, StringWriter] { var empty = true flushBuffer() @@ -96,13 +96,15 @@ class YamlRenderer(_out: StringWriter = new java.io.StringWriter(), indentArrayI if (dedentInObject) depth -= 1 dashBuffered = true - def subVisitor = YamlRenderer.this + def subVisitor: YamlRenderer = YamlRenderer.this + def visitValue(v: StringWriter, index: Int): Unit = { empty = false flushBuffer() newlineBuffered = true dashBuffered = true } + def visitEnd(index: Int) = { if (!dedentInObject) depth -= 1 if (empty) { @@ -116,7 +118,8 @@ class YamlRenderer(_out: StringWriter = new java.io.StringWriter(), indentArrayI _out } } - override def visitObject(length: Int, index: Int) = new ObjVisitor[StringWriter, StringWriter] { + + override def visitJsonableObject(length: Int, index: Int): ObjVisitor[StringWriter, StringWriter] = new ObjVisitor[StringWriter, StringWriter] { var empty = true flushBuffer() if (!topLevel) depth += 1 @@ -124,8 +127,10 @@ class YamlRenderer(_out: StringWriter = new java.io.StringWriter(), indentArrayI if (afterKey) newlineBuffered = true - def subVisitor = YamlRenderer.this - def visitKey(index: Int) = YamlRenderer.this + def subVisitor: YamlRenderer = YamlRenderer.this + + def visitKey(index: Int): YamlRenderer = YamlRenderer.this + def visitKeyValue(s: Any): Unit = { empty = false flushBuffer() @@ -136,10 +141,12 @@ class YamlRenderer(_out: StringWriter = new java.io.StringWriter(), indentArrayI afterKey = true newlineBuffered = false } + def visitValue(v: StringWriter, index: Int): Unit = { newlineBuffered = true afterKey = false } + def visitEnd(index: Int) = { if (empty) { elemBuilder.ensureLength(2) diff --git a/sjsonnet/test/src-jvm/sjsonnet/BufferedRandomAccessFileTests.scala b/sjsonnet/test/src-jvm/sjsonnet/BufferedRandomAccessFileTests.scala index 0ad1d221..ba4be747 100644 --- a/sjsonnet/test/src-jvm/sjsonnet/BufferedRandomAccessFileTests.scala +++ b/sjsonnet/test/src-jvm/sjsonnet/BufferedRandomAccessFileTests.scala @@ -166,8 +166,8 @@ object BufferedRandomAccessFileTests extends TestSuite { } } - // Test content - val testContent = "Hello, World! This is a test file with various content to thoroughly test the BufferedRandomAccessFile." + // Test content + val testContent = "Hello, World! This is a test file with various content to thoroughly test the BufferedRandomAccessFile." test("bufferReloadsAndEdgeReads") { val bufferedFile = new BufferedRandomAccessFile(tempFile.getAbsolutePath, 15) diff --git a/sjsonnet/test/src/sjsonnet/DummyPath.scala b/sjsonnet/test/src/sjsonnet/DummyPath.scala index 7d65728b..79c80197 100644 --- a/sjsonnet/test/src/sjsonnet/DummyPath.scala +++ b/sjsonnet/test/src/sjsonnet/DummyPath.scala @@ -9,7 +9,7 @@ case class DummyPath(segments: String*) extends Path{ def debugRead(): Option[String] = None - def parent(): Path = DummyPath(segments.dropRight(1):_*) + def parent(): Path = DummyPath(segments.dropRight(1)*) def segmentCount(): Int = segments.length diff --git a/sjsonnet/test/src/sjsonnet/FormatTests.scala b/sjsonnet/test/src/sjsonnet/FormatTests.scala index b44b8484..e021a777 100644 --- a/sjsonnet/test/src/sjsonnet/FormatTests.scala +++ b/sjsonnet/test/src/sjsonnet/FormatTests.scala @@ -10,7 +10,7 @@ object FormatTests extends TestSuite{ val json = ujson.read(jsonStr) val formatted = Format.format(fmt, Materializer.reverse(null, json), dummyPos)( new EvalScope{ - def extVars = _ => None + def extVars: String => Option[Expr] = _ => None def wd: Path = DummyPath() def visitExpr(expr: Expr)(implicit scope: ValScope): Val = ??? def materialize(v: Val): Value = ??? diff --git a/sjsonnet/test/src/sjsonnet/OldRenderer.scala b/sjsonnet/test/src/sjsonnet/OldRenderer.scala index 33de0d9d..98156129 100644 --- a/sjsonnet/test/src/sjsonnet/OldRenderer.scala +++ b/sjsonnet/test/src/sjsonnet/OldRenderer.scala @@ -38,19 +38,23 @@ class OldRenderer(out: Writer = new java.io.StringWriter(), newlineBuffered = false commaBuffered = false } - override def visitArray(length: Int, index: Int) = new ArrVisitor[Writer, Writer] { + + override def visitArray(length: Int, index: Int): ArrVisitor[Writer, Writer] = new ArrVisitor[Writer, Writer] { var empty = true flushBuffer() out.append('[') newlineBuffered = true depth += 1 - def subVisitor = OldRenderer.this + + def subVisitor: OldRenderer = OldRenderer.this + def visitValue(v: Writer, index: Int): Unit = { empty = false flushBuffer() commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false newlineBuffered = false @@ -63,22 +67,27 @@ class OldRenderer(out: Writer = new java.io.StringWriter(), } } - override def visitObject(length: Int, index: Int) = new ObjVisitor[Writer, Writer] { + override def visitJsonableObject(length: Int, index: Int): ObjVisitor[Writer, Writer] = new ObjVisitor[Writer, Writer] { var empty = true flushBuffer() out.append('{') newlineBuffered = true depth += 1 - def subVisitor = OldRenderer.this - def visitKey(index: Int) = OldRenderer.this + + def subVisitor: OldRenderer = OldRenderer.this + + def visitKey(index: Int): OldRenderer = OldRenderer.this + def visitKeyValue(v: Any): Unit = { empty = false flushBuffer() out.append(colonSnippet) } + def visitValue(v: Writer, index: Int): Unit = { commaBuffered = true } + def visitEnd(index: Int) = { commaBuffered = false newlineBuffered = false diff --git a/sjsonnet/test/src/sjsonnet/OldYamlRenderer.scala b/sjsonnet/test/src/sjsonnet/OldYamlRenderer.scala index 533f4ee4..659fc045 100644 --- a/sjsonnet/test/src/sjsonnet/OldYamlRenderer.scala +++ b/sjsonnet/test/src/sjsonnet/OldYamlRenderer.scala @@ -54,7 +54,8 @@ class OldYamlRenderer(out: StringWriter = new java.io.StringWriter(), indentArra newlineBuffered = false dashBuffered = false } - override def visitArray(length: Int, index: Int) = new ArrVisitor[StringWriter, StringWriter] { + + override def visitArray(length: Int, index: Int): ArrVisitor[StringWriter, StringWriter] = new ArrVisitor[StringWriter, StringWriter] { var empty = true flushBuffer() @@ -69,13 +70,15 @@ class OldYamlRenderer(out: StringWriter = new java.io.StringWriter(), indentArra if (dedentInObject) depth -= 1 dashBuffered = true - def subVisitor = OldYamlRenderer.this + def subVisitor: OldYamlRenderer = OldYamlRenderer.this + def visitValue(v: StringWriter, index: Int): Unit = { empty = false flushBuffer() newlineBuffered = true dashBuffered = true } + def visitEnd(index: Int) = { if (!dedentInObject) depth -= 1 if (empty) out.append("[]") @@ -84,7 +87,8 @@ class OldYamlRenderer(out: StringWriter = new java.io.StringWriter(), indentArra out } } - override def visitObject(length: Int, index: Int) = new ObjVisitor[StringWriter, StringWriter] { + + override def visitJsonableObject(length: Int, index: Int): ObjVisitor[StringWriter, StringWriter] = new ObjVisitor[StringWriter, StringWriter] { var empty = true flushBuffer() if (!topLevel) depth += 1 @@ -92,8 +96,10 @@ class OldYamlRenderer(out: StringWriter = new java.io.StringWriter(), indentArra if (afterKey) newlineBuffered = true - def subVisitor = OldYamlRenderer.this - def visitKey(index: Int) = OldYamlRenderer.this + def subVisitor: OldYamlRenderer = OldYamlRenderer.this + + def visitKey(index: Int): OldYamlRenderer = OldYamlRenderer.this + def visitKeyValue(s: Any): Unit = { empty = false flushBuffer() @@ -101,10 +107,12 @@ class OldYamlRenderer(out: StringWriter = new java.io.StringWriter(), indentArra afterKey = true newlineBuffered = false } + def visitValue(v: StringWriter, index: Int): Unit = { newlineBuffered = true afterKey = false } + def visitEnd(index: Int) = { if (empty) out.append("{}") newlineBuffered = false diff --git a/sjsonnet/test/src/sjsonnet/ParserTests.scala b/sjsonnet/test/src/sjsonnet/ParserTests.scala index 37da8c5b..0594d8a9 100644 --- a/sjsonnet/test/src/sjsonnet/ParserTests.scala +++ b/sjsonnet/test/src/sjsonnet/ParserTests.scala @@ -6,8 +6,8 @@ import Expr._ import fastparse.Parsed import Val.{True, Num} object ParserTests extends TestSuite{ - def parse(s: String, strictImportSyntax: Boolean = false) = fastparse.parse(s, new Parser(null, strictImportSyntax, mutable.HashMap.empty, mutable.HashMap.empty).document(_)).get.value._1 - def parseErr(s: String, strictImportSyntax: Boolean = false) = fastparse.parse(s, new Parser(null, strictImportSyntax, mutable.HashMap.empty, mutable.HashMap.empty).document(_), verboseFailures = true).asInstanceOf[Parsed.Failure].msg + def parse(s: String, strictImportSyntax: Boolean = false) = fastparse.parse(s, new Parser(null, strictImportSyntax, mutable.HashMap.empty, mutable.HashMap.empty).document(using _)).get.value._1 + def parseErr(s: String, strictImportSyntax: Boolean = false) = fastparse.parse(s, new Parser(null, strictImportSyntax, mutable.HashMap.empty, mutable.HashMap.empty).document(using _), verboseFailures = true).asInstanceOf[Parsed.Failure].msg val dummyFS = new FileScope(null) def pos(i: Int) = new Position(dummyFS, i) def tests = Tests{