Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,30 @@ filesystem, you have to provide an explicit import callback that you can
use to resolve imports yourself (whether through Node's `fs` module, or
by emulating a filesystem in-memory)

#### Async imports

`SjsonnetMain.interpret` is synchronous, which is awkward in browsers where
files come from `fetch` or `FileReader`. Use `SjsonnetMain.interpretAsync`
instead: the loader returns a `Promise` and the call returns a `Promise` of
the result. Imports are statically discovered from each parsed file's AST,
loaded concurrently, then evaluated synchronously against the populated cache.

```javascript
const result = await SjsonnetMain.interpretAsync(
"local lib = import 'lib.libsonnet'; lib.greet('world')",
{}, // extVars
{}, // tlaVars
"", // initial working directory
(wd, imported) => imported, // resolver, same shape as `interpret`
// loader: returns a Promise of the file contents (string for `import` /
// `importstr`, or bytes for `importbin`)
async (path, binary) => {
const response = await fetch("/files/" + path);
return binary ? new Uint8Array(await response.arrayBuffer()) : await response.text();
}
);
```

### Running deeply recursive Jsonnet programs

The depth of recursion is limited by running environment stack size. You can run Sjsonnet with increased
Expand Down
270 changes: 201 additions & 69 deletions sjsonnet/src-js/sjsonnet/SjsonnetMain.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@ package sjsonnet
import sjsonnet.stdlib.NativeRegex

import scala.collection.mutable
import scala.concurrent.Future
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import scala.scalajs.js
import scala.scalajs.js.JSConverters._
import scala.scalajs.js.Thenable.Implicits._
import scala.scalajs.js.annotation.{JSExport, JSExportTopLevel}
import scala.scalajs.js.typedarray.{ArrayBuffer, Int8Array, Uint8Array}

Expand Down Expand Up @@ -49,6 +53,91 @@ object SjsonnetMain {
case _ => None
}

/** Convert the value returned by a JS import loader into a [[ResolvedFile]]. */
private def toResolvedFile(path: String, value: Any, binaryData: Boolean): ResolvedFile =
value match {
case s: String => StaticResolvedFile(s)
case arr: Array[Byte] => StaticBinaryResolvedFile(arr)
case other =>
toBytesFromJs(other) match {
case Some(bytes) => StaticBinaryResolvedFile(bytes)
case None =>
val msg =
s"Import loader for '$path' must return a string or byte array, got: ${
if (other == null) "null" else other.getClass.getName
}"
js.Dynamic.global.console.error(msg)
throw js.JavaScriptException(msg)
}
}

/** Build the parent importer used during preload (only its `resolve` is called). */
private def jsResolveImporter(importResolver: js.Function2[String, String, String]): Importer =
new Importer {
def resolve(docBase: Path, importName: String): Option[Path] =
importResolver(docBase.asInstanceOf[JsVirtualPath].path, importName) match {
case null => None
case s => Some(JsVirtualPath(s))
}
def read(path: Path, binaryData: Boolean): Option[ResolvedFile] =
throw new RuntimeException(
s"Importer.read should not be called during async preload (path=$path)"
)
}

/**
* Coerce a JS object whose values are strings into a `Map[String, String]`. Iterates the JS
* dictionary directly instead of round-tripping through `ujson` to avoid the intermediate ujson
* tree, the `.obj.toMap` copy, and the trailing `.map` on the immutable map.
*/
private def parseStringMap(label: String, value: js.Any): Map[String, String] =
try {
val dict = value.asInstanceOf[js.Dictionary[js.Any]]
val out = Map.newBuilder[String, String]
out.sizeHint(dict.size)
val it = dict.iterator
while (it.hasNext) {
val (k, v) = it.next()
(v: Any) match {
case s: String => out += k -> s
case _ =>
throw js.JavaScriptException(s"$label '$k' must be a string value, got non-string")
}
}
out.result()
} catch {
case e: js.JavaScriptException => throw e
case e: Exception =>
val msg = s"Failed to parse ${label.toLowerCase}: ${e.getMessage}"
js.Dynamic.global.console.error(msg, e.asInstanceOf[js.Any])
throw js.JavaScriptException(msg)
}

private def runInterpret(
text: String,
parsedExtVars: Map[String, String],
parsedTlaVars: Map[String, String],
wd0: String,
importer: Importer,
preserveOrder: Boolean): js.Any = {
val interp = new Interpreter(
parsedExtVars,
parsedTlaVars,
JsVirtualPath(wd0),
importer,
parseCache = new DefaultParseCache,
settings = new Settings(preserveOrder = preserveOrder),
std =
new sjsonnet.stdlib.StdLibModule(nativeFunctions = Map.from(NativeRegex.functions)).module
)
interp.interpret0(text, JsVirtualPath("(memory)"), ujson.WebJson.Builder) match {
case Left(msg) =>
js.Dynamic.global.console.error("Sjsonnet evaluation error:", msg)
throw js.JavaScriptException(msg)
case Right(v) => v
}
}

@JSExport
def interpret(
text: String,
Expand All @@ -59,85 +148,128 @@ object SjsonnetMain {
importLoader: js.Function2[String, Boolean, Any],
preserveOrder: Boolean = false): js.Any = {
try {
val parsedExtVars =
try {
ujson.WebJson.transform(extVars, ujson.Value).obj.toMap.map {
case (k, ujson.Str(v)) => (k, v)
case (k, _) =>
throw js.JavaScriptException(
s"External variable '$k' must be a string value, got non-string"
)
val parsedExtVars = parseStringMap("External variable", extVars)
val parsedTlaVars = parseStringMap("Top-level argument", tlaVars)

val importer = new Importer {
def resolve(docBase: Path, importName: String): Option[Path] =
importResolver(docBase.asInstanceOf[JsVirtualPath].path, importName) match {
case null => None
case s => Some(JsVirtualPath(s))
}
} catch {
case e: js.JavaScriptException => throw e
case e: Exception =>
val msg = s"Failed to parse external variables: ${e.getMessage}"
js.Dynamic.global.console.error(msg, e.asInstanceOf[js.Any])
throw js.JavaScriptException(msg)
def read(path: Path, binaryData: Boolean): Option[ResolvedFile] =
Some(
toResolvedFile(
path.asInstanceOf[JsVirtualPath].path,
importLoader(path.asInstanceOf[JsVirtualPath].path, binaryData),
binaryData
)
)
}

runInterpret(text, parsedExtVars, parsedTlaVars, wd0, importer, preserveOrder)
} catch {
case e: js.JavaScriptException => throw e
case e: Exception =>
val msg = s"Sjsonnet internal error: ${e.getClass.getName}: ${e.getMessage}"
js.Dynamic.global.console.error(msg, e.asInstanceOf[js.Any])
throw js.JavaScriptException(msg)
}
}

/**
* Async variant of [[interpret]]. Accepts an `importLoader` that returns a `Promise` of the file
* contents, and returns a `Promise` resolving to the rendered output.
*
* Imports are eagerly front-loaded: every `import`, `importstr`, and `importbin` reachable from
* the entry source (plus from any extVar/tlaVar code snippets) is statically discovered and
* loaded before evaluation begins. This includes imports inside branches the evaluator will never
* force, e.g. `if false then import 'x' else 1` will still ask the loader for `x`. The tradeoff
* is that all I/O happens up front, which is what lets evaluation run synchronously.
*
* - Loader rejection (missing file, network error, etc.) fails the returned Promise.
* - A parse error on a discovered (non-entry) file is tolerated; it only surfaces if evaluation
* actually forces that branch.
* - The entry source's own parse error is reported through the normal `interpret0` formatting
* path so the error shape and location info match synchronous `interpret`.
*
* Each discovered file is parsed once during preload and again referenced by the evaluator; the
* parsed AST is shared so fastparse runs only once per file.
*/
@JSExport
def interpretAsync(
text: String,
extVars: js.Any,
tlaVars: js.Any,
wd0: String,
importResolver: js.Function2[String, String, String],
importLoader: js.Function2[String, Boolean, js.Promise[Any]],
preserveOrder: Boolean = false): js.Promise[js.Any] = {
try {
val parsedExtVars = parseStringMap("External variable", extVars)
val parsedTlaVars = parseStringMap("Top-level argument", tlaVars)

val parentImporter = jsResolveImporter(importResolver)
val preloader = new Preloader(parentImporter)
val wd = JsVirtualPath(wd0)
val entryPath = JsVirtualPath("(memory)")

// Don't propagate the entry's parse error here — let runInterpret surface it via
// interpret0 so the message goes through the same Error.formatError path as synchronous
// interpret (root frame, "(memory):line:col", etc.). If parsing the entry fails we still
// get an empty pending queue and a fast path to runInterpret, which fails identically.
preloader.add(entryPath, StaticResolvedFile(text), ImportKind.Code)

// ext/tla vars are parsed as Jsonnet code (Interpreter.parseVar) and may contain imports.
// Feed each value through the preloader using the same synthetic path layout so that
// discovered imports resolve against `wd`, matching the synchronous evaluator.
def discoverVarImports(prefix: String, vars: Map[String, String]): Unit =
vars.foreach { case (k, v) =>
val varPath = wd / Util.wrapInLessThanGreaterThan(s"$prefix-var $k")
// Ignore parse errors here: Interpreter.parseVar will surface them at evaluation time
// with a proper stack frame if the variable is actually referenced.
preloader.add(varPath, StaticResolvedFile(v), ImportKind.Code)
}
discoverVarImports("ext", parsedExtVars)
discoverVarImports("tla", parsedTlaVars)

val parsedTlaVars =
try {
ujson.WebJson.transform(tlaVars, ujson.Value).obj.toMap.map {
case (k, ujson.Str(v)) => (k, v)
case (k, _) =>
throw js.JavaScriptException(
s"Top-level argument '$k' must be a string value, got non-string"
)
}
} catch {
case e: js.JavaScriptException => throw e
case e: Exception =>
val msg = s"Failed to parse top-level arguments: ${e.getMessage}"
js.Dynamic.global.console.error(msg, e.asInstanceOf[js.Any])
throw js.JavaScriptException(msg)
def loadOne(p: Preloader.Pending): Future[Unit] = {
val pathStr = p.path.asInstanceOf[JsVirtualPath].path
val promise = importLoader(pathStr, p.binaryData)
// implicit Thenable.Implicits converts Promise[Any] to Future[Any]
(promise: Future[Any]).map { value =>
val resolved = toResolvedFile(pathStr, value, p.binaryData)
// Ignore parse errors on discovered imports: Jsonnet evaluation is lazy, so a parse
// error in `if false then import 'bad' else 1` should not fail the whole evaluation.
// If the branch is forced at runtime, the interpreter surfaces the error there.
preloader.add(p.path, resolved, p.kind)
()
}
}

val interp = new Interpreter(
parsedExtVars,
parsedTlaVars,
JsVirtualPath(wd0),
new Importer {
def resolve(docBase: Path, importName: String): Option[Path] =
importResolver(docBase.asInstanceOf[JsVirtualPath].path, importName) match {
case null => None
case s => Some(JsVirtualPath(s))
}
def read(path: Path, binaryData: Boolean): Option[ResolvedFile] =
importLoader(path.asInstanceOf[JsVirtualPath].path, binaryData) match {
case s: String => Some(StaticResolvedFile(s))
case arr: Array[Byte] => Some(StaticBinaryResolvedFile(arr))
case other =>
// Handle JS-native binary types: Uint8Array, ArrayBuffer, or plain JS number[]
toBytesFromJs(other) match {
case Some(bytes) => Some(StaticBinaryResolvedFile(bytes))
case None =>
val msg =
s"Import loader for '${path}' must return a string or byte array, got: ${
if (other == null) "null" else other.getClass.getName
}"
js.Dynamic.global.console.error(msg)
throw js.JavaScriptException(msg)
}
}
},
parseCache = new DefaultParseCache,
settings = new Settings(preserveOrder = preserveOrder),
std =
new sjsonnet.stdlib.StdLibModule(nativeFunctions = Map.from(NativeRegex.functions)).module
)
interp.interpret0(text, JsVirtualPath("(memory)"), ujson.WebJson.Builder) match {
case Left(msg) =>
js.Dynamic.global.console.error("Sjsonnet evaluation error:", msg)
throw js.JavaScriptException(msg)
case Right(v) => v
def loop(): Future[Unit] = {
val batch = preloader.takePendingImports()
if (batch.isEmpty) Future.successful(())
else Future.sequence(batch.map(loadOne)).flatMap(_ => loop())
}

val result: Future[js.Any] = loop().map { _ =>
Comment thread
stephenamar-db marked this conversation as resolved.
runInterpret(
text,
parsedExtVars,
parsedTlaVars,
wd0,
preloader.importer,
preserveOrder
)
}
result.toJSPromise
} catch {
case e: js.JavaScriptException => throw e
case e: js.JavaScriptException => js.Promise.reject(e.exception)
case e: Exception =>
val msg = s"Sjsonnet internal error: ${e.getClass.getName}: ${e.getMessage}"
js.Dynamic.global.console.error(msg, e.asInstanceOf[js.Any])
throw js.JavaScriptException(msg)
js.Promise.reject(msg)
}
}
}
Expand Down
56 changes: 56 additions & 0 deletions sjsonnet/src/sjsonnet/ImportFinder.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
package sjsonnet

import scala.collection.mutable

/** The kind of import expression that referenced a file. */
sealed trait ImportKind {

/** Whether the file should be read as raw bytes (`importbin`) vs. text (`import`/`importstr`). */
def binaryData: Boolean

/** Whether the loaded file is itself Jsonnet code that may contain further imports. */
def isCode: Boolean
}

object ImportKind {
case object Code extends ImportKind {
def binaryData: Boolean = false
def isCode: Boolean = true
}
case object Str extends ImportKind {
def binaryData: Boolean = false
def isCode: Boolean = false
}
case object Bin extends ImportKind {
def binaryData: Boolean = true
def isCode: Boolean = false
}
}

/**
* Walks an [[Expr]] AST collecting all `import`, `importstr`, and `importbin` expressions. Used by
* [[Preloader]] to discover the transitive set of files that need to be loaded before evaluation.
*/
object ImportFinder {

final case class Found(value: String, kind: ImportKind)

def collect(expr: Expr): Seq[Found] = {
val buf = mutable.ArrayBuffer.empty[Found]
val walker = new Walker(buf)
walker.transform(expr)
buf.toSeq
}

private class Walker(buf: mutable.ArrayBuffer[Found]) extends ExprTransform {
override def transform(expr: Expr): Expr = {
expr match {
case Expr.Import(_, v) => buf += Found(v, ImportKind.Code)
case Expr.ImportStr(_, v) => buf += Found(v, ImportKind.Str)
case Expr.ImportBin(_, v) => buf += Found(v, ImportKind.Bin)
case _ =>
}
rec(expr)
}
}
}
Loading
Loading