Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
9dbe413
allow cloning of selector mut
lfarrel6 Feb 23, 2021
7c6ee4a
add cargo config
lfarrel6 Feb 23, 2021
519d03e
delete config
lfarrel6 Feb 23, 2021
2798fa1
wrap the option return type in a result to allow for errors during re…
lfarrel6 Jun 21, 2021
c1b2f01
add test to make sure error is surfaced from replace_with
lfarrel6 Jun 21, 2021
c0f7504
Merge pull request #1 from evervault/liam/ev-752-update-jsonpath-to-r…
lfarrel6 Jun 21, 2021
78039de
Create pull_request_template.md
boilsquid Jun 29, 2021
1557793
add function to use jsonpath selectors to replace fields and expose t…
lfarrel6 Aug 13, 2021
687eea6
Merge pull request #2 from evervault/liam/eng-1320-expose-computed-pa…
lfarrel6 Aug 16, 2021
9237076
Bump version
lfarrel6 Aug 16, 2021
b87e75f
pull in upstream changes, resolve conflicts, apply required tweaks fo…
lfarrel6 May 4, 2022
2a58321
add tests for error in replace with handler for jsonpath selectors
lfarrel6 May 4, 2022
048e5e9
Merge pull request #4 from evervault/liam/merge-upstream-changes
lfarrel6 May 4, 2022
fcfa512
bump cargo version
lfarrel6 May 5, 2022
d1636de
experiment with async
Oct 13, 2023
b93d68f
make changes
Oct 19, 2023
abcd313
fmt and upgrade edition
Oct 19, 2023
ebc5f10
clean up repo
Oct 19, 2023
4781dbb
Merge pull request #5 from evervault/eoinpm/clean-up-repo
e-moran Oct 19, 2023
412bf1d
Merge branch 'master' of https://github.com/evervault/jsonpath into e…
Oct 19, 2023
15ef316
merge
Oct 19, 2023
163a3e6
unintended change
Oct 19, 2023
fd03933
add comment to replace with async
Oct 19, 2023
e0c0df3
fix deps
Oct 19, 2023
b8efb10
improve trait bounds for select
Oct 19, 2023
a4b5f89
rename
Oct 19, 2023
6969c48
Merge pull request #6 from evervault/eoinpm/etr-1303-update-jsonpath-…
e-moran Oct 19, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions .github/pull_request_template.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# Why
Add a short description about this PR.
Add links to issues, tech plans etc.

# Diagram
Does this PR require a change to the application or network diagram?

# How
Describe how you've approached the problem%
15 changes: 14 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
[package]
name = "jsonpath_lib"
version = "0.3.0"
edition = "2021"
authors = ["Changseok Han <freestrings@gmail.com>"]

description = "It is JsonPath engine written in Rust. it provide a similar API interface in Webassembly and Javascript too. - Webassembly Demo: https://freestrings.github.io/jsonpath"
Expand All @@ -17,12 +18,16 @@ license = "MIT"
travis-ci = { repository = "freestrings/jsonpath", branch = "master" }

[dependencies]
futures = "0.3"
log = "0.4"
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0", features = ["preserve_order"] }

[dev-dependencies]
criterion = { version = "0.5.1", features = ["html_reports", "async_tokio"] }
env_logger = "0.8"
tokio = { version = "1.33.0", features = ["macros"] }
tokio-test = "0.4.3"

[lib]
name = "jsonpath_lib"
Expand All @@ -31,4 +36,12 @@ crate-type = ["cdylib", "rlib"]

#[profile.release]
#debug = true
#lto = false
#lto = false

[[bench]]
name = "async"
harness = false

[[bench]]
name = "sync_mut"
harness = false
167 changes: 167 additions & 0 deletions benches/async.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
extern crate jsonpath_lib as jsonpath;
#[macro_use]
extern crate serde_json;

use std::{
pin::Pin,
sync::{Arc, Mutex},
task::{Context, Poll},
};

use common::read_json;
use criterion::{criterion_group, criterion_main, BenchmarkId};
use futures::Future;
use jsonpath::{MultiJsonSelectorMutWithMetadata, PathParserWithMetadata};
use serde_json::Value;

mod common;

#[derive(Clone)]
struct ValueFuture<T> {
inner: Arc<Mutex<Option<T>>>,
}

impl<T> ValueFuture<T> {
fn new() -> Self {
ValueFuture {
inner: Arc::new(Mutex::new(None)),
}
}

fn set_value(&self, value: T) {
let mut inner = self.inner.lock().unwrap();
*inner = Some(value);
}
}

impl<T: Clone> Future for ValueFuture<T> {
type Output = T;

fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let inner = self.inner.lock().unwrap();
if let Some(value) = inner.as_ref() {
Poll::Ready(value.clone())
} else {
// This future isn't ready yet, so we'll notify the context when it is.
cx.waker().wake_by_ref();
Poll::Pending
}
}
}

struct MutationRequest {
bags: Mutex<Vec<Field>>,
}

impl MutationRequest {
fn new() -> Self {
Self {
bags: Mutex::new(Vec::new()),
}
}

fn new_field(&self, input: Value) -> Field {
let bag = Field::new(input);
self.bags.lock().unwrap().push(bag.clone());
bag
}

async fn send_request(&self) {
let mut bags = self.bags.lock().unwrap();
for bag in bags.iter_mut() {
bag.value.set_value(bag.input.take().unwrap());
}
}
}

#[derive(Clone)]
struct Field {
input: Option<Value>,
value: ValueFuture<Value>,
}

impl Field {
fn new(input: Value) -> Self {
Self {
input: Some(input),
value: ValueFuture::new(),
}
}

pub fn value(self) -> ValueFuture<Value> {
self.value
}
}

async fn async_run(mut selector_mut: MultiJsonSelectorMutWithMetadata<'_, &str>, json: Value) {
let mut_request = Arc::new(MutationRequest::new());

let result_futures = selector_mut
.replace_with_async(json, |v, _| {
let bag: Field = mut_request.new_field(v);

Box::pin(async move {
let val = bag.value().await;
Some(val)
})
})
.unwrap();

mut_request.send_request().await;

let _result = result_futures.await.unwrap();
}

fn setup_async_benchmark(c: &mut criterion::Criterion) {
let t1_json = read_json("./benchmark/example.json");
let t1_parser = PathParserWithMetadata::compile("$.store..price", "one").unwrap();
let t1_parser_two = PathParserWithMetadata::compile("$.store..author", "two").unwrap();
let t1_selector_mut =
MultiJsonSelectorMutWithMetadata::new_multi_parser(vec![t1_parser, t1_parser_two]);

// let big_array = read_json("./benchmark/big_array.json");
let t2_json = read_json("./benchmark/big_example.json");
let t2_parser = PathParserWithMetadata::compile("$.store.book[*].author", "one").unwrap();
let t2_parser_two = PathParserWithMetadata::compile("$.store.author", "two").unwrap();
let t2_selector_mut =
MultiJsonSelectorMutWithMetadata::new_multi_parser(vec![t2_parser, t2_parser_two]);

let runtime = tokio::runtime::Builder::new_current_thread()
.build()
.unwrap();

c.bench_with_input(
BenchmarkId::new("async_selector_mut", "Json"),
&(t1_selector_mut, t1_json),
|b, (s, v)| {
// Insert a call to `to_async` to convert the bencher to async mode.
// The timing loops are the same as with the normal bencher.
b.to_async(&runtime).iter_batched(
|| (s.clone(), v.clone()),
|(s, v)| async {
async_run(s, v).await;
},
criterion::BatchSize::SmallInput,
);
},
);

c.bench_with_input(
BenchmarkId::new("async_selector_mut", "BigJson"),
&(t2_selector_mut, t2_json),
|b, (s, v)| {
// Insert a call to `to_async` to convert the bencher to async mode.
// The timing loops are the same as with the normal bencher.
b.to_async(&runtime).iter_batched(
|| (s.clone(), v.clone()),
|(s, v)| async {
async_run(s, v).await;
},
criterion::BatchSize::LargeInput,
);
},
);
}

criterion_group!(benches, setup_async_benchmark);
criterion_main!(benches);
52 changes: 52 additions & 0 deletions benches/common.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
extern crate env_logger;
extern crate jsonpath_lib as jsonpath;
extern crate serde_json;

use std::io::Read;

use serde_json::Value;

use self::jsonpath::{JsonSelector, PathParser};

#[allow(dead_code)]
pub fn setup() {
let _ = env_logger::try_init();
}

#[allow(dead_code)]
pub fn read_json(path: &str) -> Value {
let mut f = std::fs::File::open(path).unwrap();
let mut contents = String::new();
f.read_to_string(&mut contents).unwrap();
serde_json::from_str(&contents).unwrap()
}

#[allow(dead_code)]
pub fn read_contents(path: &str) -> String {
let mut f = std::fs::File::open(path).unwrap();
let mut contents = String::new();
f.read_to_string(&mut contents).unwrap();
contents
}

#[allow(dead_code)]
pub fn select_and_then_compare(path: &str, json: Value, target: Value) {
let parser = PathParser::compile(path).unwrap();
let mut selector = JsonSelector::new(parser);
let result = selector.value(&json).select_as::<Value>().unwrap();
assert_eq!(
result,
match target {
Value::Array(vec) => vec,
_ => panic!("Give me the Array!"),
},
"{}",
path
);
}

#[allow(dead_code)]
pub fn compare_result(result: Vec<&Value>, target: Value) {
let result = serde_json::to_value(result).unwrap();
assert_eq!(result, target);
}
113 changes: 113 additions & 0 deletions benches/sync_mut.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
extern crate jsonpath_lib as jsonpath;
extern crate serde_json;

use common::read_json;
use criterion::{criterion_group, criterion_main, BenchmarkId};

use jsonpath::{JsonSelectorMut, PathParser};
use serde_json::Value;

mod common;

fn selector_mut(mut selector_mut: JsonSelectorMut, json: Value) -> Value {
let mut nums = Vec::new();
let result = selector_mut
.value(json)
.replace_with(&mut |v| {
if let Value::Number(n) = v {
nums.push(n.as_f64().unwrap());
}
Ok(Some(Value::String("a".to_string())))
})
.unwrap()
.take()
.unwrap();

result
}

fn setup_async_benchmark(c: &mut criterion::Criterion) {
let t1_json = read_json("./benchmark/example.json");
let t1_parser = PathParser::compile("$.store..price").unwrap();
let t1_selector_mut = JsonSelectorMut::new(t1_parser.clone());
let t1_selector_mut_two = JsonSelectorMut::new(t1_parser);

let t2_json = read_json("./benchmark/big_example.json");
let t2_parser = PathParser::compile("$.store.book[*].author").unwrap();
let t2_parser_two = PathParser::compile("$.store.author").unwrap();
let t2_selector_mut = JsonSelectorMut::new(t2_parser);
let t2_selector_mut_two = JsonSelectorMut::new(t2_parser_two);

let runtime = tokio::runtime::Builder::new_current_thread()
.build()
.unwrap();

c.bench_with_input(
BenchmarkId::new("selector_mut", "Json"),
&(t1_selector_mut.clone(), t1_json.clone()),
|b, (s, v)| {
// Insert a call to `to_async` to convert the bencher to async mode.
// The timing loops are the same as with the normal bencher.
b.to_async(&runtime).iter_batched(
|| (s.clone(), v.clone()),
|(s, v)| async {
selector_mut(s, v);
},
criterion::BatchSize::SmallInput,
);
},
);

c.bench_with_input(
BenchmarkId::new("selector_mut", "BigJson"),
&(t2_selector_mut.clone(), t2_json.clone()),
|b, (s, v)| {
// Insert a call to `to_async` to convert the bencher to async mode.
// The timing loops are the same as with the normal bencher.
b.to_async(&runtime).iter_batched(
|| (s.clone(), v.clone()),
|(s, v)| async {
selector_mut(s, v);
},
criterion::BatchSize::LargeInput,
);
},
);

c.bench_with_input(
BenchmarkId::new("double_selector_mut", "Json"),
&(t1_selector_mut, t1_selector_mut_two, t1_json),
|b, (s, s2, v)| {
// Insert a call to `to_async` to convert the bencher to async mode.
// The timing loops are the same as with the normal bencher.
b.to_async(&runtime).iter_batched(
|| (s.clone(), s2.clone(), v.clone()),
|(s, s2, v)| async {
let v = selector_mut(s, v);
let _ = selector_mut(s2, v);
},
criterion::BatchSize::SmallInput,
);
},
);

c.bench_with_input(
BenchmarkId::new("double_selector_mut", "BigJson"),
&(t2_selector_mut, t2_selector_mut_two, t2_json),
|b, (s, s2, v)| {
// Insert a call to `to_async` to convert the bencher to async mode.
// The timing loops are the same as with the normal bencher.
b.to_async(&runtime).iter_batched(
|| (s.clone(), s2.clone(), v.clone()),
|(s, s2, v)| async {
let v = selector_mut(s, v);
let _ = selector_mut(s2, v);
},
criterion::BatchSize::LargeInput,
);
},
);
}

criterion_group!(benches, setup_async_benchmark);
criterion_main!(benches);
5 changes: 0 additions & 5 deletions lua/.gitignore

This file was deleted.

Loading