Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
77 changes: 77 additions & 0 deletions benchmark/example_deeper.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
{
"store": {
"book": [
{
"category": "reference",
"author": "Nigel Rees",
"title": "Sayings of the Century",
"price": 8.95,
"reviews": [
{
"vote": 3.2,
"user": "Angela"
},
{
"vote": 3.5,
"user": "Eric"
}
]
},
{
"category": "fiction",
"author": "Evelyn Waugh",
"title": "Sword of Honour",
"price": 12.99,
"reviews": [
{
"vote": 5.0,
"user": "Ruth"
},
{
"vote": 2.0,
"user": "Philip"
}
]
},
{
"category": "fiction",
"author": "Herman Melville",
"title": "Moby Dick",
"isbn": "0-553-21311-3",
"price": 8.99,
"reviews": [
{
"vote": 4.0,
"user": "Carol"
}
]
},
{
"category": "fiction",
"author": "J. R. R. Tolkien",
"title": "The Lord of the Rings",
"isbn": "0-395-19395-8",
"price": 22.99,
"reviews": [
{
"vote": 5.0,
"user": "Isaac"
},
{
"vote": 4.8,
"user": "Chris"
},
{
"vote": 4.3,
"user": "Frank"
}
]
}
],
"bicycle": {
"color": "red",
"price": 19.95
}
},
"expensive": 10
}
2 changes: 1 addition & 1 deletion src/paths/parser_node_visitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ pub trait ParserNodeVisitor<'a> {
| ParseToken::Bool(_) => {
token_handler.handle(&parse_node.token, parse_value_reader);
}
ParseToken::In | ParseToken::Leaves => {
ParseToken::In | ParseToken::Leaves | ParseToken::Parent => {
if let Some(n) = &parse_node.left {
self.visit(&*n, token_handler, parse_value_reader);
}
Expand Down
70 changes: 67 additions & 3 deletions src/paths/path_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,10 @@ impl<'a> ParserImpl<'a> {
self.eat_token();
self.paths_dot(prev)
}
Ok(Token::Caret(_)) => {
self.eat_token();
self.paths_caret(prev)
}
Ok(Token::OpenArray(_)) => {
self.eat_token();
self.eat_whitespace();
Expand All @@ -98,12 +102,18 @@ impl<'a> ParserImpl<'a> {

fn paths_dot(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#paths_dot");
let node = self.path(prev)?;
let node = self.path_dot(prev)?;
self.paths(node)
}

fn paths_caret(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#paths_caret");
let node = self.path_caret(prev)?;
self.paths(node)
}

fn path(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#path");
fn path_dot(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#path_dot");
match self.token_reader.peek_token() {
Ok(Token::Dot(_)) => self.path_leaves(prev),
Ok(Token::Asterisk(_)) => self.path_in_all(prev),
Expand All @@ -116,6 +126,17 @@ impl<'a> ParserImpl<'a> {
}
}

fn path_caret(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#path_caret");
match self.token_reader.peek_token() {
Ok(Token::Caret(_)) => self.path_parent_parent(prev),
Ok(Token::Asterisk(_)) => self.path_parent_all(prev),
Ok(Token::Key(_)) => self.path_parent_key(prev),
Ok(Token::OpenArray(_)) => self.path_parent_array(prev),
_ => Err(self.token_reader.to_error()),
}
}

fn path_leaves(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#path_leaves");
self.eat_token();
Expand Down Expand Up @@ -172,6 +193,49 @@ impl<'a> ParserImpl<'a> {
})
}

#[allow(clippy::unnecessary_wraps)]
fn path_parent_all(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#path_parent_key");
self.eat_token();
Ok(ParserNode {
token: ParseToken::Parent,
left: Some(Box::new(prev)),
right: Some(Box::new(self.create_node(ParseToken::All))),
})
}

#[allow(clippy::unnecessary_wraps)]
fn path_parent_parent(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#path_parent_parent");
Ok(ParserNode {
token: ParseToken::Parent,
left: Some(Box::new(prev)),
right: None,
})
}

#[allow(clippy::unnecessary_wraps)]
fn path_parent_key(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#path_parent_key");
Ok(ParserNode {
token: ParseToken::Parent,
left: Some(Box::new(prev)),
right: Some(Box::new(self.key()?)),
})
}

#[allow(clippy::unnecessary_wraps)]
fn path_parent_array(&mut self, prev: ParserNode) -> Result<ParserNode, TokenError> {
debug!("#path_parent_array");
self.eat_token();
let prev = ParserNode {
token: ParseToken::Parent,
left: Some(Box::new(prev)),
right: None,
};
self.array(prev)
}

fn key(&mut self) -> Result<ParserNode, TokenError> {
debug!("#key");
match self.token_reader.next_token() {
Expand Down
4 changes: 4 additions & 0 deletions src/paths/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use super::tokens::Token;

const CH_DOLLA: char = '$';
const CH_DOT: char = '.';
const CH_CARET: char = '^';
const CH_ASTERISK: char = '*';
const CH_LARRAY: char = '[';
const CH_RARRAY: char = ']';
Expand Down Expand Up @@ -51,6 +52,7 @@ impl<'a> Tokenizer<'a> {
fn dolla(&mut self) -> Result<Token, TokenError> {
let fun = |c: &char| match c {
&CH_DOT
| &CH_CARET
| &CH_ASTERISK
| &CH_LARRAY
| &CH_RARRAY
Expand Down Expand Up @@ -177,6 +179,7 @@ impl<'a> Tokenizer<'a> {
let fun = |c: &char| match c {
&CH_DOLLA
| &CH_DOT
| &CH_CARET
| &CH_ASTERISK
| &CH_LARRAY
| &CH_RARRAY
Expand All @@ -203,6 +206,7 @@ impl<'a> Tokenizer<'a> {
match ch {
CH_DOLLA => self.dolla(),
CH_DOT => Ok(Token::Dot(span)),
CH_CARET => Ok(Token::Caret(span)),
CH_ASTERISK => Ok(Token::Asterisk(span)),
CH_LARRAY => Ok(Token::OpenArray(span)),
CH_RARRAY => Ok(Token::CloseArray(span)),
Expand Down
5 changes: 5 additions & 0 deletions src/paths/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ pub enum Token {
And(StrRange),
Or(StrRange),
Whitespace(StrRange),
Caret(StrRange)
}

impl Token {
Expand Down Expand Up @@ -53,6 +54,7 @@ impl Token {
Token::And(_) => matches!(other, Token::And(_)),
Token::Or(_) => matches!(other, Token::Or(_)),
Token::Whitespace(_) => matches!(other, Token::Whitespace(_)),
Token::Caret(_) => matches!(other, Token::Caret(_)),
}
}

Expand Down Expand Up @@ -81,6 +83,7 @@ impl Token {
Token::And(_) => Token::And(new_span),
Token::Or(_) => Token::Or(new_span),
Token::Whitespace(_) => Token::Whitespace(new_span),
Token::Caret(_) => Token::Caret(new_span),
}
}
}
Expand All @@ -97,6 +100,8 @@ pub enum ParseToken {
Leaves,
// '*'
All,
// '^'
Parent,

Key(StrRange),
Keys(Vec<StrRange>),
Expand Down
Loading