Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/spark_expressions_support.md
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@

### url_funcs

- [ ] parse_url
- [x] parse_url
- [ ] url_decode
- [ ] url_encode

Expand Down
4 changes: 4 additions & 0 deletions native/core/src/execution/jni_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ use datafusion_spark::function::math::hex::SparkHex;
use datafusion_spark::function::math::width_bucket::SparkWidthBucket;
use datafusion_spark::function::string::char::CharFunc;
use datafusion_spark::function::string::concat::SparkConcat;
use datafusion_spark::function::url::parse_url::ParseUrl;
use datafusion_spark::function::url::try_parse_url::TryParseUrl;
use futures::poll;
use futures::stream::StreamExt;
use jni::objects::JByteBuffer;
Expand Down Expand Up @@ -400,6 +402,8 @@ fn register_datafusion_spark_function(session_ctx: &SessionContext) {
session_ctx.register_udf(ScalarUDF::new_from_impl(SparkWidthBucket::default()));
session_ctx.register_udf(ScalarUDF::new_from_impl(MapFromEntries::default()));
session_ctx.register_udf(ScalarUDF::new_from_impl(SparkCrc32::default()));
session_ctx.register_udf(ScalarUDF::new_from_impl(ParseUrl::default()));
session_ctx.register_udf(ScalarUDF::new_from_impl(TryParseUrl::default()));
}

/// Prepares arrow arrays for output.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ object QueryPlanSerde extends Logging with CometExprShim {
classOf[Like] -> CometLike,
classOf[Lower] -> CometLower,
classOf[OctetLength] -> CometScalarFunction("octet_length"),
classOf[ParseUrl] -> CometParseUrl,
classOf[RegExpReplace] -> CometRegExpReplace,
classOf[Reverse] -> CometReverse,
classOf[RLike] -> CometRLike,
Expand Down Expand Up @@ -557,6 +558,9 @@ object QueryPlanSerde extends Logging with CometExprShim {
// `PromotePrecision` is just a wrapper, don't need to serialize it.
exprToProtoInternal(child, inputs, binding)

case expr if expr.prettyName == "parse_url" =>
CometParseUrl.convertExpression(expr, inputs, binding)

case expr =>
QueryPlanSerde.exprSerdeMap.get(expr.getClass) match {
case Some(handler) =>
Expand Down
51 changes: 49 additions & 2 deletions spark/src/main/scala/org/apache/comet/serde/strings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@ package org.apache.comet.serde

import java.util.Locale

import org.apache.spark.sql.catalyst.expressions.{Attribute, Cast, Concat, ConcatWs, Expression, If, InitCap, IsNull, Left, Length, Like, Literal, Lower, RegExpReplace, Right, RLike, StringLPad, StringRepeat, StringRPad, StringSplit, Substring, Upper}
import org.apache.spark.sql.catalyst.expressions.{Attribute, Cast, Concat, ConcatWs, Expression, If, InitCap, IsNull, Left, Length, Like, Literal, Lower, ParseUrl, RegExpReplace, Right, RLike, StringLPad, StringRepeat, StringRPad, StringSplit, Substring, Upper}
import org.apache.spark.sql.types.{BinaryType, DataTypes, LongType, StringType}
import org.apache.spark.unsafe.types.UTF8String

import org.apache.comet.CometConf
import org.apache.comet.CometSparkSessionExtensions.withInfo
import org.apache.comet.expressions.{CometCast, CometEvalMode, RegExp}
import org.apache.comet.serde.ExprOuterClass.Expr
import org.apache.comet.serde.QueryPlanSerde.{createBinaryExpr, exprToProtoInternal, optExprWithInfo, scalarFunctionExprToProto, scalarFunctionExprToProtoWithReturnType}
import org.apache.comet.serde.QueryPlanSerde._

object CometStringRepeat extends CometExpressionSerde[StringRepeat] {

Expand Down Expand Up @@ -382,6 +382,53 @@ object CometStringSplit extends CometExpressionSerde[StringSplit] {
}
}

object CometParseUrl extends CometExpressionSerde[ParseUrl] {
private def failOnErrorFromChildren(rawChildren: Seq[Expression]): Option[Boolean] = {
rawChildren.lastOption.flatMap {
case Literal(value: Boolean, _) => Some(value)
case Literal(value: java.lang.Boolean, _) => Some(value.booleanValue())
case _ => None
}
}

private def convertInternal(
expr: Expression,
rawChildren: Seq[Expression],
failOnError: Option[Boolean],
inputs: Seq[Attribute],
binding: Boolean): Option[Expr] = {
val parseUrlArgs: Seq[Expression] = rawChildren.lastOption match {
case Some(Literal(_: Boolean, _)) => rawChildren.dropRight(1)
case Some(Literal(_: java.lang.Boolean, _)) => rawChildren.dropRight(1)
case _ => rawChildren
}

val shouldFailOnError: Boolean =
failOnError.orElse(failOnErrorFromChildren(rawChildren)).getOrElse(true)
val functionName: String = if (shouldFailOnError) {
"parse_url"
} else {
"try_parse_url"
}

val childExprs: Seq[Option[Expr]] = parseUrlArgs.map(exprToProtoInternal(_, inputs, binding))
val optExpr: Option[Expr] = scalarFunctionExprToProto(functionName, childExprs: _*)
optExprWithInfo(optExpr, expr, parseUrlArgs: _*)
}

def convertExpression(
expr: Expression,
inputs: Seq[Attribute],
binding: Boolean,
failOnError: Option[Boolean] = None): Option[Expr] = {
convertInternal(expr, expr.children, failOnError, inputs, binding)
}

override def convert(expr: ParseUrl, inputs: Seq[Attribute], binding: Boolean): Option[Expr] = {
convertInternal(expr, expr.children, None, inputs, binding)
}
}

trait CommonStringExprs {

def stringDecode(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,15 @@
package org.apache.comet.shims

import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.objects.StaticInvoke
import org.apache.spark.sql.catalyst.expressions.objects.{Invoke, StaticInvoke}
import org.apache.spark.sql.catalyst.expressions.url.ParseUrlEvaluator
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.types.StringTypeWithCollation
import org.apache.spark.sql.types.{BinaryType, BooleanType, DataTypes, StringType}

import org.apache.comet.CometSparkSessionExtensions.withInfo
import org.apache.comet.expressions.{CometCast, CometEvalMode}
import org.apache.comet.serde.{CommonStringExprs, Compatible, ExprOuterClass, Incompatible}
import org.apache.comet.serde.{CometParseUrl, CommonStringExprs, Compatible, ExprOuterClass, Incompatible}
import org.apache.comet.serde.ExprOuterClass.{BinaryOutputStyle, Expr}
import org.apache.comet.serde.QueryPlanSerde.{exprToProtoInternal, optExprWithInfo, scalarFunctionExprToProto}

Expand Down Expand Up @@ -68,6 +69,18 @@ trait CometExprShim extends CommonStringExprs {
val Seq(bin, charset, _, _) = s.arguments
stringDecode(expr, charset, bin, inputs, binding)

case i: Invoke if i.functionName == "evaluate" =>
i.targetObject match {
case Literal(parseUrlEvaluator: ParseUrlEvaluator, _) =>
CometParseUrl.convertExpression(
i,
inputs,
binding,
Some(parseUrlEvaluator.failOnError))
case _ =>
None
}

case expr @ ToPrettyString(child, timeZoneId) =>
val castSupported = CometCast.isSupported(
child.dataType,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.

-- ConfigMatrix: parquet.enable.dictionary=false,true
-- MinSparkVersion: 3.5

statement
CREATE TABLE test_parse_url(url string) USING parquet

statement
INSERT INTO test_parse_url VALUES
('http://spark.apache.org/path?query=1'),
('https://spark.apache.org/path/to/page?query=1&k2=v2'),
(NULL)

query
SELECT parse_url(url, 'HOST') FROM test_parse_url

query
SELECT parse_url(url, 'QUERY') FROM test_parse_url

query
SELECT parse_url(url, 'PROTOCOL') FROM test_parse_url

query
SELECT parse_url(url, 'QUERY', 'query'), parse_url(url, 'QUERY', 'k2') FROM test_parse_url
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import org.apache.spark.sql.{CometTestBase, DataFrame}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{DataTypes, StructField, StructType}

import org.apache.comet.CometSparkSessionExtensions.isSpark40Plus
import org.apache.comet.testing.{DataGenOptions, FuzzDataGenerator}

class CometStringExpressionSuite extends CometTestBase {
Expand Down Expand Up @@ -248,6 +249,39 @@ class CometStringExpressionSuite extends CometTestBase {
}
}

test("parse_url") {
withParquetTable(
Seq(
("http://spark.apache.org/path?query=1", 0),
("https://spark.apache.org/path/to/page?query=1&k2=v2", 1),
(null, 2)),
"tbl_parse_url") {

checkSparkAnswerAndOperator("SELECT parse_url(_1, 'HOST') FROM tbl_parse_url")
checkSparkAnswerAndOperator("SELECT parse_url(_1, 'QUERY') FROM tbl_parse_url")
checkSparkAnswerAndOperator("SELECT parse_url(_1, 'PROTOCOL') FROM tbl_parse_url")
checkSparkAnswerAndOperator(
"SELECT parse_url(_1, 'QUERY', 'query'), parse_url(_1, 'QUERY', 'k2') FROM tbl_parse_url")
checkSparkAnswerAndOperator("SELECT parse_url(_1, 'PATH') FROM tbl_parse_url")
checkSparkAnswerAndOperator("SELECT parse_url(_1, 'FILE') FROM tbl_parse_url")
}
}

test("parse_url with invalid URL in legacy mode") {
assume(isSpark40Plus)

withParquetTable(
Seq(
("http://spark.apache.org/path?query=1", 0),
("http://spark.apache.org:abc/path", 1),
(null, 2)),
"tbl_parse_url_invalid") {
withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
checkSparkAnswerAndOperator("SELECT parse_url(_1, 'HOST') FROM tbl_parse_url_invalid")
}
}
}

test("Various String scalar functions") {
val table = "names"
withTable(table) {
Expand Down
Loading