Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,12 @@ jobs:
spark-archive: '-Pscala-2.13'
exclude-tags: ''
comment: 'normal'
- java: 25
python: '3.11'
spark: '4.2'
spark-archive: '-Pscala-2.13'
exclude-tags: ''
comment: 'normal'
- java: 8
python: '3.9'
spark: '3.5'
Expand All @@ -96,6 +102,12 @@ jobs:
spark-archive: '-Pscala-2.13 -Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-4.1.1 -Dspark.archive.name=spark-4.1.1-bin-hadoop3.tgz'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-4.1-binary'
- java: 17
python: '3.11'
spark: '3.5'
spark-archive: '-Pscala-2.13 -Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-4.2.0-preview3 -Dspark.archive.name=spark-4.2.0-preview3-bin-hadoop3.tgz'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-4.2-binary'
env:
SPARK_LOCAL_IP: localhost
steps:
Expand Down Expand Up @@ -214,6 +226,16 @@ jobs:
spark-compile: "3.5"
spark-runtime: "4.0"
comment: "normal"
- java: 21
scala: "2.13"
spark-compile: "3.5"
spark-runtime: "4.1"
comment: "normal"
- java: 25
scala: "2.13"
spark-compile: "3.5"
spark-runtime: "4.2"
comment: "normal"
env:
SPARK_LOCAL_IP: localhost
TEST_MODULES: "extensions/spark/kyuubi-spark-connector-hive,\
Expand Down
8 changes: 6 additions & 2 deletions bin/load-kyuubi-env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ if [[ -z ${JAVA_HOME} ]]; then
fi

KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS -XX:+IgnoreUnrecognizedVMOptions"
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS -Dio.netty.tryReflectionSetAccessible=true"
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS --add-opens=java.base/java.lang=ALL-UNNAMED"
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS --add-opens=java.base/java.lang.invoke=ALL-UNNAMED"
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS --add-opens=java.base/java.lang.reflect=ALL-UNNAMED"
Expand All @@ -85,10 +84,12 @@ KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS --add-opens=java.base/sun.security.action=AL
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS --add-opens=java.base/sun.security.tools.keytool=ALL-UNNAMED"
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS --add-opens=java.base/sun.security.x509=ALL-UNNAMED"
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS --add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS -Djdk.reflect.useDirectMethodHandle=false"
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS --enable-native-access=ALL-UNNAMED"
KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS --sun-misc-unsafe-memory-access=allow"
export KYUUBI_JAVA_OPTS="$KYUUBI_JAVA_OPTS"

KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS -XX:+IgnoreUnrecognizedVMOptions"
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS -Dio.netty.tryReflectionSetAccessible=true"
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS --add-opens=java.base/java.lang=ALL-UNNAMED"
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS --add-opens=java.base/java.lang.invoke=ALL-UNNAMED"
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS --add-opens=java.base/java.lang.reflect=ALL-UNNAMED"
Expand All @@ -105,6 +106,9 @@ KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS --add-opens=java.base/sun.security.a
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS --add-opens=java.base/sun.security.tools.keytool=ALL-UNNAMED"
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS --add-opens=java.base/sun.security.x509=ALL-UNNAMED"
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS --add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS -Djdk.reflect.useDirectMethodHandle=false"
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS --sun-misc-unsafe-memory-access=allow"
KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS --enable-native-access=ALL-UNNAMED"
export KYUUBI_CTL_JAVA_OPTS="$KYUUBI_CTL_JAVA_OPTS"

export KYUUBI_SCALA_VERSION="${KYUUBI_SCALA_VERSION:-"2.12"}"
Expand Down
4 changes: 2 additions & 2 deletions dev/dependencyList
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ grpc-stub/1.76.2//grpc-stub-1.76.2.jar
grpc-util/1.76.2//grpc-util-1.76.2.jar
gson/2.11.0//gson-2.11.0.jar
guava/33.4.8-jre//guava-33.4.8-jre.jar
hadoop-client-api/3.3.6//hadoop-client-api-3.3.6.jar
hadoop-client-runtime/3.3.6//hadoop-client-runtime-3.3.6.jar
hadoop-client-api/3.4.3//hadoop-client-api-3.4.3.jar
hadoop-client-runtime/3.4.3//hadoop-client-runtime-3.4.3.jar
hk2-api/2.6.1//hk2-api-2.6.1.jar
hk2-locator/2.6.1//hk2-locator-2.6.1.jar
hk2-utils/2.6.1//hk2-utils-2.6.1.jar
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,10 @@

package org.apache.kyuubi.plugin.spark.authz

import java.net.URI

import scala.reflect.io.File
import scala.util.Try

import org.apache.spark.sql.{DataFrame, SparkSession, SQLContext}
import org.apache.spark.sql.catalyst.TableIdentifier
Expand All @@ -35,6 +38,7 @@ import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
import org.apache.kyuubi.plugin.spark.authz.ranger.AccessType
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.reflect.DynConstructors

abstract class PrivilegesBuilderSuite extends AnyFunSuite
with SparkSessionProvider with BeforeAndAfterAll with BeforeAndAfterEach {
Expand Down Expand Up @@ -1451,6 +1455,33 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
assert(accessType1 == AccessType.WRITE)
}

private val emptyCatalogStorageFormat = Try {
DynConstructors.builder()
.impl(
classOf[CatalogStorageFormat],
classOf[Option[URI]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Boolean],
classOf[Map[String, String]])
.build[CatalogStorageFormat]()
.newInstance(None, None, None, None, None, Boolean.box(false), Map.empty)
}.recover { case _: Exception =>
DynConstructors.builder()
.impl(
classOf[CatalogStorageFormat],
classOf[Option[URI]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Boolean],
classOf[Map[String, String]])
.build[CatalogStorageFormat]()
.newInstance(None, None, None, None, Boolean.box(false), Map.empty)
}.get

test("InsertIntoDataSourceCommand") {
val tableName = "InsertIntoDataSourceTable"
withTable(tableName) { _ =>
Expand All @@ -1461,13 +1492,7 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
val newTable = CatalogTable(
identifier = TableIdentifier(tableName, None),
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat(
locationUri = None,
inputFormat = None,
outputFormat = None,
serde = None,
compressed = false,
properties = Map.empty),
storage = emptyCatalogStorageFormat,
schema = schema,
provider = Some(classOf[SimpleInsertSource].getName))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,10 @@

package org.apache.kyuubi.plugin.lineage.helper

import java.net.URI

import scala.reflect.io.File
import scala.util.Try

import org.apache.spark.SparkConf
import org.apache.spark.kyuubi.lineage.{LineageConf, SparkContextHelper}
Expand All @@ -29,6 +32,7 @@ import org.apache.spark.sql.types.{IntegerType, StringType, StructType}

import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.plugin.lineage.Lineage
import org.apache.kyuubi.util.reflect.DynConstructors

abstract class SparkSQLLineageParserHelperSuite extends KyuubiFunSuite
with SparkListenerExtensionTest {
Expand Down Expand Up @@ -323,6 +327,33 @@ abstract class SparkSQLLineageParserHelperSuite extends KyuubiFunSuite
}
}

private val emptyCatalogStorageFormat = Try {
DynConstructors.builder()
.impl(
classOf[CatalogStorageFormat],
classOf[Option[URI]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Boolean],
classOf[Map[String, String]])
.build[CatalogStorageFormat]()
.newInstance(None, None, None, None, None, Boolean.box(false), Map.empty)
}.recover { case _: Exception =>
DynConstructors.builder()
.impl(
classOf[CatalogStorageFormat],
classOf[Option[URI]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Option[String]],
classOf[Boolean],
classOf[Map[String, String]])
.build[CatalogStorageFormat]()
.newInstance(None, None, None, None, Boolean.box(false), Map.empty)
}.get

test("columns lineage extract - InsertIntoDataSourceCommand") {
val tableName = "insertintodatasourcecommand"
withTable(tableName) { _ =>
Expand All @@ -332,13 +363,7 @@ abstract class SparkSQLLineageParserHelperSuite extends KyuubiFunSuite
val newTable = CatalogTable(
identifier = TableIdentifier(tableName, None),
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat(
locationUri = None,
inputFormat = None,
outputFormat = None,
serde = None,
compressed = false,
properties = Map.empty),
storage = emptyCatalogStorageFormat,
schema = schema,
provider = Some(classOf[SimpleInsertSource].getName))
spark.sessionState.catalog.createTable(newTable, ignoreIfExists = false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,31 @@ package org.apache.spark.ui
import org.apache.http.client.methods.HttpGet
import org.apache.http.impl.client.HttpClients
import org.apache.http.util.EntityUtils
import org.apache.spark.SPARK_VERSION
import org.scalactic.source.Position
import org.scalatest.Tag

import org.apache.kyuubi.engine.spark.WithSparkSQLEngine
import org.apache.kyuubi.operation.HiveJDBCTestHelper
import org.apache.kyuubi.session.SessionHandle
import org.apache.kyuubi.util.SemanticVersion

class EngineTabSuite extends WithSparkSQLEngine with HiveJDBCTestHelper {
override def withKyuubiConf: Map[String, String] = Map(
"spark.ui.enabled" -> "true",
"spark.ui.port" -> "0",
"spark.sql.redaction.string.regex" -> "(?i)url|access|secret|password")

override protected def test(testName: String, testTags: Tag*)(testBody: => Any)(implicit
pos: Position): Unit = {
// SPARK-47086 (4.2.0) Upgrade to Jetty 12 and Servlet 6.0
if (SemanticVersion(SPARK_VERSION) >= "4.2") {
ignore(s"$testName (excluded)")(testBody)
} else {
super.test(testName, testTags: _*)(testBody)
}
}

override protected def beforeEach(): Unit = {
super.beforeEach()
startSparkEngine()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ class KyuubiOperationPerGroupSuite extends WithKyuubiServer with SparkQueryTests
}

test("kyuubi defined function - system_user/session_user") {
assume(SPARK_ENGINE_RUNTIME_VERSION < "4.2")
withSessionConf(Map("hive.server2.proxy.user" -> "user1"))(Map.empty)(Map.empty) {
withJdbcStatement() { statement =>
val res = statement.executeQuery("select system_user() as c1, session_user() as c2")
Expand Down
46 changes: 44 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@
<grpc.version>1.76.2</grpc.version>
<guava.version>33.4.8-jre</guava.version>
<guava.failureaccess.version>1.0.3</guava.failureaccess.version>
<hadoop.version>3.3.6</hadoop.version>
<hadoop.version>3.4.3</hadoop.version>
<hikaricp.version>4.0.3</hikaricp.version>
<fliptables.verion>1.1.1</fliptables.verion>
<hive.version>3.1.3</hive.version>
Expand Down Expand Up @@ -301,7 +301,8 @@
--add-opens=java.base/sun.security.x509=ALL-UNNAMED
--add-opens=java.base/sun.util.calendar=ALL-UNNAMED
-Djdk.reflect.useDirectMethodHandle=false
-Dio.netty.tryReflectionSetAccessible=true</extraJavaTestArgs>
--enable-native-access=ALL-UNNAMED
--sun-misc-unsafe-memory-access=allow</extraJavaTestArgs>

<debugArgLine>-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005</debugArgLine>
</properties>
Expand Down Expand Up @@ -1968,6 +1969,21 @@
</properties>
</profile>

<profile>
<id>java-25</id>
<activation>
<jdk>25</jdk>
</activation>
<properties>
<!-- TODO: The current version of spotless(2.30.0) and google-java-format(1.7)
does not support Java 25, but new version produces different outputs.
Re-evaluate once we dropped support for Java 8. -->
<maven.plugin.spotless.version>2.43.0</maven.plugin.spotless.version>
<spotless.check.skip>true</spotless.check.skip>
<spotless.java.googlejavaformat.version>1.22.0</spotless.java.googlejavaformat.version>
</properties>
</profile>

<profile>
<id>scala-2.12</id>
<properties>
Expand Down Expand Up @@ -2083,6 +2099,32 @@
</properties>
</profile>

<profile>
<id>spark-4.2</id>
<modules>
<!--
<module>extensions/spark/kyuubi-extension-spark-4-2</module>
-->
<module>extensions/spark/kyuubi-spark-connector-hive</module>
</modules>
<properties>
<maven.compiler.release>17</maven.compiler.release>
<enforcer.maxJdkVersion>17</enforcer.maxJdkVersion>
<spark.version>4.2.0-preview3</spark.version>
<spark.binary.version>4.0</spark.binary.version>
<antlr4.version>4.13.1</antlr4.version>
<delta.version>4.0.0</delta.version>
<delta.artifact>delta-spark_${scala.binary.version}</delta.artifact>
<!-- TODO: update once Hudi support Spark 4.2 -->
<hudi.artifact>hudi-spark3.5-bundle_${scala.binary.version}</hudi.artifact>
<!-- TODO: update once Paimon support Spark 4.2.
paimon-spark-3.5 contains Scala 2.12 classes cause conflicts with Scala 2.13 -->
<paimon.artifact>paimon-common</paimon.artifact>
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
<spark.archive.name>spark-${spark.version}-bin-hadoop3.tgz</spark.archive.name>
</properties>
</profile>

<profile>
<id>spark-master</id>
<properties>
Expand Down
Loading