diff --git a/kafka/Dockerfile b/kafka/Dockerfile
index 33a34cded..82fece93c 100644
--- a/kafka/Dockerfile
+++ b/kafka/Dockerfile
@@ -44,9 +44,9 @@ tar -czf /stackable/kafka-${NEW_VERSION}-src.tar.gz .
# We don't specify "-x test" to skip the tests, as we might bump some Kafka internal dependencies in the future and
# it's a good idea to run the tests in this case.
./gradlew clean releaseTarGz
-./gradlew cyclonedxBom
+#./gradlew cyclonedxBom
tar -xf core/build/distributions/kafka_${SCALA_VERSION}-${NEW_VERSION}.tgz -C /stackable
-cp build/reports/bom.json /stackable/kafka_${SCALA_VERSION}-${NEW_VERSION}.cdx.json
+#cp build/reports/bom.json /stackable/kafka_${SCALA_VERSION}-${NEW_VERSION}.cdx.json
rm -rf /stackable/kafka_${SCALA_VERSION}-${NEW_VERSION}/site-docs/
(cd .. && rm -rf ${PRODUCT_VERSION})
@@ -79,7 +79,7 @@ LABEL \
description="This image is deployed by the Stackable Operator for Apache Kafka."
COPY --chown=${STACKABLE_USER_UID}:0 --from=kafka-builder /stackable/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION} /stackable/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}
-COPY --chown=${STACKABLE_USER_UID}:0 --from=kafka-builder /stackable/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}.cdx.json /stackable/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}.cdx.json
+#COPY --chown=${STACKABLE_USER_UID}:0 --from=kafka-builder /stackable/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}.cdx.json /stackable/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}.cdx.json
COPY --chown=${STACKABLE_USER_UID}:0 --from=kafka-builder /stackable/kafka-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}-src.tar.gz /stackable
COPY --chown=${STACKABLE_USER_UID}:0 --from=kafka-builder /stackable/jmx/ /stackable/jmx/
COPY --chown=${STACKABLE_USER_UID}:0 --from=kcat /stackable/kcat /stackable/bin/kcat-${KAFKA_KCAT_VERSION}
diff --git a/kafka/boil-config.toml b/kafka/boil-config.toml
index 0d5d0a387..33a64a58a 100644
--- a/kafka/boil-config.toml
+++ b/kafka/boil-config.toml
@@ -25,3 +25,17 @@ java-devel = "24"
[versions."4.1.1".build-arguments]
scala-version = "2.13"
jmx-exporter-version = "1.3.0"
+
+[versions."4.2.0".local-images]
+java-base = "24"
+java-devel = "24"
+"kafka/kcat" = "1.7.0"
+"kafka/kafka-opa-plugin" = "1.5.1"
+# TODO: this is not used in this version but it's added
+# to avoid major changes to the Kafka image build on short notice.
+# Building this image is quick and in CI should not even be noticed.
+"shared/reload4j" = "1.2.25"
+
+[versions."4.2.0".build-arguments]
+scala-version = "2.13"
+jmx-exporter-version = "1.3.0"
diff --git a/kafka/stackable/patches/4.2.0/0001-Add-CycloneDX-plugin.patch b/kafka/stackable/patches/4.2.0/0001-Add-CycloneDX-plugin.patch
new file mode 100644
index 000000000..eaafc4cb9
--- /dev/null
+++ b/kafka/stackable/patches/4.2.0/0001-Add-CycloneDX-plugin.patch
@@ -0,0 +1,65 @@
+From 855ddc9cdffe2a838f9dc7d05064cf8b74144d36 Mon Sep 17 00:00:00 2001
+From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com>
+Date: Wed, 13 May 2026 16:30:33 +0300
+Subject: Add CycloneDX plugin
+
+---
+ build.gradle | 44 +++++++++++++++++++++++++++++++++++++++++++-
+ 1 file changed, 43 insertions(+), 1 deletion(-)
+
+diff --git a/build.gradle b/build.gradle
+index 2b27f3af75..6a5e74a2bd 100644
+--- a/build.gradle
++++ b/build.gradle
+@@ -41,8 +41,50 @@ plugins {
+ id 'org.scoverage' version '8.1' apply false
+ id 'com.gradleup.shadow' version '8.3.9' apply false
+ id 'com.diffplug.spotless' version "8.0.0"
++ id 'org.cyclonedx.bom' version '1.10.0'
++}
++
++cyclonedxBom {
++ // Specified the type of project being built. Defaults to 'library'
++ projectType = "application"
++ // Specified the version of the CycloneDX specification to use. Defaults to '1.5'
++ schemaVersion = "1.5"
++ // Boms destination directory. Defaults to 'build/reports'
++ destination = file("build/reports")
++ // The file name for the generated BOMs (before the file format suffix). Defaults to 'bom'
++ outputName = "bom"
++ // The file format generated, can be xml, json or all for generating both. Defaults to 'all'
++ outputFormat = "json"
++ includeConfigs = ["runtimeClasspath"]
++ // Exclude test components. This list needs to be checked and, if it changed, updated for every new Kafka version.
++ // The list can be obtained by running `gradle projects | grep upgrade-system-tests`
++ skipProjects = [
++ 'upgrade-system-tests-0110',
++ 'upgrade-system-tests-10',
++ 'upgrade-system-tests-11',
++ 'upgrade-system-tests-20',
++ 'upgrade-system-tests-21',
++ 'upgrade-system-tests-22',
++ 'upgrade-system-tests-23',
++ 'upgrade-system-tests-24',
++ 'upgrade-system-tests-25',
++ 'upgrade-system-tests-26',
++ 'upgrade-system-tests-27',
++ 'upgrade-system-tests-28',
++ 'upgrade-system-tests-30',
++ 'upgrade-system-tests-31',
++ 'upgrade-system-tests-32',
++ 'upgrade-system-tests-33',
++ 'upgrade-system-tests-34',
++ 'upgrade-system-tests-35',
++ 'upgrade-system-tests-36',
++ 'upgrade-system-tests-37',
++ 'upgrade-system-tests-38',
++ 'upgrade-system-tests-39',
++ 'upgrade-system-tests-40',
++ ]
+ }
+-
++
+ ext {
+ minClientJavaVersion = 11
+ minNonClientJavaVersion = 17
diff --git a/kafka/stackable/patches/4.2.0/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch b/kafka/stackable/patches/4.2.0/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch
new file mode 100644
index 000000000..c22f2b19a
--- /dev/null
+++ b/kafka/stackable/patches/4.2.0/0002-Change-Gradle-to-use-the-Nexus-Build-Repo.patch
@@ -0,0 +1,35 @@
+From 456b4ec6e150ba3309d3fde426d2df80a67d85de Mon Sep 17 00:00:00 2001
+From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com>
+Date: Wed, 13 May 2026 16:31:44 +0300
+Subject: Change Gradle to use the Nexus Build Repo
+
+---
+ build.gradle | 8 ++++++--
+ 1 file changed, 6 insertions(+), 2 deletions(-)
+
+diff --git a/build.gradle b/build.gradle
+index 6a5e74a2bd..2bdfb80d85 100644
+--- a/build.gradle
++++ b/build.gradle
+@@ -18,7 +18,9 @@ import java.nio.charset.StandardCharsets
+
+ buildscript {
+ repositories {
+- mavenCentral()
++ maven {
++ url 'https://build-repo.stackable.tech/repository/maven-public/'
++ }
+ }
+ apply from: "$rootDir/gradle/dependencies.gradle"
+
+@@ -209,7 +211,9 @@ ext {
+ allprojects {
+
+ repositories {
+- mavenCentral()
++ maven {
++ url 'https://build-repo.stackable.tech/repository/maven-public/'
++ }
+ }
+
+ dependencyUpdates {
diff --git a/kafka/stackable/patches/4.2.0/0003-Build-custom-Stackable-version.patch b/kafka/stackable/patches/4.2.0/0003-Build-custom-Stackable-version.patch
new file mode 100644
index 000000000..1cfe06847
--- /dev/null
+++ b/kafka/stackable/patches/4.2.0/0003-Build-custom-Stackable-version.patch
@@ -0,0 +1,619 @@
+From f5c7dd38a155475eb4f579ceeb18f88a19a84a41 Mon Sep 17 00:00:00 2001
+From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com>
+Date: Wed, 13 May 2026 16:36:16 +0300
+Subject: Build custom Stackable version
+
+How it was done:
+
+Replace in files in root dir (do not garble .git):
+
+for d in $(ls -p|grep -v /); do find $d -type f -exec sed -i
+'s/4\.2\.0/4.2.0-stackable0.0.0-dev/g' {} +; done
+
+Replace in all subdirs:
+
+for d in $(ls -d */); do find $d -type f -exec sed -i
+'s/4\.2\.0/4.2.0-stackable0.0.0-dev/g' {} +; done
+---
+ .../server/telemetry/ClientTelemetry.java | 2 +-
+ .../telemetry/ClientTelemetryReceiver.java | 2 +-
+ committer-tools/kafka-merge-pr.py | 2 +-
+ docs/apis/_index.md | 12 ++++++------
+ docs/configuration/system-properties.md | 4 ++--
+ docs/getting-started/docker.md | 8 ++++----
+ docs/getting-started/quickstart.md | 18 +++++++++---------
+ docs/getting-started/upgrade.md | 6 +++---
+ docs/kafka-connect/user-guide.md | 4 ++--
+ docs/operations/tiered-storage.md | 4 ++--
+ docs/streams/developer-guide/datatypes.md | 4 ++--
+ docs/streams/developer-guide/dsl-api.md | 4 ++--
+ docs/streams/developer-guide/testing.md | 2 +-
+ .../developer-guide/write-streams-app.md | 12 ++++++------
+ docs/streams/quickstart.md | 6 +++---
+ docs/streams/tutorial.md | 2 +-
+ docs/streams/upgrade-guide.md | 12 ++++++------
+ gradle.properties | 2 +-
+ streams/quickstart/java/pom.xml | 2 +-
+ .../main/resources/archetype-resources/pom.xml | 2 +-
+ streams/quickstart/pom.xml | 2 +-
+ tests/kafkatest/__init__.py | 2 +-
+ tests/kafkatest/version.py | 10 +++++-----
+ 23 files changed, 62 insertions(+), 62 deletions(-)
+
+diff --git a/clients/src/main/java/org/apache/kafka/server/telemetry/ClientTelemetry.java b/clients/src/main/java/org/apache/kafka/server/telemetry/ClientTelemetry.java
+index 3bb4db2a82..f0bd2dcb9a 100644
+--- a/clients/src/main/java/org/apache/kafka/server/telemetry/ClientTelemetry.java
++++ b/clients/src/main/java/org/apache/kafka/server/telemetry/ClientTelemetry.java
+@@ -23,7 +23,7 @@ import org.apache.kafka.common.metrics.MetricsReporter;
+ * A {@link MetricsReporter} may implement this interface to indicate support for collecting client
+ * telemetry on the server side.
+ *
+- * @deprecated Since 4.2.0, use {@link ClientTelemetryExporterProvider} instead. This interface will be
++ * @deprecated Since 4.2.0-stackable0.0.0-dev, use {@link ClientTelemetryExporterProvider} instead. This interface will be
+ * removed in Kafka 5.0.0. The new interface provides a {@link ClientTelemetryExporter}
+ * which includes additional context such as the push interval.
+ */
+diff --git a/clients/src/main/java/org/apache/kafka/server/telemetry/ClientTelemetryReceiver.java b/clients/src/main/java/org/apache/kafka/server/telemetry/ClientTelemetryReceiver.java
+index addc2a5c07..8ca68d3aa9 100644
+--- a/clients/src/main/java/org/apache/kafka/server/telemetry/ClientTelemetryReceiver.java
++++ b/clients/src/main/java/org/apache/kafka/server/telemetry/ClientTelemetryReceiver.java
+@@ -23,7 +23,7 @@ import org.apache.kafka.server.authorizer.AuthorizableRequestContext;
+ * {@code ClientTelemetryReceiver} defines the behaviour for telemetry receiver on the broker side
+ * which receives client telemetry metrics.
+ *
+- * @deprecated Since 4.2.0, use {@link ClientTelemetryExporter} instead. This interface will be
++ * @deprecated Since 4.2.0-stackable0.0.0-dev, use {@link ClientTelemetryExporter} instead. This interface will be
+ * removed in Kafka 5.0.0. The new interface provides additional context including
+ * the push interval to enable better metric lifecycle management.
+ */
+diff --git a/committer-tools/kafka-merge-pr.py b/committer-tools/kafka-merge-pr.py
+index be86078687..37723cd797 100755
+--- a/committer-tools/kafka-merge-pr.py
++++ b/committer-tools/kafka-merge-pr.py
+@@ -70,7 +70,7 @@ TEMP_BRANCH_PREFIX = "PR_TOOL"
+
+ DEV_BRANCH_NAME = "trunk"
+
+-DEFAULT_FIX_VERSION = os.environ.get("DEFAULT_FIX_VERSION", "4.2.0")
++DEFAULT_FIX_VERSION = os.environ.get("DEFAULT_FIX_VERSION", "4.2.0-stackable0.0.0-dev")
+
+ ORIGINAL_HEAD = ""
+
+diff --git a/docs/apis/_index.md b/docs/apis/_index.md
+index cbddb64149..0dff2ea2c6 100644
+--- a/docs/apis/_index.md
++++ b/docs/apis/_index.md
+@@ -48,7 +48,7 @@ To use the producer, add the following Maven dependency to your project:
+
+ org.apache.kafka
+ kafka-clients
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ # Consumer API
+@@ -63,7 +63,7 @@ To use the consumer, add the following Maven dependency to your project:
+
+ org.apache.kafka
+ kafka-clients
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ # Share Consumer API
+@@ -78,7 +78,7 @@ To use the share consumer, add the following Maven dependency to your project:
+
+ org.apache.kafka
+ kafka-clients
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ # Streams API
+@@ -95,7 +95,7 @@ To use Kafka Streams, add the following Maven dependency to your project:
+
+ org.apache.kafka
+ kafka-streams
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ When using Scala you may optionally include the `kafka-streams-scala` library. Additional documentation on using the Kafka Streams DSL for Scala is available [in the developer guide](/42/documentation/streams/developer-guide/dsl-api.html#scala-dsl).
+@@ -106,7 +106,7 @@ To use Kafka Streams DSL for Scala 2.13, add the following Maven dependency to y
+
+ org.apache.kafka
+ kafka-streams-scala_2.13
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ # Connect API
+@@ -127,7 +127,7 @@ To use the Admin API, add the following Maven dependency to your project:
+
+ org.apache.kafka
+ kafka-clients
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ For more information about the Admin APIs, see the [javadoc](/{version}/javadoc/index.html?org/apache/kafka/clients/admin/Admin.html "Kafka 4.2 Javadoc").
+diff --git a/docs/configuration/system-properties.md b/docs/configuration/system-properties.md
+index c6296267b1..018b213e70 100644
+--- a/docs/configuration/system-properties.md
++++ b/docs/configuration/system-properties.md
+@@ -112,7 +112,7 @@ Deprecated:
+
+
+
+-4.2.0
++4.2.0-stackable0.0.0-dev
+ |
+
+ |
+@@ -137,7 +137,7 @@ Since:
+ |
+
+
+-4.2.0
++4.2.0-stackable0.0.0-dev
+ |
+
+ |
+diff --git a/docs/getting-started/docker.md b/docs/getting-started/docker.md
+index 9fd766223b..d8b48be697 100644
+--- a/docs/getting-started/docker.md
++++ b/docs/getting-started/docker.md
+@@ -33,7 +33,7 @@ type: docs
+ Docker image can be pulled from Docker Hub using the following command:
+
+
+- $ docker pull apache/kafka:4.2.0
++ $ docker pull apache/kafka:4.2.0-stackable0.0.0-dev
+
+ If you want to fetch the latest version of the Docker image use following command:
+
+@@ -43,7 +43,7 @@ If you want to fetch the latest version of the Docker image use following comman
+ To start the Kafka container using this Docker image with default configs and on default port 9092:
+
+
+- $ docker run -p 9092:9092 apache/kafka:4.2.0
++ $ docker run -p 9092:9092 apache/kafka:4.2.0-stackable0.0.0-dev
+
+ ## GraalVM Based Native Apache Kafka Docker Image
+
+@@ -53,7 +53,7 @@ NOTE: This image is experimental and intended for local development and testing
+ Docker image can be pulled from Docker Hub using the following command:
+
+
+- $ docker pull apache/kafka-native:4.2.0
++ $ docker pull apache/kafka-native:4.2.0-stackable0.0.0-dev
+
+ If you want to fetch the latest version of the Docker image use following command:
+
+@@ -63,7 +63,7 @@ If you want to fetch the latest version of the Docker image use following comman
+ To start the Kafka container using this Docker image with default configs and on default port 9092:
+
+
+- $ docker run -p 9092:9092 apache/kafka-native:4.2.0
++ $ docker run -p 9092:9092 apache/kafka-native:4.2.0-stackable0.0.0-dev
+
+ ## Usage guide
+
+diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md
+index 25061e31d1..10af2204ce 100644
+--- a/docs/getting-started/quickstart.md
++++ b/docs/getting-started/quickstart.md
+@@ -28,11 +28,11 @@ type: docs
+
+ ## Step 1: Get Kafka
+
+-[Download](https://www.apache.org/dyn/closer.cgi?path=/kafka/4.2.0/kafka_2.13-4.2.0.tgz) the latest Kafka release and extract it:
++[Download](https://www.apache.org/dyn/closer.cgi?path=/kafka/4.2.0-stackable0.0.0-dev/kafka_2.13-4.2.0-stackable0.0.0-dev.tgz) the latest Kafka release and extract it:
+
+
+- $ tar -xzf kafka_2.13-4.2.0.tgz
+- $ cd kafka_2.13-4.2.0
++ $ tar -xzf kafka_2.13-4.2.0-stackable0.0.0-dev.tgz
++ $ cd kafka_2.13-4.2.0-stackable0.0.0-dev
+
+ ## Step 2: Start the Kafka environment
+
+@@ -64,24 +64,24 @@ Once the Kafka server has successfully launched, you will have a basic Kafka env
+ Get the Docker image:
+
+
+- $ docker pull apache/kafka:4.2.0
++ $ docker pull apache/kafka:4.2.0-stackable0.0.0-dev
+
+ Start the Kafka Docker container:
+
+
+- $ docker run -p 9092:9092 apache/kafka:4.2.0
++ $ docker run -p 9092:9092 apache/kafka:4.2.0-stackable0.0.0-dev
+
+ ### Using GraalVM Based Native Apache Kafka Docker Image
+
+ Get the Docker image:
+
+
+- $ docker pull apache/kafka-native:4.2.0
++ $ docker pull apache/kafka-native:4.2.0-stackable0.0.0-dev
+
+ Start the Kafka Docker container:
+
+
+- $ docker run -p 9092:9092 apache/kafka-native:4.2.0
++ $ docker run -p 9092:9092 apache/kafka-native:4.2.0-stackable0.0.0-dev
+
+ ## Step 3: Create a topic to store your events
+
+@@ -135,12 +135,12 @@ You probably have lots of data in existing systems like relational databases or
+
+ In this quickstart we'll see how to run Kafka Connect with simple connectors that import data from a file to a Kafka topic and export data from a Kafka topic to a file.
+
+-First, make sure to add `connect-file-4.2.0.jar` to the `plugin.path` property in the Connect worker's configuration. For the purpose of this quickstart we'll use a relative path and consider the connectors' package as an uber jar, which works when the quickstart commands are run from the installation directory. However, it's worth noting that for production deployments using absolute paths is always preferable. See [plugin.path](../../configuration/kafka-connect-configs/#connectconfigs_plugin.path) for a detailed description of how to set this config.
++First, make sure to add `connect-file-4.2.0-stackable0.0.0-dev.jar` to the `plugin.path` property in the Connect worker's configuration. For the purpose of this quickstart we'll use a relative path and consider the connectors' package as an uber jar, which works when the quickstart commands are run from the installation directory. However, it's worth noting that for production deployments using absolute paths is always preferable. See [plugin.path](../../configuration/kafka-connect-configs/#connectconfigs_plugin.path) for a detailed description of how to set this config.
+
+ Edit the `config/connect-standalone.properties` file, add or change the `plugin.path` configuration property match the following, and save the file:
+
+
+- $ echo "plugin.path=libs/connect-file-4.2.0.jar" >> config/connect-standalone.properties
++ $ echo "plugin.path=libs/connect-file-4.2.0-stackable0.0.0-dev.jar" >> config/connect-standalone.properties
+
+ Then, start by creating some seed data to test with:
+
+diff --git a/docs/getting-started/upgrade.md b/docs/getting-started/upgrade.md
+index 5538114c41..db25264930 100644
+--- a/docs/getting-started/upgrade.md
++++ b/docs/getting-started/upgrade.md
+@@ -26,11 +26,11 @@ type: docs
+ -->
+
+
+-## Upgrading to 4.2.0
++## Upgrading to 4.2.0-stackable0.0.0-dev
+
+-### Upgrading Servers to 4.2.0 from any version 3.3.x through 4.1.x
++### Upgrading Servers to 4.2.0-stackable0.0.0-dev from any version 3.3.x through 4.1.x
+
+-### Notable changes in 4.2.0
++### Notable changes in 4.2.0-stackable0.0.0-dev
+
+ * The `--max-partition-memory-bytes` option in `kafka-console-producer` is deprecated and will be removed in Kafka 5.0. Please use `--batch-size` instead.
+ * Queues for Kafka ([KIP-932](https://cwiki.apache.org/confluence/x/4hA0Dw)) is production-ready in Apache Kafka 4.2. This feature introduces a new kind of group called share groups, as an alternative to consumer groups. Consumers in a share group cooperatively consume records from topics, without assigning each partition to just one consumer. Share groups also introduce per-record acknowledgement and counting of delivery attempts. Use share groups in cases where records are processed one at a time, rather than as part of an ordered stream.
+diff --git a/docs/kafka-connect/user-guide.md b/docs/kafka-connect/user-guide.md
+index 9a8f0e3afe..2c0556ef04 100644
+--- a/docs/kafka-connect/user-guide.md
++++ b/docs/kafka-connect/user-guide.md
+@@ -42,7 +42,7 @@ The first parameter is the configuration for the worker. This includes settings
+ * `bootstrap.servers` \- List of Kafka servers used to bootstrap connections to Kafka
+ * `key.converter` \- Converter class used to convert between Kafka Connect format and the serialized form that is written to Kafka. This controls the format of the keys in messages written to or read from Kafka, and since this is independent of connectors it allows any connector to work with any serialization format. Examples of common formats include JSON and Avro.
+ * `value.converter` \- Converter class used to convert between Kafka Connect format and the serialized form that is written to Kafka. This controls the format of the values in messages written to or read from Kafka, and since this is independent of connectors it allows any connector to work with any serialization format. Examples of common formats include JSON and Avro.
+- * `plugin.path` (default `null`) - a list of paths that contain Connect plugins (connectors, converters, transformations). Before running quick starts, users must add the absolute path that contains the example FileStreamSourceConnector and FileStreamSinkConnector packaged in `connect-file-4.2.0.jar`, because these connectors are not included by default to the `CLASSPATH` or the `plugin.path` of the Connect worker (see plugin.path property for examples).
++ * `plugin.path` (default `null`) - a list of paths that contain Connect plugins (connectors, converters, transformations). Before running quick starts, users must add the absolute path that contains the example FileStreamSourceConnector and FileStreamSinkConnector packaged in `connect-file-4.2.0-stackable0.0.0-dev.jar`, because these connectors are not included by default to the `CLASSPATH` or the `plugin.path` of the Connect worker (see plugin.path property for examples).
+
+
+
+@@ -712,4 +712,4 @@ You should then verify that your manifests are correct by using the verification
+
+ ## Security
+
+-It's important to understand the security concerns inherent to Connect. First, Connect allows running custom plugins. These plugins can run arbitrary code, so you must trust them before installing them in your Connect clusters. By default, the REST API is unsecured and allows anyone that can access it to start and stop connectors. You should only directly expose the REST API to trusted users, otherwise it's easy to gain arbitrary code execution on Connect workers. By default, connectors can also override the configurations of the Kafka clients that Connect uses internally. Since Kafka 4.2.0, it's recommended to set `connector.client.config.override.policy` to `Allowlist`, this will be the default from Kafka 5.0.0, and explicitly only allow configurations that you need to override. Keep in mind that configurations that can load classes such as `sasl.jaas.config` or `sasl.login.class` should only be allowed if only trusted users can access the REST API as they, by design, enable executing code on the Connect worker.
++It's important to understand the security concerns inherent to Connect. First, Connect allows running custom plugins. These plugins can run arbitrary code, so you must trust them before installing them in your Connect clusters. By default, the REST API is unsecured and allows anyone that can access it to start and stop connectors. You should only directly expose the REST API to trusted users, otherwise it's easy to gain arbitrary code execution on Connect workers. By default, connectors can also override the configurations of the Kafka clients that Connect uses internally. Since Kafka 4.2.0-stackable0.0.0-dev, it's recommended to set `connector.client.config.override.policy` to `Allowlist`, this will be the default from Kafka 5.0.0, and explicitly only allow configurations that you need to override. Keep in mind that configurations that can load classes such as `sasl.jaas.config` or `sasl.login.class` should only be allowed if only trusted users can access the REST API as they, by design, enable executing code on the Connect worker.
+diff --git a/docs/operations/tiered-storage.md b/docs/operations/tiered-storage.md
+index 38dac7361b..f4117ca838 100644
+--- a/docs/operations/tiered-storage.md
++++ b/docs/operations/tiered-storage.md
+@@ -63,7 +63,7 @@ To adopt the `LocalTieredStorage`, the test library needs to be built locally
+
+
+ # please checkout to the specific version tag you're using before building it
+- # ex: `git checkout 4.2.0`
++ # ex: `git checkout 4.2.0-stackable0.0.0-dev`
+ $ ./gradlew clean :storage:testJar
+
+ After build successfully, there should be a `kafka-storage-x.x.x-test.jar` file under `storage/build/libs`. Next, setting configurations in the broker side to enable tiered storage feature.
+@@ -79,7 +79,7 @@ After build successfully, there should be a `kafka-storage-x.x.x-test.jar` file
+ # This is the mandatory configuration for tiered storage.
+ # Here, we use the `LocalTieredStorage` built above.
+ remote.log.storage.manager.class.name=org.apache.kafka.server.log.remote.storage.LocalTieredStorage
+- remote.log.storage.manager.class.path=/PATH/TO/kafka-storage-4.2.0-test.jar
++ remote.log.storage.manager.class.path=/PATH/TO/kafka-storage-4.2.0-stackable0.0.0-dev-test.jar
+
+ # These 2 prefix are default values, but customizable
+ remote.log.storage.manager.impl.prefix=rsm.config.
+diff --git a/docs/streams/developer-guide/datatypes.md b/docs/streams/developer-guide/datatypes.md
+index 6354d6d54c..163571d1bf 100644
+--- a/docs/streams/developer-guide/datatypes.md
++++ b/docs/streams/developer-guide/datatypes.md
+@@ -92,7 +92,7 @@ Apache Kafka includes several built-in serde implementations for Java primitives
+
+ org.apache.kafka
+ kafka-clients
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ This artifact provides the following serde implementations under the package [org.apache.kafka.common.serialization](https://github.com/apache/kafka/blob/4.2/clients/src/main/java/org/apache/kafka/common/serialization), which you can leverage when e.g., defining default serializers in your Streams configuration.
+@@ -220,7 +220,7 @@ Apache Kafka Streams includes serde implementations for windowed types in its `k
+
+ org.apache.kafka
+ kafka-streams
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ This artifact provides the following windowed serde implementations under the package [org.apache.kafka.streams.kstream](https://github.com/apache/kafka/blob/4.2/streams/src/main/java/org/apache/kafka/streams/kstream):
+diff --git a/docs/streams/developer-guide/dsl-api.md b/docs/streams/developer-guide/dsl-api.md
+index 71b5c2f043..d9a35197cc 100644
+--- a/docs/streams/developer-guide/dsl-api.md
++++ b/docs/streams/developer-guide/dsl-api.md
+@@ -5785,7 +5785,7 @@ The library is cross-built with Scala 2.12 and 2.13. To reference the library co
+
+ org.apache.kafka
+ kafka-streams-scala_2.13
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ To use the library compiled against Scala 2.12 replace the `artifactId` with `kafka-streams-scala_2.12`.
+@@ -5793,7 +5793,7 @@ To use the library compiled against Scala 2.12 replace the `artifactId` with `ka
+ When using SBT then you can reference the correct library using the following:
+
+
+- libraryDependencies += "org.apache.kafka" %% "kafka-streams-scala" % "4.2.0"
++ libraryDependencies += "org.apache.kafka" %% "kafka-streams-scala" % "4.2.0-stackable0.0.0-dev"
+
+ ## Sample Usage
+
+diff --git a/docs/streams/developer-guide/testing.md b/docs/streams/developer-guide/testing.md
+index e95292aaee..76c2a49f00 100644
+--- a/docs/streams/developer-guide/testing.md
++++ b/docs/streams/developer-guide/testing.md
+@@ -39,7 +39,7 @@ To test a Kafka Streams application, Kafka provides a test-utils artifact that c
+
+ org.apache.kafka
+ kafka-streams-test-utils
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+ test
+
+
+diff --git a/docs/streams/developer-guide/write-streams-app.md b/docs/streams/developer-guide/write-streams-app.md
+index 14d7f647b4..5cca9db806 100644
+--- a/docs/streams/developer-guide/write-streams-app.md
++++ b/docs/streams/developer-guide/write-streams-app.md
+@@ -73,7 +73,7 @@ Description
+
+ |
+
+-`4.2.0`
++`4.2.0-stackable0.0.0-dev`
+ |
+
+
+@@ -90,7 +90,7 @@ Description
+ |
+
+
+-`4.2.0`
++`4.2.0-stackable0.0.0-dev`
+ |
+
+
+@@ -107,7 +107,7 @@ Description
+ |
+
+
+-`4.2.0`
++`4.2.0-stackable0.0.0-dev`
+ |
+
+
+@@ -124,17 +124,17 @@ Example `pom.xml` snippet when using Maven:
+
+ org.apache.kafka
+ kafka-streams
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ org.apache.kafka
+ kafka-clients
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ org.apache.kafka
+ kafka-streams-scala_2.13
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+
+ # Using Kafka Streams within your application code
+diff --git a/docs/streams/quickstart.md b/docs/streams/quickstart.md
+index c571bf553d..88558d0d0b 100644
+--- a/docs/streams/quickstart.md
++++ b/docs/streams/quickstart.md
+@@ -66,11 +66,11 @@ As the first step, we will start Kafka (unless you already have it started) and
+
+ ### Step 1: Download the code
+
+-[Download](https://www.apache.org/dyn/closer.cgi?path=/kafka/4.2.0/kafka_2.13-4.2.0.tgz "Kafka downloads") the 4.2.0 release and un-tar it. Note that there are multiple downloadable Scala versions and we choose to use the recommended version (2.13) here:
++[Download](https://www.apache.org/dyn/closer.cgi?path=/kafka/4.2.0-stackable0.0.0-dev/kafka_2.13-4.2.0-stackable0.0.0-dev.tgz "Kafka downloads") the 4.2.0-stackable0.0.0-dev release and un-tar it. Note that there are multiple downloadable Scala versions and we choose to use the recommended version (2.13) here:
+
+
+- $ tar -xzf kafka_2.13-4.2.0.tgz
+- $ cd kafka_2.13-4.2.0
++ $ tar -xzf kafka_2.13-4.2.0-stackable0.0.0-dev.tgz
++ $ cd kafka_2.13-4.2.0-stackable0.0.0-dev
+
+ ### Step 2: Start the Kafka server
+
+diff --git a/docs/streams/tutorial.md b/docs/streams/tutorial.md
+index 290144fd42..11fef66372 100644
+--- a/docs/streams/tutorial.md
++++ b/docs/streams/tutorial.md
+@@ -38,7 +38,7 @@ We are going to use a Kafka Streams Maven Archetype for creating a Streams proje
+ $ mvn archetype:generate \
+ -DarchetypeGroupId=org.apache.kafka \
+ -DarchetypeArtifactId=streams-quickstart-java \
+- -DarchetypeVersion=4.2.0 \
++ -DarchetypeVersion=4.2.0-stackable0.0.0-dev \
+ -DgroupId=streams.examples \
+ -DartifactId=streams-quickstart \
+ -Dversion=0.1 \
+diff --git a/docs/streams/upgrade-guide.md b/docs/streams/upgrade-guide.md
+index 2f948c6324..d4b75bfebd 100644
+--- a/docs/streams/upgrade-guide.md
++++ b/docs/streams/upgrade-guide.md
+@@ -28,20 +28,20 @@ type: docs
+
+ # Upgrade Guide and API Changes
+
+-Upgrading from any older version to 4.2.0 is possible: if upgrading from 3.4 or below, you will need to do two rolling bounces, where during the first rolling bounce phase you set the config `upgrade.from="older version"` (possible values are `"2.4" - "3.4"`) and during the second you remove it. This is required to safely handle 2 changes. The first is a change in foreign-key join serialization format. The second is a change in the serialization format for an internal repartition topic. For more details, please refer to [KIP-904](https://cwiki.apache.org/confluence/x/P5VbDg):
++Upgrading from any older version to 4.2.0-stackable0.0.0-dev is possible: if upgrading from 3.4 or below, you will need to do two rolling bounces, where during the first rolling bounce phase you set the config `upgrade.from="older version"` (possible values are `"2.4" - "3.4"`) and during the second you remove it. This is required to safely handle 2 changes. The first is a change in foreign-key join serialization format. The second is a change in the serialization format for an internal repartition topic. For more details, please refer to [KIP-904](https://cwiki.apache.org/confluence/x/P5VbDg):
+
+ * prepare your application instances for a rolling bounce and make sure that config `upgrade.from` is set to the version from which it is being upgrade.
+ * bounce each instance of your application once
+- * prepare your newly deployed 4.2.0 application instances for a second round of rolling bounces; make sure to remove the value for config `upgrade.from`
++ * prepare your newly deployed 4.2.0-stackable0.0.0-dev application instances for a second round of rolling bounces; make sure to remove the value for config `upgrade.from`
+ * bounce each instance of your application once more to complete the upgrade
+
+
+
+-As an alternative, an offline upgrade is also possible. Upgrading from any versions as old as 0.11.0.x to 4.2.0 in offline mode require the following steps:
++As an alternative, an offline upgrade is also possible. Upgrading from any versions as old as 0.11.0.x to 4.2.0-stackable0.0.0-dev in offline mode require the following steps:
+
+ * stop all old (e.g., 0.11.0.x) application instances
+ * update your code and swap old code and jar file with new code and new jar file
+- * restart all new (4.2.0) application instances
++ * restart all new (4.2.0-stackable0.0.0-dev) application instances
+
+
+
+@@ -63,7 +63,7 @@ Starting in Kafka Streams 2.6.x, a new processing mode is available, named EOS v
+
+ Since 2.6.0 release, Kafka Streams depends on a RocksDB version that requires MacOS 10.14 or higher.
+
+-## Streams API changes in 4.2.0
++## Streams API changes in 4.2.0-stackable0.0.0-dev
+
+ ### General Availability for a core feature set of the Streams Rebalance Protocol (KIP-1071)
+
+@@ -295,7 +295,7 @@ Kafka Streams does not send a "leave group" request when an instance is closed.
+ * `KStream KStream.process(ProcessorSupplier, ...)`
+ * `KStream KStream.processValues(FixedKeyProcessorSupplier, ...)`
+
+-Both new methods have multiple overloads and return a `KStream` instead of `void` as the deprecated `process()` methods did. In addition, `FixedKeyProcessor`, `FixedKeyRecord`, `FixedKeyProcessorContext`, and `ContextualFixedKeyProcessor` are introduced to guard against disallowed key modification inside `processValues()`. Furthermore, `ProcessingContext` is added for a better interface hierarchy. **CAUTION:** The newly added `KStream.processValues()` method introduced a regression bug ([KAFKA-19668](https://issues.apache.org/jira/browse/KAFKA-19668)). If you have "merge repartition topics" optimization enabled, it is not safe to migrate from `transformValues()` to `processValues()` in 3.3.0 release. The bug is only fixed with Kafka Streams 4.0.1, 4.1.1, and 4.2.0. For more details, please refer to the [migration guide](/42/streams/developer-guide/dsl-api/#transformers-removal-and-migration-to-processors).
++Both new methods have multiple overloads and return a `KStream` instead of `void` as the deprecated `process()` methods did. In addition, `FixedKeyProcessor`, `FixedKeyRecord`, `FixedKeyProcessorContext`, and `ContextualFixedKeyProcessor` are introduced to guard against disallowed key modification inside `processValues()`. Furthermore, `ProcessingContext` is added for a better interface hierarchy. **CAUTION:** The newly added `KStream.processValues()` method introduced a regression bug ([KAFKA-19668](https://issues.apache.org/jira/browse/KAFKA-19668)). If you have "merge repartition topics" optimization enabled, it is not safe to migrate from `transformValues()` to `processValues()` in 3.3.0 release. The bug is only fixed with Kafka Streams 4.0.1, 4.1.1, and 4.2.0-stackable0.0.0-dev. For more details, please refer to the [migration guide](/42/streams/developer-guide/dsl-api/#transformers-removal-and-migration-to-processors).
+
+ Emitting a windowed aggregation result only after a window is closed is currently supported via the `suppress()` operator. However, `suppress()` uses an in-memory implementation and does not support RocksDB. To close this gap, [KIP-825](https://cwiki.apache.org/confluence/x/n7fkCw) introduces "emit strategies", which are built into the aggregation operator directly to use the already existing RocksDB store. `TimeWindowedKStream.emitStrategy(EmitStrategy)` and `SessionWindowedKStream.emitStrategy(EmitStrategy)` allow picking between "emit on window update" (default) and "emit on window close" strategies. Additionally, a few new emit metrics are added, as well as a necessary new method, `SessionStore.findSessions(long, long)`.
+
+diff --git a/gradle.properties b/gradle.properties
+index 7d294bfc15..b2d6fa66f9 100644
+--- a/gradle.properties
++++ b/gradle.properties
+@@ -22,7 +22,7 @@ group=org.apache.kafka
+ # - streams/quickstart/pom.xml
+ # - streams/quickstart/java/src/main/resources/archetype-resources/pom.xml
+ # - streams/quickstart/java/pom.xml
+-version=4.2.0
++version=4.2.0-stackable0.0.0-dev
+ scalaVersion=2.13.17
+ # Adding swaggerVersion in gradle.properties to have a single version in place for swagger
+ swaggerVersion=2.2.39
+diff --git a/streams/quickstart/java/pom.xml b/streams/quickstart/java/pom.xml
+index 1162e1a180..6448eb9fe9 100644
+--- a/streams/quickstart/java/pom.xml
++++ b/streams/quickstart/java/pom.xml
+@@ -26,7 +26,7 @@
+
+ org.apache.kafka
+ streams-quickstart
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+ ..
+
+
+diff --git a/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml b/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml
+index 027f4830bf..d4e3a0a4bd 100644
+--- a/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml
++++ b/streams/quickstart/java/src/main/resources/archetype-resources/pom.xml
+@@ -29,7 +29,7 @@
+
+
+ UTF-8
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+ 2.0.16
+
+
+diff --git a/streams/quickstart/pom.xml b/streams/quickstart/pom.xml
+index b7e51c15c5..5ce42abd98 100644
+--- a/streams/quickstart/pom.xml
++++ b/streams/quickstart/pom.xml
+@@ -22,7 +22,7 @@
+ org.apache.kafka
+ streams-quickstart
+ pom
+- 4.2.0
++ 4.2.0-stackable0.0.0-dev
+
+ Kafka Streams :: Quickstart
+
+diff --git a/tests/kafkatest/__init__.py b/tests/kafkatest/__init__.py
+index b192f8f528..0c0d0414c6 100644
+--- a/tests/kafkatest/__init__.py
++++ b/tests/kafkatest/__init__.py
+@@ -22,4 +22,4 @@
+ # Instead, in development branches, the version should have a suffix of the form ".devN"
+ #
+ # For example, when Kafka is at version 1.0.0-SNAPSHOT, this should be something like "1.0.0.dev0"
+-__version__ = '4.2.0'
++__version__ = '4.2.0-stackable0.0.0-dev'
+diff --git a/tests/kafkatest/version.py b/tests/kafkatest/version.py
+index 86d74eee12..3a782bb830 100644
+--- a/tests/kafkatest/version.py
++++ b/tests/kafkatest/version.py
+@@ -100,21 +100,21 @@ class KafkaVersion(LooseVersion):
+ def supports_command_config(self):
+ # According to KIP-1147, --producer.config and --consumer.config have been deprecated and will be removed in future versions
+ # For backward compatibility, we select the configuration based on node version:
+- # - For versions 4.2.0 and above, use --command-config
++ # - For versions 4.2.0-stackable0.0.0-dev and above, use --command-config
+ # - For older versions, continue using --producer.config or --consumer.config
+ return self >= V_4_2_0
+
+ def supports_command_property(self):
+ # According to KIP-1147, --producer-property and --consumer-property have been deprecated and will be removed in future versions
+ # For backward compatibility, we select the configuration based on node version:
+- # - For versions 4.2.0 and above, use --command-property
++ # - For versions 4.2.0-stackable0.0.0-dev and above, use --command-property
+ # - For older versions, continue using --producer-property or --consumer-property
+ return self >= V_4_2_0
+
+ def supports_formatter_property(self):
+ # According to KIP-1147, --property has been deprecated and will be removed in future versions
+ # For backward compatibility, we select the configuration based on node version:
+- # - For versions 4.2.0 and above, use --formatter-property
++ # - For versions 4.2.0-stackable0.0.0-dev and above, use --formatter-property
+ # - For older versions, continue using --property
+ return self >= V_4_2_0
+
+@@ -128,7 +128,7 @@ def get_version(node=None):
+ return DEV_BRANCH
+
+ DEV_BRANCH = KafkaVersion("dev")
+-DEV_VERSION = KafkaVersion("4.2.0-SNAPSHOT")
++DEV_VERSION = KafkaVersion("4.2.0-stackable0.0.0-dev-SNAPSHOT")
+
+ LATEST_STABLE_TRANSACTION_VERSION = 2
+ # This should match the LATEST_PRODUCTION version defined in MetadataVersion.java
+@@ -247,5 +247,5 @@ V_4_1_1 = KafkaVersion("4.1.1")
+ LATEST_4_1 = V_4_1_1
+
+ # 4.2.x version
+-V_4_2_0 = KafkaVersion("4.2.0")
++V_4_2_0 = KafkaVersion("4.2.0-stackable0.0.0-dev")
+ LATEST_4_2 = V_4_2_0
diff --git a/kafka/stackable/patches/4.2.0/0004-Include-jackson-dataformat-xml-dependency.patch b/kafka/stackable/patches/4.2.0/0004-Include-jackson-dataformat-xml-dependency.patch
new file mode 100644
index 000000000..2b1bb9767
--- /dev/null
+++ b/kafka/stackable/patches/4.2.0/0004-Include-jackson-dataformat-xml-dependency.patch
@@ -0,0 +1,131 @@
+From f77fdcc430c86388d9c575b8351488691915876b Mon Sep 17 00:00:00 2001
+From: Razvan-Daniel Mihai <84674+razvan@users.noreply.github.com>
+Date: Wed, 13 May 2026 17:25:07 +0300
+Subject: Include jackson dataformat xml dependency
+
+---
+ build.gradle | 15 ++++++++++++++-
+ gradle/dependencies.gradle | 1 +
+ 2 files changed, 15 insertions(+), 1 deletion(-)
+
+diff --git a/build.gradle b/build.gradle
+index 2bdfb80d85..0ed19d18bb 100644
+--- a/build.gradle
++++ b/build.gradle
+@@ -185,13 +185,15 @@ ext {
+ libs.slf4jLog4j2,
+ libs.junitPlatformLanucher,
+ libs.jacksonDatabindYaml,
++ libs.jacksonDatabindXml,
+ project(":test-common:test-common-util")
+ ]
+
+ log4jReleaseLibs = [
+ libs.slf4jLog4j2,
+ libs.log4j1Bridge2Api,
+- libs.jacksonDatabindYaml
++ libs.jacksonDatabindYaml,
++ libs.jacksonDatabindXml
+ ]
+
+ log4j2Libs = [
+@@ -1120,6 +1122,7 @@ project(':core') {
+ implementation libs.jacksonDataformatCsv
+ implementation libs.jacksonJDK8Datatypes
+ implementation libs.jacksonDatabindYaml
++ implementation libs.jacksonDatabindXml
+ implementation libs.joptSimple
+ implementation libs.jose4j
+ implementation libs.metrics
+@@ -1534,6 +1537,7 @@ project(':group-coordinator') {
+ testImplementation project(':server-common').sourceSets.test.output
+ testImplementation project(':coordinator-common').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
++ testImplementation libs.jacksonDatabindXml
+ testImplementation libs.junitJupiter
+ testImplementation libs.mockitoCore
+ testImplementation testLog4j2Libs
+@@ -1656,6 +1660,7 @@ project(':test-common:test-common-runtime') {
+ implementation libs.junitPlatformLanucher
+ implementation libs.junitJupiter
+ implementation libs.jacksonDatabindYaml
++ implementation libs.jacksonDatabindXml
+ implementation libs.slf4jApi
+
+ testImplementation libs.junitJupiter
+@@ -2130,6 +2135,7 @@ project(':raft') {
+ testImplementation project(':clients')
+ testImplementation project(':clients').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
++ testImplementation libs.jacksonDatabindXml
+ testImplementation libs.junitJupiter
+ testImplementation libs.mockitoCore
+ testImplementation libs.jqwik
+@@ -2227,6 +2233,7 @@ project(':server-common') {
+ testImplementation project(':clients')
+ testImplementation project(':clients').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
++ testImplementation libs.jacksonDatabindXml
+ testImplementation libs.junitJupiter
+ testImplementation libs.mockitoCore
+ testImplementation testLog4j2Libs
+@@ -2368,6 +2375,7 @@ project(':storage') {
+ testImplementation project(':transaction-coordinator')
+ testImplementation libs.hamcrest
+ testImplementation libs.jacksonDatabindYaml
++ testImplementation libs.jacksonDatabindXml
+ testImplementation libs.junitJupiter
+ testImplementation libs.mockitoCore
+ testImplementation libs.bcpkix
+@@ -2685,6 +2693,7 @@ project(':shell') {
+ testImplementation project(':server-common')
+ testImplementation project(':server-common').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
++ testImplementation libs.jacksonDatabindXml
+ testImplementation libs.junitJupiter
+ testImplementation testLog4j2Libs
+
+@@ -2734,6 +2743,7 @@ project(':streams') {
+
+ testImplementation project(':clients').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
++ testImplementation libs.jacksonDatabindXml
+ testImplementation libs.junitJupiter
+ testImplementation libs.bcpkix
+ testImplementation libs.hamcrest
+@@ -2879,6 +2889,7 @@ project(':streams:streams-scala') {
+ testImplementation project(':streams:test-utils')
+
+ testImplementation libs.jacksonDatabindYaml
++ testImplementation libs.jacksonDatabindXml
+ testImplementation libs.junitJupiter
+ testImplementation libs.mockitoJunitJupiter // supports MockitoExtension
+ testImplementation testLog4j2Libs
+@@ -2983,6 +2994,7 @@ project(':streams:test-utils') {
+
+ testImplementation project(':clients').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
++ testImplementation libs.jacksonDatabindXml
+ testImplementation libs.junitJupiter
+ testImplementation libs.mockitoCore
+ testImplementation libs.hamcrest
+@@ -3678,6 +3690,7 @@ project(':connect:runtime') {
+ testImplementation project(':server-common').sourceSets.test.output
+
+ testImplementation libs.jacksonDatabindYaml
++ testImplementation libs.jacksonDatabindXml
+ testImplementation libs.junitJupiter
+ testImplementation libs.mockitoCore
+ testImplementation libs.mockitoJunitJupiter
+diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle
+index fb0465b66e..094d4545be 100644
+--- a/gradle/dependencies.gradle
++++ b/gradle/dependencies.gradle
+@@ -157,6 +157,7 @@ libs += [
+ jacksonAnnotations: "com.fasterxml.jackson.core:jackson-annotations:$versions.jackson",
+ jacksonDatabind: "com.fasterxml.jackson.core:jackson-databind:$versions.jackson",
+ jacksonDatabindYaml: "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$versions.jackson",
++ jacksonDatabindXml: "com.fasterxml.jackson.dataformat:jackson-dataformat-xml:$versions.jackson",
+ jacksonDataformatCsv: "com.fasterxml.jackson.dataformat:jackson-dataformat-csv:$versions.jackson",
+ jacksonJDK8Datatypes: "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:$versions.jackson",
+ jacksonBlackbird: "com.fasterxml.jackson.module:jackson-module-blackbird:$versions.jackson",
diff --git a/kafka/stackable/patches/4.2.0/patchable.toml b/kafka/stackable/patches/4.2.0/patchable.toml
new file mode 100644
index 000000000..ced9a66f7
--- /dev/null
+++ b/kafka/stackable/patches/4.2.0/patchable.toml
@@ -0,0 +1 @@
+base = "a18251bae0b825c69794a50dffd4c3100cf5ca5b"
|