diff --git a/CHANGELOG.md b/CHANGELOG.md index 4cc81015..210bc9fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,13 +12,15 @@ All notable changes to this project will be documented in this file. ### Changed - `vector` `0.26.0` -> `0.31.0` ([#269]). -- `operator-rs` `0.44.0` -> `0.45.1` ([#267]). +- `operator-rs` `0.44.0` -> `0.48.0` ([#267], [#275]). - Removed usages of SPARK_DAEMON_JAVA_OPTS since it's not a reliable way to pass extra JVM options ([#272]). +- [BREAKING] use product image selection instead of version ([#275]). [#267]: https://github.com/stackabletech/spark-k8s-operator/pull/267 [#268]: https://github.com/stackabletech/spark-k8s-operator/pull/268 [#269]: https://github.com/stackabletech/spark-k8s-operator/pull/269 [#272]: https://github.com/stackabletech/spark-k8s-operator/pull/272 +[#275]: https://github.com/stackabletech/spark-k8s-operator/pull/275 ## [23.7.0] - 2023-07-14 diff --git a/Cargo.lock b/Cargo.lock index ef6e506b..8359580f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -340,6 +340,16 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -465,69 +475,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] -name = "encoding" -version = "0.2.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec" -dependencies = [ - "encoding-index-japanese", - "encoding-index-korean", - "encoding-index-simpchinese", - "encoding-index-singlebyte", - "encoding-index-tradchinese", -] - -[[package]] -name = "encoding-index-japanese" -version = "1.20141219.5" +name = "encoding_rs" +version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-korean" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc33fb8e6bcba213fe2f14275f0963fd16f0a02c878e3095ecfdf5bee529d81" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-simpchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d87a7194909b9118fc707194baa434a4e3b0fb6a5a757c73c3adb07aa25031f7" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-singlebyte" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-tradchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18" -dependencies = [ - "encoding_index_tests", + "cfg-if", ] -[[package]] -name = "encoding_index_tests" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" - [[package]] name = "equivalent" version = "1.0.1" @@ -557,9 +512,9 @@ dependencies = [ [[package]] name = "fancy-regex" -version = "0.8.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d95b4efe5be9104a4a18a9916e86654319895138be727b229820c39257c30dda" +checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2" dependencies = [ "bit-set", "regex", @@ -571,21 +526,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - [[package]] name = "form_urlencoded" version = "1.2.0" @@ -834,21 +774,19 @@ dependencies = [ ] [[package]] -name = "hyper-openssl" -version = "0.9.2" +name = "hyper-rustls" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6ee5d7a8f718585d1c3c61dfde28ef5b0bb14734b4db13f5ada856cdc6c612b" +checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" dependencies = [ + "futures-util", "http", "hyper", - "linked_hash_set", - "once_cell", - "openssl", - "openssl-sys", - "parking_lot", + "log", + "rustls", + "rustls-native-certs", "tokio", - "tokio-openssl", - "tower-layer", + "tokio-rustls", ] [[package]] @@ -956,11 +894,11 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "java-properties" -version = "1.4.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1904d8654a1ef51034d02d5a9411b50bf91bea15b0ab644ae179d1325976263" +checksum = "37bf6f484471c451f2b51eabd9e66b3fa7274550c5ec4b6c3d6070840945117f" dependencies = [ - "encoding", + "encoding_rs", "lazy_static", "regex", ] @@ -1049,18 +987,19 @@ dependencies = [ "http", "http-body", "hyper", - "hyper-openssl", + "hyper-rustls", "hyper-timeout", "jsonpath_lib", "k8s-openapi", "kube-core", - "openssl", "pem", "pin-project", + "rustls", + "rustls-pemfile", "secrecy", "serde", "serde_json", - "serde_yaml 0.9.25", + "serde_yaml", "thiserror", "tokio", "tokio-util", @@ -1162,21 +1101,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - -[[package]] -name = "linked_hash_set" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47186c6da4d81ca383c7c47c1bfc80f4b95f4720514d860a5407aaf4233f9588" -dependencies = [ - "linked-hash-map", -] - [[package]] name = "linux-raw-sys" version = "0.4.5" @@ -1285,42 +1209,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] -name = "openssl" -version = "0.10.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e" -dependencies = [ - "bitflags 1.3.2", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" +name = "openssl-probe" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - -[[package]] -name = "openssl-sys" -version = "0.9.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "866b5f16f90776b9bb8dc1e1802ac6f0513de3a7a7465867bfbc563dc737faac" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "opentelemetry" @@ -1511,8 +1403,8 @@ dependencies = [ [[package]] name = "product-config" -version = "0.4.0" -source = "git+https://github.com/stackabletech/product-config.git?tag=0.4.0#e1e5938b4f6120f85a088194e86d22433fdba731" +version = "0.5.0" +source = "git+https://github.com/stackabletech/product-config.git?tag=0.5.0#439869d9e6a72fb6d912f6e494649a2f74f41d25" dependencies = [ "fancy-regex", "java-properties", @@ -1520,7 +1412,7 @@ dependencies = [ "semver", "serde", "serde_json", - "serde_yaml 0.8.26", + "serde_yaml", "thiserror", "xml-rs", ] @@ -1623,6 +1515,21 @@ version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4bf2521270932c3c7bed1a59151222bd7643c79310f2916f01925e1e16255698" +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] + [[package]] name = "rstest" version = "0.18.1" @@ -1680,6 +1587,49 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "rustls" +version = "0.21.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +dependencies = [ + "base64 0.21.2", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "261e9e0888cba427c3316e6322805653c9425240b6fd96cee7cb671ab70ab8d0" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.14" @@ -1692,6 +1642,15 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys", +] + [[package]] name = "schemars" version = "0.8.12" @@ -1722,6 +1681,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "secrecy" version = "0.8.0" @@ -1732,6 +1701,29 @@ dependencies = [ "zeroize", ] +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "semver" version = "1.0.18" @@ -1803,18 +1795,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_yaml" -version = "0.8.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" -dependencies = [ - "indexmap 1.9.3", - "ryu", - "serde", - "yaml-rust", -] - [[package]] name = "serde_yaml" version = "0.9.25" @@ -1893,10 +1873,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "stackable-operator" -version = "0.46.0" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=0.46.0#c88fbe2c5692b773af23b9680d42df00099cf074" +version = "0.48.0" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=0.48.0#ad0aed7c4df39dd6b2c4c81a8f6ca184025550cf" dependencies = [ "chrono", "clap", @@ -1916,7 +1902,7 @@ dependencies = [ "schemars", "serde", "serde_json", - "serde_yaml 0.9.25", + "serde_yaml", "snafu", "stackable-operator-derive", "strum", @@ -1929,8 +1915,8 @@ dependencies = [ [[package]] name = "stackable-operator-derive" -version = "0.46.0" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=0.46.0#c88fbe2c5692b773af23b9680d42df00099cf074" +version = "0.48.0" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=0.48.0#ad0aed7c4df39dd6b2c4c81a8f6ca184025550cf" dependencies = [ "darling 0.20.3", "proc-macro2", @@ -1946,7 +1932,7 @@ dependencies = [ "semver", "serde", "serde_json", - "serde_yaml 0.9.25", + "serde_yaml", "snafu", "stackable-operator", "strum", @@ -1964,7 +1950,7 @@ dependencies = [ "semver", "serde", "serde_json", - "serde_yaml 0.9.25", + "serde_yaml", "snafu", "stackable-operator", "stackable-spark-k8s-crd", @@ -2133,14 +2119,12 @@ dependencies = [ ] [[package]] -name = "tokio-openssl" -version = "0.6.3" +name = "tokio-rustls" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08f9ffb7809f1b20c1b398d92acf4cc719874b3b2b2d9ea2f09b4a80350878a" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "futures-util", - "openssl", - "openssl-sys", + "rustls", "tokio", ] @@ -2391,6 +2375,12 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "url" version = "2.4.0" @@ -2501,6 +2491,16 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +[[package]] +name = "web-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "winapi" version = "0.3.9" @@ -2613,15 +2613,6 @@ version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47430998a7b5d499ccee752b41567bc3afc57e1327dc855b1a2aa44ce29b5fa1" -[[package]] -name = "yaml-rust" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" -dependencies = [ - "linked-hash-map", -] - [[package]] name = "zeroize" version = "1.6.0" diff --git a/Cargo.toml b/Cargo.toml index 0f382618..9c7f39f2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" serde_yaml = "0.9" snafu = "0.7" -stackable-operator = { git = "https://github.com/stackabletech/operator-rs.git", tag = "0.46.0" } +stackable-operator = { git = "https://github.com/stackabletech/operator-rs.git", tag = "0.48.0" } strum = { version = "0.25", features = ["derive"] } tokio = { version = "1.29", features = ["full"] } tracing = "0.1" diff --git a/deploy/helm/spark-k8s-operator/crds/crds.yaml b/deploy/helm/spark-k8s-operator/crds/crds.yaml index c519f368..8c4551ad 100644 --- a/deploy/helm/spark-k8s-operator/crds/crds.yaml +++ b/deploy/helm/spark-k8s-operator/crds/crds.yaml @@ -10122,25 +10122,47 @@ spec: nullable: true type: object sparkImage: - nullable: true - type: string - sparkImagePullPolicy: - enum: - - Always - - IfNotPresent - - Never - nullable: true - type: string - sparkImagePullSecrets: - items: - description: LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace. - properties: - name: - description: 'Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' - type: string - type: object - nullable: true - type: array + anyOf: + - required: + - custom + - productVersion + - required: + - productVersion + properties: + custom: + description: Overwrite the docker image. Specify the full docker image name, e.g. `docker.stackable.tech/stackable/superset:1.4.1-stackable2.1.0` + type: string + productVersion: + description: Version of the product, e.g. `1.4.1`. + type: string + pullPolicy: + default: Always + description: '[Pull policy](https://kubernetes.io/docs/concepts/containers/images/#image-pull-policy) used when pulling the Images' + enum: + - IfNotPresent + - Always + - Never + type: string + pullSecrets: + description: '[Image pull secrets](https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod) to pull images from a private registry' + items: + description: LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace. + properties: + name: + description: 'Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' + type: string + type: object + nullable: true + type: array + repo: + description: Name of the docker repo, e.g. `docker.stackable.tech/stackable` + nullable: true + type: string + stackableVersion: + description: Stackable version of the product, e.g. `23.4`, `23.4.1` or `0.0.0-dev`. If not specified, the operator will use its own version, e.g. `23.4.1`. When using a nightly operator or a pr version, it will use the nightly `0.0.0-dev` image. + nullable: true + type: string + type: object stopped: nullable: true type: boolean @@ -11227,6 +11249,8 @@ spec: type: object nullable: true type: array + required: + - sparkImage type: object status: nullable: true diff --git a/docs/modules/spark-k8s/examples/example-encapsulated.yaml b/docs/modules/spark-k8s/examples/example-encapsulated.yaml index 1aa0305b..52a39f99 100644 --- a/docs/modules/spark-k8s/examples/example-encapsulated.yaml +++ b/docs/modules/spark-k8s/examples/example-encapsulated.yaml @@ -5,7 +5,8 @@ metadata: name: spark-pi spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev # <1> + sparkImage: + productVersion: 3.3.0 # <1> mode: cluster mainClass: org.apache.spark.examples.SparkPi mainApplicationFile: /stackable/spark/examples/jars/spark-examples.jar # <2> diff --git a/docs/modules/spark-k8s/examples/example-history-app.yaml b/docs/modules/spark-k8s/examples/example-history-app.yaml index e54c3824..2323ead9 100644 --- a/docs/modules/spark-k8s/examples/example-history-app.yaml +++ b/docs/modules/spark-k8s/examples/example-history-app.yaml @@ -5,8 +5,9 @@ metadata: name: spark-pi-s3-1 spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev - sparkImagePullPolicy: IfNotPresent + sparkImage: + productVersion: 3.3.0 + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi mainApplicationFile: s3a://my-bucket/spark-examples.jar diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-configmap.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-configmap.yaml index 262dcb14..6948e67a 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-configmap.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-configmap.yaml @@ -6,7 +6,8 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev + sparkImage: + productVersion: 3.3.0 mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny-tlc-report-1.1.0.jar # <3> mainClass: tech.stackable.demo.spark.NYTLCReport diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-external-dependencies.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-external-dependencies.yaml index fcdd502d..45838630 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-external-dependencies.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-external-dependencies.yaml @@ -6,7 +6,8 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev + sparkImage: + productVersion: 3.3.0 mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny_tlc_report.py # <1> args: diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-image.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-image.yaml index d88af990..0d74f282 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-image.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-image.yaml @@ -7,7 +7,8 @@ metadata: spec: version: "1.0" image: docker.stackable.tech/stackable/ny-tlc-report:0.1.0 # <1> - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev + sparkImage: + productVersion: 3.3.0 mode: cluster mainApplicationFile: local:///stackable/spark/jobs/ny_tlc_report.py # <2> args: diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-pvc.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-pvc.yaml index 2703b440..120e09b6 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-pvc.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-pvc.yaml @@ -6,7 +6,8 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev + sparkImage: + productVersion: 3.3.0 mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny-tlc-report-1.0-SNAPSHOT.jar # <1> mainClass: org.example.App # <2> diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-s3-private.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-s3-private.yaml index 174dbf31..e3c013e0 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-s3-private.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-s3-private.yaml @@ -5,7 +5,8 @@ metadata: name: example-sparkapp-s3-private spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev + sparkImage: + productVersion: 3.3.0 mode: cluster mainApplicationFile: s3a://my-bucket/spark-examples.jar # <1> mainClass: org.apache.spark.examples.SparkPi # <2> diff --git a/docs/modules/spark-k8s/examples/example-sparkapp-streaming.yaml b/docs/modules/spark-k8s/examples/example-sparkapp-streaming.yaml index 72007aa0..2cf0d96a 100644 --- a/docs/modules/spark-k8s/examples/example-sparkapp-streaming.yaml +++ b/docs/modules/spark-k8s/examples/example-sparkapp-streaming.yaml @@ -6,7 +6,8 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev + sparkImage: + productVersion: 3.3.0 mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/streaming/hdfs_wordcount.py args: diff --git a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh index 4a79f79a..5a2a37d5 100755 --- a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh +++ b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh @@ -58,7 +58,8 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev + sparkImage: + productVersion: 3.3.0 mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py driver: diff --git a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh.j2 b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh.j2 index 671c9d9e..ac688ab2 100755 --- a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh.j2 +++ b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh.j2 @@ -58,7 +58,8 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable{{ versions.spark }} + sparkImage: + productVersion: 3.3.0 mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py driver: diff --git a/docs/modules/spark-k8s/pages/index.adoc b/docs/modules/spark-k8s/pages/index.adoc index 305a4605..0c12eec3 100644 --- a/docs/modules/spark-k8s/pages/index.adoc +++ b/docs/modules/spark-k8s/pages/index.adoc @@ -57,8 +57,3 @@ In the xref:stackablectl::demos/spark-k8s-anomaly-detection-taxi-data.adoc[] dem The Stackable Operator for Apache Spark on Kubernetes currently supports the following versions of Spark: include::partial$supported-versions.adoc[] - -The Spark images do not include Python. For PySpark users, there are separate images called `pyspark-k8s` instead of `spark-k8s`. These come with different Python versions as follows: - -- pyspark-k8s:3.3.0-hadoop3 : Python 3.9 -- pyspark-k8s:3.4.0-hadoop3 : Python 3.11 diff --git a/docs/modules/spark-k8s/partials/supported-versions.adoc b/docs/modules/spark-k8s/partials/supported-versions.adoc index 6918efc6..0b8fb2a7 100644 --- a/docs/modules/spark-k8s/partials/supported-versions.adoc +++ b/docs/modules/spark-k8s/partials/supported-versions.adoc @@ -2,5 +2,5 @@ // This is a separate file, since it is used by both the direct Spark documentation, and the overarching // Stackable Platform documentation. -- 3.3.0-hadoop3 (Hadoop 3.3.3, Scala 2.12, Java 11) -- 3.4.0-hadoop3 (Hadoop 3.3.4, Scala 2.12, Java 11) +- 3.3.0 (Hadoop 3.3.3, Scala 2.12, Python 3.9, Java 11) +- 3.4.0 (Hadoop 3.3.4, Scala 2.12, Python 3.11, Java 11) diff --git a/examples/ny-tlc-report-external-dependencies.yaml b/examples/ny-tlc-report-external-dependencies.yaml index f69e1c9c..824fa8cf 100644 --- a/examples/ny-tlc-report-external-dependencies.yaml +++ b/examples/ny-tlc-report-external-dependencies.yaml @@ -6,9 +6,9 @@ metadata: namespace: default spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:3.3.0-stackable0.0.0-dev - # Always | IfNotPresent | Never - sparkImagePullPolicy: IfNotPresent + sparkImage: + productVersion: 3.3.0 + pullPolicy: IfNotPresent mode: cluster mainApplicationFile: s3a://my-bucket/ny_tlc_report.py args: diff --git a/examples/ny-tlc-report.yaml b/examples/ny-tlc-report.yaml index 8b58e4b4..c4c11cc4 100644 --- a/examples/ny-tlc-report.yaml +++ b/examples/ny-tlc-report.yaml @@ -13,7 +13,8 @@ metadata: name: spark-ny-cm spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.3.0-stackable0.0.0-dev + sparkImage: + productVersion: 3.3.0 mode: cluster mainApplicationFile: s3a://my-bucket/ny-tlc-report-1.1.0-3.3.0.jar mainClass: tech.stackable.demo.spark.NYTLCReport diff --git a/rust/crd/src/constants.rs b/rust/crd/src/constants.rs index d9ca862a..59658051 100644 --- a/rust/crd/src/constants.rs +++ b/rust/crd/src/constants.rs @@ -68,7 +68,7 @@ pub const HISTORY_CONTROLLER_NAME: &str = "history"; pub const HISTORY_ROLE_NAME: &str = "node"; -pub const HISTORY_IMAGE_BASE_NAME: &str = "spark-k8s"; +pub const SPARK_IMAGE_BASE_NAME: &str = "spark-k8s"; pub const SPARK_DEFAULTS_FILE_NAME: &str = "spark-defaults.conf"; diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index 05f63574..515df0b4 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -22,6 +22,7 @@ use stackable_operator::{ builder::{SecretOperatorVolumeSourceBuilder, VolumeBuilder}, commons::{ affinity::{StackableAffinity, StackableAffinityFragment}, + product_image_selection::ProductImage, resources::{ CpuLimits, CpuLimitsFragment, MemoryLimits, MemoryLimitsFragment, NoRuntimeLimits, NoRuntimeLimitsFragment, Resources, ResourcesFragment, @@ -35,10 +36,7 @@ use stackable_operator::{ merge::{Atomic, Merge}, }, k8s_openapi::{ - api::core::v1::{ - EmptyDirVolumeSource, EnvVar, LocalObjectReference, PodTemplateSpec, Volume, - VolumeMount, - }, + api::core::v1::{EmptyDirVolumeSource, EnvVar, PodTemplateSpec, Volume, VolumeMount}, apimachinery::pkg::api::resource::Quantity, }, kube::{CustomResource, ResourceExt}, @@ -48,7 +46,7 @@ use stackable_operator::{ role_utils::pod_overrides_schema, schemars::{self, JsonSchema}, }; -use strum::{Display, EnumIter, EnumString}; +use strum::{Display, EnumIter}; #[derive(Snafu, Debug)] pub enum Error { @@ -150,7 +148,7 @@ impl SparkConfig { } } -#[derive(Clone, CustomResource, Debug, Default, Deserialize, JsonSchema, Serialize)] +#[derive(Clone, CustomResource, Debug, Deserialize, JsonSchema, Serialize)] #[kube( group = "spark.stackable.tech", version = "v1alpha1", @@ -176,12 +174,7 @@ pub struct SparkApplicationSpec { pub main_application_file: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub image: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub spark_image: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub spark_image_pull_policy: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub spark_image_pull_secrets: Option>, + pub spark_image: ProductImage, /// Name of the Vector aggregator discovery ConfigMap. /// It must contain the key `ADDRESS` with the address of the Vector aggregator. #[serde(skip_serializing_if = "Option::is_none")] @@ -210,13 +203,6 @@ pub struct SparkApplicationSpec { pub log_file_directory: Option, } -#[derive(Clone, Debug, Deserialize, Eq, JsonSchema, PartialEq, Serialize, Display, EnumString)] -pub enum ImagePullPolicy { - Always, - IfNotPresent, - Never, -} - #[derive(Clone, Debug, Default, Deserialize, JsonSchema, PartialEq, Eq, Serialize)] #[serde(rename_all = "camelCase")] pub struct JobDependencies { @@ -247,14 +233,6 @@ impl SparkApplication { self.spec.image.as_deref() } - pub fn spark_image_pull_policy(&self) -> Option { - self.spec.spark_image_pull_policy.clone() - } - - pub fn spark_image_pull_secrets(&self) -> Option> { - self.spec.spark_image_pull_secrets.clone() - } - pub fn version(&self) -> Option<&str> { self.spec.version.as_deref() } @@ -477,6 +455,7 @@ impl SparkApplication { serviceaccount_name: &str, s3conn: &Option, s3_log_dir: &Option, + spark_image: &str, ) -> Result, Error> { // mandatory properties let mode = self.mode().context(ObjectHasNoDeployModeSnafu)?; @@ -495,8 +474,8 @@ impl SparkApplication { format!("--conf spark.kubernetes.driver.podTemplateContainerName={container_name}", container_name = SparkContainer::Spark), format!("--conf spark.kubernetes.executor.podTemplateContainerName={container_name}", container_name = SparkContainer::Spark), format!("--conf spark.kubernetes.namespace={}", self.metadata.namespace.as_ref().context(NoNamespaceSnafu)?), - format!("--conf spark.kubernetes.driver.container.image={}", self.spec.spark_image.as_ref().context(NoSparkImageSnafu)?), - format!("--conf spark.kubernetes.executor.container.image={}", self.spec.spark_image.as_ref().context(NoSparkImageSnafu)?), + format!("--conf spark.kubernetes.driver.container.image={}", spark_image.to_string()), + format!("--conf spark.kubernetes.executor.container.image={}", spark_image.to_string()), format!("--conf spark.kubernetes.authenticate.driver.serviceAccountName={}", serviceaccount_name), format!("--conf spark.driver.defaultJavaOptions=-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}"), format!("--conf spark.driver.extraClassPath=/stackable/spark/extra-jars/*"), @@ -1071,11 +1050,9 @@ impl ExecutorConfig { #[cfg(test)] mod tests { - use crate::{ - cores_from_quantity, resources_to_executor_props, ExecutorConfig, ImagePullPolicy, - }; + use crate::DriverConfig; + use crate::{cores_from_quantity, resources_to_executor_props, ExecutorConfig}; use crate::{resources_to_driver_props, SparkApplication}; - use crate::{DriverConfig, LocalObjectReference}; use crate::{Quantity, SparkStorageConfig}; use rstest::rstest; use stackable_operator::builder::ObjectMetaBuilder; @@ -1086,7 +1063,6 @@ mod tests { use stackable_operator::k8s_openapi::api::core::v1::PodTemplateSpec; use stackable_operator::product_logging::spec::Logging; use std::collections::{BTreeMap, HashMap}; - use std::str::FromStr; #[test] fn test_spark_examples_s3() { @@ -1099,7 +1075,8 @@ metadata: name: spark-examples-s3 spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.2.1-hadoop3.2-python39-aws1.11.375-stackable0.3.0 + sparkImage: + productVersion: 3.4.0 mode: cluster mainClass: org.apache.spark.examples.SparkPi mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/spark-examples.jar @@ -1131,8 +1108,6 @@ spec: spark_application.spec.spark_conf.map(|m| m.keys().len()) ); - assert!(spark_application.spec.spark_image.is_some()); - assert!(spark_application.spec.mode.is_some()); assert!(spark_application.spec.driver.is_some()); assert!(spark_application.spec.executor.is_some()); @@ -1155,7 +1130,8 @@ metadata: spec: version: "1.0" image: docker.stackable.tech/stackable/ny-tlc-report:0.1.0 - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.2.1-hadoop3.2-python39-aws1.11.375-stackable0.3.0 + sparkImage: + productVersion: 3.2.1 mode: cluster mainApplicationFile: local:///stackable/spark/jobs/ny_tlc_report.py args: @@ -1186,7 +1162,6 @@ spec: ); assert!(spark_application.spec.image.is_some()); - assert!(spark_application.spec.spark_image.is_some()); assert!(spark_application.spec.mode.is_some()); assert!(spark_application.spec.args.is_some()); assert!(spark_application.spec.deps.is_some()); @@ -1209,7 +1184,8 @@ metadata: uid: 12345678asdfghj spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.2.1-hadoop3.2-python39-aws1.11.375-stackable0.3.0 + sparkImage: + productVersion: 3.4.0 mode: cluster mainApplicationFile: s3a://stackable-spark-k8s-jars/jobs/ny_tlc_report.py args: @@ -1247,7 +1223,6 @@ spec: spark_application.spec.spark_conf.map(|m| m.keys().len()) ); - assert!(spark_application.spec.spark_image.is_some()); assert!(spark_application.spec.mode.is_some()); assert!(spark_application.spec.args.is_some()); assert!(spark_application.spec.deps.is_some()); @@ -1258,74 +1233,6 @@ spec: assert!(spark_application.spec.image.is_none()); } - #[test] - fn test_image_actions() { - let spark_application = serde_yaml::from_str::( - r#" ---- -apiVersion: spark.stackable.tech/v1alpha1 -kind: SparkApplication -metadata: - name: spark-pi-local - namespace: default -spec: - version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:3.2.1-hadoop3.2-stackable0.4.0 - sparkImagePullPolicy: Always - sparkImagePullSecrets: - - name: myregistrykey - mode: cluster - mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: local:///stackable/spark/examples/jars/spark-examples.jar - sparkConf: - spark.kubernetes.node.selector.node: "2" - driver: - cores: 1 - coreLimit: "1200m" - memory: "512m" - executor: - cores: 1 - instances: 1 - memory: "512m" - "#, - ) - .unwrap(); - - assert_eq!( - Some(vec![LocalObjectReference { - name: Some("myregistrykey".to_string()) - }]), - spark_application.spark_image_pull_secrets() - ); - assert_eq!( - Some(ImagePullPolicy::Always), - spark_application.spark_image_pull_policy() - ); - } - - #[test] - fn test_image_pull_policy_ser() { - assert_eq!("Never", ImagePullPolicy::Never.to_string()); - assert_eq!("Always", ImagePullPolicy::Always.to_string()); - assert_eq!("IfNotPresent", ImagePullPolicy::IfNotPresent.to_string()); - } - - #[test] - fn test_image_pull_policy_de() { - assert_eq!( - ImagePullPolicy::Always, - ImagePullPolicy::from_str("Always").unwrap() - ); - assert_eq!( - ImagePullPolicy::Never, - ImagePullPolicy::from_str("Never").unwrap() - ); - assert_eq!( - ImagePullPolicy::IfNotPresent, - ImagePullPolicy::from_str("IfNotPresent").unwrap() - ); - } - #[test] fn test_default_resource_limits() { let spark_application = serde_yaml::from_str::( @@ -1336,6 +1243,8 @@ kind: SparkApplication metadata: name: spark-examples spec: + sparkImage: + productVersion: 1.2.3 executor: instances: 1 config: @@ -1367,6 +1276,8 @@ kind: SparkApplication metadata: name: spark-examples spec: + sparkImage: + productVersion: 1.2.3 job: resources: cpu: diff --git a/rust/operator-binary/src/history_controller.rs b/rust/operator-binary/src/history_controller.rs index f72d2f1a..0942dcc2 100644 --- a/rust/operator-binary/src/history_controller.rs +++ b/rust/operator-binary/src/history_controller.rs @@ -33,12 +33,12 @@ use stackable_operator::{ }; use stackable_spark_k8s_crd::{ constants::{ - ACCESS_KEY_ID, APP_NAME, HISTORY_CONTROLLER_NAME, HISTORY_IMAGE_BASE_NAME, - HISTORY_ROLE_NAME, JVM_SECURITY_PROPERTIES_FILE, LOG4J2_CONFIG_FILE, - MAX_SPARK_LOG_FILES_SIZE, OPERATOR_NAME, SECRET_ACCESS_KEY, SPARK_CLUSTER_ROLE, - SPARK_DEFAULTS_FILE_NAME, SPARK_UID, STACKABLE_TLS_STORE_PASSWORD, STACKABLE_TRUST_STORE, - VOLUME_MOUNT_NAME_CONFIG, VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG, - VOLUME_MOUNT_PATH_CONFIG, VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG, + ACCESS_KEY_ID, APP_NAME, HISTORY_CONTROLLER_NAME, HISTORY_ROLE_NAME, + JVM_SECURITY_PROPERTIES_FILE, LOG4J2_CONFIG_FILE, MAX_SPARK_LOG_FILES_SIZE, OPERATOR_NAME, + SECRET_ACCESS_KEY, SPARK_CLUSTER_ROLE, SPARK_DEFAULTS_FILE_NAME, SPARK_IMAGE_BASE_NAME, + SPARK_UID, STACKABLE_TLS_STORE_PASSWORD, STACKABLE_TRUST_STORE, VOLUME_MOUNT_NAME_CONFIG, + VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG, VOLUME_MOUNT_PATH_CONFIG, + VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG, }, history, history::{HistoryConfig, SparkHistoryServer, SparkHistoryServerContainer}, @@ -163,10 +163,10 @@ pub async fn reconcile(shs: Arc, ctx: Arc) -> Result, ctx: Arc) .await .context(S3LogDirSnafu)?; + let resolved_product_image = spark_application + .spec + .spark_image + .resolve(SPARK_IMAGE_BASE_NAME, crate::built_info::CARGO_PKG_VERSION); + let (serviceaccount, rolebinding) = build_spark_role_serviceaccount(&spark_application)?; client .apply_patch(CONTROLLER_NAME, &serviceaccount, &serviceaccount) @@ -202,12 +205,6 @@ pub async fn reconcile(spark_application: Arc, ctx: Arc) .await .context(ResolveVectorAggregatorAddressSnafu)?; - let spark_image = spark_application - .spec - .spark_image - .as_deref() - .context(ObjectHasNoSparkImageSnafu)?; - let env_vars = spark_application.env(&opt_s3conn, &s3logdir); let driver_config = spark_application @@ -237,6 +234,7 @@ pub async fn reconcile(spark_application: Arc, ctx: Arc) &opt_s3conn, &s3logdir, vector_aggregator_address.as_deref(), + &resolved_product_image, )?; client .apply_patch( @@ -274,6 +272,7 @@ pub async fn reconcile(spark_application: Arc, ctx: Arc) &opt_s3conn, &s3logdir, vector_aggregator_address.as_deref(), + &resolved_product_image, )?; client .apply_patch( @@ -289,6 +288,7 @@ pub async fn reconcile(spark_application: Arc, ctx: Arc) serviceaccount.metadata.name.as_ref().unwrap(), &opt_s3conn, &s3logdir, + &resolved_product_image.image, ) .context(BuildCommandSnafu)?; @@ -305,7 +305,7 @@ pub async fn reconcile(spark_application: Arc, ctx: Arc) let job = spark_job( &spark_application, - spark_image, + &resolved_product_image, &serviceaccount, &env_vars, &job_commands, @@ -325,6 +325,7 @@ fn init_containers( logging: &Logging, s3conn: &Option, s3logdir: &Option, + spark_image: &ResolvedProductImage, ) -> Result> { let mut jcb = ContainerBuilder::new(&SparkContainer::Job.to_string()) .context(IllegalContainerNameSnafu)?; @@ -361,12 +362,6 @@ fn init_containers( .build() }); - let spark_image = spark_application - .spec - .spark_image - .as_deref() - .context(ObjectHasNoSparkImageSnafu)?; - let mut rcb = ContainerBuilder::new(&SparkContainer::Requirements.to_string()) .context(IllegalContainerNameSnafu)?; let requirements_container = spark_application.requirements().map(|req| { @@ -388,14 +383,12 @@ fn init_containers( "pip install --target={VOLUME_MOUNT_PATH_REQ} {req}" )); - rcb.image(spark_image) + rcb.image(&spark_image.image) .command(vec!["/bin/bash".to_string(), "-c".to_string()]) .args(vec![args.join(" && ")]) .add_volume_mount(VOLUME_MOUNT_NAME_REQ, VOLUME_MOUNT_PATH_REQ) - .add_volume_mount(VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_PATH_LOG); - if let Some(image_pull_policy) = spark_application.spark_image_pull_policy() { - rcb.image_pull_policy(image_pull_policy.to_string()); - } + .add_volume_mount(VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_PATH_LOG) + .image_pull_policy(&spark_image.image_pull_policy); rcb.resources( ResourceRequirementsBuilder::new() @@ -423,7 +416,7 @@ fn init_containers( format!("{STACKABLE_MOUNT_PATH_TLS}/{cert_secret}"), ); } - tcb.image(spark_image) + tcb.image(&spark_image.image) .command(vec!["/bin/bash".to_string(), "-c".to_string()]) .args(vec![args.join(" && ")]) .add_volume_mount(STACKABLE_TRUST_STORE_NAME, STACKABLE_TRUST_STORE) @@ -452,12 +445,14 @@ fn pod_template( env: &[EnvVar], s3conn: &Option, s3logdir: &Option, + spark_image: &ResolvedProductImage, ) -> Result { let container_name = SparkContainer::Spark.to_string(); let mut cb = ContainerBuilder::new(&container_name).context(IllegalContainerNameSnafu)?; cb.add_volume_mounts(config.volume_mounts.clone()) .add_env_vars(env.to_vec()) - .resources(config.resources.clone().into()); + .resources(config.resources.clone().into()) + .image_from_product_image(spark_image); if config.logging.enable_vector_agent { cb.add_env_var( @@ -471,10 +466,6 @@ fn pod_template( ); } - if let Some(image_pull_policy) = spark_application.spark_image_pull_policy() { - cb.image_pull_policy(image_pull_policy.to_string()); - } - let mut pb = PodBuilder::new(); pb.metadata( ObjectMetaBuilder::new() @@ -488,38 +479,26 @@ fn pod_template( ) .add_container(cb.build()) .add_volumes(volumes.to_vec()) - .security_context(security_context()); - - pb.affinity(&config.affinity); + .security_context(security_context()) + .image_pull_secrets_from_product_image(spark_image) + .affinity(&config.affinity); - let init_containers = - init_containers(spark_application, &config.logging, s3conn, s3logdir).unwrap(); + let init_containers = init_containers( + spark_application, + &config.logging, + s3conn, + s3logdir, + spark_image, + ) + .unwrap(); for init_container in init_containers { pb.add_init_container(init_container.clone()); } - if let Some(image_pull_secrets) = spark_application.spark_image_pull_secrets() { - pb.image_pull_secrets( - image_pull_secrets - .iter() - .flat_map(|secret| secret.name.clone()), - ); - } - if config.logging.enable_vector_agent { pb.add_container(vector_container( - &ResolvedProductImage { - product_version: "".into(), - app_version_label: "".into(), - image: spark_application - .spec - .spark_image - .clone() - .context(ObjectHasNoSparkImageSnafu)?, - image_pull_policy: "".into(), - pull_secrets: None, - }, + spark_image, VOLUME_MOUNT_NAME_CONFIG, VOLUME_MOUNT_NAME_LOG, config.logging.containers.get(&SparkContainer::Vector), @@ -544,6 +523,7 @@ fn pod_template_config_map( s3conn: &Option, s3logdir: &Option, vector_aggregator_address: Option<&str>, + spark_image: &ResolvedProductImage, ) -> Result { let cm_name = spark_application.pod_template_config_map_name(config.role.clone()); @@ -573,6 +553,7 @@ fn pod_template_config_map( env, s3conn, s3logdir, + spark_image, )?; let mut cm_builder = ConfigMapBuilder::new(); @@ -654,7 +635,7 @@ fn submit_job_config_map( #[allow(clippy::too_many_arguments)] fn spark_job( spark_application: &SparkApplication, - spark_image: &str, + spark_image: &ResolvedProductImage, serviceaccount: &ServiceAccount, env: &[EnvVar], job_commands: &[String], @@ -689,7 +670,7 @@ fn spark_job( args.push(shutdown_vector_command(VOLUME_MOUNT_PATH_LOG)); } - cb.image(spark_image) + cb.image_from_product_image(spark_image) .command(vec!["/bin/bash".to_string(), "-c".to_string()]) .args(vec![args.join(" && ")]) .resources(job_config.resources.into()) @@ -705,10 +686,6 @@ fn spark_job( // TODO: move this to the image .add_env_var("SPARK_CONF_DIR", "/stackable/spark/conf"); - if let Some(image_pull_policy) = spark_application.spark_image_pull_policy() { - cb.image_pull_policy(image_pull_policy.to_string()); - } - let mut volumes = vec![ VolumeBuilder::new(VOLUME_MOUNT_NAME_CONFIG) .with_config_map(spark_application.submit_job_config_map_name()) @@ -730,17 +707,7 @@ fn spark_job( if job_config.logging.enable_vector_agent { containers.push(vector_container( - &ResolvedProductImage { - product_version: "".into(), - app_version_label: "".into(), - image: spark_application - .spec - .spark_image - .clone() - .context(ObjectHasNoSparkImageSnafu)?, - image_pull_policy: "".into(), - pull_secrets: None, - }, + spark_image, VOLUME_MOUNT_NAME_CONFIG, VOLUME_MOUNT_NAME_LOG, job_config @@ -770,7 +737,7 @@ fn spark_job( restart_policy: Some("Never".to_string()), service_account_name: serviceaccount.metadata.name.clone(), volumes: Some(volumes), - image_pull_secrets: spark_application.spark_image_pull_secrets(), + image_pull_secrets: spark_image.pull_secrets.clone(), security_context: Some(security_context()), ..PodSpec::default() }), diff --git a/tests/templates/kuttl/logging/04-deploy-history-server.yaml.j2 b/tests/templates/kuttl/logging/04-deploy-history-server.yaml.j2 index 7cc64b78..1b11c903 100644 --- a/tests/templates/kuttl/logging/04-deploy-history-server.yaml.j2 +++ b/tests/templates/kuttl/logging/04-deploy-history-server.yaml.j2 @@ -22,8 +22,13 @@ metadata: name: spark-history spec: image: - productVersion: "{{ test_scenario['values']['spark'].split('-stackable')[0] }}" - stackableVersion: "{{ test_scenario['values']['spark'].split('-stackable')[1] }}" +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent vectorAggregatorConfigMapName: spark-vector-aggregator-discovery logFileDirectory: s3: diff --git a/tests/templates/kuttl/logging/05-deploy-automatic-log-config-spark-app.yaml.j2 b/tests/templates/kuttl/logging/05-deploy-automatic-log-config-spark-app.yaml.j2 index 16053929..a1f08592 100644 --- a/tests/templates/kuttl/logging/05-deploy-automatic-log-config-spark-app.yaml.j2 +++ b/tests/templates/kuttl/logging/05-deploy-automatic-log-config-spark-app.yaml.j2 @@ -5,7 +5,14 @@ metadata: name: spark-automatic-log-config spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent image: docker.stackable.tech/stackable/ny-tlc-report:{{ test_scenario['values']['ny-tlc-report'] }} vectorAggregatorConfigMapName: spark-vector-aggregator-discovery mode: cluster diff --git a/tests/templates/kuttl/logging/06-deploy-custom-log-config-spark-app.yaml.j2 b/tests/templates/kuttl/logging/06-deploy-custom-log-config-spark-app.yaml.j2 index b3ee4007..2015366d 100644 --- a/tests/templates/kuttl/logging/06-deploy-custom-log-config-spark-app.yaml.j2 +++ b/tests/templates/kuttl/logging/06-deploy-custom-log-config-spark-app.yaml.j2 @@ -39,7 +39,14 @@ metadata: name: spark-custom-log-config spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent image: docker.stackable.tech/stackable/ny-tlc-report:{{ test_scenario['values']['ny-tlc-report'] }} vectorAggregatorConfigMapName: spark-vector-aggregator-discovery mode: cluster diff --git a/tests/templates/kuttl/logging/07-deploy-automatic-log-config-pyspark-app.yaml.j2 b/tests/templates/kuttl/logging/07-deploy-automatic-log-config-pyspark-app.yaml.j2 index 939ecb7f..8110ab8b 100644 --- a/tests/templates/kuttl/logging/07-deploy-automatic-log-config-pyspark-app.yaml.j2 +++ b/tests/templates/kuttl/logging/07-deploy-automatic-log-config-pyspark-app.yaml.j2 @@ -5,7 +5,14 @@ metadata: name: pyspark-automatic-log-config spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent vectorAggregatorConfigMapName: spark-vector-aggregator-discovery mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/als.py diff --git a/tests/templates/kuttl/logging/08-deploy-custom-log-config-pyspark-app.yaml.j2 b/tests/templates/kuttl/logging/08-deploy-custom-log-config-pyspark-app.yaml.j2 index 3dcaa696..b6ab6f75 100644 --- a/tests/templates/kuttl/logging/08-deploy-custom-log-config-pyspark-app.yaml.j2 +++ b/tests/templates/kuttl/logging/08-deploy-custom-log-config-pyspark-app.yaml.j2 @@ -39,7 +39,14 @@ metadata: name: pyspark-custom-log-config spec: version: "1.0" - sparkImage: docker.stackable.tech/stackable/pyspark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent vectorAggregatorConfigMapName: spark-vector-aggregator-discovery mode: cluster mainApplicationFile: local:///stackable/spark/examples/src/main/python/als.py diff --git a/tests/templates/kuttl/pod_overrides/04-prepare-bucket.yaml.j2 b/tests/templates/kuttl/pod_overrides/04-prepare-bucket.yaml.j2 index 14374091..be3231ae 100644 --- a/tests/templates/kuttl/pod_overrides/04-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/pod_overrides/04-prepare-bucket.yaml.j2 @@ -6,5 +6,5 @@ commands: - command: sleep 5 - script: | POD=$(kubectl -n $NAMESPACE get pod -l app.kubernetes.io/instance=test-minio -o name | head -n 1 | sed -e 's#pod/##') - kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar $POD:/tmp - kubectl -n $NAMESPACE exec $POD -- mc cp /tmp/spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar local/my-bucket + kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split(',')[0] }}.jar $POD:/tmp + kubectl -n $NAMESPACE exec $POD -- mc cp /tmp/spark-examples_{{ test_scenario['values']['spark'].split(',')[0] }}.jar local/my-bucket diff --git a/tests/templates/kuttl/pod_overrides/06-deploy-history-server.yaml.j2 b/tests/templates/kuttl/pod_overrides/06-deploy-history-server.yaml.j2 index cc3d7170..965dd18d 100644 --- a/tests/templates/kuttl/pod_overrides/06-deploy-history-server.yaml.j2 +++ b/tests/templates/kuttl/pod_overrides/06-deploy-history-server.yaml.j2 @@ -25,8 +25,13 @@ metadata: name: spark-history spec: image: - productVersion: "{{ test_scenario['values']['spark'].split('-stackable')[0] }}" - stackableVersion: "{{ test_scenario['values']['spark'].split('-stackable')[1] }}" +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent logFileDirectory: s3: prefix: eventlogs/ @@ -50,4 +55,4 @@ spec: memory: 512Mi limits: cpu: 1500m - memory: 1024Mi \ No newline at end of file + memory: 1024Mi diff --git a/tests/templates/kuttl/pod_overrides/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/pod_overrides/10-deploy-spark-app.yaml.j2 index 48d9fdd8..6668dd1e 100644 --- a/tests/templates/kuttl/pod_overrides/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/pod_overrides/10-deploy-spark-app.yaml.j2 @@ -5,11 +5,17 @@ metadata: name: spark-pi-s3-1 spec: version: "1.0" - sparkImage: "docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: "s3a://my-bucket/spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar" + mainApplicationFile: "s3a://my-bucket/spark-examples_{{ test_scenario['values']['spark'].split(',')[0] }}.jar" s3connection: reference: spark-data-s3-connection logFileDirectory: @@ -53,4 +59,4 @@ spec: memory: 512Mi limits: cpu: 1500m - memory: 1024Mi \ No newline at end of file + memory: 1024Mi diff --git a/tests/templates/kuttl/pyspark-ny-public-s3-image/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/pyspark-ny-public-s3-image/10-deploy-spark-app.yaml.j2 index 003c62c0..6676a058 100644 --- a/tests/templates/kuttl/pyspark-ny-public-s3-image/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/pyspark-ny-public-s3-image/10-deploy-spark-app.yaml.j2 @@ -10,8 +10,14 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: "docker.stackable.tech/stackable/pyspark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainApplicationFile: local:///stackable/spark/jobs/ny_tlc_report.py args: diff --git a/tests/templates/kuttl/pyspark-ny-public-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/pyspark-ny-public-s3/10-deploy-spark-app.yaml.j2 index e10c2c7a..a6dc7039 100644 --- a/tests/templates/kuttl/pyspark-ny-public-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/pyspark-ny-public-s3/10-deploy-spark-app.yaml.j2 @@ -9,8 +9,14 @@ spec: vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} # everything under /jobs will be copied to /stackable/spark/jobs - sparkImage: "docker.stackable.tech/stackable/pyspark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainApplicationFile: s3a://my-bucket/ny_tlc_report.py args: diff --git a/tests/templates/kuttl/resources/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/resources/10-deploy-spark-app.yaml.j2 index 90e458ec..1e7f81fd 100644 --- a/tests/templates/kuttl/resources/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/resources/10-deploy-spark-app.yaml.j2 @@ -8,7 +8,14 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: "docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkALS mainApplicationFile: "local:///stackable/spark/examples/jars/spark-examples.jar" diff --git a/tests/templates/kuttl/resources/12-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/resources/12-deploy-spark-app.yaml.j2 index b0e8c3f5..a99f1537 100644 --- a/tests/templates/kuttl/resources/12-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/resources/12-deploy-spark-app.yaml.j2 @@ -8,7 +8,14 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: "docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkALS mainApplicationFile: "local:///stackable/spark/examples/jars/spark-examples.jar" diff --git a/tests/templates/kuttl/smoke/04-prepare-bucket.yaml.j2 b/tests/templates/kuttl/smoke/04-prepare-bucket.yaml.j2 index 14374091..be3231ae 100644 --- a/tests/templates/kuttl/smoke/04-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/smoke/04-prepare-bucket.yaml.j2 @@ -6,5 +6,5 @@ commands: - command: sleep 5 - script: | POD=$(kubectl -n $NAMESPACE get pod -l app.kubernetes.io/instance=test-minio -o name | head -n 1 | sed -e 's#pod/##') - kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar $POD:/tmp - kubectl -n $NAMESPACE exec $POD -- mc cp /tmp/spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar local/my-bucket + kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split(',')[0] }}.jar $POD:/tmp + kubectl -n $NAMESPACE exec $POD -- mc cp /tmp/spark-examples_{{ test_scenario['values']['spark'].split(',')[0] }}.jar local/my-bucket diff --git a/tests/templates/kuttl/smoke/06-deploy-history-server.yaml.j2 b/tests/templates/kuttl/smoke/06-deploy-history-server.yaml.j2 index e57d720c..967cdd7e 100644 --- a/tests/templates/kuttl/smoke/06-deploy-history-server.yaml.j2 +++ b/tests/templates/kuttl/smoke/06-deploy-history-server.yaml.j2 @@ -25,8 +25,13 @@ metadata: name: spark-history spec: image: - productVersion: "{{ test_scenario['values']['spark'].split('-stackable')[0] }}" - stackableVersion: "{{ test_scenario['values']['spark'].split('-stackable')[1] }}" +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} diff --git a/tests/templates/kuttl/smoke/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/smoke/10-deploy-spark-app.yaml.j2 index bcf7dd9b..f8d1fe31 100644 --- a/tests/templates/kuttl/smoke/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/smoke/10-deploy-spark-app.yaml.j2 @@ -8,11 +8,17 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: "docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: "s3a://my-bucket/spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar" + mainApplicationFile: "s3a://my-bucket/spark-examples_{{ test_scenario['values']['spark'].split(',')[0] }}.jar" s3connection: reference: spark-data-s3-connection logFileDirectory: diff --git a/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 index 3c48ff6c..53220e9d 100644 --- a/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-examples/10-deploy-spark-app.yaml.j2 @@ -8,8 +8,14 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: "docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkALS mainApplicationFile: "local:///stackable/spark/examples/jars/spark-examples.jar" diff --git a/tests/templates/kuttl/spark-history-server/04-prepare-bucket.yaml.j2 b/tests/templates/kuttl/spark-history-server/04-prepare-bucket.yaml.j2 index d9d05fea..b557eb6a 100644 --- a/tests/templates/kuttl/spark-history-server/04-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/spark-history-server/04-prepare-bucket.yaml.j2 @@ -4,6 +4,6 @@ kind: TestStep commands: # give minio enough time to start - command: sleep 10 - - command: kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar minio-client:/tmp/spark-examples.jar + - command: kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split(',')[0] }}.jar minio-client:/tmp/spark-examples.jar - command: kubectl exec -n $NAMESPACE minio-client -- mc --insecure alias set test-minio http://test-minio:9000 spark sparkspark - command: kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/spark-examples.jar test-minio/my-bucket diff --git a/tests/templates/kuttl/spark-history-server/06-deploy-history-server.yaml.j2 b/tests/templates/kuttl/spark-history-server/06-deploy-history-server.yaml.j2 index e57d720c..967cdd7e 100644 --- a/tests/templates/kuttl/spark-history-server/06-deploy-history-server.yaml.j2 +++ b/tests/templates/kuttl/spark-history-server/06-deploy-history-server.yaml.j2 @@ -25,8 +25,13 @@ metadata: name: spark-history spec: image: - productVersion: "{{ test_scenario['values']['spark'].split('-stackable')[0] }}" - stackableVersion: "{{ test_scenario['values']['spark'].split('-stackable')[1] }}" +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} diff --git a/tests/templates/kuttl/spark-history-server/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-history-server/10-deploy-spark-app.yaml.j2 index a8717ff3..8f0d046c 100644 --- a/tests/templates/kuttl/spark-history-server/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-history-server/10-deploy-spark-app.yaml.j2 @@ -8,8 +8,14 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: "docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi mainApplicationFile: "s3a://my-bucket/spark-examples.jar" diff --git a/tests/templates/kuttl/spark-history-server/12-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-history-server/12-deploy-spark-app.yaml.j2 index ef042659..3d01cb6a 100644 --- a/tests/templates/kuttl/spark-history-server/12-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-history-server/12-deploy-spark-app.yaml.j2 @@ -8,8 +8,14 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: "docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi mainApplicationFile: "s3a://my-bucket/spark-examples.jar" diff --git a/tests/templates/kuttl/spark-ny-public-s3/03-prepare-bucket.yaml.j2 b/tests/templates/kuttl/spark-ny-public-s3/03-prepare-bucket.yaml.j2 index 3fd04942..f836f05f 100644 --- a/tests/templates/kuttl/spark-ny-public-s3/03-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/spark-ny-public-s3/03-prepare-bucket.yaml.j2 @@ -5,7 +5,7 @@ commands: # give minio enough time to start - command: sleep 10 - command: kubectl cp -n $NAMESPACE yellow_tripdata_2021-07.csv minio-client:/tmp - - command: kubectl cp -n $NAMESPACE ny-tlc-report-1.1.0-{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar minio-client:/tmp/ny-tlc-report.jar + - command: kubectl cp -n $NAMESPACE ny-tlc-report-1.1.0-{{ test_scenario['values']['spark'].split(',')[0] }}.jar minio-client:/tmp/ny-tlc-report.jar {% if test_scenario['values']['s3-use-tls'] == 'true' %} - command: kubectl exec -n $NAMESPACE minio-client -- mc --insecure alias set minio https://minio:9000 spark sparkspark {% else %} diff --git a/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 index ec9bcba4..16cd28cd 100644 --- a/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-ny-public-s3/10-deploy-spark-app.yaml.j2 @@ -16,8 +16,14 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: "docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }}" - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: tech.stackable.demo.spark.NYTLCReport mainApplicationFile: "s3a://my-bucket/ny-tlc-report.jar" diff --git a/tests/templates/kuttl/spark-pi-private-s3/04-prepare-bucket.yaml.j2 b/tests/templates/kuttl/spark-pi-private-s3/04-prepare-bucket.yaml.j2 index 52d7bd3c..f892753f 100644 --- a/tests/templates/kuttl/spark-pi-private-s3/04-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-private-s3/04-prepare-bucket.yaml.j2 @@ -4,7 +4,7 @@ kind: TestStep commands: # give minio enough time to start - command: sleep 10 - - command: kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar minio-client:/tmp/spark-examples.jar + - command: kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split(',')[0] }}.jar minio-client:/tmp/spark-examples.jar - command: kubectl exec -n $NAMESPACE minio-client -- sh -c 'mc alias set test-minio http://test-minio:9000 $$MINIO_SERVER_ACCESS_KEY $$MINIO_SERVER_SECRET_KEY' - command: kubectl exec -n $NAMESPACE minio-client -- mc mb test-minio/my-bucket - command: kubectl exec -n $NAMESPACE minio-client -- mc cp /tmp/spark-examples.jar test-minio/my-bucket diff --git a/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 index 32cd6028..db45ff67 100644 --- a/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-private-s3/10-deploy-spark-app.yaml.j2 @@ -8,8 +8,14 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi mainApplicationFile: s3a://my-bucket/spark-examples.jar diff --git a/tests/templates/kuttl/spark-pi-public-s3/03-prepare-bucket.yaml.j2 b/tests/templates/kuttl/spark-pi-public-s3/03-prepare-bucket.yaml.j2 index e06a76c8..6fb66bb7 100644 --- a/tests/templates/kuttl/spark-pi-public-s3/03-prepare-bucket.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-public-s3/03-prepare-bucket.yaml.j2 @@ -4,7 +4,7 @@ kind: TestStep commands: # give minio enough time to start - command: sleep 10 - - command: kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split('-stackable')[0] }}.jar minio-client:/tmp/spark-examples.jar + - command: kubectl cp -n $NAMESPACE spark-examples_{{ test_scenario['values']['spark'].split(',')[0] }}.jar minio-client:/tmp/spark-examples.jar - command: kubectl exec -n $NAMESPACE minio-client -- sh -c 'mc alias set test-minio http://test-minio:9000 $$MINIO_SERVER_ACCESS_KEY $$MINIO_SERVER_SECRET_KEY' - command: kubectl exec -n $NAMESPACE minio-client -- mc mb test-minio/my-bucket - command: kubectl exec -n $NAMESPACE minio-client -- mc policy set public test-minio/my-bucket diff --git a/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 b/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 index 0565659a..69f19582 100644 --- a/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 +++ b/tests/templates/kuttl/spark-pi-public-s3/10-deploy-spark-app.yaml.j2 @@ -8,8 +8,14 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - sparkImage: docker.stackable.tech/stackable/spark-k8s:{{ test_scenario['values']['spark'].split('-stackable')[0] }}-stackable{{ test_scenario['values']['spark'].split('-stackable')[1] }} - sparkImagePullPolicy: IfNotPresent + sparkImage: +{% if test_scenario['values']['spark'].find(",") > 0 %} + custom: "{{ test_scenario['values']['spark'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['spark'].split(',')[0] }}" +{% else %} + productVersion: "{{ test_scenario['values']['spark'] }}" +{% endif %} + pullPolicy: IfNotPresent mode: cluster mainClass: org.apache.spark.examples.SparkPi mainApplicationFile: s3a://my-bucket/spark-examples.jar diff --git a/tests/test-definition.yaml b/tests/test-definition.yaml index 95f8b8de..b8c0fedc 100644 --- a/tests/test-definition.yaml +++ b/tests/test-definition.yaml @@ -1,13 +1,3 @@ -# These tests can run against an OpenShift cluster, provided you note the following: -# -# 1. Set the "openshift" dimension below to "true" (with quotes) -# 2. Comment out the "true" option in the "s3-use-tls" dimension -# -# Regarding point 2.: the bitnami chart is used for S3 on OpenShift as it correctly installs -# a minio instance (the chart from minio does not correctly apply the service account to -# the job that creates the bucket, and so the permissions are not sufficient). However, it -# cannot correctly use self-signed TLS certificates due to a bug in libminioclient.sh: for -# non-OpenShift clusters the minio chart is thus used instead. --- dimensions: - name: openshift @@ -15,8 +5,11 @@ dimensions: - "false" - name: spark values: - - 3.3.0-stackable0.0.0-dev - - 3.4.0-stackable0.0.0-dev + - 3.3.0 + - 3.4.0 + # Alternatively, if you want to use a custom image, append a comma and the full image name to the product version + # as in the example below. + # - 3.4.0,docker.stackable.tech/sandbox/spark-k8s:3.4.0-stackable0.0.0-dev - name: ny-tlc-report values: - 0.1.0