@@ -19,7 +19,9 @@ use stackable_operator::{
1919 Resource , ResourceExt ,
2020 } ,
2121 labels:: { role_group_selector_labels, role_selector_labels, ObjectLabels } ,
22- product_config:: ProductConfigManager ,
22+ product_config:: {
23+ types:: PropertyNameKind , writer:: to_java_properties_string, ProductConfigManager ,
24+ } ,
2325 product_logging:: {
2426 framework:: { calculate_log_volume_size_limit, vector_container} ,
2527 spec:: {
@@ -32,19 +34,20 @@ use stackable_operator::{
3234use stackable_spark_k8s_crd:: {
3335 constants:: {
3436 ACCESS_KEY_ID , APP_NAME , HISTORY_CONTROLLER_NAME , HISTORY_IMAGE_BASE_NAME ,
35- HISTORY_ROLE_NAME , LOG4J2_CONFIG_FILE , MAX_SPARK_LOG_FILES_SIZE , OPERATOR_NAME ,
36- SECRET_ACCESS_KEY , SPARK_CLUSTER_ROLE , SPARK_DEFAULTS_FILE_NAME , SPARK_UID ,
37- STACKABLE_TLS_STORE_PASSWORD , STACKABLE_TRUST_STORE , VOLUME_MOUNT_NAME_CONFIG ,
38- VOLUME_MOUNT_NAME_LOG , VOLUME_MOUNT_NAME_LOG_CONFIG , VOLUME_MOUNT_NAME_SPARK_DEFAULTS ,
39- VOLUME_MOUNT_PATH_LOG , VOLUME_MOUNT_PATH_LOG_CONFIG , VOLUME_MOUNT_PATH_SPARK_DEFAULTS ,
37+ HISTORY_ROLE_NAME , JVM_SECURITY_PROPERTIES_FILE , LOG4J2_CONFIG_FILE ,
38+ MAX_SPARK_LOG_FILES_SIZE , OPERATOR_NAME , SECRET_ACCESS_KEY , SPARK_CLUSTER_ROLE ,
39+ SPARK_DEFAULTS_FILE_NAME , SPARK_UID , STACKABLE_TLS_STORE_PASSWORD , STACKABLE_TRUST_STORE ,
40+ VOLUME_MOUNT_NAME_CONFIG , VOLUME_MOUNT_NAME_LOG , VOLUME_MOUNT_NAME_LOG_CONFIG ,
41+ VOLUME_MOUNT_NAME_SPARK_DEFAULTS , VOLUME_MOUNT_PATH_LOG , VOLUME_MOUNT_PATH_LOG_CONFIG ,
42+ VOLUME_MOUNT_PATH_SPARK_DEFAULTS ,
4043 } ,
4144 history,
4245 history:: { HistoryConfig , SparkHistoryServer , SparkHistoryServerContainer } ,
4346 s3logdir:: S3LogDir ,
4447 tlscerts,
4548} ;
46- use std:: time:: Duration ;
4749use std:: { collections:: BTreeMap , sync:: Arc } ;
50+ use std:: { collections:: HashMap , time:: Duration } ;
4851
4952use snafu:: { OptionExt , ResultExt , Snafu } ;
5053use stackable_operator:: builder:: resources:: ResourceRequirementsBuilder ;
@@ -129,6 +132,14 @@ pub enum Error {
129132 } ,
130133 #[ snafu( display( "cannot retrieve role group" ) ) ]
131134 CannotRetrieveRoleGroup { source : history:: Error } ,
135+ #[ snafu( display(
136+ "History server : failed to serialize [{JVM_SECURITY_PROPERTIES_FILE}] for group {}" ,
137+ rolegroup
138+ ) ) ]
139+ JvmSecurityProperties {
140+ source : stackable_operator:: product_config:: writer:: PropertiesWriterError ,
141+ rolegroup : String ,
142+ } ,
132143}
133144
134145type Result < T , E = Error > = std:: result:: Result < T , E > ;
@@ -211,7 +222,7 @@ pub async fn reconcile(shs: Arc<SparkHistoryServer>, ctx: Arc<Ctx>) -> Result<Ac
211222 role_group : rolegroup_name. into ( ) ,
212223 } ;
213224
214- let config = shs
225+ let merged_config = shs
215226 . merged_config ( & rgr)
216227 . context ( FailedToResolveConfigSnafu ) ?;
217228
@@ -228,7 +239,8 @@ pub async fn reconcile(shs: Arc<SparkHistoryServer>, ctx: Arc<Ctx>) -> Result<Ac
228239
229240 let config_map = build_config_map (
230241 & shs,
231- & config,
242+ _rolegroup_config,
243+ & merged_config,
232244 & resolved_product_image. app_version_label ,
233245 & rgr,
234246 s3_log_dir. as_ref ( ) . unwrap ( ) ,
@@ -244,7 +256,7 @@ pub async fn reconcile(shs: Arc<SparkHistoryServer>, ctx: Arc<Ctx>) -> Result<Ac
244256 & resolved_product_image,
245257 & rgr,
246258 s3_log_dir. as_ref ( ) . unwrap ( ) ,
247- & config ,
259+ & merged_config ,
248260 & serviceaccount,
249261 ) ?;
250262 cluster_resources
@@ -268,7 +280,8 @@ pub fn error_policy(_obj: Arc<SparkHistoryServer>, _error: &Error, _ctx: Arc<Ctx
268280
269281fn build_config_map (
270282 shs : & SparkHistoryServer ,
271- config : & HistoryConfig ,
283+ config : & HashMap < PropertyNameKind , BTreeMap < String , String > > ,
284+ merged_config : & HistoryConfig ,
272285 app_version_label : & str ,
273286 rolegroupref : & RoleGroupRef < SparkHistoryServer > ,
274287 s3_log_dir : & S3LogDir ,
@@ -278,6 +291,16 @@ fn build_config_map(
278291
279292 let spark_defaults = spark_defaults ( shs, s3_log_dir, rolegroupref) ?;
280293
294+ let jvm_sec_props: BTreeMap < String , Option < String > > = config
295+ . get ( & PropertyNameKind :: File (
296+ JVM_SECURITY_PROPERTIES_FILE . to_string ( ) ,
297+ ) )
298+ . cloned ( )
299+ . unwrap_or_default ( )
300+ . into_iter ( )
301+ . map ( |( k, v) | ( k, Some ( v) ) )
302+ . collect ( ) ;
303+
281304 let mut cm_builder = ConfigMapBuilder :: new ( ) ;
282305
283306 cm_builder
@@ -290,12 +313,20 @@ fn build_config_map(
290313 . with_recommended_labels ( labels ( shs, app_version_label, & rolegroupref. role_group ) )
291314 . build ( ) ,
292315 )
293- . add_data ( SPARK_DEFAULTS_FILE_NAME , spark_defaults) ;
316+ . add_data ( SPARK_DEFAULTS_FILE_NAME , spark_defaults)
317+ . add_data (
318+ JVM_SECURITY_PROPERTIES_FILE ,
319+ to_java_properties_string ( jvm_sec_props. iter ( ) ) . with_context ( |_| {
320+ JvmSecurityPropertiesSnafu {
321+ rolegroup : rolegroupref. role_group . clone ( ) ,
322+ }
323+ } ) ?,
324+ ) ;
294325
295326 product_logging:: extend_config_map (
296327 rolegroupref,
297328 vector_aggregator_address,
298- & config . logging ,
329+ & merged_config . logging ,
299330 SparkHistoryServerContainer :: SparkHistory ,
300331 SparkHistoryServerContainer :: Vector ,
301332 & mut cm_builder,
@@ -593,9 +624,10 @@ fn env_vars(s3logdir: &S3LogDir) -> Vec<EnvVar> {
593624 } ) ;
594625 vars. push ( EnvVar {
595626 name : "SPARK_HISTORY_OPTS" . to_string ( ) ,
596- value : Some ( format ! (
597- "-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}"
598- ) ) ,
627+ value : Some ( vec ! [
628+ format!( "-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}" ) ,
629+ format!( "-Djava.security.properties={VOLUME_MOUNT_PATH_LOG_CONFIG}/{JVM_SECURITY_PROPERTIES_FILE}" ) ,
630+ ] . join ( " " ) ) ,
599631 value_from : None ,
600632 } ) ;
601633 // if TLS is enabled build truststore
0 commit comments