Skip to content

Commit 105c92d

Browse files
committed
First working version
1 parent a11150f commit 105c92d

File tree

5 files changed

+148
-22
lines changed

5 files changed

+148
-22
lines changed

deploy/config-spec/properties.yaml

Lines changed: 38 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,41 @@
33
version: 0.1.0
44
spec:
55
units: []
6-
properties: []
6+
properties:
7+
- property: &jvmDnsCacheTtl
8+
propertyNames:
9+
- name: "networkaddress.cache.ttl"
10+
kind:
11+
type: "file"
12+
file: "security.properties"
13+
datatype:
14+
type: "integer"
15+
min: "0"
16+
recommendedValues:
17+
- fromVersion: "0.0.0"
18+
value: "30"
19+
roles:
20+
- name: "node"
21+
required: true
22+
asOfVersion: "0.0.0"
23+
comment: "History server - TTL for successfully resolved domain names."
24+
description: "History server - TTL for successfully resolved domain names."
25+
26+
- property: &jvmDnsCacheNegativeTtl
27+
propertyNames:
28+
- name: "networkaddress.cache.negative.ttl"
29+
kind:
30+
type: "file"
31+
file: "security.properties"
32+
datatype:
33+
type: "integer"
34+
min: "0"
35+
recommendedValues:
36+
- fromVersion: "0.0.0"
37+
value: "0"
38+
roles:
39+
- name: "node"
40+
required: true
41+
asOfVersion: "0.0.0"
42+
comment: "History server - TTL for domain names that cannot be resolved."
43+
description: "History server - TTL for domain names that cannot be resolved."

deploy/helm/spark-k8s-operator/configs/properties.yaml

Lines changed: 38 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,41 @@
33
version: 0.1.0
44
spec:
55
units: []
6-
properties: []
6+
properties:
7+
- property: &jvmDnsCacheTtl
8+
propertyNames:
9+
- name: "networkaddress.cache.ttl"
10+
kind:
11+
type: "file"
12+
file: "security.properties"
13+
datatype:
14+
type: "integer"
15+
min: "0"
16+
recommendedValues:
17+
- fromVersion: "0.0.0"
18+
value: "30"
19+
roles:
20+
- name: "node"
21+
required: true
22+
asOfVersion: "0.0.0"
23+
comment: "History server - TTL for successfully resolved domain names."
24+
description: "History server - TTL for successfully resolved domain names."
25+
26+
- property: &jvmDnsCacheNegativeTtl
27+
propertyNames:
28+
- name: "networkaddress.cache.negative.ttl"
29+
kind:
30+
type: "file"
31+
file: "security.properties"
32+
datatype:
33+
type: "integer"
34+
min: "0"
35+
recommendedValues:
36+
- fromVersion: "0.0.0"
37+
value: "0"
38+
roles:
39+
- name: "node"
40+
required: true
41+
asOfVersion: "0.0.0"
42+
comment: "History server - TTL for domain names that cannot be resolved."
43+
description: "History server - TTL for domain names that cannot be resolved."

rust/crd/src/history.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,10 @@ impl SparkHistoryServer {
200200
> = vec![(
201201
HISTORY_ROLE_NAME.to_string(),
202202
(
203-
vec![PropertyNameKind::File(SPARK_DEFAULTS_FILE_NAME.to_string())],
203+
vec![
204+
PropertyNameKind::File(SPARK_DEFAULTS_FILE_NAME.to_string()),
205+
PropertyNameKind::File(JVM_SECURITY_PROPERTIES_FILE.to_string()),
206+
],
204207
self.spec.nodes.clone(),
205208
),
206209
)]

rust/crd/src/lib.rs

Lines changed: 20 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -538,11 +538,28 @@ impl SparkApplication {
538538
}
539539
}
540540

541-
// s3 with TLS
541+
// Extra JVM opts:
542+
// - java security properties
543+
// - s3 with TLS
544+
let mut extra_java_opts = vec![format!(
545+
"-Djava.security.properties={VOLUME_MOUNT_PATH_LOG_CONFIG}/{JVM_SECURITY_PROPERTIES_FILE}"
546+
)];
542547
if tlscerts::tls_secret_names(s3conn, s3_log_dir).is_some() {
543-
submit_cmd.push(format!("--conf spark.driver.extraJavaOptions=\"-Djavax.net.ssl.trustStore={STACKABLE_TRUST_STORE}/truststore.p12 -Djavax.net.ssl.trustStorePassword={STACKABLE_TLS_STORE_PASSWORD} -Djavax.net.ssl.trustStoreType=pkcs12 -Djavax.net.debug=ssl,handshake\""));
544-
submit_cmd.push(format!("--conf spark.executor.extraJavaOptions=\"-Djavax.net.ssl.trustStore={STACKABLE_TRUST_STORE}/truststore.p12 -Djavax.net.ssl.trustStorePassword={STACKABLE_TLS_STORE_PASSWORD} -Djavax.net.ssl.trustStoreType=pkcs12 -Djavax.net.debug=ssl,handshake\""));
548+
extra_java_opts.extend(
549+
vec![
550+
format!("-Djavax.net.ssl.trustStore={STACKABLE_TRUST_STORE}/truststore.p12"),
551+
format!("-Djavax.net.ssl.trustStorePassword={STACKABLE_TLS_STORE_PASSWORD}"),
552+
format!("-Djavax.net.ssl.trustStoreType=pkcs12"),
553+
format!("-Djavax.net.debug=ssl,handshake"),
554+
]
555+
.into_iter(),
556+
);
545557
}
558+
let str_extra_java_opts = extra_java_opts.join(" ");
559+
submit_cmd.extend(vec![
560+
format!("--conf spark.driver.extraJavaOptions=\"{str_extra_java_opts}\""),
561+
format!("--conf spark.executor.extraJavaOptions=\"{str_extra_java_opts}\""),
562+
]);
546563

547564
// repositories and packages arguments
548565
if let Some(deps) = self.spec.deps.clone() {

rust/operator-binary/src/history_controller.rs

Lines changed: 48 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,9 @@ use stackable_operator::{
1919
Resource, ResourceExt,
2020
},
2121
labels::{role_group_selector_labels, role_selector_labels, ObjectLabels},
22-
product_config::ProductConfigManager,
22+
product_config::{
23+
types::PropertyNameKind, writer::to_java_properties_string, ProductConfigManager,
24+
},
2325
product_logging::{
2426
framework::{calculate_log_volume_size_limit, vector_container},
2527
spec::{
@@ -32,19 +34,20 @@ use stackable_operator::{
3234
use stackable_spark_k8s_crd::{
3335
constants::{
3436
ACCESS_KEY_ID, APP_NAME, HISTORY_CONTROLLER_NAME, HISTORY_IMAGE_BASE_NAME,
35-
HISTORY_ROLE_NAME, LOG4J2_CONFIG_FILE, MAX_SPARK_LOG_FILES_SIZE, OPERATOR_NAME,
36-
SECRET_ACCESS_KEY, SPARK_CLUSTER_ROLE, SPARK_DEFAULTS_FILE_NAME, SPARK_UID,
37-
STACKABLE_TLS_STORE_PASSWORD, STACKABLE_TRUST_STORE, VOLUME_MOUNT_NAME_CONFIG,
38-
VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG, VOLUME_MOUNT_NAME_SPARK_DEFAULTS,
39-
VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG, VOLUME_MOUNT_PATH_SPARK_DEFAULTS,
37+
HISTORY_ROLE_NAME, JVM_SECURITY_PROPERTIES_FILE, LOG4J2_CONFIG_FILE,
38+
MAX_SPARK_LOG_FILES_SIZE, OPERATOR_NAME, SECRET_ACCESS_KEY, SPARK_CLUSTER_ROLE,
39+
SPARK_DEFAULTS_FILE_NAME, SPARK_UID, STACKABLE_TLS_STORE_PASSWORD, STACKABLE_TRUST_STORE,
40+
VOLUME_MOUNT_NAME_CONFIG, VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG,
41+
VOLUME_MOUNT_NAME_SPARK_DEFAULTS, VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG,
42+
VOLUME_MOUNT_PATH_SPARK_DEFAULTS,
4043
},
4144
history,
4245
history::{HistoryConfig, SparkHistoryServer, SparkHistoryServerContainer},
4346
s3logdir::S3LogDir,
4447
tlscerts,
4548
};
46-
use std::time::Duration;
4749
use std::{collections::BTreeMap, sync::Arc};
50+
use std::{collections::HashMap, time::Duration};
4851

4952
use snafu::{OptionExt, ResultExt, Snafu};
5053
use stackable_operator::builder::resources::ResourceRequirementsBuilder;
@@ -129,6 +132,14 @@ pub enum Error {
129132
},
130133
#[snafu(display("cannot retrieve role group"))]
131134
CannotRetrieveRoleGroup { source: history::Error },
135+
#[snafu(display(
136+
"History server : failed to serialize [{JVM_SECURITY_PROPERTIES_FILE}] for group {}",
137+
rolegroup
138+
))]
139+
JvmSecurityProperties {
140+
source: stackable_operator::product_config::writer::PropertiesWriterError,
141+
rolegroup: String,
142+
},
132143
}
133144

134145
type Result<T, E = Error> = std::result::Result<T, E>;
@@ -211,7 +222,7 @@ pub async fn reconcile(shs: Arc<SparkHistoryServer>, ctx: Arc<Ctx>) -> Result<Ac
211222
role_group: rolegroup_name.into(),
212223
};
213224

214-
let config = shs
225+
let merged_config = shs
215226
.merged_config(&rgr)
216227
.context(FailedToResolveConfigSnafu)?;
217228

@@ -228,7 +239,8 @@ pub async fn reconcile(shs: Arc<SparkHistoryServer>, ctx: Arc<Ctx>) -> Result<Ac
228239

229240
let config_map = build_config_map(
230241
&shs,
231-
&config,
242+
_rolegroup_config,
243+
&merged_config,
232244
&resolved_product_image.app_version_label,
233245
&rgr,
234246
s3_log_dir.as_ref().unwrap(),
@@ -244,7 +256,7 @@ pub async fn reconcile(shs: Arc<SparkHistoryServer>, ctx: Arc<Ctx>) -> Result<Ac
244256
&resolved_product_image,
245257
&rgr,
246258
s3_log_dir.as_ref().unwrap(),
247-
&config,
259+
&merged_config,
248260
&serviceaccount,
249261
)?;
250262
cluster_resources
@@ -268,7 +280,8 @@ pub fn error_policy(_obj: Arc<SparkHistoryServer>, _error: &Error, _ctx: Arc<Ctx
268280

269281
fn build_config_map(
270282
shs: &SparkHistoryServer,
271-
config: &HistoryConfig,
283+
config: &HashMap<PropertyNameKind, BTreeMap<String, String>>,
284+
merged_config: &HistoryConfig,
272285
app_version_label: &str,
273286
rolegroupref: &RoleGroupRef<SparkHistoryServer>,
274287
s3_log_dir: &S3LogDir,
@@ -278,6 +291,16 @@ fn build_config_map(
278291

279292
let spark_defaults = spark_defaults(shs, s3_log_dir, rolegroupref)?;
280293

294+
let jvm_sec_props: BTreeMap<String, Option<String>> = config
295+
.get(&PropertyNameKind::File(
296+
JVM_SECURITY_PROPERTIES_FILE.to_string(),
297+
))
298+
.cloned()
299+
.unwrap_or_default()
300+
.into_iter()
301+
.map(|(k, v)| (k, Some(v)))
302+
.collect();
303+
281304
let mut cm_builder = ConfigMapBuilder::new();
282305

283306
cm_builder
@@ -290,12 +313,20 @@ fn build_config_map(
290313
.with_recommended_labels(labels(shs, app_version_label, &rolegroupref.role_group))
291314
.build(),
292315
)
293-
.add_data(SPARK_DEFAULTS_FILE_NAME, spark_defaults);
316+
.add_data(SPARK_DEFAULTS_FILE_NAME, spark_defaults)
317+
.add_data(
318+
JVM_SECURITY_PROPERTIES_FILE,
319+
to_java_properties_string(jvm_sec_props.iter()).with_context(|_| {
320+
JvmSecurityPropertiesSnafu {
321+
rolegroup: rolegroupref.role_group.clone(),
322+
}
323+
})?,
324+
);
294325

295326
product_logging::extend_config_map(
296327
rolegroupref,
297328
vector_aggregator_address,
298-
&config.logging,
329+
&merged_config.logging,
299330
SparkHistoryServerContainer::SparkHistory,
300331
SparkHistoryServerContainer::Vector,
301332
&mut cm_builder,
@@ -593,9 +624,10 @@ fn env_vars(s3logdir: &S3LogDir) -> Vec<EnvVar> {
593624
});
594625
vars.push(EnvVar {
595626
name: "SPARK_HISTORY_OPTS".to_string(),
596-
value: Some(format!(
597-
"-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}"
598-
)),
627+
value: Some(vec![
628+
format!("-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}"),
629+
format!("-Djava.security.properties={VOLUME_MOUNT_PATH_LOG_CONFIG}/{JVM_SECURITY_PROPERTIES_FILE}"),
630+
].join(" ")),
599631
value_from: None,
600632
});
601633
// if TLS is enabled build truststore

0 commit comments

Comments
 (0)