Skip to content

Commit

Permalink
fixed envOverrides and extended test
Browse files Browse the repository at this point in the history
  • Loading branch information
adwk67 committed Aug 14, 2024
1 parent 7b25959 commit 4435584
Show file tree
Hide file tree
Showing 5 changed files with 60 additions and 3 deletions.
30 changes: 30 additions & 0 deletions rust/crd/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -727,6 +727,36 @@ impl SparkApplication {
}
}

pub fn merged_env(&self, role: SparkApplicationRole, env: &[EnvVar]) -> Vec<EnvVar> {
// use a BTree internally to enable replacement of existing keys
let mut env_vars: BTreeMap<String, EnvVar> = BTreeMap::new();

for e in env {
env_vars.insert(e.clone().name, e.to_owned());
}

if let Some(env_map) = match role {
SparkApplicationRole::Submit => self.spec.job.clone().map(|j| j.env_overrides),
SparkApplicationRole::Driver => self.spec.driver.clone().map(|d| d.env_overrides),
SparkApplicationRole::Executor => {
self.spec.executor.clone().map(|r| r.config.env_overrides)
}
} {
for (k, v) in env_map {
env_vars.insert(
k.clone(),
EnvVar {
name: k,
value: Some(v),
value_from: None,
},
);
}
}

env_vars.into_values().collect()
}

pub fn validated_role_config(
&self,
resolved_product_image: &ResolvedProductImage,
Expand Down
3 changes: 2 additions & 1 deletion rust/crd/src/s3logdir.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,8 @@ impl S3LogDir {
/// * spark.hadoop.fs.s3a.aws.credentials.provider
/// * spark.hadoop.fs.s3a.access.key
/// * spark.hadoop.fs.s3a.secret.key
/// instead, the environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are set
///
/// Instead, the environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are set
/// on the container start command.
pub fn history_server_spark_config(&self) -> BTreeMap<String, String> {
let mut result = BTreeMap::new();
Expand Down
6 changes: 4 additions & 2 deletions rust/operator-binary/src/spark_k8s_controller.rs
Original file line number Diff line number Diff line change
Expand Up @@ -479,9 +479,10 @@ fn pod_template(
) -> Result<PodTemplateSpec> {
let container_name = SparkContainer::Spark.to_string();
let mut cb = ContainerBuilder::new(&container_name).context(IllegalContainerNameSnafu)?;
let merged_env = spark_application.merged_env(role.clone(), env);

cb.add_volume_mounts(config.volume_mounts(spark_application, s3conn, s3logdir))
.add_env_vars(env.to_vec())
.add_env_vars(merged_env)
.resources(config.resources.clone().into())
.image_from_product_image(spark_image);

Expand Down Expand Up @@ -716,13 +717,14 @@ fn spark_job(
.context(IllegalContainerNameSnafu)?;

let args = [job_commands.join(" ")];
let merged_env = spark_application.merged_env(SparkApplicationRole::Submit, env);

cb.image_from_product_image(spark_image)
.command(vec!["/bin/bash".to_string(), "-c".to_string()])
.args(vec![args.join(" && ")])
.resources(job_config.resources.clone().into())
.add_volume_mounts(spark_application.spark_job_volume_mounts(s3conn, s3logdir))
.add_env_vars(env.to_vec())
.add_env_vars(merged_env)
.add_env_var(
"SPARK_SUBMIT_OPTS",
format!(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,14 @@ spec:
prefix: eventlogs/
bucket:
reference: spark-history-s3-bucket
env:
- name: TEST_SPARK_VAR_0
value: ORIGINAL
- name: TEST_SPARK_VAR_1
value: DONOTREPLACE
job:
envOverrides: &envOverrides
TEST_SPARK_VAR_0: REPLACED
podOverrides:
spec:
containers:
Expand All @@ -37,6 +44,7 @@ spec:
cpu: 1500m
memory: 1024Mi
driver:
envOverrides: *envOverrides
podOverrides:
spec:
containers:
Expand All @@ -50,6 +58,7 @@ spec:
memory: 1024Mi
executor:
replicas: 1
envOverrides: *envOverrides
podOverrides:
spec:
containers:
Expand Down
15 changes: 15 additions & 0 deletions tests/templates/kuttl/pod_overrides/11-assert.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
---
apiVersion: kuttl.dev/v1beta1
kind: TestAssert
timeout: 30
commands:
- script: kubectl -n $NAMESPACE get job spark-pi-s3-1 -o yaml | yq '.spec.template.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_0").value' | grep 'REPLACED'
- script: kubectl -n $NAMESPACE get job spark-pi-s3-1 -o yaml | yq '.spec.template.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_1").value' | grep 'DONOTREPLACE'
- script: kubectl -n $NAMESPACE get cm spark-pi-s3-1-driver-pod-template -o json | jq -r '.data."template.yaml"' | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_0").value' | grep 'REPLACED'
- script: kubectl -n $NAMESPACE get cm spark-pi-s3-1-driver-pod-template -o json | jq -r '.data."template.yaml"' | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_1").value' | grep 'DONOTREPLACE'
- script: kubectl -n $NAMESPACE get cm spark-pi-s3-1-executor-pod-template -o json | jq -r '.data."template.yaml"' | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_0").value' | grep 'REPLACED'
- script: kubectl -n $NAMESPACE get cm spark-pi-s3-1-executor-pod-template -o json | jq -r '.data."template.yaml"' | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_1").value' | grep 'DONOTREPLACE'
- script: |
POD=$(kubectl -n $NAMESPACE get pod -l app.kubernetes.io/instance=spark-pi-s3-1 -o name | head -n 1 | sed -e 's#pod/##')
kubectl -n $NAMESPACE get pod $POD -o yaml | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_0").value' | grep 'REPLACED'
kubectl -n $NAMESPACE get pod $POD -o yaml | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_1").value' | grep 'DONOTREPLACE'

0 comments on commit 4435584

Please sign in to comment.