Skip to content

Commit

Permalink
feat: nonProxyHosts config for spark
Browse files Browse the repository at this point in the history
  • Loading branch information
phlg committed Sep 5, 2023
1 parent 2c25701 commit ff46e95
Show file tree
Hide file tree
Showing 12 changed files with 147 additions and 24 deletions.
4 changes: 2 additions & 2 deletions charts/jupyter-pyspark/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 1.29.0
version: 1.30.0

dependencies:
- name: library-chart
version: 1.4.0
version: 1.4.1
repository: https://inseefrlab.github.io/helm-charts-interactive-services
11 changes: 7 additions & 4 deletions charts/jupyter-pyspark/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# jupyter-pyspark

![Version: 1.29.0](https://img.shields.io/badge/Version-1.29.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)
![Version: 1.30.0](https://img.shields.io/badge/Version-1.30.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)

The JupyterLab IDE with PySpark, an interface to use Apache Spark from Python.

Expand All @@ -15,7 +15,7 @@ The JupyterLab IDE with PySpark, an interface to use Apache Spark from Python.

| Repository | Name | Version |
|------------|------|---------|
| https://inseefrlab.github.io/helm-charts-interactive-services | library-chart | 1.4.0 |
| https://inseefrlab.github.io/helm-charts-interactive-services | library-chart | 1.4.1 |

## Values

Expand Down Expand Up @@ -70,6 +70,9 @@ The JupyterLab IDE with PySpark, an interface to use Apache Spark from Python.
| persistence.size | string | `"10Gi"` | |
| podAnnotations | object | `{}` | |
| podSecurityContext.fsGroup | int | `100` | |
| proxy.httpProxy | string | `nil` | |
| proxy.httpsProxy | string | `nil` | |
| proxy.noProxy | string | `nil` | |
| replicaCount | int | `1` | |
| repository.condaRepository | string | `""` | |
| repository.configMapName | string | `""` | |
Expand Down Expand Up @@ -102,8 +105,8 @@ The JupyterLab IDE with PySpark, an interface to use Apache Spark from Python.
| serviceAccount.annotations | object | `{}` | |
| serviceAccount.create | bool | `true` | |
| serviceAccount.name | string | `""` | |
| spark.config."spark.driver.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}"` | |
| spark.config."spark.executor.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}"` | |
| spark.config."spark.driver.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }} -Dhttps.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }}"` | |
| spark.config."spark.executor.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }} -Dhttps.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }}"` | |
| spark.config."spark.kubernetes.authenticate.driver.serviceAccountName" | string | `"{{ include \"library-chart.fullname\" . }}"` | |
| spark.config."spark.kubernetes.container.image" | string | `"{{ ternary .Values.service.image.custom.version .Values.service.image.version .Values.service.image.custom.enabled }}"` | |
| spark.config."spark.kubernetes.driver.pod.name" | string | `"{{ include \"library-chart.fullname\" . }}-0"` | |
Expand Down
33 changes: 33 additions & 0 deletions charts/jupyter-pyspark/values.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -804,6 +804,39 @@
}
}
},
"proxy": {
"description": "Proxy configuration",
"type": "object",
"properties": {
"httpProxy": {
"type": "string",
"description": "Proxy URL for HTTP requests",
"default": "",
"x-onyxia": {
"hidden": true,
"overwriteDefaultWith": "{{proxyInjection.httpProxyUrl}}"
}
},
"httpsProxy": {
"type": "string",
"description": "Proxy URL for HTTPS requests",
"default": "",
"x-onyxia": {
"hidden": true,
"overwriteDefaultWith": "{{proxyInjection.httpsProxyUrl}}"
}
},
"noProxy": {
"type": "string",
"description": "Comma separated list of hosts for which proxy is bypassed",
"default": "",
"x-onyxia": {
"hidden": true,
"overwriteDefaultWith": "{{proxyInjection.noProxy}}"
}
}
}
},
"startupProbe": {
"type": "object",
"description": "Start up probe",
Expand Down
9 changes: 7 additions & 2 deletions charts/jupyter-pyspark/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ spark:
spark.kubernetes.driver.pod.name: '{{ include "library-chart.fullname" . }}-0'
spark.kubernetes.namespace: '{{ .Release.Namespace }}'
spark.kubernetes.container.image: '{{ ternary .Values.service.image.custom.version .Values.service.image.version .Values.service.image.custom.enabled }}'
spark.driver.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}'
spark.executor.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}'
spark.driver.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }} -Dhttps.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }}'
spark.executor.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }} -Dhttps.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }}'
userConfig:
spark.dynamicAllocation.enabled: "true"
spark.dynamicAllocation.initialExecutors: "1"
Expand Down Expand Up @@ -89,6 +89,11 @@ git:
repository: ""
branch: ""

proxy:
noProxy:
httpProxy:
httpsProxy:

repository:
configMapName: ""
pipRepository: ""
Expand Down
4 changes: 2 additions & 2 deletions charts/rstudio-sparkr/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 1.14.0
version: 1.15.0

dependencies:
- name: library-chart
version: 1.4.0
version: 1.4.1
repository: https://inseefrlab.github.io/helm-charts-interactive-services
11 changes: 7 additions & 4 deletions charts/rstudio-sparkr/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# rstudio-sparkr

![Version: 1.14.0](https://img.shields.io/badge/Version-1.14.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)
![Version: 1.15.0](https://img.shields.io/badge/Version-1.15.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)

The RStudio IDE with a collection of standard data science packages. It includes SparkR, an R package that provides an interface to use Apache Spark from R.

Expand All @@ -15,7 +15,7 @@ The RStudio IDE with a collection of standard data science packages. It includes

| Repository | Name | Version |
|------------|------|---------|
| https://inseefrlab.github.io/helm-charts-interactive-services | library-chart | 1.4.0 |
| https://inseefrlab.github.io/helm-charts-interactive-services | library-chart | 1.4.1 |

## Values

Expand Down Expand Up @@ -66,6 +66,9 @@ The RStudio IDE with a collection of standard data science packages. It includes
| persistence.size | string | `"10Gi"` | |
| podAnnotations | object | `{}` | |
| podSecurityContext.fsGroup | int | `100` | |
| proxy.httpProxy | string | `nil` | |
| proxy.httpsProxy | string | `nil` | |
| proxy.noProxy | string | `nil` | |
| replicaCount | int | `1` | |
| repository.configMapName | string | `""` | |
| repository.mavenRepository | string | `""` | |
Expand Down Expand Up @@ -98,8 +101,8 @@ The RStudio IDE with a collection of standard data science packages. It includes
| serviceAccount.annotations | object | `{}` | |
| serviceAccount.create | bool | `true` | |
| serviceAccount.name | string | `""` | |
| spark.config."spark.driver.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}"` | |
| spark.config."spark.executor.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}"` | |
| spark.config."spark.driver.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }} -Dhttps.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }}"` | |
| spark.config."spark.executor.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }} -Dhttps.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }}"` | |
| spark.config."spark.kubernetes.authenticate.driver.serviceAccountName" | string | `"{{ include \"library-chart.fullname\" . }}"` | |
| spark.config."spark.kubernetes.container.image" | string | `"{{ ternary .Values.service.image.custom.version .Values.service.image.version .Values.service.image.custom.enabled }}"` | |
| spark.config."spark.kubernetes.driver.pod.name" | string | `"{{ include \"library-chart.fullname\" . }}-0"` | |
Expand Down
33 changes: 33 additions & 0 deletions charts/rstudio-sparkr/values.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -750,6 +750,39 @@
}
}
},
"proxy": {
"description": "Proxy configuration",
"type": "object",
"properties": {
"httpProxy": {
"type": "string",
"description": "Proxy URL for HTTP requests",
"default": "",
"x-onyxia": {
"hidden": true,
"overwriteDefaultWith": "{{proxyInjection.httpProxyUrl}}"
}
},
"httpsProxy": {
"type": "string",
"description": "Proxy URL for HTTPS requests",
"default": "",
"x-onyxia": {
"hidden": true,
"overwriteDefaultWith": "{{proxyInjection.httpsProxyUrl}}"
}
},
"noProxy": {
"type": "string",
"description": "Comma separated list of hosts for which proxy is bypassed",
"default": "",
"x-onyxia": {
"hidden": true,
"overwriteDefaultWith": "{{proxyInjection.noProxy}}"
}
}
}
},
"startupProbe": {
"type": "object",
"description": "Start up probe",
Expand Down
9 changes: 7 additions & 2 deletions charts/rstudio-sparkr/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ spark:
spark.kubernetes.driver.pod.name: '{{ include "library-chart.fullname" . }}-0'
spark.kubernetes.namespace: '{{ .Release.Namespace }}'
spark.kubernetes.container.image: '{{ ternary .Values.service.image.custom.version .Values.service.image.version .Values.service.image.custom.enabled }}'
spark.driver.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}'
spark.executor.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}'
spark.driver.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }} -Dhttps.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }}'
spark.executor.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }} -Dhttps.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }}'
userConfig:
spark.dynamicAllocation.enabled: "true"
spark.dynamicAllocation.initialExecutors: "1"
Expand Down Expand Up @@ -208,6 +208,11 @@ tolerations: []

affinity: {}

proxy:
noProxy:
httpProxy:
httpsProxy:

repository:
configMapName: ""
packageManagerUrl: ""
Expand Down
4 changes: 2 additions & 2 deletions charts/vscode-pyspark/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 1.1.0
version: 1.2.0

dependencies:
- name: library-chart
version: 1.4.0
version: 1.4.1
repository: https://inseefrlab.github.io/helm-charts-interactive-services
11 changes: 7 additions & 4 deletions charts/vscode-pyspark/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# vscode-pyspark

![Version: 1.1.0](https://img.shields.io/badge/Version-1.1.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)
![Version: 1.2.0](https://img.shields.io/badge/Version-1.2.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)

The Visual Studio Code IDE with PySpark, an interface to use Apache Spark from Python.

Expand All @@ -15,7 +15,7 @@ The Visual Studio Code IDE with PySpark, an interface to use Apache Spark from P

| Repository | Name | Version |
|------------|------|---------|
| https://inseefrlab.github.io/helm-charts-interactive-services | library-chart | 1.4.0 |
| https://inseefrlab.github.io/helm-charts-interactive-services | library-chart | 1.4.1 |

## Values

Expand Down Expand Up @@ -70,6 +70,9 @@ The Visual Studio Code IDE with PySpark, an interface to use Apache Spark from P
| persistence.size | string | `"10Gi"` | |
| podAnnotations | object | `{}` | |
| podSecurityContext.fsGroup | int | `100` | |
| proxy.httpProxy | string | `nil` | |
| proxy.httpsProxy | string | `nil` | |
| proxy.noProxy | string | `nil` | |
| replicaCount | int | `1` | |
| repository.condaRepository | string | `""` | |
| repository.configMapName | string | `""` | |
Expand Down Expand Up @@ -102,8 +105,8 @@ The Visual Studio Code IDE with PySpark, an interface to use Apache Spark from P
| serviceAccount.annotations | object | `{}` | |
| serviceAccount.create | bool | `true` | |
| serviceAccount.name | string | `""` | |
| spark.config."spark.driver.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}"` | |
| spark.config."spark.executor.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}"` | |
| spark.config."spark.driver.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }} -Dhttps.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }}"` | |
| spark.config."spark.executor.extraJavaOptions" | string | `"-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }} -Dhttps.nonProxyHosts={{ include \"library-chart.sparkNonProxyHosts\" . }}"` | |
| spark.config."spark.kubernetes.authenticate.driver.serviceAccountName" | string | `"{{ include \"library-chart.fullname\" . }}"` | |
| spark.config."spark.kubernetes.container.image" | string | `"{{ ternary .Values.service.image.custom.version .Values.service.image.version .Values.service.image.custom.enabled }}"` | |
| spark.config."spark.kubernetes.driver.pod.name" | string | `"{{ include \"library-chart.fullname\" . }}-0"` | |
Expand Down
33 changes: 33 additions & 0 deletions charts/vscode-pyspark/values.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -803,6 +803,39 @@
}
}
},
"proxy": {
"description": "Proxy configuration",
"type": "object",
"properties": {
"httpProxy": {
"type": "string",
"description": "Proxy URL for HTTP requests",
"default": "",
"x-onyxia": {
"hidden": true,
"overwriteDefaultWith": "{{proxyInjection.httpProxyUrl}}"
}
},
"httpsProxy": {
"type": "string",
"description": "Proxy URL for HTTPS requests",
"default": "",
"x-onyxia": {
"hidden": true,
"overwriteDefaultWith": "{{proxyInjection.httpsProxyUrl}}"
}
},
"noProxy": {
"type": "string",
"description": "Comma separated list of hosts for which proxy is bypassed",
"default": "",
"x-onyxia": {
"hidden": true,
"overwriteDefaultWith": "{{proxyInjection.noProxy}}"
}
}
}
},
"startupProbe": {
"type": "object",
"description": "Start up probe",
Expand Down
9 changes: 7 additions & 2 deletions charts/vscode-pyspark/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ spark:
spark.kubernetes.driver.pod.name: '{{ include "library-chart.fullname" . }}-0'
spark.kubernetes.namespace: '{{ .Release.Namespace }}'
spark.kubernetes.container.image: '{{ ternary .Values.service.image.custom.version .Values.service.image.version .Values.service.image.custom.enabled }}'
spark.driver.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}'
spark.executor.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }}'
spark.driver.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }} -Dhttps.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }}'
spark.executor.extraJavaOptions: '-Dcom.amazonaws.sdk.disableCertChecking={{ .Values.spark.disabledCertChecking }} -Dhttp.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }} -Dhttps.nonProxyHosts={{ include "library-chart.sparkNonProxyHosts" . }}'
userConfig:
spark.dynamicAllocation.enabled: "true"
spark.dynamicAllocation.initialExecutors: "1"
Expand Down Expand Up @@ -89,6 +89,11 @@ git:
repository: ""
branch: ""

proxy:
noProxy:
httpProxy:
httpsProxy:

repository:
configMapName: ""
pipRepository: ""
Expand Down

0 comments on commit ff46e95

Please sign in to comment.