Skip to content

Commit

Permalink
[DPE-3067] Changes following up entrypoint service refactoring (#57)
Browse files Browse the repository at this point in the history
  • Loading branch information
deusebio authored Nov 30, 2023
1 parent 8eff7ed commit 28cc6aa
Show file tree
Hide file tree
Showing 6 changed files with 11 additions and 47 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
ARG BASE_IMAGE=base-charmed-spark:latest
FROM $BASE_IMAGE
# Provide Default Entrypoint for Pebble
ENTRYPOINT [ "/bin/pebble", "enter", "--verbose", "--args", "entrypoint" ]
ENTRYPOINT [ "/bin/pebble", "enter", "--verbose", "--args", "sparkd" ]
14 changes: 0 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,20 +71,6 @@ Charmed Spark Rock Image is delivered with Pebble already included in order to m
docker run ghcr.io/canonical/charmed-spark:3.4.1-22.04_edge \; start history-server
```

#### Starting Jupyter Notebook

```shell
docker run \
-v /path/to/kube/config:/var/lib/spark/.kube/config \
-p <port>:8888
ghcr.io/canonical/charmed-spark:3.4.1-22.04_edge \
\; --args jupyter --username <spark-service-account> --namespace <spark-namespace> \
\; start jupyter
```

Make sure to have created the `<spark-service-account>` in the `<spark-namespace>` with the `spark8t` CLI beforehand.
You should be able to access the jupyter server at `http://0.0.0.0:<port>`.

## Developers and Contributing

Please see the [CONTRIBUTING.md](https://github.com/canonical/charmed-spark-rock/blob/3.4-22.04/edge/CONTRIBUTING.md) for guidelines and for developer guidance.
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
42 changes: 10 additions & 32 deletions rockcraft.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,15 @@ environment:
SPARK_LOG_DIR: /var/log/spark

services:
entrypoint:
command: "/bin/bash /opt/pebble/charmed-spark-entrypoint.sh"
sparkd:
command: "/bin/bash /opt/pebble/sparkd.sh"
summary: "This is the service to startup Spark processes using the Spark entrypoint"
override: replace
startup: enabled
on-success: shutdown
on-failure: shutdown
history-server:
command: "/bin/bash /opt/pebble/charmed-spark-history-server.sh"
command: "/bin/bash /opt/pebble/history-server.sh"
summary: "This is the Spark History Server service"
override: replace
startup: disabled
Expand All @@ -39,20 +39,12 @@ services:
environment:
SPARK_PROPERTIES_FILE: /etc/spark8t/conf/spark-defaults.conf
thrift-server:
command: "/bin/bash /opt/pebble/charmed-spark-thrift-server.sh"
command: "/bin/bash /opt/pebble/thrift-server.sh"
summary: "This is the Spark Thrift Server service"
override: replace
startup: disabled
environment:
SPARK_PROPERTIES_FILE: /etc/spark8t/conf/spark-defaults.conf
jupyter:
command: "spark-client.pyspark [ --username spark --namespace spark ]"
summary: "This is the Spark-powered Jupyter service"
override: replace
startup: disabled
environment:
PYSPARK_DRIVER_PYTHON: jupyter
PYSPARK_DRIVER_PYTHON_OPTS: "lab --no-browser --port=8888 --ip=0.0.0.0 --NotebookApp.token='' --notebook-dir=/var/lib/spark/notebook"

parts:
spark:
Expand Down Expand Up @@ -146,20 +138,6 @@ parts:
stage:
- opt/spark8t/python/dist

jupyter:
plugin: python
source: .
python-packages:
- jupyterlab
stage-packages:
- python3-venv
organize:
lib: usr/lib
bin: usr/bin
share: usr/share
stage:
- usr

kubectl:
plugin: nil
build-packages:
Expand Down Expand Up @@ -187,18 +165,18 @@ parts:
source: files/spark
organize:
conf/spark-defaults.conf: etc/spark8t/conf/spark-defaults.conf
bin/charmed-spark-entrypoint.sh: opt/pebble/charmed-spark-entrypoint.sh
bin/charmed-spark-history-server.sh: opt/pebble/charmed-spark-history-server.sh
bin/charmed-spark-thrift-server.sh: opt/pebble/charmed-spark-thrift-server.sh
bin/sparkd.sh: opt/pebble/sparkd.sh
bin/history-server.sh: opt/pebble/history-server.sh
bin/thrift-server.sh: opt/pebble/thrift-server.sh
bin/spark-client.pyspark: opt/spark-client/python/bin/spark-client.pyspark
bin/spark-client.service-account-registry: opt/spark-client/python/bin/spark-client.service-account-registry
bin/spark-client.spark-shell: opt/spark-client/python/bin/spark-client.spark-shell
bin/spark-client.spark-submit: opt/spark-client/python/bin/spark-client.spark-submit
stage:
- etc/spark8t/conf/
- opt/pebble/charmed-spark-entrypoint.sh
- opt/pebble/charmed-spark-history-server.sh
- opt/pebble/charmed-spark-thrift-server.sh
- opt/pebble/sparkd.sh
- opt/pebble/history-server.sh
- opt/pebble/thrift-server.sh
- opt/spark-client/python/bin/spark-client.pyspark
- opt/spark-client/python/bin/spark-client.service-account-registry
- opt/spark-client/python/bin/spark-client.spark-shell
Expand Down

0 comments on commit 28cc6aa

Please sign in to comment.