All checks were successful
Flow / Base (push) Successful in 5s
Flow / SCIPY (push) Successful in 5s
Flow / PYAI_C (push) Successful in 7s
Flow / SCRPY (push) Successful in 7s
Flow / SQL (push) Successful in 8s
Flow / PYFLINK (push) Successful in 10s
Flow / AGDA (push) Successful in 10s
Flow / ANSIBLE (push) Successful in 8s
Flow / C (push) Successful in 11s
Flow / CADABRA2 (push) Successful in 10s
Flow / CHAPEL (push) Successful in 9s
Flow / CPP (push) Successful in 9s
Flow / DYALOG (push) Successful in 8s
Flow / FORTRAN (push) Successful in 9s
Flow / GO (push) Successful in 9s
Flow / HASKELL (push) Successful in 10s
Flow / JAVA (push) Successful in 9s
Flow / JS (push) Successful in 10s
Flow / JULIA (push) Successful in 9s
Flow / KOTLIN (push) Successful in 11s
Flow / LUA (push) Successful in 10s
Flow / R (push) Successful in 11s
Flow / NOVNC (push) Successful in 9s
Flow / OCTAVE (push) Successful in 12s
Flow / SAGEMATH (push) Successful in 13s
Flow / MAPLE (push) Successful in 9s
Flow / MMA (push) Successful in 11s
Flow / MATLAB_MINIMAL (push) Successful in 9s
Flow / PYQT6 (push) Successful in 9s
Flow / SCILAB (push) Successful in 22s
Flow / MATLAB_MCM (push) Successful in 6s
Flow / PYSPARK (push) Successful in 1m33s
Flow / Push (push) Successful in 31s
Push_ghcr / Push (push) Successful in 1h2m14s
38 lines
1.6 KiB
Docker
38 lines
1.6 KiB
Docker
FROM eoelab.org:1027/eoeair/jupyter:py-c AS builder
|
|
|
|
USER root
|
|
# You need to use https://archive.apache.org/dist/ website if you want to download old Spark versions
|
|
# But it seems to be slower, that's why we use recommended site for download
|
|
RUN apt-get update --yes && apt-get install --yes --no-install-recommends wget && \
|
|
apt-get clean && rm -rf /var/lib/apt/lists/* && \
|
|
wget -qO "spark.tgz" "https://mirrors.tuna.tsinghua.edu.cn/apache/spark/spark-3.5.5/spark-3.5.5-bin-hadoop3.tgz" && \
|
|
tar xzf "spark.tgz" -C /usr/local --no-same-owner && rm "spark.tgz"
|
|
|
|
FROM eoelab.org:1027/eoeair/jupyter:py-c
|
|
|
|
USER root
|
|
|
|
# Configure Spark
|
|
ENV SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \
|
|
SPARK_HOME=/usr/local/spark \
|
|
PATH="${PATH}:/usr/local/spark/bin"
|
|
|
|
# Configure IPython system-wide
|
|
COPY ipython_kernel_config.py "/etc/ipython/"
|
|
COPY --chown="root:root" --from=builder /usr/local/spark-3.5.5-bin-hadoop3/ /usr/local/spark-3.5.5-bin-hadoop3/
|
|
|
|
RUN apt-get update --yes && \
|
|
apt-get install --yes --no-install-recommends openjdk-17-jre-headless ca-certificates-java && apt-get clean && rm -rf /var/lib/apt/lists/* && \
|
|
ln -s "spark-3.5.5-bin-hadoop3" "/usr/local/spark" && \
|
|
# Add a link in the before_notebook hook in order to source automatically PYTHONPATH
|
|
mkdir -p /usr/local/bin/before-notebook.d && \
|
|
ln -s "/usr/local/spark/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh
|
|
|
|
USER ${NB_UID}
|
|
|
|
# Install pyarrow+findspark+duckdb
|
|
RUN pip install 'pyspark' 'findspark' && \
|
|
pip cache purge
|
|
|
|
|