This commit is contained in:
ben0i0d 2025-03-11 00:44:01 +08:00
parent 7ccaedbddd
commit 5053e35c41
5 changed files with 9 additions and 10 deletions
.gitea/workflows
BigData/pyspark
Math/scilab
README.mdREADME_CN.md

@ -51,7 +51,7 @@ jobs:
steps: steps:
- name: checkout code - name: checkout code
uses: https://eoelab.org:1027/actions/checkout@v4 uses: https://eoelab.org:1027/actions/checkout@v4
- name: scrpy-c build - name: scrpy build
uses: https://eoelab.org:1027/actions/build-push-action@v6 uses: https://eoelab.org:1027/actions/build-push-action@v6
with: with:
context: BigData/scrpy context: BigData/scrpy
@ -63,7 +63,7 @@ jobs:
steps: steps:
- name: checkout code - name: checkout code
uses: https://eoelab.org:1027/actions/checkout@v4 uses: https://eoelab.org:1027/actions/checkout@v4
- name: scrpy-c build - name: sql build
uses: https://eoelab.org:1027/actions/build-push-action@v6 uses: https://eoelab.org:1027/actions/build-push-action@v6
with: with:
context: BigData/sql context: BigData/sql

@ -5,7 +5,7 @@ USER root
# But it seems to be slower, that's why we use recommended site for download # But it seems to be slower, that's why we use recommended site for download
RUN apt-get update --yes && apt-get install --yes --no-install-recommends wget && \ RUN apt-get update --yes && apt-get install --yes --no-install-recommends wget && \
apt-get clean && rm -rf /var/lib/apt/lists/* && \ apt-get clean && rm -rf /var/lib/apt/lists/* && \
wget -qO "spark.tgz" "https://mirrors.tuna.tsinghua.edu.cn/apache/spark/spark-3.5.4/spark-3.5.4-bin-hadoop3.tgz" && \ wget -qO "spark.tgz" "https://mirrors.tuna.tsinghua.edu.cn/apache/spark/spark-3.5.5/spark-3.5.5-bin-hadoop3.tgz" && \
tar xzf "spark.tgz" -C /usr/local --no-same-owner && rm "spark.tgz" tar xzf "spark.tgz" -C /usr/local --no-same-owner && rm "spark.tgz"
FROM eoelab.org:1027/eoeair/jupyter:py-c FROM eoelab.org:1027/eoeair/jupyter:py-c
@ -19,11 +19,11 @@ ENV SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M
# Configure IPython system-wide # Configure IPython system-wide
COPY ipython_kernel_config.py "/etc/ipython/" COPY ipython_kernel_config.py "/etc/ipython/"
COPY --chown="root:root" --from=builder /usr/local/spark-3.5.4-bin-hadoop3/ /usr/local/spark-3.5.4-bin-hadoop3/ COPY --chown="root:root" --from=builder /usr/local/spark-3.5.5-bin-hadoop3/ /usr/local/spark-3.5.5-bin-hadoop3/
RUN apt-get update --yes && \ RUN apt-get update --yes && \
apt-get install --yes --no-install-recommends openjdk-17-jre-headless ca-certificates-java && apt-get clean && rm -rf /var/lib/apt/lists/* && \ apt-get install --yes --no-install-recommends openjdk-17-jre-headless ca-certificates-java && apt-get clean && rm -rf /var/lib/apt/lists/* && \
ln -s "spark-3.5.4-bin-hadoop3" "/usr/local/spark" && \ ln -s "spark-3.5.5-bin-hadoop3" "/usr/local/spark" && \
# Add a link in the before_notebook hook in order to source automatically PYTHONPATH # Add a link in the before_notebook hook in order to source automatically PYTHONPATH
mkdir -p /usr/local/bin/before-notebook.d && \ mkdir -p /usr/local/bin/before-notebook.d && \
ln -s "/usr/local/spark/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh ln -s "/usr/local/spark/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh

@ -12,5 +12,4 @@ RUN apt-get update --yes && \
USER $NB_USER USER $NB_USER
RUN pip install scilab_kernel && \ RUN pip install scilab_kernel && \
python -m scilab_kernel install --prefix /opt/base/ && \
pip cache purge pip cache purge

@ -120,9 +120,9 @@ M-->MF(Scilab)
**Package version** **Package version**
* cuda 12.4.0 * cuda 12.4.0
* Python 3.12 * Python 3.11
* Julia latest * Julia latest
* spark 3.5.4 * spark 3.5.5
* flink 1.20.0 * flink 1.20.0
* jupyterlab 4 * jupyterlab 4
* Matlab R2023b * Matlab R2023b

@ -127,9 +127,9 @@ M-->MF(Scilab)
**软件包版本** **软件包版本**
* cuda 12.4.0 * cuda 12.4.0
* Python 3.12 * Python 3.11
* Julia latest * Julia latest
* spark 3.5.4 * spark 3.5.5
* flink 1.20.0 * flink 1.20.0
* jupyterlab 4 * jupyterlab 4
* Matlab R2023b * Matlab R2023b