From 5053e35c41abd2f52db2274a5125376b51c86c7a Mon Sep 17 00:00:00 2001 From: ben0i0d Date: Tue, 11 Mar 2025 00:44:01 +0800 Subject: [PATCH] update --- .gitea/workflows/Flow.yaml | 4 ++-- BigData/pyspark/Dockerfile | 6 +++--- Math/scilab/Dockerfile | 1 - README.md | 4 ++-- README_CN.md | 4 ++-- 5 files changed, 9 insertions(+), 10 deletions(-) diff --git a/.gitea/workflows/Flow.yaml b/.gitea/workflows/Flow.yaml index d58cb94..ce0741c 100644 --- a/.gitea/workflows/Flow.yaml +++ b/.gitea/workflows/Flow.yaml @@ -51,7 +51,7 @@ jobs: steps: - name: checkout code uses: https://eoelab.org:1027/actions/checkout@v4 - - name: scrpy-c build + - name: scrpy build uses: https://eoelab.org:1027/actions/build-push-action@v6 with: context: BigData/scrpy @@ -63,7 +63,7 @@ jobs: steps: - name: checkout code uses: https://eoelab.org:1027/actions/checkout@v4 - - name: scrpy-c build + - name: sql build uses: https://eoelab.org:1027/actions/build-push-action@v6 with: context: BigData/sql diff --git a/BigData/pyspark/Dockerfile b/BigData/pyspark/Dockerfile index b55f900..857440c 100644 --- a/BigData/pyspark/Dockerfile +++ b/BigData/pyspark/Dockerfile @@ -5,7 +5,7 @@ USER root # But it seems to be slower, that's why we use recommended site for download RUN apt-get update --yes && apt-get install --yes --no-install-recommends wget && \ apt-get clean && rm -rf /var/lib/apt/lists/* && \ - wget -qO "spark.tgz" "https://mirrors.tuna.tsinghua.edu.cn/apache/spark/spark-3.5.4/spark-3.5.4-bin-hadoop3.tgz" && \ + wget -qO "spark.tgz" "https://mirrors.tuna.tsinghua.edu.cn/apache/spark/spark-3.5.5/spark-3.5.5-bin-hadoop3.tgz" && \ tar xzf "spark.tgz" -C /usr/local --no-same-owner && rm "spark.tgz" FROM eoelab.org:1027/eoeair/jupyter:py-c @@ -19,11 +19,11 @@ ENV SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M # Configure IPython system-wide COPY ipython_kernel_config.py "/etc/ipython/" -COPY --chown="root:root" --from=builder /usr/local/spark-3.5.4-bin-hadoop3/ /usr/local/spark-3.5.4-bin-hadoop3/ +COPY --chown="root:root" --from=builder /usr/local/spark-3.5.5-bin-hadoop3/ /usr/local/spark-3.5.5-bin-hadoop3/ RUN apt-get update --yes && \ apt-get install --yes --no-install-recommends openjdk-17-jre-headless ca-certificates-java && apt-get clean && rm -rf /var/lib/apt/lists/* && \ - ln -s "spark-3.5.4-bin-hadoop3" "/usr/local/spark" && \ + ln -s "spark-3.5.5-bin-hadoop3" "/usr/local/spark" && \ # Add a link in the before_notebook hook in order to source automatically PYTHONPATH mkdir -p /usr/local/bin/before-notebook.d && \ ln -s "/usr/local/spark/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh diff --git a/Math/scilab/Dockerfile b/Math/scilab/Dockerfile index 467f04f..a62e038 100644 --- a/Math/scilab/Dockerfile +++ b/Math/scilab/Dockerfile @@ -12,5 +12,4 @@ RUN apt-get update --yes && \ USER $NB_USER RUN pip install scilab_kernel && \ - python -m scilab_kernel install --prefix /opt/base/ && \ pip cache purge \ No newline at end of file diff --git a/README.md b/README.md index bc8db25..6d7a97f 100644 --- a/README.md +++ b/README.md @@ -120,9 +120,9 @@ M-->MF(Scilab) **Package version** * cuda 12.4.0 -* Python 3.12 +* Python 3.11 * Julia latest -* spark 3.5.4 +* spark 3.5.5 * flink 1.20.0 * jupyterlab 4 * Matlab R2023b diff --git a/README_CN.md b/README_CN.md index 6c81e46..90a4fd5 100644 --- a/README_CN.md +++ b/README_CN.md @@ -127,9 +127,9 @@ M-->MF(Scilab) **软件包版本** * cuda 12.4.0 -* Python 3.12 +* Python 3.11 * Julia latest -* spark 3.5.4 +* spark 3.5.5 * flink 1.20.0 * jupyterlab 4 * Matlab R2023b