1
0
forked from eoeair/cenv

add llama-server

更新 Dockerfile,修正工作目录路径并添加健康检查和入口点
This commit is contained in:
pdch 2024-12-23 15:57:27 +08:00
parent a4aff3ed8a
commit 4572633365
2 changed files with 39 additions and 0 deletions

View File

@ -274,6 +274,20 @@ jobs:
context: GS/steam/steam_wine
tags: eoelab.org:1027/${{ gitea.repository }}:steam_wine
llama_cpu:
runs-on: runner
steps:
- name: checkout code
uses: https://eoelab.org:1027/actions/checkout@v4
- name: llama_cpu build
uses: https://eoelab.org:1027/actions/build-push-action@v6
with:
context: llama/cpu
build-args: |
http_proxy=http://192.168.2.2:7890
https_proxy=http://192.168.2.2:7890
tags: eoelab.org:1027/${{ gitea.repository }}:base
Push:
runs-on: runner
needs: [Steam_Wine,CUDA_Devel]

25
llama/cpu/Dockerfile Normal file
View File

@ -0,0 +1,25 @@
FROM eoelab.org:1027/ben0i0d/cenv:base
USER root
RUN apt-get update --yes && apt-get install --yes gcc gdb cmake build-essential git libcurl4-openssl-dev && \
apt-get clean && rm -rf /var/lib/apt/lists/*
WORKDIR "${HOME}"
# install llama
RUN git clone https://github.com/ggerganov/llama.cpp.git
WORKDIR /llama.cpp
RUN cmake -S . -B build -DGGML_BACKEND_DL=ON -DGGML_NATIVE=OFF -DGGML_CPU_ALL_VARIANTS=ON -DLLAMA_CURL=ON -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF && \
cmake --build build -j $(nproc)
WORKDIR "${HOME}"
RUN mv /llama.cpp/build/bin/llama-server /usr/local/bin/llama-server && \
rm -rf /llama.cpp
HEALTHCHECK CMD [ "curl", "-f", "http://localhost:8080/health" ]
ENTRYPOINT [ "llama-server" ]