llama.cpp/Dockerfile
2024-01-24 17:42:34 +05:30

22 lines
1.3 KiB
Docker

FROM intel/python:latest
ARG DEBIAN_FRONTEND=noninteractive
RUN apt update && \
apt install -y --no-install-recommends software-properties-common gnupg apt-utils git && \
wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && \
echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list && \
apt update && \
apt install intel-oneapi-mkl intel-oneapi-compiler-dpcpp-cpp intel-oneapi-mkl-devel gcc g++ pkg-config cmake -y --no-install-recommends && \
. /opt/intel/oneapi/setvars.sh && \
export CMAKE_ARGS="-DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DCMAKE_AR=xiar -DCMAKE_CXX_FLAGS=-fuse-ld=lld -DCMAKE_C_FLAGS=-fuse-ld=lld" && \
export FORCE_CMAKE=1 && \
pip install git+https://git.baalajimaestro.me/baalajimaestro/llama.cpp.git fastapi uvicorn sse-starlette pydantic-settings starlette-context && \
apt remove intel-oneapi-mkl intel-oneapi-compiler-dpcpp-cpp intel-oneapi-mkl-devel gcc g++ pkg-config cmake apt-utils git software-properties-common gnupg -y && \
apt autoremove -y
EXPOSE 8000
ENV HOST 0.0.0.0
ENTRYPOINT ["python", "-m", "llama_cpp.server"]