Use intel mkl instead of openblas
Signed-off-by: baalajimaestro <me@baalajimaestro.me>
This commit is contained in:
parent
97c5ef3242
commit
ada8710817
28
Dockerfile
28
Dockerfile
|
@ -1,14 +1,26 @@
|
|||
FROM python:3-alpine
|
||||
FROM intelpython/intelpython3_full
|
||||
|
||||
ARG CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS"
|
||||
ARG FORCE_CMAKE=1
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apk update && apk add --no-cache cmake make gcc g++ openblas-dev openblas git && \
|
||||
pip install git+https://git.baalajimaestro.me/baalajimaestro/llama.cpp.git fastapi uvicorn sse-starlette pydantic-settings && \
|
||||
apk del make cmake gcc g++ openblas-dev git && \
|
||||
apk add --no-cache libstdc++
|
||||
RUN apt update && \
|
||||
apt install -y software-properties-common gnupg apt-utils && \
|
||||
wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list && \
|
||||
apt update && \
|
||||
apt install intel-oneapi-mkl -y
|
||||
|
||||
RUN apt install intel-oneapi-compiler-dpcpp-cpp intel-oneapi-mkl-devel gcc g++ pkg-config cmake -y && \
|
||||
. /opt/intel/oneapi/setvars.sh && \
|
||||
export CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx" && \
|
||||
export FORCE_CMAKE=1 && \
|
||||
pip install git+https://git.baalajimaestro.me/baalajimaestro/llama.cpp.git fastapi uvicorn sse-starlette pydantic-settings && \
|
||||
apt remove intel-oneapi-compiler-dpcpp-cpp intel-oneapi-mkl-devel gcc g++ pkg-config cmake -y && \
|
||||
apt autoremove -y
|
||||
|
||||
ADD entrypoint.sh /
|
||||
|
||||
EXPOSE 8000
|
||||
ENV HOST 0.0.0.0
|
||||
|
||||
ENTRYPOINT ["python3", "-m", "llama_cpp.server"]
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
|
|
5
entrypoint.sh
Executable file
5
entrypoint.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#! /usr/bin/env bash
|
||||
|
||||
source /opt/intel/oneapi/setvars.sh
|
||||
python -m llama_cpp.server $@
|
||||
|
Loading…
Reference in a new issue