diff --git a/Dockerfile b/Dockerfile index 031eec3..02b1470 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,6 @@ LABEL maintainer "Gregor Betz and the Logikon AI Team" ARG VLLM_VERSION=0.3.2 ARG LM_EVAL_VERSION=v0.4.1 -ARG FLASHINFER_VERSION=v0.1.3 ENV APP_HOME . WORKDIR $APP_HOME @@ -13,14 +12,13 @@ WORKDIR $APP_HOME RUN git clone https://github.com/logikon-ai/cot-eval.git RUN git clone --branch ${LM_EVAL_VERSION} https://github.com/EleutherAI/lm-evaluation-harness.git -RUN git clone --branch ${FLASHINFER_VERSION} https://github.com/flashinfer-ai/flashinfer.git --recursive # Install python packages RUN pip install --upgrade pip RUN pip uninstall transformer-engine -y -RUN cd lm-evaluation-harness && pip install -e ".[vllm]" +RUN cd lm-evaluation-harness && pip install -e . RUN cd cot-eval && pip install -e . RUN pip install -U vllm==${VLLM_VERSION} @@ -32,7 +30,7 @@ RUN pip uninstall -y flash-attn RUN pip install flash-attn --no-build-isolation # Install flashinfer backend -RUN cd flashinfer/python && pip install -e . +RUN pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.3 # Run cot-eval script on startup