mirror of
https://github.com/modelscope/modelscope.git
synced 2025-12-16 16:27:45 +01:00
force reinstall 1.9.2
This commit is contained in:
@@ -53,7 +53,7 @@ RUN if [ "$USE_GPU" = "True" ] ; then \
|
||||
fi
|
||||
|
||||
# torchmetrics==0.11.4 for ofa
|
||||
RUN pip install --no-cache-dir tiktoken torchmetrics==0.11.4 transformers_stream_generator 'protobuf<=3.20.0' bitsandbytes basicsr
|
||||
RUN pip install --no-cache-dir torchmetrics==0.11.4 tiktoken transformers_stream_generator 'protobuf<=3.20.0' bitsandbytes basicsr
|
||||
COPY docker/scripts/install_flash_attension.sh /tmp/install_flash_attension.sh
|
||||
RUN if [ "$USE_GPU" = "True" ] ; then \
|
||||
bash /tmp/install_flash_attension.sh; \
|
||||
|
||||
Reference in New Issue
Block a user