update install.sh for llm

This commit is contained in:
wangxingjun778
2025-09-15 21:50:16 +08:00
parent 208fd68427
commit 7f994b908c

View File

@@ -27,12 +27,12 @@ pip install --no-cache-dir triton auto-gptq==$autogptq_version -U && pip cache p
pip install --no-cache-dir vllm==$vllm_version && pip cache purge
if [[ "$(printf '%s\n' "0.6.0" "$vllm_version" | sort -V | head -n1)" = "0.6.0" ]]; then
# vllm_version is >= 0.6.0
echo ">>Debug: Condition met: vllm_version >= 0.6.0, proceeding with installation..."
pip install --no-cache-dir vllm==$vllm_version && pip cache purge
else
echo ">>Debug: Condition not met: vllm_version < 0.6.0, skipping installation. (vllm_version = $vllm_version)"
fi
#if [[ "$(printf '%s\n' "0.6.0" "$vllm_version" | sort -V | head -n1)" = "0.6.0" ]]; then
# # vllm_version is >= 0.6.0
# echo ">>Debug: Condition met: vllm_version >= 0.6.0, proceeding with installation..."
# pip install --no-cache-dir vllm==$vllm_version && pip cache purge
#else
# echo ">>Debug: Condition not met: vllm_version < 0.6.0, skipping installation. (vllm_version = $vllm_version)"
#fi
# pip uninstall -y torch-scatter && TORCH_CUDA_ARCH_LIST="6.0;6.1;6.2;7.0;7.5;8.0;8.6;8.9;9.0" pip install --no-cache-dir -U torch-scatter