Spaces:
Running
on
Zero
Running
on
Zero
Update setup_blender.sh
Browse files- setup_blender.sh +10 -13
setup_blender.sh
CHANGED
@@ -23,8 +23,9 @@ TORCHVISION_VERSION="0.18.1"
|
|
23 |
TARGET_CUDA_VERSION_SHORT="cu121" # PyTorch 2.3.1 wheels are available for cu121
|
24 |
TORCH_INDEX_URL="https://download.pytorch.org/whl/${TARGET_CUDA_VERSION_SHORT}"
|
25 |
|
26 |
-
#
|
27 |
-
|
|
|
28 |
|
29 |
# --- Set Environment Variables for Build ---
|
30 |
export CUDA_HOME=${CUDA_HOME:-/usr/local/cuda} # This might be nominal if nvcc isn't actually used
|
@@ -45,7 +46,7 @@ echo "Blender Python Executable: ${BLENDER_PY_EXEC}"
|
|
45 |
echo "Using CUDA_HOME=${CUDA_HOME}"
|
46 |
echo "Targeting PyTorch for CUDA: ${TARGET_CUDA_VERSION_SHORT}"
|
47 |
echo "TORCH_CUDA_ARCH_LIST: ${TORCH_CUDA_ARCH_LIST}"
|
48 |
-
echo "Attempting to install flash-attn
|
49 |
|
50 |
# --- Download and Extract Blender ---
|
51 |
mkdir -p "${BLENDER_INSTALL_BASE}"
|
@@ -106,17 +107,13 @@ echo "Step 1: Installing PyTorch ${TORCH_VERSION} (for CUDA ${TARGET_CUDA_VERSIO
|
|
106 |
--index-url ${TORCH_INDEX_URL} -vvv
|
107 |
echo "PyTorch and Torchvision installation attempted."
|
108 |
|
109 |
-
echo "Step 2: Installing flash-attn
|
110 |
-
#
|
111 |
-
# --no-
|
112 |
-
# --only-binary ":all:" tells pip to not attempt source builds for any package in this command.
|
113 |
-
# --find-links adds PyTorch's index for this specific command, in case compatible wheels are there.
|
114 |
"${BLENDER_PY_EXEC}" -m pip install --no-cache-dir \
|
115 |
-
--no-
|
116 |
-
|
117 |
-
|
118 |
-
flash-attn==${FLASH_ATTN_VERSION_TO_INSTALL} -vvv
|
119 |
-
echo "flash-attn installation attempted."
|
120 |
|
121 |
echo "Step 3: Installing remaining dependencies from ${UNIRIG_REQS_FILE_IN_SPACE}..."
|
122 |
# Ensure flash-attn is REMOVED from unirig_requirements.txt.
|
|
|
23 |
TARGET_CUDA_VERSION_SHORT="cu121" # PyTorch 2.3.1 wheels are available for cu121
|
24 |
TORCH_INDEX_URL="https://download.pytorch.org/whl/${TARGET_CUDA_VERSION_SHORT}"
|
25 |
|
26 |
+
# Direct URL for the compatible flash-attn wheel for v2.5.8
|
27 |
+
# Corresponds to Python 3.11 (cp311), PyTorch 2.3.1+cu121
|
28 |
+
FLASH_ATTN_WHEEL_URL="https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+pt23cu121-cp311-cp311-linux_x86_64.whl"
|
29 |
|
30 |
# --- Set Environment Variables for Build ---
|
31 |
export CUDA_HOME=${CUDA_HOME:-/usr/local/cuda} # This might be nominal if nvcc isn't actually used
|
|
|
46 |
echo "Using CUDA_HOME=${CUDA_HOME}"
|
47 |
echo "Targeting PyTorch for CUDA: ${TARGET_CUDA_VERSION_SHORT}"
|
48 |
echo "TORCH_CUDA_ARCH_LIST: ${TORCH_CUDA_ARCH_LIST}"
|
49 |
+
echo "Attempting to install flash-attn from direct wheel URL: ${FLASH_ATTN_WHEEL_URL}"
|
50 |
|
51 |
# --- Download and Extract Blender ---
|
52 |
mkdir -p "${BLENDER_INSTALL_BASE}"
|
|
|
107 |
--index-url ${TORCH_INDEX_URL} -vvv
|
108 |
echo "PyTorch and Torchvision installation attempted."
|
109 |
|
110 |
+
echo "Step 2: Installing flash-attn from direct wheel URL..."
|
111 |
+
# Install flash-attn from a direct wheel URL to ensure compatibility and avoid source build.
|
112 |
+
# Using --no-deps as we manage other dependencies separately and assume the wheel is self-contained or relies on PyTorch.
|
|
|
|
|
113 |
"${BLENDER_PY_EXEC}" -m pip install --no-cache-dir \
|
114 |
+
--no-deps \
|
115 |
+
"${FLASH_ATTN_WHEEL_URL}" -vvv
|
116 |
+
echo "flash-attn installation attempted from wheel."
|
|
|
|
|
117 |
|
118 |
echo "Step 3: Installing remaining dependencies from ${UNIRIG_REQS_FILE_IN_SPACE}..."
|
119 |
# Ensure flash-attn is REMOVED from unirig_requirements.txt.
|