Upload lora-scripts/install.bash with huggingface_hub
Browse files- lora-scripts/install.bash +71 -0
lora-scripts/install.bash
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/bash
|
| 2 |
+
|
| 3 |
+
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
| 4 |
+
create_venv=true
|
| 5 |
+
|
| 6 |
+
while [ -n "$1" ]; do
|
| 7 |
+
case "$1" in
|
| 8 |
+
--disable-venv)
|
| 9 |
+
create_venv=false
|
| 10 |
+
shift
|
| 11 |
+
;;
|
| 12 |
+
*)
|
| 13 |
+
shift
|
| 14 |
+
;;
|
| 15 |
+
esac
|
| 16 |
+
done
|
| 17 |
+
|
| 18 |
+
if $create_venv; then
|
| 19 |
+
echo "Creating python venv..."
|
| 20 |
+
python3 -m venv venv
|
| 21 |
+
source "$script_dir/venv/bin/activate"
|
| 22 |
+
echo "active venv"
|
| 23 |
+
fi
|
| 24 |
+
|
| 25 |
+
echo "Installing torch & xformers..."
|
| 26 |
+
|
| 27 |
+
cuda_version=$(nvidia-smi | grep -oiP 'CUDA Version: \K[\d\.]+')
|
| 28 |
+
|
| 29 |
+
if [ -z "$cuda_version" ]; then
|
| 30 |
+
cuda_version=$(nvcc --version | grep -oiP 'release \K[\d\.]+')
|
| 31 |
+
fi
|
| 32 |
+
cuda_major_version=$(echo "$cuda_version" | awk -F'.' '{print $1}')
|
| 33 |
+
cuda_minor_version=$(echo "$cuda_version" | awk -F'.' '{print $2}')
|
| 34 |
+
|
| 35 |
+
echo "CUDA Version: $cuda_version"
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
if (( cuda_major_version >= 12 )); then
|
| 39 |
+
echo "install torch 2.2.1+cu121"
|
| 40 |
+
pip install torch==2.2.1+cu121 torchvision==0.17.1+cu121 --extra-index-url https://download.pytorch.org/whl/cu121
|
| 41 |
+
pip install --no-deps xformers==0.0.25 --extra-index-url https://download.pytorch.org/whl/cu118
|
| 42 |
+
elif (( cuda_major_version == 11 && cuda_minor_version >= 8 )); then
|
| 43 |
+
echo "install torch 2.2.1+cu118"
|
| 44 |
+
pip install torch==2.2.1+cu118 torchvision==0.17.1+cu118 --extra-index-url https://download.pytorch.org/whl/cu118
|
| 45 |
+
pip install --no-deps xformers==0.0.25+cu118 --extra-index-url https://download.pytorch.org/whl/cu118
|
| 46 |
+
elif (( cuda_major_version == 11 && cuda_minor_version >= 6 )); then
|
| 47 |
+
echo "install torch 1.12.1+cu116"
|
| 48 |
+
pip install torch==1.12.1+cu116 torchvision==0.13.1+cu116 --extra-index-url https://download.pytorch.org/whl/cu116
|
| 49 |
+
# for RTX3090+cu113/cu116 xformers, we need to install this version from source. You can also try xformers==0.0.18
|
| 50 |
+
pip install --upgrade git+https://github.com/facebookresearch/xformers.git@0bad001ddd56c080524d37c84ff58d9cd030ebfd
|
| 51 |
+
pip install triton==2.0.0.dev20221202
|
| 52 |
+
elif (( cuda_major_version == 11 && cuda_minor_version >= 2 )); then
|
| 53 |
+
echo "install torch 1.12.1+cu113"
|
| 54 |
+
pip install torch==1.12.1+cu113 torchvision==0.13.1+cu113 --extra-index-url https://download.pytorch.org/whl/cu116
|
| 55 |
+
pip install --upgrade git+https://github.com/facebookresearch/xformers.git@0bad001ddd56c080524d37c84ff58d9cd030ebfd
|
| 56 |
+
pip install triton==2.0.0.dev20221202
|
| 57 |
+
else
|
| 58 |
+
echo "Unsupported cuda version:$cuda_version"
|
| 59 |
+
exit 1
|
| 60 |
+
fi
|
| 61 |
+
|
| 62 |
+
echo "Installing deps..."
|
| 63 |
+
cd "$script_dir/sd-scripts" || exit
|
| 64 |
+
|
| 65 |
+
pip install --upgrade -r requirements.txt
|
| 66 |
+
|
| 67 |
+
cd "$script_dir" || exit
|
| 68 |
+
|
| 69 |
+
pip install --upgrade -r requirements.txt
|
| 70 |
+
|
| 71 |
+
echo "Install completed"
|