Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
KoboldAI
GitHub Repository: KoboldAI/KoboldAI-Client
Path: blob/main/install_requirements.sh
471 views
1
#!/bin/bash
2
if [[ $1 = "cuda" || $1 = "CUDA" ]]; then
3
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
4
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
5
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
6
bin/micromamba create -f environments/huggingface.yml -r runtime -n koboldai -y
7
exit
8
fi
9
if [[ $1 = "rocm" || $1 = "ROCM" ]]; then
10
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
11
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
12
# Weird micromamba bug causes it to fail the first time, running it twice just to be safe, the second time is much faster
13
bin/micromamba create -f environments/rocm.yml -r runtime -n koboldai-rocm -y
14
exit
15
fi
16
echo Please specify either CUDA or ROCM
17
18