-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrun.sh
32 lines (21 loc) · 1.07 KB
/
run.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
#!/bin/bash
# create bin dir
mkdir bin
# install ollama
wget "https://modelscope.cn/api/v1/models/issaccv/OllamaDeploy/repo?Revision=master&FilePath=ollama-linux-amd64" -O bin/ollama
chmod +x bin/ollama
# run ollama in the background
OLLAMA_FLASH_ATTENTION=1 bin/ollama serve &
# download model
wget "https://www.modelscope.cn/api/v1/models/qwen/Qwen2-7B-Instruct-GGUF/repo?Revision=master&FilePath=qwen2-7b-instruct-q8_0.gguf" -O qwen-2-7b-instruct.gguf
# create qwen with modelfile
bin/ollama create qwen -f modelfile
# download the embedding model
git clone https://www.modelscope.cn/AI-ModelScope/bge-small-zh-v1.5.git demo/BAAI/bge-small-zh-v1.5
git clone https://www.modelscope.cn/Xorbits/bge-large-zh-v1.5.git demo/BAAI/bge-large-zh-v1.5
# download dataset
git clone https://www.modelscope.cn/datasets/issaccv/aiops2024-challenge-dataset.git demo/dataset
mv demo/dataset/question.jsonl demo/question.jsonl
unzip demo/dataset/data.zip -d demo/
# print info
echo "The Ollama server is running in the background. Please close current terminal and you can now play with demo!"