安装ollama

Linux

自动安装

1
curl -fsSL https://ollama.com/install.sh | sh

手工安装

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
cd /usr/local/src/
curl -L https://ollama.com/download/ollama-linux-amd64.tgz -o ollama-linux-amd64.tgz
tar -C /usr -xzf ollama-linux-amd64.tgz

useradd -r -s /bin/false -U -m -d /usr/share/ollama ollama
usermod -a -G ollama $(whoami)
cat >/etc/systemd/system/ollama.service<<EOF
[Unit]
Description=Ollama Service
After=network.target

[Service]
ExecStart=/usr/bin/ollama serve
User=ollama
Group=ollama
Restart=always
RestartSec=3
Environment="PATH=$PATH"
Environment="OLLAMA_HOST=0.0.0.0"

[Install]
WantedBy=multi-user.target
EOF

systemctl daemon-reload
systemctl enable --now ollama
netstat -tnlp| grep 11434
systemctl status ollama
systemctl restart ollama
journalctl -e -u ollama
ollama -v

升级

自动升级

1
curl -fsSL https://ollama.com/install.sh | sh

手动升级

1
2
curl -L https://ollama.com/download/ollama-linux-amd64.tgz -o ollama-linux-amd64.tgz
tar -C /usr -xzf ollama-linux-amd64.tgz

卸载

1
2
3
4
5
6
7
8
systemctl stop ollama
systemctl disable ollama
rm /etc/systemd/system/ollama.service
rm $(which ollama)
rm -r /usr/share/ollama
rm -r /usr/lib/ollama
userdel ollama
groupdel ollama

测试

运行Llama 3.2模型

1
2
3
4
5
6
7
ollama pull llama3.2
ollama show llama3.2
ollama list
ollama ps
#ollama rm llama3.2
ollama run llama3.2
ollama stop llama3.2

运行LLaVA模型

1
2
3
4
5
6
7
8
9
ollama pull llava
ollama show llava
ollama list
ollama ps
#ollama rm llava
ollama run llava
ollama stop llava

nvidia-smi