|
| 1 | + |
| 2 | +## 背景 |
| 3 | +我忘記ollama是跟哪個docker-compose去跑所以我先去檢查 |
| 4 | + |
| 5 | +``` |
| 6 | +docker inspect -f '{{json .NetworkSettings.Networks}}' ollama | jq docker inspect -f '{{json .NetworkSettings.Networks}}' n8n | jq docker inspect -f '{{json .NetworkSettings.Networks}}' open-webui | jq { "open-webui_default": { "IPAMConfig": null, "Links": null, "Aliases": [ "ollama", "ollama" ], "DriverOpts": null, "GwPriority": 0, "NetworkID": "1cfe167fb156ef45ad6eaeb87aa6d2dc96771a30a38dd8ae81860b47f2c6ca74", "EndpointID": "9e9d69ef8ccf2db163dd665b7b86e29ef6196b5f3de26e9769c5c1b8c3554a09", "Gateway": "172.20.0.1", "IPAddress": "172.20.0.2", "MacAddress": "62:09:20:3b:82:c1", "IPPrefixLen": 16, "IPv6Gateway": "", "GlobalIPv6Address": "", "GlobalIPv6PrefixLen": 0, "DNSNames": [ "ollama", "8eaa6f07c73f" ] } } { "n8n_default": { "IPAMConfig": null, "Links": null, "Aliases": [ "n8n", "n8n" ], "DriverOpts": null, "GwPriority": 0, "NetworkID": "11b0037a58a90c1943a219050e502bd9f9c29e170913035e8f0a250448fca17f", "EndpointID": "49b7e45e01efc3ee5ef334d0cc8fa930e0a1915fc37ee04e3a2e9f707fbc62dd", "Gateway": "172.19.0.1", "IPAddress": "172.19.0.2", "MacAddress": "da:09:74:e4:e8:f7", "IPPrefixLen": 16, "IPv6Gateway": "", "GlobalIPv6Address": "", "GlobalIPv6PrefixLen": 0, "DNSNames": [ "n8n", "b146ba4956b8" ] } } { "open-webui_default": { "IPAMConfig": null, "Links": null, "Aliases": [ "open-webui", "open-webui" ], "DriverOpts": null, "GwPriority": 0, "NetworkID": "1cfe167fb156ef45ad6eaeb87aa6d2dc96771a30a38dd8ae81860b47f2c6ca74", "EndpointID": "3bb710da086477263ac10125ed0ba5ab55183f89539881b228562b1024b087a7", "Gateway": "172.20.0.1", "IPAddress": "172.20.0.3", "MacAddress": "26:7d:2c:94:c4:43", "IPPrefixLen": 16, "IPv6Gateway": "", "GlobalIPv6Address": "", "GlobalIPv6PrefixLen": 0, "DNSNames": [ "open-webui", "8cb4dc565e4a" ] } } |
| 7 | +``` |
| 8 | +發現 ollama 及 openwebui都在172.20.0 |
| 9 | +然後我印象之前是和openwebui一起設定 |
| 10 | +``` |
| 11 | +docker inspect open-webui --format '{{json .Config.Labels}}' | jq |
| 12 | +``` |
| 13 | + |
| 14 | +``` |
| 15 | +docker inspect open-webui --format '{{json .Config.Labels}}' | jq |
| 16 | +{ |
| 17 | + "com.docker.compose.config-hash": "7ac5135e913eb6c9fa443470acad7d6a48786cdff0458a32445f4067109f79dc", |
| 18 | + "com.docker.compose.container-number": "1", |
| 19 | + "com.docker.compose.depends_on": "ollama:service_started:false", |
| 20 | + "com.docker.compose.image": "sha256:9173df40b9879b99cd5332d38ec966973f8c0ffb77dbafb2ec96caea2548a8d3", |
| 21 | + "com.docker.compose.oneoff": "False", |
| 22 | + "com.docker.compose.project": "open-webui", |
| 23 | + "com.docker.compose.project.config_files": "/home/alanhc/workspace/open-webui/docker-compose.yaml,/home/alanhc/workspace/open-webui/docker-compose.gpu.yaml", |
| 24 | + "com.docker.compose.project.working_dir": "/home/alanhc/workspace/open-webui", |
| 25 | + "com.docker.compose.service": "open-webui", |
| 26 | + "com.docker.compose.version": "2.40.3", |
| 27 | + "org.opencontainers.image.created": "2025-12-02T22:29:30.612Z", |
| 28 | + "org.opencontainers.image.description": "User-friendly AI Interface (Supports Ollama, OpenAI API, ...)", |
| 29 | + "org.opencontainers.image.licenses": "NOASSERTION", |
| 30 | + "org.opencontainers.image.revision": "6f1486ffd0cb288d0e21f41845361924e0d742b3", |
| 31 | + "org.opencontainers.image.source": "https://github.com/open-webui/open-webui", |
| 32 | + "org.opencontainers.image.title": "open-webui", |
| 33 | + "org.opencontainers.image.url": "https://github.com/open-webui/open-webui", |
| 34 | + "org.opencontainers.image.version": "main" |
| 35 | +} |
| 36 | +``` |
| 37 | + |
| 38 | +接著我去看設定 |
| 39 | + |
| 40 | +``` |
| 41 | +cd /home/alanhc/workspace/open-webui |
| 42 | +docker compose -f docker-compose.yaml -f docker-compose.gpu.yaml config |
| 43 | +name: open-webui |
| 44 | +services: |
| 45 | + ollama: |
| 46 | + container_name: ollama |
| 47 | + deploy: |
| 48 | + resources: |
| 49 | + reservations: |
| 50 | + devices: |
| 51 | + - capabilities: |
| 52 | + - gpu |
| 53 | + driver: nvidia |
| 54 | + count: 1 |
| 55 | + image: ollama/ollama:latest |
| 56 | + networks: |
| 57 | + default: null |
| 58 | + pull_policy: always |
| 59 | + restart: unless-stopped |
| 60 | + tty: true |
| 61 | + volumes: |
| 62 | + - type: volume |
| 63 | + source: ollama |
| 64 | + target: /root/.ollama |
| 65 | + volume: {} |
| 66 | + open-webui: |
| 67 | + build: |
| 68 | + context: /home/alanhc/workspace/open-webui |
| 69 | + dockerfile: Dockerfile |
| 70 | + container_name: open-webui |
| 71 | + depends_on: |
| 72 | + ollama: |
| 73 | + condition: service_started |
| 74 | + required: true |
| 75 | + environment: |
| 76 | + OLLAMA_BASE_URL: http://ollama:11434 |
| 77 | + WEBUI_SECRET_KEY: "" |
| 78 | + WEBUI_URL: https://openwebui.0xfanslab.com |
| 79 | + extra_hosts: |
| 80 | + - host.docker.internal=host-gateway |
| 81 | + image: ghcr.io/open-webui/open-webui:main |
| 82 | + networks: |
| 83 | + default: null |
| 84 | + ports: |
| 85 | + - mode: ingress |
| 86 | + target: 8080 |
| 87 | + published: "8080" |
| 88 | + protocol: tcp |
| 89 | + restart: unless-stopped |
| 90 | + volumes: |
| 91 | + - type: volume |
| 92 | + source: open-webui |
| 93 | + target: /app/backend/data |
| 94 | + volume: {} |
| 95 | +networks: |
| 96 | + default: |
| 97 | + name: open-webui_default |
| 98 | +volumes: |
| 99 | + ollama: |
| 100 | + name: open-webui_ollama |
| 101 | + open-webui: |
| 102 | + name: open-webui_open-web |
| 103 | +``` |
| 104 | +再看看他的entrypoint在哪? |
| 105 | +``` |
| 106 | +docker inspect open-webui --format 'Entrypoint={{json .Config.Entrypoint}} Cmd={{json .Config.Cmd}}' |
| 107 | +Entrypoint=null Cmd=["bash","start.sh"] |
| 108 | +``` |
| 109 | + |
| 110 | +我知道docker-compose file path: |
| 111 | + |
| 112 | +我要使用 caddy 當作對外入口這樣可以用token驗證的話 |
| 113 | +``` |
| 114 | +:8081 { |
| 115 | +
|
| 116 | + # (可選)讓 OPTIONS preflight 直接過,避免某些前端跨域卡住 |
| 117 | + @preflight method OPTIONS |
| 118 | + handle @preflight { |
| 119 | + respond 204 |
| 120 | + } |
| 121 | +
|
| 122 | + # Token 驗證:必須完全符合這個 header |
| 123 | + @authed header Authorization "Bearer {env.OLLAMA_PROXY_TOKEN}" |
| 124 | +
|
| 125 | + handle @authed { |
| 126 | + reverse_proxy ollama:11434 |
| 127 | + } |
| 128 | +
|
| 129 | + handle { |
| 130 | + respond "Unauthorized" 401 |
| 131 | + } |
| 132 | +} |
| 133 | +
|
| 134 | +``` |
| 135 | + |
| 136 | +我去改docker-compose.yaml |
| 137 | +``` |
| 138 | +services: |
| 139 | + ollama: |
| 140 | + volumes: |
| 141 | + - ollama:/root/.ollama |
| 142 | + container_name: ollama |
| 143 | + pull_policy: always |
| 144 | + tty: true |
| 145 | + restart: unless-stopped |
| 146 | + image: ollama/ollama:${OLLAMA_DOCKER_TAG-latest} |
| 147 | + environment: |
| 148 | + - OLLAMA_HOST=0.0.0.0:11434 |
| 149 | + # 不要 ports,讓 ollama 本體不要直接對外 |
| 150 | +
|
| 151 | + # ✅ 新增:對外入口(帶 token 才能用) |
| 152 | + ollama-proxy: |
| 153 | + image: caddy:2 |
| 154 | + container_name: ollama-proxy |
| 155 | + restart: unless-stopped |
| 156 | + depends_on: |
| 157 | + - ollama |
| 158 | + environment: |
| 159 | + - OLLAMA_PROXY_TOKEN=${OLLAMA_PROXY_TOKEN} |
| 160 | + volumes: |
| 161 | + - ./Caddyfile:/etc/caddy/Caddyfile:ro |
| 162 | + ports: |
| 163 | + - "11435:8081" # 對外用這個 port(之後也可掛網域) |
| 164 | +
|
| 165 | + open-webui: |
| 166 | + build: |
| 167 | + context: . |
| 168 | + dockerfile: Dockerfile |
| 169 | + image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-main} |
| 170 | + container_name: open-webui |
| 171 | + volumes: |
| 172 | + - open-webui:/app/backend/data |
| 173 | + depends_on: |
| 174 | + - ollama |
| 175 | + ports: |
| 176 | + - "8080:8080" |
| 177 | + environment: |
| 178 | + - 'WEBUI_URL=https://openwebui.0xfanslab.com' |
| 179 | + - 'OLLAMA_BASE_URL=http://ollama:11434' |
| 180 | + - 'WEBUI_SECRET_KEY=' |
| 181 | + extra_hosts: |
| 182 | + - host.docker.internal:host-gateway |
| 183 | + restart: unless-stopped |
| 184 | +
|
| 185 | +volumes: |
| 186 | + ollama: {} |
| 187 | + open-webui: {} |
| 188 | +
|
| 189 | +``` |
| 190 | + |
| 191 | + |
| 192 | +產生密鑰:`openssl rand -base64 32 |
| 193 | + |
| 194 | +在.env加上`OLLAMA_PROXY_TOKEN=<前面產生的token>` |
| 195 | + |
| 196 | +``` |
| 197 | +docker compose -f docker-compose.yaml -f docker-compose.gpu.yaml up -d |
| 198 | +``` |
| 199 | +測試ollama有沒有用gpu |
| 200 | +``` |
| 201 | +docker inspect ollama --format '{{json .HostConfig.DeviceRequests}}' | jq |
| 202 | +
|
| 203 | +``` |
| 204 | + |
| 205 | +測試直接打 |
| 206 | +``` |
| 207 | +curl -i http://127.0.0.1:11435/api/tags |
| 208 | +HTTP/1.1 401 Unauthorized |
| 209 | +Content-Type: text/plain; charset=utf-8 |
| 210 | +Server: Caddy |
| 211 | +Date: Sat, 13 Dec 2025 07:37:21 GMT |
| 212 | +Content-Length: 12 |
| 213 | +
|
| 214 | +Unauthorized |
| 215 | +``` |
| 216 | + |
| 217 | +帶入token |
| 218 | +``` |
| 219 | +curl -i http://127.0.0.1:11435/api/tags \ |
| 220 | + -H "Authorization: Bearer <token>" |
| 221 | +HTTP/1.1 200 OK |
| 222 | +Content-Length: 1030 |
| 223 | +Content-Type: application/json; charset=utf-8 |
| 224 | +Date: Sat, 13 Dec 2025 07:38:06 GMT |
| 225 | +Via: 1.1 Caddy |
| 226 | +``` |
0 commit comments