-
Notifications
You must be signed in to change notification settings - Fork 16
Expand file tree
/
Copy pathdocker-compose.prod.yml
More file actions
145 lines (133 loc) · 5.52 KB
/
docker-compose.prod.yml
File metadata and controls
145 lines (133 loc) · 5.52 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
# OpenTranscribe Production Overrides
# Use with: docker compose -f docker-compose.yml -f docker-compose.prod.yml up
#
# This file contains ONLY production-specific settings:
# - Pull pre-built images from Docker Hub
# - Production volume configurations
# - Production database initialization path
services:
backend:
image: davidamacey/opentranscribe-backend:latest
build:
context: ./backend
dockerfile: Dockerfile.prod # Production build
pull_policy: always
# No volumes needed - temp files live in container only
celery-worker:
image: davidamacey/opentranscribe-backend:latest
build:
context: ./backend
dockerfile: Dockerfile.prod # Production build
pull_policy: always
volumes:
# Production: only model cache, no code mounts
- ${MODEL_CACHE_DIR:-./models}/huggingface:/home/appuser/.cache/huggingface
- ${MODEL_CACHE_DIR:-./models}/torch:/home/appuser/.cache/torch
- ${MODEL_CACHE_DIR:-./models}/nltk_data:/home/appuser/.cache/nltk_data
- ${MODEL_CACHE_DIR:-./models}/sentence-transformers:/home/appuser/.cache/sentence-transformers
# No temp volume needed - temp files live in container only
celery-download-worker:
image: davidamacey/opentranscribe-backend:latest
build:
context: ./backend
dockerfile: Dockerfile.prod # Production build
pull_policy: always
volumes:
# Production: only model cache for download worker
- ${MODEL_CACHE_DIR:-./models}/huggingface:/home/appuser/.cache/huggingface
- ${MODEL_CACHE_DIR:-./models}/torch:/home/appuser/.cache/torch
celery-cpu-worker:
image: davidamacey/opentranscribe-backend:latest
build:
context: ./backend
dockerfile: Dockerfile.prod # Production build
pull_policy: always
# No volumes needed - CPU worker doesn't need model cache
celery-nlp-worker:
image: davidamacey/opentranscribe-backend:latest
build:
context: ./backend
dockerfile: Dockerfile.prod # Production build
pull_policy: always
# No volumes needed - NLP worker doesn't need model cache
celery-embedding-worker:
image: davidamacey/opentranscribe-backend:latest
build:
context: ./backend
dockerfile: Dockerfile.prod # Production build
pull_policy: always
volumes:
# Production: sentence-transformers model cache
- ${MODEL_CACHE_DIR:-./models}/sentence-transformers:/home/appuser/.cache/sentence-transformers
celery-beat:
image: davidamacey/opentranscribe-backend:latest
build:
context: ./backend
dockerfile: Dockerfile.prod # Production build
pull_policy: always
# No volumes needed - scheduler state lives in container only
frontend:
image: davidamacey/opentranscribe-frontend:latest
build:
context: ./frontend
dockerfile: Dockerfile.prod # Production build with NGINX
pull_policy: always
ports:
- "${FRONTEND_PORT:-5173}:8080" # Production NGINX port
environment:
- NODE_ENV=production # Production mode
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://127.0.0.1:8080"]
interval: 10s
timeout: 5s
retries: 5
start_period: 30s
flower:
image: davidamacey/opentranscribe-backend:latest
build:
context: ./backend
dockerfile: Dockerfile.prod # Production build
pull_policy: always
# OpenSearch production security (enable security plugin with TLS + auth)
# Requires OPENSEARCH_ADMIN_PASSWORD set in .env
# Requires TLS certificates in config/opensearch/certs/ (or use demo certs for testing)
opensearch:
environment:
# Invert OPENSEARCH_SECURITY_ENABLED → DISABLE_SECURITY_PLUGIN
# When security is enabled (true), the plugin should NOT be disabled (false)
DISABLE_SECURITY_PLUGIN: "${OPENSEARCH_DISABLE_SECURITY:-true}"
OPENSEARCH_INITIAL_ADMIN_PASSWORD: "${OPENSEARCH_ADMIN_PASSWORD:-}"
plugins.security.ssl.transport.pemcert_filepath: "certs/esnode.pem"
plugins.security.ssl.transport.pemkey_filepath: "certs/esnode-key.pem"
plugins.security.ssl.transport.pemtrustedcas_filepath: "certs/root-ca.pem"
plugins.security.ssl.http.enabled: "${OPENSEARCH_USE_TLS:-false}"
plugins.security.ssl.http.pemcert_filepath: "certs/esnode.pem"
plugins.security.ssl.http.pemkey_filepath: "certs/esnode-key.pem"
plugins.security.ssl.http.pemtrustedcas_filepath: "certs/root-ca.pem"
plugins.security.allow_default_init_securityindex: "true"
docs:
image: davidamacey/opentranscribe-docs:latest
build:
context: ./docs-site
dockerfile: Dockerfile
pull_policy: always
ports:
- "${DOCS_PORT:-5183}:8080"
environment:
- NODE_ENV=production
# GPU Scaled Worker - Production overrides
# NOTE: No 'scale' parameter here - base has scale: 0, gpu-scale.yml sets scale: 1
# This overlay only provides production-specific settings (image, build, pull_policy)
celery-worker-gpu-scaled:
image: davidamacey/opentranscribe-backend:latest
build:
context: ./backend
dockerfile: Dockerfile.prod # Production build
pull_policy: always
volumes:
# Production: Model cache directories
- ${MODEL_CACHE_DIR:-./models}/huggingface:/home/appuser/.cache/huggingface
- ${MODEL_CACHE_DIR:-./models}/torch:/home/appuser/.cache/torch
- ${MODEL_CACHE_DIR:-./models}/nltk_data:/home/appuser/.cache/nltk_data
- ${MODEL_CACHE_DIR:-./models}/sentence-transformers:/home/appuser/.cache/sentence-transformers
# No temp volume needed - temp files live in container only