From 4cf6b1ee392df8bab999d12d3f15f1eb0c23277e Mon Sep 17 00:00:00 2001 From: fayerman-source Date: Fri, 2 Jan 2026 01:53:57 -0500 Subject: [PATCH 1/9] feat: add one-click docker-compose deployment and setup script --- .env.example | 43 ++++++++++++++++++++++++++++ docker-compose.yml | 71 ++++++++++++++++++++++++++++++++++++++++++++++ setup.sh | 56 ++++++++++++++++++++++++++++++++++++ 3 files changed, 170 insertions(+) create mode 100644 .env.example create mode 100644 docker-compose.yml create mode 100755 setup.sh diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000..2626455ded --- /dev/null +++ b/.env.example @@ -0,0 +1,43 @@ +# Omi One-Click Deployment Configuration +# Copy this to .env and fill in the values + +# --- Core API Keys --- +# Required for transcription +DEEPGRAM_API_KEY= +# Required for LLM features +OPENAI_API_KEY= +# Optional: Other LLM providers +ANTHROPIC_API_KEY= + +# --- Backend Configuration --- +REDIS_DB_HOST=redis +REDIS_DB_PORT=6379 +REDIS_DB_PASSWORD= + +# Vector Database (Pinecone is default in current code) +PINECONE_API_KEY= +PINECONE_INDEX_NAME=memories-backend + +# Database (Firebase/Firestore is default) +# Provide the JSON content of your service account if using Firestore +SERVICE_ACCOUNT_JSON= + +# --- Service URLs (Internal Docker Networking) --- +HOSTED_VAD_API_URL=http://vad:8080/v1/vad +HOSTED_SPEAKER_EMBEDDING_API_URL=http://diarizer:8080 +HOSTED_PUSHER_API_URL=http://pusher:8080 + +# --- Frontend Configuration --- +# Public URL of the backend (use localhost for local dev) +NEXT_PUBLIC_API_URL=http://localhost:8080 +# Firebase config for the frontend +NEXT_PUBLIC_FIREBASE_API_KEY= +NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN= +NEXT_PUBLIC_FIREBASE_PROJECT_ID= +NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET= +NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID= +NEXT_PUBLIC_FIREBASE_APP_ID= + +# --- Development / Debugging --- +ADMIN_KEY=some_secret_key +ENCRYPTION_SECRET=omi_default_secret_change_me diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000000..d5dab49edc --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,71 @@ +services: + backend: + build: + context: . + dockerfile: backend/Dockerfile + restart: always + ports: + - "8080:8080" + env_file: + - .env + depends_on: + - redis + + pusher: + build: + context: . + dockerfile: backend/pusher/Dockerfile + restart: always + ports: + - "8081:8080" + env_file: + - .env + depends_on: + - redis + + frontend: + build: + context: . + dockerfile: web/frontend/Dockerfile + args: + - API_URL=http://localhost:8080 + - NEXT_PUBLIC_FIREBASE_API_KEY=${NEXT_PUBLIC_FIREBASE_API_KEY:-fake_key_for_build} + - NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN=${NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN:-omi-app.firebaseapp.com} + - NEXT_PUBLIC_FIREBASE_PROJECT_ID=${NEXT_PUBLIC_FIREBASE_PROJECT_ID:-omi-app} + - NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET=${NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET:-omi-app.appspot.com} + - NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID=${NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID:-123456789} + - NEXT_PUBLIC_FIREBASE_APP_ID=${NEXT_PUBLIC_FIREBASE_APP_ID:-1:123456789:web:abcdef} + - NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID=${NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID:-G-ABCDEF} + restart: always + ports: + - "3000:3000" + env_file: + - .env + extra_hosts: + - "host.docker.internal:host-gateway" + depends_on: + - backend + + redis: + image: redis:alpine + restart: always + ports: + - "6379:6379" + + # Optional: Diarizer (Requires NVIDIA GPU and NVIDIA Container Toolkit) + # diarizer: + # build: + # context: . + # dockerfile: backend/diarizer/Dockerfile + # restart: always + # ports: + # - "8082:8080" + # env_file: + # - .env + # deploy: + # resources: + # reservations: + # devices: + # - driver: nvidia + # count: 1 + # capabilities: [gpu] diff --git a/setup.sh b/setup.sh new file mode 100755 index 0000000000..2162dc4c4e --- /dev/null +++ b/setup.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +# Omi One-Click Setup Script +# Designed for backend developers and customers with low technical expertise. + +set -e + +echo "==========================================" +echo " 🚀 Omi One-Click Setup (Docker) " +echo "==========================================" + +# Check for Docker +if ! command -v docker &> /dev/null; then + echo "❌ Error: Docker is not installed. Please install Docker first: https://docs.docker.com/get-docker/" + exit 1 +fi + +# Check for Docker Compose +if ! docker compose version &> /dev/null; then + echo "❌ Error: Docker Compose is not installed. Please install it or use a newer version of Docker Desktop." + exit 1 +fi + +# Create .env if it doesn't exist +if [ ! -f .env ]; then + echo "📄 Creating .env from .env.example..." + cp .env.example .env + echo "⚠️ Action Required: Please edit the .env file and add your API keys." + echo " At minimum, you need: DEEPGRAM_API_KEY and OPENAI_API_KEY." + + # Optional: try to open the editor + if command -v nano &> /dev/null; then + read -p "Would you like to edit .env now? (y/n) " -n 1 -r + echo + if [[ $REPLY =~ ^[Yy]$ ]]; then + nano .env + fi + fi +fi + +# Build and Start +echo "🛠️ Building and starting Omi services..." +docker compose up -d --build + +echo "" +echo "==========================================" +echo "✅ Omi is now starting up!" +echo "" +echo "Services available at:" +echo "👉 Frontend: http://localhost:3000" +echo "👉 Backend: http://localhost:8080" +echo "👉 Pusher: http://localhost:8081" +echo "" +echo "To view logs, run: docker compose logs -f" +echo "To stop Omi, run: docker compose down" +echo "==========================================" From be9ebffaefe44741a8c36b5bd92b6647174814bd Mon Sep 17 00:00:00 2001 From: fayerman-source Date: Fri, 2 Jan 2026 02:19:59 -0500 Subject: [PATCH 2/9] feat: implement mock fallbacks for local/offline deployment --- backend/database/_client.py | 38 ++++++++++++++++++- backend/database/vector_db.py | 29 ++++++++++++--- backend/main.py | 15 +++++--- backend/utils/conversations/search.py | 39 ++++++++++++++++---- backend/utils/other/storage.py | 53 ++++++++++++++++++++++++--- backend/utils/translation.py | 28 +++++++++++++- 6 files changed, 175 insertions(+), 27 deletions(-) diff --git a/backend/database/_client.py b/backend/database/_client.py index 943d6e60de..77c7dacb2e 100644 --- a/backend/database/_client.py +++ b/backend/database/_client.py @@ -4,6 +4,38 @@ import uuid from google.cloud import firestore +from google.auth.exceptions import DefaultCredentialsError + +class MockFirestore: + def collection(self, name): + return MockCollection() + +class MockCollection: + def stream(self): + return [] + def document(self, doc_id): + return MockDocument(doc_id) + def add(self, data): + return None + def where(self, field, op, value): + return self + +class MockDocument: + def __init__(self, doc_id): + self.id = doc_id + def set(self, data): + return None + def get(self): + return MockSnapshot() + def update(self, data): + return None + def delete(self): + return None + +class MockSnapshot: + exists = False + def to_dict(self): + return {} if os.environ.get('SERVICE_ACCOUNT_JSON'): service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) @@ -11,7 +43,11 @@ with open('google-credentials.json', 'w') as f: json.dump(service_account_info, f) -db = firestore.Client() +try: + db = firestore.Client() +except (DefaultCredentialsError, Exception) as e: + print(f"⚠️ Warning: Firestore connection failed ({e}). Using MockFirestore for local dev.") + db = MockFirestore() def get_users_uid(): diff --git a/backend/database/vector_db.py b/backend/database/vector_db.py index f9a883d5e4..ce501e5dae 100644 --- a/backend/database/vector_db.py +++ b/backend/database/vector_db.py @@ -9,11 +9,30 @@ from models.conversation import Conversation from utils.llm.clients import embeddings -if os.getenv('PINECONE_API_KEY') is not None: - pc = Pinecone(api_key=os.getenv('PINECONE_API_KEY', '')) - index = pc.Index(os.getenv('PINECONE_INDEX_NAME', '')) -else: - index = None +class MockIndex: + def upsert(self, vectors, namespace): + print(f"Mock upsert: {len(vectors)} vectors") + return {"upserted_count": len(vectors)} + def query(self, vector, top_k, include_metadata=False, filter=None, namespace=None, include_values=False): + print("Mock query") + return {"matches": []} + def update(self, id, set_metadata, namespace): + print(f"Mock update: {id}") + return {} + def delete(self, ids, namespace): + print(f"Mock delete: {ids}") + return {} + +try: + if os.getenv('PINECONE_API_KEY'): + pc = Pinecone(api_key=os.getenv('PINECONE_API_KEY')) + index = pc.Index(os.getenv('PINECONE_INDEX_NAME', '')) + else: + print("⚠️ Warning: PINECONE_API_KEY not set. Using MockIndex.") + index = MockIndex() +except Exception as e: + print(f"⚠️ Warning: Pinecone init failed ({e}). Using MockIndex.") + index = MockIndex() def _get_data(uid: str, conversation_id: str, vector: List[float]): diff --git a/backend/main.py b/backend/main.py index c1e5f713be..09c2247ca3 100644 --- a/backend/main.py +++ b/backend/main.py @@ -43,12 +43,15 @@ from utils.other.timeout import TimeoutMiddleware -if os.environ.get('SERVICE_ACCOUNT_JSON'): - service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) - credentials = firebase_admin.credentials.Certificate(service_account_info) - firebase_admin.initialize_app(credentials) -else: - firebase_admin.initialize_app() +try: + if os.environ.get('SERVICE_ACCOUNT_JSON'): + service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) + credentials = firebase_admin.credentials.Certificate(service_account_info) + firebase_admin.initialize_app(credentials) + else: + firebase_admin.initialize_app() +except Exception as e: + print(f"⚠️ Warning: Firebase Admin initialization failed ({e}). Auth & DB features may not work.") app = FastAPI() diff --git a/backend/utils/conversations/search.py b/backend/utils/conversations/search.py index 9892b8b4d1..4a4c3bd95e 100644 --- a/backend/utils/conversations/search.py +++ b/backend/utils/conversations/search.py @@ -5,13 +5,38 @@ import typesense -client = typesense.Client( - { - 'nodes': [{'host': os.getenv('TYPESENSE_HOST'), 'port': os.getenv('TYPESENSE_HOST_PORT'), 'protocol': 'https'}], - 'api_key': os.getenv('TYPESENSE_API_KEY'), - 'connection_timeout_seconds': 2, - } -) +class MockTypesenseClient: + def __init__(self): + self.collections = MockCollections() + +class MockCollections: + def __getitem__(self, key): + return MockDocuments() + +class MockDocuments: + @property + def documents(self): + return self + + def search(self, params): + print(f"Mock search with params: {params}") + return {'hits': [], 'found': 0} + +try: + if os.getenv('TYPESENSE_API_KEY'): + client = typesense.Client( + { + 'nodes': [{'host': os.getenv('TYPESENSE_HOST'), 'port': os.getenv('TYPESENSE_HOST_PORT'), 'protocol': 'https'}], + 'api_key': os.getenv('TYPESENSE_API_KEY'), + 'connection_timeout_seconds': 2, + } + ) + else: + print("⚠️ Warning: TYPESENSE_API_KEY not set. Using MockTypesenseClient.") + client = MockTypesenseClient() +except Exception as e: + print(f"⚠️ Warning: Typesense init failed ({e}). Using MockTypesenseClient.") + client = MockTypesenseClient() def search_conversations( diff --git a/backend/utils/other/storage.py b/backend/utils/other/storage.py index 8089b9a8fa..8b82b9488a 100644 --- a/backend/utils/other/storage.py +++ b/backend/utils/other/storage.py @@ -17,12 +17,53 @@ from utils import encryption from database import users as users_db -if os.environ.get('SERVICE_ACCOUNT_JSON'): - service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) - credentials = service_account.Credentials.from_service_account_info(service_account_info) - storage_client = storage.Client(credentials=credentials) -else: - storage_client = storage.Client() +class MockStorageClient: + def bucket(self, name): + return MockBucket(name) + +class MockBucket: + def __init__(self, name): + self.name = name + def blob(self, name): + return MockBlob(name, self.name) + def list_blobs(self, prefix=None): + return [] + +class MockBlob: + def __init__(self, name, bucket_name): + self.name = name + self.bucket_name = bucket_name + self.size = 0 + self.time_created = None + self.metadata = {} + self.cache_control = None + def upload_from_filename(self, filename): + print(f"Mock upload from filename: {filename} to {self.name}") + def upload_from_string(self, data, content_type=None): + print(f"Mock upload from string to {self.name}") + def download_to_filename(self, filename): + print(f"Mock download to {filename} from {self.name}") + def delete(self): + print(f"Mock delete {self.name}") + def exists(self): + return False + def generate_signed_url(self, **kwargs): + return f"http://localhost:8080/_mock_signed_url/{self.bucket_name}/{self.name}" + def reload(self): + pass + def download_as_bytes(self): + return b"" + +try: + if os.environ.get('SERVICE_ACCOUNT_JSON'): + service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) + credentials = service_account.Credentials.from_service_account_info(service_account_info) + storage_client = storage.Client(credentials=credentials) + else: + storage_client = storage.Client() +except Exception as e: + print(f"⚠️ Warning: Google Storage connection failed ({e}). Using MockStorageClient for local dev.") + storage_client = MockStorageClient() speech_profiles_bucket = os.getenv('BUCKET_SPEECH_PROFILES') postprocessing_audio_bucket = os.getenv('BUCKET_POSTPROCESSING') diff --git a/backend/utils/translation.py b/backend/utils/translation.py index 124887624b..eb7edd8a49 100644 --- a/backend/utils/translation.py +++ b/backend/utils/translation.py @@ -111,8 +111,32 @@ r'\b(' + '|'.join(re.escape(word) for word in _non_lexical_utterances) + r')\b', re.IGNORECASE ) -# Initialize the translation client globally -_client = translate_v3.TranslationServiceClient() +class MockTranslationServiceClient: + def detect_language(self, parent, content, mime_type): + print(f"Mock detect language: {content[:20]}...") + return MockDetectResponse() + + def translate_text(self, contents, parent, mime_type, target_language_code): + print(f"Mock translate text to {target_language_code}") + return MockTranslateResponse(contents[0]) + +class MockDetectResponse: + languages = [] + +class MockTranslateResponse: + def __init__(self, text): + self.translations = [MockTranslation(text)] + +class MockTranslation: + def __init__(self, text): + self.translated_text = text + +try: + _client = translate_v3.TranslationServiceClient() +except Exception as e: + print(f"⚠️ Warning: Google Translation init failed ({e}). Using MockTranslationServiceClient.") + _client = MockTranslationServiceClient() + _parent = f"projects/{PROJECT_ID}/locations/global" _mime_type = "text/plain" From 0868789ee0edfadde29e56cc0eed2ef5e23655b8 Mon Sep 17 00:00:00 2001 From: fayerman-source Date: Fri, 2 Jan 2026 02:58:33 -0500 Subject: [PATCH 3/9] security: generate random secrets in setup.sh and refine exception handling --- backend/database/_client.py | 2 +- backend/database/vector_db.py | 4 ++-- backend/main.py | 4 +++- backend/utils/conversations/search.py | 4 +++- backend/utils/other/storage.py | 4 +++- backend/utils/translation.py | 4 +++- setup.sh | 10 ++++++++++ 7 files changed, 25 insertions(+), 7 deletions(-) diff --git a/backend/database/_client.py b/backend/database/_client.py index 77c7dacb2e..009e37fe9d 100644 --- a/backend/database/_client.py +++ b/backend/database/_client.py @@ -45,7 +45,7 @@ def to_dict(self): try: db = firestore.Client() -except (DefaultCredentialsError, Exception) as e: +except (DefaultCredentialsError, ValueError) as e: print(f"⚠️ Warning: Firestore connection failed ({e}). Using MockFirestore for local dev.") db = MockFirestore() diff --git a/backend/database/vector_db.py b/backend/database/vector_db.py index ce501e5dae..351eabe9d0 100644 --- a/backend/database/vector_db.py +++ b/backend/database/vector_db.py @@ -4,7 +4,7 @@ from datetime import datetime, timezone, timedelta from typing import List -from pinecone import Pinecone +from pinecone import Pinecone, PineconeConfigurationError from models.conversation import Conversation from utils.llm.clients import embeddings @@ -30,7 +30,7 @@ def delete(self, ids, namespace): else: print("⚠️ Warning: PINECONE_API_KEY not set. Using MockIndex.") index = MockIndex() -except Exception as e: +except (PineconeConfigurationError, ValueError, KeyError) as e: print(f"⚠️ Warning: Pinecone init failed ({e}). Using MockIndex.") index = MockIndex() diff --git a/backend/main.py b/backend/main.py index 09c2247ca3..3a0befe77a 100644 --- a/backend/main.py +++ b/backend/main.py @@ -43,6 +43,8 @@ from utils.other.timeout import TimeoutMiddleware +from google.auth.exceptions import DefaultCredentialsError + try: if os.environ.get('SERVICE_ACCOUNT_JSON'): service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) @@ -50,7 +52,7 @@ firebase_admin.initialize_app(credentials) else: firebase_admin.initialize_app() -except Exception as e: +except (DefaultCredentialsError, ValueError) as e: print(f"⚠️ Warning: Firebase Admin initialization failed ({e}). Auth & DB features may not work.") app = FastAPI() diff --git a/backend/utils/conversations/search.py b/backend/utils/conversations/search.py index 4a4c3bd95e..734c7e9f85 100644 --- a/backend/utils/conversations/search.py +++ b/backend/utils/conversations/search.py @@ -5,6 +5,8 @@ import typesense +from typesense.exceptions import ConfigError + class MockTypesenseClient: def __init__(self): self.collections = MockCollections() @@ -34,7 +36,7 @@ def search(self, params): else: print("⚠️ Warning: TYPESENSE_API_KEY not set. Using MockTypesenseClient.") client = MockTypesenseClient() -except Exception as e: +except (ConfigError, ValueError, KeyError) as e: print(f"⚠️ Warning: Typesense init failed ({e}). Using MockTypesenseClient.") client = MockTypesenseClient() diff --git a/backend/utils/other/storage.py b/backend/utils/other/storage.py index 8b82b9488a..81893c77b0 100644 --- a/backend/utils/other/storage.py +++ b/backend/utils/other/storage.py @@ -54,6 +54,8 @@ def reload(self): def download_as_bytes(self): return b"" +from google.auth.exceptions import DefaultCredentialsError + try: if os.environ.get('SERVICE_ACCOUNT_JSON'): service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) @@ -61,7 +63,7 @@ def download_as_bytes(self): storage_client = storage.Client(credentials=credentials) else: storage_client = storage.Client() -except Exception as e: +except (DefaultCredentialsError, ValueError, KeyError) as e: print(f"⚠️ Warning: Google Storage connection failed ({e}). Using MockStorageClient for local dev.") storage_client = MockStorageClient() diff --git a/backend/utils/translation.py b/backend/utils/translation.py index eb7edd8a49..8470cdb358 100644 --- a/backend/utils/translation.py +++ b/backend/utils/translation.py @@ -131,9 +131,11 @@ class MockTranslation: def __init__(self, text): self.translated_text = text +from google.auth.exceptions import DefaultCredentialsError + try: _client = translate_v3.TranslationServiceClient() -except Exception as e: +except (DefaultCredentialsError, ValueError, KeyError) as e: print(f"⚠️ Warning: Google Translation init failed ({e}). Using MockTranslationServiceClient.") _client = MockTranslationServiceClient() diff --git a/setup.sh b/setup.sh index 2162dc4c4e..fc44a0718b 100755 --- a/setup.sh +++ b/setup.sh @@ -25,6 +25,16 @@ fi if [ ! -f .env ]; then echo "📄 Creating .env from .env.example..." cp .env.example .env + + # Generate secure random secrets + ADMIN_KEY=$(openssl rand -hex 32) + ENCRYPTION_SECRET=$(openssl rand -hex 32) + + # Update .env with generated secrets + sed -i "s/ADMIN_KEY=.*/ADMIN_KEY=$ADMIN_KEY/" .env + sed -i "s/ENCRYPTION_SECRET=.*/ENCRYPTION_SECRET=$ENCRYPTION_SECRET/" .env + + echo "✅ Generated secure random keys for ADMIN_KEY and ENCRYPTION_SECRET." echo "⚠️ Action Required: Please edit the .env file and add your API keys." echo " At minimum, you need: DEEPGRAM_API_KEY and OPENAI_API_KEY." From 417c6c244c00d7f99274b6ffbba2b3f308f5f340 Mon Sep 17 00:00:00 2001 From: fayerman-source Date: Sat, 3 Jan 2026 12:51:16 -0500 Subject: [PATCH 4/9] feat(dev): add data persistence to local docker setup --- .gitignore | 2 +- backend/database/_client.py | 190 ++++++++++++++++++++++++++++++++---- docker-compose.yml | 4 + 3 files changed, 176 insertions(+), 20 deletions(-) diff --git a/.gitignore b/.gitignore index 2c46fa6099..3493a05433 100644 --- a/.gitignore +++ b/.gitignore @@ -198,5 +198,5 @@ omiGlass/.expo app/.fvm/ app/android/.kotlin/ .playwright-mcp/ - CLAUDE.md +/data/ \ No newline at end of file diff --git a/backend/database/_client.py b/backend/database/_client.py index 009e37fe9d..2b2e8b47b8 100644 --- a/backend/database/_client.py +++ b/backend/database/_client.py @@ -2,40 +2,192 @@ import json import os import uuid +from typing import Dict, Any from google.cloud import firestore from google.auth.exceptions import DefaultCredentialsError -class MockFirestore: +# Constants for local persistence +DATA_DIR = '/app/data' +DB_FILE = os.path.join(DATA_DIR, 'firestore_mock.json') + +class PersistentMockFirestore: + _instance = None + _data: Dict[str, Dict[str, Any]] = {} + + def __new__(cls): + if cls._instance is None: + cls._instance = super(PersistentMockFirestore, cls).__new__(cls) + cls._instance._load() + return cls._instance + + def _load(self): + if os.path.exists(DB_FILE): + try: + with open(DB_FILE, 'r') as f: + self._data = json.load(f) + print(f"✅ Loaded persistent mock data from {DB_FILE}") + except Exception as e: + print(f"⚠️ Failed to load mock data: {e}") + self._data = {} + else: + self._data = {} + + def _save(self): + if not os.path.exists(DATA_DIR): + try: + os.makedirs(DATA_DIR) + except OSError: + # Might fail if not permission, but inside Docker usually OK + pass + try: + with open(DB_FILE, 'w') as f: + json.dump(self._data, f, default=str, indent=2) + except Exception as e: + print(f"⚠️ Failed to save mock data: {e}") + def collection(self, name): - return MockCollection() + if name not in self._data: + self._data[name] = {} + return MockCollection(self, name) + + def batch(self): + return MockBatch(self) + +class MockBatch: + def __init__(self, db): + self.db = db + + def set(self, ref, data): + ref.set(data) + + def update(self, ref, data): + ref.update(data) + + def delete(self, ref): + ref.delete() + + def commit(self): + pass # Changes happen immediately in this simple mock class MockCollection: + def __init__(self, db, name, parent_doc=None): + self.db = db + self.name = name + self.parent_doc = parent_doc # For subcollections + + def _get_data(self): + # Handle subcollections: parent_doc.data[col_name] + if self.parent_doc: + if self.name not in self.parent_doc._get_data(): + self.parent_doc._get_data()[self.name] = {} + return self.parent_doc._get_data()[self.name] + return self.db._data[self.name] + + def document(self, doc_id=None): + if doc_id is None: + doc_id = str(uuid.uuid4()) + return MockDocument(self.db, self, doc_id) + + def add(self, data, doc_id=None): + if doc_id is None: + doc_id = str(uuid.uuid4()) + doc = self.document(doc_id) + doc.set(data) + return None, doc + def stream(self): - return [] - def document(self, doc_id): - return MockDocument(doc_id) - def add(self, data): - return None - def where(self, field, op, value): + # Return all docs in this collection + data = self._get_data() + return [MockDocument(self.db, self, doc_id) for doc_id in data.keys()] + + def where(self, *args, **kwargs): + # Basic mock support for chaining, doesn't actually filter yet + return self + + def limit(self, count): + return self + + def order_by(self, field, direction=None): return self class MockDocument: - def __init__(self, doc_id): + def __init__(self, db, collection, doc_id): + self.db = db + self.collection = collection self.id = doc_id + + def _get_data(self): + col_data = self.collection._get_data() + if self.id not in col_data: + return None # Does not exist + return col_data[self.id] + def set(self, data): - return None - def get(self): - return MockSnapshot() + col_data = self.collection._get_data() + col_data[self.id] = data + self.db._save() + def update(self, data): - return None + current = self._get_data() + if current: + current.update(data) + self.db._save() + + def get(self): + data = self._get_data() + return MockSnapshot(self.id, data) + def delete(self): - return None + col_data = self.collection._get_data() + if self.id in col_data: + del col_data[self.id] + self.db._save() + + def collection(self, name): + # Subcollections require nested storage structure + # Simplified: storing subcollections in a special field '_collections' inside the doc data? + # Or simpler: Just return a dummy collection for now to prevent crashes, + # as implementing deep nested persistence in one file is complex. + # But wait, we want persistence. + # Let's try to store it in the doc data under `__collections__` key + current = self._get_data() + if current is None: + # Create doc implicitly? No, usually errors. + # But for mock, let's allow it + self.set({}) + current = self._get_data() + + if '__collections__' not in current: + current['__collections__'] = {} + + return MockSubCollection(self.db, name, current['__collections__']) + +class MockSubCollection(MockCollection): + def __init__(self, db, name, storage): + self.db = db + self.name = name + self.storage = storage # Reference to the dict holding this collection's data + + def _get_data(self): + if self.name not in self.storage: + self.storage[self.name] = {} + return self.storage[self.name] class MockSnapshot: - exists = False + def __init__(self, doc_id, data): + self.id = doc_id + self._data = data + self.exists = data is not None + self.reference = None # Placeholder + def to_dict(self): - return {} + if self._data and '__collections__' in self._data: + # Hide internal storage + d = self._data.copy() + del d['__collections__'] + return d + return self._data or {} if os.environ.get('SERVICE_ACCOUNT_JSON'): service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) @@ -46,8 +198,8 @@ def to_dict(self): try: db = firestore.Client() except (DefaultCredentialsError, ValueError) as e: - print(f"⚠️ Warning: Firestore connection failed ({e}). Using MockFirestore for local dev.") - db = MockFirestore() + print(f"⚠️ Warning: Firestore connection failed ({e}). Using PersistentMockFirestore for local dev.") + db = PersistentMockFirestore() def get_users_uid(): @@ -59,4 +211,4 @@ def document_id_from_seed(seed: str) -> uuid.UUID: """Avoid repeating the same data""" seed_hash = hashlib.sha256(seed.encode('utf-8')).digest() generated_uuid = uuid.UUID(bytes=seed_hash[:16], version=4) - return str(generated_uuid) + return str(generated_uuid) \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index d5dab49edc..fee1bac949 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,6 +10,8 @@ services: - .env depends_on: - redis + volumes: + - ./data/backend:/app/data pusher: build: @@ -51,6 +53,8 @@ services: restart: always ports: - "6379:6379" + volumes: + - ./data/redis:/data # Optional: Diarizer (Requires NVIDIA GPU and NVIDIA Container Toolkit) # diarizer: From 81a99651fdbfebcd4de258aa58eaf23de1a0e55c Mon Sep 17 00:00:00 2001 From: fayerman-source Date: Sat, 3 Jan 2026 13:22:45 -0500 Subject: [PATCH 5/9] fix(setup): improve macos compatibility and harden firestore mocks --- backend/database/_client.py | 132 +++++++++++++++++++++++++++++------- setup.sh | 6 +- 2 files changed, 109 insertions(+), 29 deletions(-) diff --git a/backend/database/_client.py b/backend/database/_client.py index 2b2e8b47b8..941b2347e9 100644 --- a/backend/database/_client.py +++ b/backend/database/_client.py @@ -2,10 +2,11 @@ import json import os import uuid -from typing import Dict, Any +from typing import Dict, Any, List from google.cloud import firestore from google.auth.exceptions import DefaultCredentialsError +from google.cloud.firestore_v1.base_query import FieldFilter, BaseCompositeFilter # Constants for local persistence DATA_DIR = '/app/data' @@ -38,7 +39,6 @@ def _save(self): try: os.makedirs(DATA_DIR) except OSError: - # Might fail if not permission, but inside Docker usually OK pass try: with open(DB_FILE, 'w') as f: @@ -68,16 +68,19 @@ def delete(self, ref): ref.delete() def commit(self): - pass # Changes happen immediately in this simple mock + pass class MockCollection: def __init__(self, db, name, parent_doc=None): self.db = db self.name = name - self.parent_doc = parent_doc # For subcollections + self.parent_doc = parent_doc + self._filters = [] + self._limit = None + self._offset = 0 + self._order_by = [] def _get_data(self): - # Handle subcollections: parent_doc.data[col_name] if self.parent_doc: if self.name not in self.parent_doc._get_data(): self.parent_doc._get_data()[self.name] = {} @@ -96,20 +99,104 @@ def add(self, data, doc_id=None): doc.set(data) return None, doc + def _apply_filters(self, docs): + filtered_docs = [] + for doc in docs: + data = doc._get_data() + if not data: continue + + match = True + for f in self._filters: + field, op, value = f + # Handle dot notation for nested fields + val = data + for part in field.split('.'): + if isinstance(val, dict): + val = val.get(part) + else: + val = None + break + + if op == '==' and val != value: match = False + elif op == '!=' and val == value: match = False + elif op == '>' and not (val > value if val is not None else False): match = False + elif op == '>=' and not (val >= value if val is not None else False): match = False + elif op == '<' and not (val < value if val is not None else False): match = False + elif op == '<=' and not (val <= value if val is not None else False): match = False + elif op == 'in' and val not in value: match = False + elif op == 'array_contains' and (val is None or value not in val): match = False + + if not match: break + + if match: filtered_docs.append(doc) + return filtered_docs + def stream(self): - # Return all docs in this collection data = self._get_data() - return [MockDocument(self.db, self, doc_id) for doc_id in data.keys()] + docs = [MockDocument(self.db, self, doc_id) for doc_id in data.keys()] + + # Filter + docs = self._apply_filters(docs) + + # Sort + for field, direction in self._order_by: + reverse = direction == 'DESCENDING' + docs.sort(key=lambda x: x._get_data().get(field, ""), reverse=reverse) + + # Offset & Limit + if self._offset: + docs = docs[self._offset:] + if self._limit: + docs = docs[:self._limit] + + return docs + def get(self): + return [doc.get() for doc in self.stream()] + def where(self, *args, **kwargs): - # Basic mock support for chaining, doesn't actually filter yet + # Support both .where("field", "==", "value") and .where(filter=FieldFilter(...)) + if 'filter' in kwargs: + f = kwargs['filter'] + if isinstance(f, FieldFilter): + self._filters.append((f.field.field_path, f.op, f.value)) + elif isinstance(f, BaseCompositeFilter): + # Basic composite handling (AND only for now) + for sub_filter in f.filters: + if isinstance(sub_filter, FieldFilter): + self._filters.append((sub_filter.field.field_path, sub_filter.op, sub_filter.value)) + elif len(args) == 3: + self._filters.append(args) return self def limit(self, count): + self._limit = count + return self + + def offset(self, count): + self._offset = count return self - def order_by(self, field, direction=None): + def order_by(self, field, direction='ASCENDING'): + self._order_by.append((field, direction)) return self + + def count(self): + return MockCountQuery(self) + +class MockCountQuery: + def __init__(self, query): + self.query = query + + def get(self): + # Return a list containing a list containing an object with a value property + # Firestore count query structure: [[Aggregation(value=count)]] + count = len(self.query.stream()) + return [[MockAggregation(count)]] + +class MockAggregation: + def __init__(self, value): + self.value = value class MockDocument: def __init__(self, db, collection, doc_id): @@ -120,7 +207,7 @@ def __init__(self, db, collection, doc_id): def _get_data(self): col_data = self.collection._get_data() if self.id not in col_data: - return None # Does not exist + return None return col_data[self.id] def set(self, data): @@ -144,17 +231,13 @@ def delete(self): del col_data[self.id] self.db._save() + @property + def reference(self): + return self + def collection(self, name): - # Subcollections require nested storage structure - # Simplified: storing subcollections in a special field '_collections' inside the doc data? - # Or simpler: Just return a dummy collection for now to prevent crashes, - # as implementing deep nested persistence in one file is complex. - # But wait, we want persistence. - # Let's try to store it in the doc data under `__collections__` key current = self._get_data() if current is None: - # Create doc implicitly? No, usually errors. - # But for mock, let's allow it self.set({}) current = self._get_data() @@ -165,9 +248,8 @@ def collection(self, name): class MockSubCollection(MockCollection): def __init__(self, db, name, storage): - self.db = db - self.name = name - self.storage = storage # Reference to the dict holding this collection's data + super().__init__(db, name) # Init base filtering/sorting + self.storage = storage def _get_data(self): if self.name not in self.storage: @@ -179,11 +261,10 @@ def __init__(self, doc_id, data): self.id = doc_id self._data = data self.exists = data is not None - self.reference = None # Placeholder + self.reference = None def to_dict(self): if self._data and '__collections__' in self._data: - # Hide internal storage d = self._data.copy() del d['__collections__'] return d @@ -191,13 +272,12 @@ def to_dict(self): if os.environ.get('SERVICE_ACCOUNT_JSON'): service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) - # create google-credentials.json with open('google-credentials.json', 'w') as f: json.dump(service_account_info, f) try: db = firestore.Client() -except (DefaultCredentialsError, ValueError) as e: +except (DefaultCredentialsError, ValueError, ImportError) as e: print(f"⚠️ Warning: Firestore connection failed ({e}). Using PersistentMockFirestore for local dev.") db = PersistentMockFirestore() @@ -211,4 +291,4 @@ def document_id_from_seed(seed: str) -> uuid.UUID: """Avoid repeating the same data""" seed_hash = hashlib.sha256(seed.encode('utf-8')).digest() generated_uuid = uuid.UUID(bytes=seed_hash[:16], version=4) - return str(generated_uuid) \ No newline at end of file + return str(generated_uuid) diff --git a/setup.sh b/setup.sh index fc44a0718b..91cd9e9ae4 100755 --- a/setup.sh +++ b/setup.sh @@ -30,9 +30,9 @@ if [ ! -f .env ]; then ADMIN_KEY=$(openssl rand -hex 32) ENCRYPTION_SECRET=$(openssl rand -hex 32) - # Update .env with generated secrets - sed -i "s/ADMIN_KEY=.*/ADMIN_KEY=$ADMIN_KEY/" .env - sed -i "s/ENCRYPTION_SECRET=.*/ENCRYPTION_SECRET=$ENCRYPTION_SECRET/" .env + # Update .env with generated secrets (cross-platform compatible) + sed "s/ADMIN_KEY=.*/ADMIN_KEY=$ADMIN_KEY/" .env > .env.tmp && mv .env.tmp .env + sed "s/ENCRYPTION_SECRET=.*/ENCRYPTION_SECRET=$ENCRYPTION_SECRET/" .env > .env.tmp && mv .env.tmp .env echo "✅ Generated secure random keys for ADMIN_KEY and ENCRYPTION_SECRET." echo "⚠️ Action Required: Please edit the .env file and add your API keys." From 2b48806339044dccbb2805e569a7caed4aea2607 Mon Sep 17 00:00:00 2001 From: fayerman-source Date: Sat, 3 Jan 2026 17:49:22 -0500 Subject: [PATCH 6/9] fix(mock): improve PersistentMockFirestore security and batch atomicity --- backend/database/_client.py | 55 ++++++++++++++++++++++++------------- 1 file changed, 36 insertions(+), 19 deletions(-) diff --git a/backend/database/_client.py b/backend/database/_client.py index 941b2347e9..711149e928 100644 --- a/backend/database/_client.py +++ b/backend/database/_client.py @@ -7,6 +7,7 @@ from google.cloud import firestore from google.auth.exceptions import DefaultCredentialsError from google.cloud.firestore_v1.base_query import FieldFilter, BaseCompositeFilter +from firebase_admin import credentials # Constants for local persistence DATA_DIR = '/app/data' @@ -57,18 +58,26 @@ def batch(self): class MockBatch: def __init__(self, db): self.db = db - + self._operations = [] + def set(self, ref, data): - ref.set(data) - + self._operations.append(('set', ref, data)) + def update(self, ref, data): - ref.update(data) - + self._operations.append(('update', ref, data)) + def delete(self, ref): - ref.delete() - + self._operations.append(('delete', ref, None)) + def commit(self): - pass + for op_type, ref, data in self._operations: + if op_type == 'set': + ref.set(data) + elif op_type == 'update': + ref.update(data) + elif op_type == 'delete': + ref.delete() + self._operations = [] # Clear operations after commit class MockCollection: def __init__(self, db, name, parent_doc=None): @@ -248,7 +257,7 @@ def collection(self, name): class MockSubCollection(MockCollection): def __init__(self, db, name, storage): - super().__init__(db, name) # Init base filtering/sorting + super().__init__(db, name) self.storage = storage def _get_data(self): @@ -270,16 +279,24 @@ def to_dict(self): return d return self._data or {} +# Removed: Writing SERVICE_ACCOUNT_JSON to a file +# Initialize Firestore client directly from JSON string if available if os.environ.get('SERVICE_ACCOUNT_JSON'): - service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) - with open('google-credentials.json', 'w') as f: - json.dump(service_account_info, f) - -try: - db = firestore.Client() -except (DefaultCredentialsError, ValueError, ImportError) as e: - print(f"⚠️ Warning: Firestore connection failed ({e}). Using PersistentMockFirestore for local dev.") - db = PersistentMockFirestore() + try: + service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) + # Use service_account_info directly to create credentials + cred = credentials.Certificate(service_account_info) + db = firestore.Client(credentials=cred) + except (json.JSONDecodeError, ValueError, Exception) as e: + print(f"⚠️ Error initializing Firestore with SERVICE_ACCOUNT_JSON: {e}. Falling back to default/mock.") + db = PersistentMockFirestore() +else: + try: + # Attempt to initialize with default credentials (e.g., gcloud auth application-default login) + db = firestore.Client() + except (DefaultCredentialsError, ValueError, Exception) as e: + print(f"⚠️ Warning: Firestore connection failed ({e}). Using PersistentMockFirestore for local dev.") + db = PersistentMockFirestore() def get_users_uid(): @@ -291,4 +308,4 @@ def document_id_from_seed(seed: str) -> uuid.UUID: """Avoid repeating the same data""" seed_hash = hashlib.sha256(seed.encode('utf-8')).digest() generated_uuid = uuid.UUID(bytes=seed_hash[:16], version=4) - return str(generated_uuid) + return str(generated_uuid) \ No newline at end of file From ea48ac9d755f0972505f14cc7cd0b34f96384af0 Mon Sep 17 00:00:00 2001 From: fayerman-source Date: Sat, 3 Jan 2026 17:51:14 -0500 Subject: [PATCH 7/9] feat(dev): add windows setup script and update docs for one-click deployment --- backend/setup.ps1 | 63 +++++++++++++++++++++++++ docs/doc/developer/one-click-setup.mdx | 65 ++++++++++++++++++++++++++ 2 files changed, 128 insertions(+) create mode 100755 backend/setup.ps1 create mode 100644 docs/doc/developer/one-click-setup.mdx diff --git a/backend/setup.ps1 b/backend/setup.ps1 new file mode 100755 index 0000000000..6ae6d2ffb6 --- /dev/null +++ b/backend/setup.ps1 @@ -0,0 +1,63 @@ +#!/usr/bin/env pwsh + +# Omi One-Click Setup Script for Windows (PowerShell) +# Designed for backend developers and customers with low technical expertise. + +$ErrorActionPreference = "Stop" + +Write-Host "==========================================" -ForegroundColor Cyan +Write-Host " 🚀 Omi One-Click Setup (Docker) " -ForegroundColor Cyan +Write-Host "==========================================" -ForegroundColor Cyan + +# Check for Docker +if (-Not (Get-Command docker -ErrorAction SilentlyContinue)) { + Write-Host "❌ Error: Docker is not installed. Please install Docker Desktop: https://docs.docker.com/get-docker/" -ForegroundColor Red + exit 1 +} + +# Check for Docker Compose +if (-Not (Get-Command docker-compose -ErrorAction SilentlyContinue) -and -Not (Get-Command "docker compose" -ErrorAction SilentlyContinue)) { + Write-Host "❌ Error: Docker Compose is not installed. Please install Docker Desktop or ensure it's in your PATH." -ForegroundColor Red + exit 1 +} + +# Create .env if it doesn't exist +if (-Not (Test-Path .env)) { + Write-Host "📄 Creating .env from .env.example..." -ForegroundColor Yellow + Copy-Item .env.example .env + + # Generate secure random secrets + $ADMIN_KEY = [System.Convert]::ToHexString((Get-Random -Count 32 -Minimum 0 -Maximum 256 | ForEach-Object { [byte]$_ })) + $ENCRYPTION_SECRET = [System.Convert]::ToHexString((Get-Random -Count 32 -Minimum 0 -Maximum 256 | ForEach-Object { [byte]$_ })) + + # Update .env with generated secrets + (Get-Content .env) -replace "ADMIN_KEY=.*", "ADMIN_KEY=$ADMIN_KEY" | Set-Content .env + (Get-Content .env) -replace "ENCRYPTION_SECRET=.*", "ENCRYPTION_SECRET=$ENCRYPTION_SECRET" | Set-Content .env + + Write-Host "✅ Generated secure random keys for ADMIN_KEY and ENCRYPTION_SECRET." -ForegroundColor Green + Write-Host "⚠️ Action Required: Please edit the .env file and add your API keys." -ForegroundColor Yellow + Write-Host " At minimum, you need: DEEPGRAM_API_KEY and OPENAI_API_KEY." -ForegroundColor Yellow + + # Optional: try to open the editor (notepad for Windows) + $response = Read-Host "Would you like to edit .env now? (y/n)" + if ($response -eq "y" -or $response -eq "Y") { + notepad .env + } +} + +# Build and Start +Write-Host "🛠️ Building and starting Omi services..." -ForegroundColor Blue +docker compose up -d --build + +Write-Host "" -ForegroundColor Cyan +Write-Host "==========================================" -ForegroundColor Cyan +Write-Host "✅ Omi is now starting up!" -ForegroundColor Green +Write-Host "" -ForegroundColor Cyan +Write-Host "Services available at:" -ForegroundColor Cyan +Write-Host "👉 Frontend: http://localhost:3000" -ForegroundColor Cyan +Write-Host "👉 Backend: http://localhost:8080" -ForegroundColor Cyan +Write-Host "👉 Pusher: http://localhost:8081" -ForegroundColor Cyan +Write-Host "" -ForegroundColor Cyan +Write-Host "To view logs, run: docker compose logs -f" -ForegroundColor DarkGray +Write-Host "To stop Omi, run: docker compose down" -ForegroundColor DarkGray +Write-Host "==========================================" -ForegroundColor Cyan diff --git a/docs/doc/developer/one-click-setup.mdx b/docs/doc/developer/one-click-setup.mdx new file mode 100644 index 0000000000..4ff73682d5 --- /dev/null +++ b/docs/doc/developer/one-click-setup.mdx @@ -0,0 +1,65 @@ +--- +title: 'One-Click Local Development Setup' +description: 'Run the entire Omi backend locally with Docker, no API keys required.' +--- + +This guide provides instructions for setting up the Omi backend locally using Docker Compose, +with mock services for external dependencies. This allows for a full-featured development +environment without needing to configure cloud API keys. + +## Prerequisites + +* **Docker Desktop:** Ensure Docker is installed and running on your system. + * [Install Docker for Windows](https://docs.docker.com/desktop/install/windows-install/) + * [Install Docker for Mac](https://docs.docker.com/desktop/install/mac-install/) + * [Install Docker for Linux](https://docs.docker.com/desktop/install/linux-install/) + +## Setup + +Navigate to the `backend/` directory within your Omi repository: + +```bash +cd projects/omi/backend +``` + +### Linux / macOS + +Run the setup script: + +```bash +./setup.sh +``` + +### Windows (PowerShell) + +Open PowerShell in the `backend/` directory and run the setup script: + +```powershell +./setup.ps1 +``` + +--- + +The setup script will: +1. Check for Docker and Docker Compose. +2. If `.env` doesn't exist, it will copy `backend/.env.example` to `backend/.env` and generate secure random keys for `ADMIN_KEY` and `ENCRYPTION_SECRET`. +3. Prompt you to edit `.env` to add required API keys for Deepgram and OpenAI (if you want to use real services). +4. Build and start all Omi services using Docker Compose. + +## Accessing Services + +Once the services are up, you can access: + +* **Frontend:** `http://localhost:3000` +* **Backend API:** `http://localhost:8080` +* **Pusher Service:** `http://localhost:8081` + +## Data Persistence + +Local data (memories, tasks, etc.) for `MockFirestore` is persisted to `./data/backend/firestore_mock.json` on your host machine. Redis data is persisted to `./data/redis`. These volumes are managed by Docker. + +## Managing Services + +* **View Logs:** `docker compose logs -f` +* **Stop Services:** `docker compose down` +* **Restart Services:** `docker compose restart` From 76dcb74e945b73e2cbe8b5ae43504ab03bd3bf4d Mon Sep 17 00:00:00 2001 From: fayerman-source Date: Sat, 3 Jan 2026 18:06:04 -0500 Subject: [PATCH 8/9] fix(docker): resolve port conflicts and build context paths for local deployment --- backend/docker-compose.yml | 57 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 backend/docker-compose.yml diff --git a/backend/docker-compose.yml b/backend/docker-compose.yml new file mode 100644 index 0000000000..3ba0cc985b --- /dev/null +++ b/backend/docker-compose.yml @@ -0,0 +1,57 @@ +services: + backend: + build: + context: .. + dockerfile: backend/Dockerfile + restart: always + ports: + - "8088:8080" + env_file: + - .env + depends_on: + - redis + volumes: + - ./data/backend:/app/data + + pusher: + build: + context: .. + dockerfile: backend/pusher/Dockerfile + restart: always + ports: + - "8089:8080" + env_file: + - .env + depends_on: + - redis + + frontend: + build: + context: .. + dockerfile: web/frontend/Dockerfile + args: + - API_URL=http://localhost:8088 + - NEXT_PUBLIC_FIREBASE_API_KEY=${NEXT_PUBLIC_FIREBASE_API_KEY:-fake_key_for_build} + - NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN=${NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN:-omi-app.firebaseapp.com} + - NEXT_PUBLIC_FIREBASE_PROJECT_ID=${NEXT_PUBLIC_FIREBASE_PROJECT_ID:-omi-app} + - NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET=${NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET:-omi-app.appspot.com} + - NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID=${NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID:-123456789} + - NEXT_PUBLIC_FIREBASE_APP_ID=${NEXT_PUBLIC_FIREBASE_APP_ID:-1:123456789:web:abcdef} + - NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID=${NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID:-G-ABCDEF} + restart: always + ports: + - "3001:3000" + env_file: + - .env + extra_hosts: + - "host.docker.internal:host-gateway" + depends_on: + - backend + + redis: + image: redis:alpine + restart: always + ports: + - "6380:6379" + volumes: + - ../data/redis:/data \ No newline at end of file From 61b2a745569ca4b1758d24b395a91b05e32cfade Mon Sep 17 00:00:00 2001 From: fayerman-source Date: Sat, 3 Jan 2026 18:23:01 -0500 Subject: [PATCH 9/9] fix(docker): move docker setup to root to respect .dockerignore --- backend/docker-compose.yml | 57 -------------------------- docker-compose.yml | 28 +++---------- docs/doc/developer/one-click-setup.mdx | 10 ++--- backend/setup.ps1 => setup.ps1 | 0 4 files changed, 10 insertions(+), 85 deletions(-) delete mode 100644 backend/docker-compose.yml rename backend/setup.ps1 => setup.ps1 (100%) diff --git a/backend/docker-compose.yml b/backend/docker-compose.yml deleted file mode 100644 index 3ba0cc985b..0000000000 --- a/backend/docker-compose.yml +++ /dev/null @@ -1,57 +0,0 @@ -services: - backend: - build: - context: .. - dockerfile: backend/Dockerfile - restart: always - ports: - - "8088:8080" - env_file: - - .env - depends_on: - - redis - volumes: - - ./data/backend:/app/data - - pusher: - build: - context: .. - dockerfile: backend/pusher/Dockerfile - restart: always - ports: - - "8089:8080" - env_file: - - .env - depends_on: - - redis - - frontend: - build: - context: .. - dockerfile: web/frontend/Dockerfile - args: - - API_URL=http://localhost:8088 - - NEXT_PUBLIC_FIREBASE_API_KEY=${NEXT_PUBLIC_FIREBASE_API_KEY:-fake_key_for_build} - - NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN=${NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN:-omi-app.firebaseapp.com} - - NEXT_PUBLIC_FIREBASE_PROJECT_ID=${NEXT_PUBLIC_FIREBASE_PROJECT_ID:-omi-app} - - NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET=${NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET:-omi-app.appspot.com} - - NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID=${NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID:-123456789} - - NEXT_PUBLIC_FIREBASE_APP_ID=${NEXT_PUBLIC_FIREBASE_APP_ID:-1:123456789:web:abcdef} - - NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID=${NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID:-G-ABCDEF} - restart: always - ports: - - "3001:3000" - env_file: - - .env - extra_hosts: - - "host.docker.internal:host-gateway" - depends_on: - - backend - - redis: - image: redis:alpine - restart: always - ports: - - "6380:6379" - volumes: - - ../data/redis:/data \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index fee1bac949..6162fd808b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,7 +5,7 @@ services: dockerfile: backend/Dockerfile restart: always ports: - - "8080:8080" + - "8088:8080" env_file: - .env depends_on: @@ -19,7 +19,7 @@ services: dockerfile: backend/pusher/Dockerfile restart: always ports: - - "8081:8080" + - "8089:8080" env_file: - .env depends_on: @@ -30,7 +30,7 @@ services: context: . dockerfile: web/frontend/Dockerfile args: - - API_URL=http://localhost:8080 + - API_URL=http://localhost:8088 - NEXT_PUBLIC_FIREBASE_API_KEY=${NEXT_PUBLIC_FIREBASE_API_KEY:-fake_key_for_build} - NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN=${NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN:-omi-app.firebaseapp.com} - NEXT_PUBLIC_FIREBASE_PROJECT_ID=${NEXT_PUBLIC_FIREBASE_PROJECT_ID:-omi-app} @@ -40,7 +40,7 @@ services: - NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID=${NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID:-G-ABCDEF} restart: always ports: - - "3000:3000" + - "3001:3000" env_file: - .env extra_hosts: @@ -52,24 +52,6 @@ services: image: redis:alpine restart: always ports: - - "6379:6379" + - "6380:6379" volumes: - ./data/redis:/data - - # Optional: Diarizer (Requires NVIDIA GPU and NVIDIA Container Toolkit) - # diarizer: - # build: - # context: . - # dockerfile: backend/diarizer/Dockerfile - # restart: always - # ports: - # - "8082:8080" - # env_file: - # - .env - # deploy: - # resources: - # reservations: - # devices: - # - driver: nvidia - # count: 1 - # capabilities: [gpu] diff --git a/docs/doc/developer/one-click-setup.mdx b/docs/doc/developer/one-click-setup.mdx index 4ff73682d5..75272e4caf 100644 --- a/docs/doc/developer/one-click-setup.mdx +++ b/docs/doc/developer/one-click-setup.mdx @@ -16,15 +16,15 @@ environment without needing to configure cloud API keys. ## Setup -Navigate to the `backend/` directory within your Omi repository: +Navigate to the root directory of your Omi repository: ```bash -cd projects/omi/backend +cd /path/to/omi ``` ### Linux / macOS -Run the setup script: +Run the setup script from the root: ```bash ./setup.sh @@ -32,7 +32,7 @@ Run the setup script: ### Windows (PowerShell) -Open PowerShell in the `backend/` directory and run the setup script: +Open PowerShell in the root directory and run the setup script: ```powershell ./setup.ps1 @@ -42,7 +42,7 @@ Open PowerShell in the `backend/` directory and run the setup script: The setup script will: 1. Check for Docker and Docker Compose. -2. If `.env` doesn't exist, it will copy `backend/.env.example` to `backend/.env` and generate secure random keys for `ADMIN_KEY` and `ENCRYPTION_SECRET`. +2. If `.env` doesn't exist, it will copy `.env.example` to `.env` and generate secure random keys for `ADMIN_KEY` and `ENCRYPTION_SECRET`. 3. Prompt you to edit `.env` to add required API keys for Deepgram and OpenAI (if you want to use real services). 4. Build and start all Omi services using Docker Compose. diff --git a/backend/setup.ps1 b/setup.ps1 similarity index 100% rename from backend/setup.ps1 rename to setup.ps1