diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000..2626455ded --- /dev/null +++ b/.env.example @@ -0,0 +1,43 @@ +# Omi One-Click Deployment Configuration +# Copy this to .env and fill in the values + +# --- Core API Keys --- +# Required for transcription +DEEPGRAM_API_KEY= +# Required for LLM features +OPENAI_API_KEY= +# Optional: Other LLM providers +ANTHROPIC_API_KEY= + +# --- Backend Configuration --- +REDIS_DB_HOST=redis +REDIS_DB_PORT=6379 +REDIS_DB_PASSWORD= + +# Vector Database (Pinecone is default in current code) +PINECONE_API_KEY= +PINECONE_INDEX_NAME=memories-backend + +# Database (Firebase/Firestore is default) +# Provide the JSON content of your service account if using Firestore +SERVICE_ACCOUNT_JSON= + +# --- Service URLs (Internal Docker Networking) --- +HOSTED_VAD_API_URL=http://vad:8080/v1/vad +HOSTED_SPEAKER_EMBEDDING_API_URL=http://diarizer:8080 +HOSTED_PUSHER_API_URL=http://pusher:8080 + +# --- Frontend Configuration --- +# Public URL of the backend (use localhost for local dev) +NEXT_PUBLIC_API_URL=http://localhost:8080 +# Firebase config for the frontend +NEXT_PUBLIC_FIREBASE_API_KEY= +NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN= +NEXT_PUBLIC_FIREBASE_PROJECT_ID= +NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET= +NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID= +NEXT_PUBLIC_FIREBASE_APP_ID= + +# --- Development / Debugging --- +ADMIN_KEY=some_secret_key +ENCRYPTION_SECRET=omi_default_secret_change_me diff --git a/.gitignore b/.gitignore index 2c46fa6099..3493a05433 100644 --- a/.gitignore +++ b/.gitignore @@ -198,5 +198,5 @@ omiGlass/.expo app/.fvm/ app/android/.kotlin/ .playwright-mcp/ - CLAUDE.md +/data/ \ No newline at end of file diff --git a/backend/database/_client.py b/backend/database/_client.py index 943d6e60de..711149e928 100644 --- a/backend/database/_client.py +++ b/backend/database/_client.py @@ -2,16 +2,301 @@ import json import os import uuid +from typing import Dict, Any, List from google.cloud import firestore +from google.auth.exceptions import DefaultCredentialsError +from google.cloud.firestore_v1.base_query import FieldFilter, BaseCompositeFilter +from firebase_admin import credentials -if os.environ.get('SERVICE_ACCOUNT_JSON'): - service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) - # create google-credentials.json - with open('google-credentials.json', 'w') as f: - json.dump(service_account_info, f) +# Constants for local persistence +DATA_DIR = '/app/data' +DB_FILE = os.path.join(DATA_DIR, 'firestore_mock.json') + +class PersistentMockFirestore: + _instance = None + _data: Dict[str, Dict[str, Any]] = {} + + def __new__(cls): + if cls._instance is None: + cls._instance = super(PersistentMockFirestore, cls).__new__(cls) + cls._instance._load() + return cls._instance + + def _load(self): + if os.path.exists(DB_FILE): + try: + with open(DB_FILE, 'r') as f: + self._data = json.load(f) + print(f"✅ Loaded persistent mock data from {DB_FILE}") + except Exception as e: + print(f"⚠️ Failed to load mock data: {e}") + self._data = {} + else: + self._data = {} + + def _save(self): + if not os.path.exists(DATA_DIR): + try: + os.makedirs(DATA_DIR) + except OSError: + pass + try: + with open(DB_FILE, 'w') as f: + json.dump(self._data, f, default=str, indent=2) + except Exception as e: + print(f"⚠️ Failed to save mock data: {e}") + + def collection(self, name): + if name not in self._data: + self._data[name] = {} + return MockCollection(self, name) + + def batch(self): + return MockBatch(self) + +class MockBatch: + def __init__(self, db): + self.db = db + self._operations = [] + + def set(self, ref, data): + self._operations.append(('set', ref, data)) + + def update(self, ref, data): + self._operations.append(('update', ref, data)) + + def delete(self, ref): + self._operations.append(('delete', ref, None)) + + def commit(self): + for op_type, ref, data in self._operations: + if op_type == 'set': + ref.set(data) + elif op_type == 'update': + ref.update(data) + elif op_type == 'delete': + ref.delete() + self._operations = [] # Clear operations after commit + +class MockCollection: + def __init__(self, db, name, parent_doc=None): + self.db = db + self.name = name + self.parent_doc = parent_doc + self._filters = [] + self._limit = None + self._offset = 0 + self._order_by = [] + + def _get_data(self): + if self.parent_doc: + if self.name not in self.parent_doc._get_data(): + self.parent_doc._get_data()[self.name] = {} + return self.parent_doc._get_data()[self.name] + return self.db._data[self.name] + + def document(self, doc_id=None): + if doc_id is None: + doc_id = str(uuid.uuid4()) + return MockDocument(self.db, self, doc_id) + + def add(self, data, doc_id=None): + if doc_id is None: + doc_id = str(uuid.uuid4()) + doc = self.document(doc_id) + doc.set(data) + return None, doc + + def _apply_filters(self, docs): + filtered_docs = [] + for doc in docs: + data = doc._get_data() + if not data: continue + + match = True + for f in self._filters: + field, op, value = f + # Handle dot notation for nested fields + val = data + for part in field.split('.'): + if isinstance(val, dict): + val = val.get(part) + else: + val = None + break + + if op == '==' and val != value: match = False + elif op == '!=' and val == value: match = False + elif op == '>' and not (val > value if val is not None else False): match = False + elif op == '>=' and not (val >= value if val is not None else False): match = False + elif op == '<' and not (val < value if val is not None else False): match = False + elif op == '<=' and not (val <= value if val is not None else False): match = False + elif op == 'in' and val not in value: match = False + elif op == 'array_contains' and (val is None or value not in val): match = False + + if not match: break + + if match: filtered_docs.append(doc) + return filtered_docs + + def stream(self): + data = self._get_data() + docs = [MockDocument(self.db, self, doc_id) for doc_id in data.keys()] + + # Filter + docs = self._apply_filters(docs) + + # Sort + for field, direction in self._order_by: + reverse = direction == 'DESCENDING' + docs.sort(key=lambda x: x._get_data().get(field, ""), reverse=reverse) + + # Offset & Limit + if self._offset: + docs = docs[self._offset:] + if self._limit: + docs = docs[:self._limit] + + return docs + + def get(self): + return [doc.get() for doc in self.stream()] -db = firestore.Client() + def where(self, *args, **kwargs): + # Support both .where("field", "==", "value") and .where(filter=FieldFilter(...)) + if 'filter' in kwargs: + f = kwargs['filter'] + if isinstance(f, FieldFilter): + self._filters.append((f.field.field_path, f.op, f.value)) + elif isinstance(f, BaseCompositeFilter): + # Basic composite handling (AND only for now) + for sub_filter in f.filters: + if isinstance(sub_filter, FieldFilter): + self._filters.append((sub_filter.field.field_path, sub_filter.op, sub_filter.value)) + elif len(args) == 3: + self._filters.append(args) + return self + + def limit(self, count): + self._limit = count + return self + + def offset(self, count): + self._offset = count + return self + + def order_by(self, field, direction='ASCENDING'): + self._order_by.append((field, direction)) + return self + + def count(self): + return MockCountQuery(self) + +class MockCountQuery: + def __init__(self, query): + self.query = query + + def get(self): + # Return a list containing a list containing an object with a value property + # Firestore count query structure: [[Aggregation(value=count)]] + count = len(self.query.stream()) + return [[MockAggregation(count)]] + +class MockAggregation: + def __init__(self, value): + self.value = value + +class MockDocument: + def __init__(self, db, collection, doc_id): + self.db = db + self.collection = collection + self.id = doc_id + + def _get_data(self): + col_data = self.collection._get_data() + if self.id not in col_data: + return None + return col_data[self.id] + + def set(self, data): + col_data = self.collection._get_data() + col_data[self.id] = data + self.db._save() + + def update(self, data): + current = self._get_data() + if current: + current.update(data) + self.db._save() + + def get(self): + data = self._get_data() + return MockSnapshot(self.id, data) + + def delete(self): + col_data = self.collection._get_data() + if self.id in col_data: + del col_data[self.id] + self.db._save() + + @property + def reference(self): + return self + + def collection(self, name): + current = self._get_data() + if current is None: + self.set({}) + current = self._get_data() + + if '__collections__' not in current: + current['__collections__'] = {} + + return MockSubCollection(self.db, name, current['__collections__']) + +class MockSubCollection(MockCollection): + def __init__(self, db, name, storage): + super().__init__(db, name) + self.storage = storage + + def _get_data(self): + if self.name not in self.storage: + self.storage[self.name] = {} + return self.storage[self.name] + +class MockSnapshot: + def __init__(self, doc_id, data): + self.id = doc_id + self._data = data + self.exists = data is not None + self.reference = None + + def to_dict(self): + if self._data and '__collections__' in self._data: + d = self._data.copy() + del d['__collections__'] + return d + return self._data or {} + +# Removed: Writing SERVICE_ACCOUNT_JSON to a file +# Initialize Firestore client directly from JSON string if available +if os.environ.get('SERVICE_ACCOUNT_JSON'): + try: + service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) + # Use service_account_info directly to create credentials + cred = credentials.Certificate(service_account_info) + db = firestore.Client(credentials=cred) + except (json.JSONDecodeError, ValueError, Exception) as e: + print(f"⚠️ Error initializing Firestore with SERVICE_ACCOUNT_JSON: {e}. Falling back to default/mock.") + db = PersistentMockFirestore() +else: + try: + # Attempt to initialize with default credentials (e.g., gcloud auth application-default login) + db = firestore.Client() + except (DefaultCredentialsError, ValueError, Exception) as e: + print(f"⚠️ Warning: Firestore connection failed ({e}). Using PersistentMockFirestore for local dev.") + db = PersistentMockFirestore() def get_users_uid(): @@ -23,4 +308,4 @@ def document_id_from_seed(seed: str) -> uuid.UUID: """Avoid repeating the same data""" seed_hash = hashlib.sha256(seed.encode('utf-8')).digest() generated_uuid = uuid.UUID(bytes=seed_hash[:16], version=4) - return str(generated_uuid) + return str(generated_uuid) \ No newline at end of file diff --git a/backend/database/vector_db.py b/backend/database/vector_db.py index f9a883d5e4..351eabe9d0 100644 --- a/backend/database/vector_db.py +++ b/backend/database/vector_db.py @@ -4,16 +4,35 @@ from datetime import datetime, timezone, timedelta from typing import List -from pinecone import Pinecone +from pinecone import Pinecone, PineconeConfigurationError from models.conversation import Conversation from utils.llm.clients import embeddings -if os.getenv('PINECONE_API_KEY') is not None: - pc = Pinecone(api_key=os.getenv('PINECONE_API_KEY', '')) - index = pc.Index(os.getenv('PINECONE_INDEX_NAME', '')) -else: - index = None +class MockIndex: + def upsert(self, vectors, namespace): + print(f"Mock upsert: {len(vectors)} vectors") + return {"upserted_count": len(vectors)} + def query(self, vector, top_k, include_metadata=False, filter=None, namespace=None, include_values=False): + print("Mock query") + return {"matches": []} + def update(self, id, set_metadata, namespace): + print(f"Mock update: {id}") + return {} + def delete(self, ids, namespace): + print(f"Mock delete: {ids}") + return {} + +try: + if os.getenv('PINECONE_API_KEY'): + pc = Pinecone(api_key=os.getenv('PINECONE_API_KEY')) + index = pc.Index(os.getenv('PINECONE_INDEX_NAME', '')) + else: + print("⚠️ Warning: PINECONE_API_KEY not set. Using MockIndex.") + index = MockIndex() +except (PineconeConfigurationError, ValueError, KeyError) as e: + print(f"⚠️ Warning: Pinecone init failed ({e}). Using MockIndex.") + index = MockIndex() def _get_data(uid: str, conversation_id: str, vector: List[float]): diff --git a/backend/main.py b/backend/main.py index c1e5f713be..3a0befe77a 100644 --- a/backend/main.py +++ b/backend/main.py @@ -43,12 +43,17 @@ from utils.other.timeout import TimeoutMiddleware -if os.environ.get('SERVICE_ACCOUNT_JSON'): - service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) - credentials = firebase_admin.credentials.Certificate(service_account_info) - firebase_admin.initialize_app(credentials) -else: - firebase_admin.initialize_app() +from google.auth.exceptions import DefaultCredentialsError + +try: + if os.environ.get('SERVICE_ACCOUNT_JSON'): + service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) + credentials = firebase_admin.credentials.Certificate(service_account_info) + firebase_admin.initialize_app(credentials) + else: + firebase_admin.initialize_app() +except (DefaultCredentialsError, ValueError) as e: + print(f"⚠️ Warning: Firebase Admin initialization failed ({e}). Auth & DB features may not work.") app = FastAPI() diff --git a/backend/utils/conversations/search.py b/backend/utils/conversations/search.py index 9892b8b4d1..734c7e9f85 100644 --- a/backend/utils/conversations/search.py +++ b/backend/utils/conversations/search.py @@ -5,13 +5,40 @@ import typesense -client = typesense.Client( - { - 'nodes': [{'host': os.getenv('TYPESENSE_HOST'), 'port': os.getenv('TYPESENSE_HOST_PORT'), 'protocol': 'https'}], - 'api_key': os.getenv('TYPESENSE_API_KEY'), - 'connection_timeout_seconds': 2, - } -) +from typesense.exceptions import ConfigError + +class MockTypesenseClient: + def __init__(self): + self.collections = MockCollections() + +class MockCollections: + def __getitem__(self, key): + return MockDocuments() + +class MockDocuments: + @property + def documents(self): + return self + + def search(self, params): + print(f"Mock search with params: {params}") + return {'hits': [], 'found': 0} + +try: + if os.getenv('TYPESENSE_API_KEY'): + client = typesense.Client( + { + 'nodes': [{'host': os.getenv('TYPESENSE_HOST'), 'port': os.getenv('TYPESENSE_HOST_PORT'), 'protocol': 'https'}], + 'api_key': os.getenv('TYPESENSE_API_KEY'), + 'connection_timeout_seconds': 2, + } + ) + else: + print("⚠️ Warning: TYPESENSE_API_KEY not set. Using MockTypesenseClient.") + client = MockTypesenseClient() +except (ConfigError, ValueError, KeyError) as e: + print(f"⚠️ Warning: Typesense init failed ({e}). Using MockTypesenseClient.") + client = MockTypesenseClient() def search_conversations( diff --git a/backend/utils/other/storage.py b/backend/utils/other/storage.py index 8089b9a8fa..81893c77b0 100644 --- a/backend/utils/other/storage.py +++ b/backend/utils/other/storage.py @@ -17,12 +17,55 @@ from utils import encryption from database import users as users_db -if os.environ.get('SERVICE_ACCOUNT_JSON'): - service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) - credentials = service_account.Credentials.from_service_account_info(service_account_info) - storage_client = storage.Client(credentials=credentials) -else: - storage_client = storage.Client() +class MockStorageClient: + def bucket(self, name): + return MockBucket(name) + +class MockBucket: + def __init__(self, name): + self.name = name + def blob(self, name): + return MockBlob(name, self.name) + def list_blobs(self, prefix=None): + return [] + +class MockBlob: + def __init__(self, name, bucket_name): + self.name = name + self.bucket_name = bucket_name + self.size = 0 + self.time_created = None + self.metadata = {} + self.cache_control = None + def upload_from_filename(self, filename): + print(f"Mock upload from filename: {filename} to {self.name}") + def upload_from_string(self, data, content_type=None): + print(f"Mock upload from string to {self.name}") + def download_to_filename(self, filename): + print(f"Mock download to {filename} from {self.name}") + def delete(self): + print(f"Mock delete {self.name}") + def exists(self): + return False + def generate_signed_url(self, **kwargs): + return f"http://localhost:8080/_mock_signed_url/{self.bucket_name}/{self.name}" + def reload(self): + pass + def download_as_bytes(self): + return b"" + +from google.auth.exceptions import DefaultCredentialsError + +try: + if os.environ.get('SERVICE_ACCOUNT_JSON'): + service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"]) + credentials = service_account.Credentials.from_service_account_info(service_account_info) + storage_client = storage.Client(credentials=credentials) + else: + storage_client = storage.Client() +except (DefaultCredentialsError, ValueError, KeyError) as e: + print(f"⚠️ Warning: Google Storage connection failed ({e}). Using MockStorageClient for local dev.") + storage_client = MockStorageClient() speech_profiles_bucket = os.getenv('BUCKET_SPEECH_PROFILES') postprocessing_audio_bucket = os.getenv('BUCKET_POSTPROCESSING') diff --git a/backend/utils/translation.py b/backend/utils/translation.py index 124887624b..8470cdb358 100644 --- a/backend/utils/translation.py +++ b/backend/utils/translation.py @@ -111,8 +111,34 @@ r'\b(' + '|'.join(re.escape(word) for word in _non_lexical_utterances) + r')\b', re.IGNORECASE ) -# Initialize the translation client globally -_client = translate_v3.TranslationServiceClient() +class MockTranslationServiceClient: + def detect_language(self, parent, content, mime_type): + print(f"Mock detect language: {content[:20]}...") + return MockDetectResponse() + + def translate_text(self, contents, parent, mime_type, target_language_code): + print(f"Mock translate text to {target_language_code}") + return MockTranslateResponse(contents[0]) + +class MockDetectResponse: + languages = [] + +class MockTranslateResponse: + def __init__(self, text): + self.translations = [MockTranslation(text)] + +class MockTranslation: + def __init__(self, text): + self.translated_text = text + +from google.auth.exceptions import DefaultCredentialsError + +try: + _client = translate_v3.TranslationServiceClient() +except (DefaultCredentialsError, ValueError, KeyError) as e: + print(f"⚠️ Warning: Google Translation init failed ({e}). Using MockTranslationServiceClient.") + _client = MockTranslationServiceClient() + _parent = f"projects/{PROJECT_ID}/locations/global" _mime_type = "text/plain" diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000000..6162fd808b --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,57 @@ +services: + backend: + build: + context: . + dockerfile: backend/Dockerfile + restart: always + ports: + - "8088:8080" + env_file: + - .env + depends_on: + - redis + volumes: + - ./data/backend:/app/data + + pusher: + build: + context: . + dockerfile: backend/pusher/Dockerfile + restart: always + ports: + - "8089:8080" + env_file: + - .env + depends_on: + - redis + + frontend: + build: + context: . + dockerfile: web/frontend/Dockerfile + args: + - API_URL=http://localhost:8088 + - NEXT_PUBLIC_FIREBASE_API_KEY=${NEXT_PUBLIC_FIREBASE_API_KEY:-fake_key_for_build} + - NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN=${NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN:-omi-app.firebaseapp.com} + - NEXT_PUBLIC_FIREBASE_PROJECT_ID=${NEXT_PUBLIC_FIREBASE_PROJECT_ID:-omi-app} + - NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET=${NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET:-omi-app.appspot.com} + - NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID=${NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID:-123456789} + - NEXT_PUBLIC_FIREBASE_APP_ID=${NEXT_PUBLIC_FIREBASE_APP_ID:-1:123456789:web:abcdef} + - NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID=${NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID:-G-ABCDEF} + restart: always + ports: + - "3001:3000" + env_file: + - .env + extra_hosts: + - "host.docker.internal:host-gateway" + depends_on: + - backend + + redis: + image: redis:alpine + restart: always + ports: + - "6380:6379" + volumes: + - ./data/redis:/data diff --git a/docs/doc/developer/one-click-setup.mdx b/docs/doc/developer/one-click-setup.mdx new file mode 100644 index 0000000000..75272e4caf --- /dev/null +++ b/docs/doc/developer/one-click-setup.mdx @@ -0,0 +1,65 @@ +--- +title: 'One-Click Local Development Setup' +description: 'Run the entire Omi backend locally with Docker, no API keys required.' +--- + +This guide provides instructions for setting up the Omi backend locally using Docker Compose, +with mock services for external dependencies. This allows for a full-featured development +environment without needing to configure cloud API keys. + +## Prerequisites + +* **Docker Desktop:** Ensure Docker is installed and running on your system. + * [Install Docker for Windows](https://docs.docker.com/desktop/install/windows-install/) + * [Install Docker for Mac](https://docs.docker.com/desktop/install/mac-install/) + * [Install Docker for Linux](https://docs.docker.com/desktop/install/linux-install/) + +## Setup + +Navigate to the root directory of your Omi repository: + +```bash +cd /path/to/omi +``` + +### Linux / macOS + +Run the setup script from the root: + +```bash +./setup.sh +``` + +### Windows (PowerShell) + +Open PowerShell in the root directory and run the setup script: + +```powershell +./setup.ps1 +``` + +--- + +The setup script will: +1. Check for Docker and Docker Compose. +2. If `.env` doesn't exist, it will copy `.env.example` to `.env` and generate secure random keys for `ADMIN_KEY` and `ENCRYPTION_SECRET`. +3. Prompt you to edit `.env` to add required API keys for Deepgram and OpenAI (if you want to use real services). +4. Build and start all Omi services using Docker Compose. + +## Accessing Services + +Once the services are up, you can access: + +* **Frontend:** `http://localhost:3000` +* **Backend API:** `http://localhost:8080` +* **Pusher Service:** `http://localhost:8081` + +## Data Persistence + +Local data (memories, tasks, etc.) for `MockFirestore` is persisted to `./data/backend/firestore_mock.json` on your host machine. Redis data is persisted to `./data/redis`. These volumes are managed by Docker. + +## Managing Services + +* **View Logs:** `docker compose logs -f` +* **Stop Services:** `docker compose down` +* **Restart Services:** `docker compose restart` diff --git a/setup.ps1 b/setup.ps1 new file mode 100755 index 0000000000..6ae6d2ffb6 --- /dev/null +++ b/setup.ps1 @@ -0,0 +1,63 @@ +#!/usr/bin/env pwsh + +# Omi One-Click Setup Script for Windows (PowerShell) +# Designed for backend developers and customers with low technical expertise. + +$ErrorActionPreference = "Stop" + +Write-Host "==========================================" -ForegroundColor Cyan +Write-Host " 🚀 Omi One-Click Setup (Docker) " -ForegroundColor Cyan +Write-Host "==========================================" -ForegroundColor Cyan + +# Check for Docker +if (-Not (Get-Command docker -ErrorAction SilentlyContinue)) { + Write-Host "❌ Error: Docker is not installed. Please install Docker Desktop: https://docs.docker.com/get-docker/" -ForegroundColor Red + exit 1 +} + +# Check for Docker Compose +if (-Not (Get-Command docker-compose -ErrorAction SilentlyContinue) -and -Not (Get-Command "docker compose" -ErrorAction SilentlyContinue)) { + Write-Host "❌ Error: Docker Compose is not installed. Please install Docker Desktop or ensure it's in your PATH." -ForegroundColor Red + exit 1 +} + +# Create .env if it doesn't exist +if (-Not (Test-Path .env)) { + Write-Host "📄 Creating .env from .env.example..." -ForegroundColor Yellow + Copy-Item .env.example .env + + # Generate secure random secrets + $ADMIN_KEY = [System.Convert]::ToHexString((Get-Random -Count 32 -Minimum 0 -Maximum 256 | ForEach-Object { [byte]$_ })) + $ENCRYPTION_SECRET = [System.Convert]::ToHexString((Get-Random -Count 32 -Minimum 0 -Maximum 256 | ForEach-Object { [byte]$_ })) + + # Update .env with generated secrets + (Get-Content .env) -replace "ADMIN_KEY=.*", "ADMIN_KEY=$ADMIN_KEY" | Set-Content .env + (Get-Content .env) -replace "ENCRYPTION_SECRET=.*", "ENCRYPTION_SECRET=$ENCRYPTION_SECRET" | Set-Content .env + + Write-Host "✅ Generated secure random keys for ADMIN_KEY and ENCRYPTION_SECRET." -ForegroundColor Green + Write-Host "⚠️ Action Required: Please edit the .env file and add your API keys." -ForegroundColor Yellow + Write-Host " At minimum, you need: DEEPGRAM_API_KEY and OPENAI_API_KEY." -ForegroundColor Yellow + + # Optional: try to open the editor (notepad for Windows) + $response = Read-Host "Would you like to edit .env now? (y/n)" + if ($response -eq "y" -or $response -eq "Y") { + notepad .env + } +} + +# Build and Start +Write-Host "🛠️ Building and starting Omi services..." -ForegroundColor Blue +docker compose up -d --build + +Write-Host "" -ForegroundColor Cyan +Write-Host "==========================================" -ForegroundColor Cyan +Write-Host "✅ Omi is now starting up!" -ForegroundColor Green +Write-Host "" -ForegroundColor Cyan +Write-Host "Services available at:" -ForegroundColor Cyan +Write-Host "👉 Frontend: http://localhost:3000" -ForegroundColor Cyan +Write-Host "👉 Backend: http://localhost:8080" -ForegroundColor Cyan +Write-Host "👉 Pusher: http://localhost:8081" -ForegroundColor Cyan +Write-Host "" -ForegroundColor Cyan +Write-Host "To view logs, run: docker compose logs -f" -ForegroundColor DarkGray +Write-Host "To stop Omi, run: docker compose down" -ForegroundColor DarkGray +Write-Host "==========================================" -ForegroundColor Cyan diff --git a/setup.sh b/setup.sh new file mode 100755 index 0000000000..91cd9e9ae4 --- /dev/null +++ b/setup.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +# Omi One-Click Setup Script +# Designed for backend developers and customers with low technical expertise. + +set -e + +echo "==========================================" +echo " 🚀 Omi One-Click Setup (Docker) " +echo "==========================================" + +# Check for Docker +if ! command -v docker &> /dev/null; then + echo "❌ Error: Docker is not installed. Please install Docker first: https://docs.docker.com/get-docker/" + exit 1 +fi + +# Check for Docker Compose +if ! docker compose version &> /dev/null; then + echo "❌ Error: Docker Compose is not installed. Please install it or use a newer version of Docker Desktop." + exit 1 +fi + +# Create .env if it doesn't exist +if [ ! -f .env ]; then + echo "📄 Creating .env from .env.example..." + cp .env.example .env + + # Generate secure random secrets + ADMIN_KEY=$(openssl rand -hex 32) + ENCRYPTION_SECRET=$(openssl rand -hex 32) + + # Update .env with generated secrets (cross-platform compatible) + sed "s/ADMIN_KEY=.*/ADMIN_KEY=$ADMIN_KEY/" .env > .env.tmp && mv .env.tmp .env + sed "s/ENCRYPTION_SECRET=.*/ENCRYPTION_SECRET=$ENCRYPTION_SECRET/" .env > .env.tmp && mv .env.tmp .env + + echo "✅ Generated secure random keys for ADMIN_KEY and ENCRYPTION_SECRET." + echo "⚠️ Action Required: Please edit the .env file and add your API keys." + echo " At minimum, you need: DEEPGRAM_API_KEY and OPENAI_API_KEY." + + # Optional: try to open the editor + if command -v nano &> /dev/null; then + read -p "Would you like to edit .env now? (y/n) " -n 1 -r + echo + if [[ $REPLY =~ ^[Yy]$ ]]; then + nano .env + fi + fi +fi + +# Build and Start +echo "🛠️ Building and starting Omi services..." +docker compose up -d --build + +echo "" +echo "==========================================" +echo "✅ Omi is now starting up!" +echo "" +echo "Services available at:" +echo "👉 Frontend: http://localhost:3000" +echo "👉 Backend: http://localhost:8080" +echo "👉 Pusher: http://localhost:8081" +echo "" +echo "To view logs, run: docker compose logs -f" +echo "To stop Omi, run: docker compose down" +echo "=========================================="