Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# Omi One-Click Deployment Configuration
# Copy this to .env and fill in the values

# --- Core API Keys ---
# Required for transcription
DEEPGRAM_API_KEY=
# Required for LLM features
OPENAI_API_KEY=
# Optional: Other LLM providers
ANTHROPIC_API_KEY=

# --- Backend Configuration ---
REDIS_DB_HOST=redis
REDIS_DB_PORT=6379
REDIS_DB_PASSWORD=

# Vector Database (Pinecone is default in current code)
PINECONE_API_KEY=
PINECONE_INDEX_NAME=memories-backend

# Database (Firebase/Firestore is default)
# Provide the JSON content of your service account if using Firestore
SERVICE_ACCOUNT_JSON=

# --- Service URLs (Internal Docker Networking) ---
HOSTED_VAD_API_URL=http://vad:8080/v1/vad
HOSTED_SPEAKER_EMBEDDING_API_URL=http://diarizer:8080
HOSTED_PUSHER_API_URL=http://pusher:8080

# --- Frontend Configuration ---
# Public URL of the backend (use localhost for local dev)
NEXT_PUBLIC_API_URL=http://localhost:8080
# Firebase config for the frontend
NEXT_PUBLIC_FIREBASE_API_KEY=
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN=
NEXT_PUBLIC_FIREBASE_PROJECT_ID=
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET=
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID=
NEXT_PUBLIC_FIREBASE_APP_ID=

# --- Development / Debugging ---
ADMIN_KEY=some_secret_key
ENCRYPTION_SECRET=omi_default_secret_change_me
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -198,5 +198,5 @@ omiGlass/.expo
app/.fvm/
app/android/.kotlin/
.playwright-mcp/

CLAUDE.md
/data/
299 changes: 292 additions & 7 deletions backend/database/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,301 @@
import json
import os
import uuid
from typing import Dict, Any, List

from google.cloud import firestore
from google.auth.exceptions import DefaultCredentialsError
from google.cloud.firestore_v1.base_query import FieldFilter, BaseCompositeFilter
from firebase_admin import credentials

if os.environ.get('SERVICE_ACCOUNT_JSON'):
service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"])
# create google-credentials.json
with open('google-credentials.json', 'w') as f:
json.dump(service_account_info, f)
# Constants for local persistence
DATA_DIR = '/app/data'
DB_FILE = os.path.join(DATA_DIR, 'firestore_mock.json')

class PersistentMockFirestore:
_instance = None
_data: Dict[str, Dict[str, Any]] = {}

def __new__(cls):
if cls._instance is None:
cls._instance = super(PersistentMockFirestore, cls).__new__(cls)
cls._instance._load()
return cls._instance

def _load(self):
if os.path.exists(DB_FILE):
try:
with open(DB_FILE, 'r') as f:
self._data = json.load(f)
print(f"✅ Loaded persistent mock data from {DB_FILE}")
except Exception as e:
print(f"⚠️ Failed to load mock data: {e}")
self._data = {}
else:
self._data = {}

def _save(self):
if not os.path.exists(DATA_DIR):
try:
os.makedirs(DATA_DIR)
except OSError:
pass
try:
with open(DB_FILE, 'w') as f:
json.dump(self._data, f, default=str, indent=2)
except Exception as e:
print(f"⚠️ Failed to save mock data: {e}")

def collection(self, name):
if name not in self._data:
self._data[name] = {}
return MockCollection(self, name)

def batch(self):
return MockBatch(self)

class MockBatch:
def __init__(self, db):
self.db = db
self._operations = []

def set(self, ref, data):
self._operations.append(('set', ref, data))

def update(self, ref, data):
self._operations.append(('update', ref, data))

def delete(self, ref):
self._operations.append(('delete', ref, None))

def commit(self):
for op_type, ref, data in self._operations:
if op_type == 'set':
ref.set(data)
elif op_type == 'update':
ref.update(data)
elif op_type == 'delete':
ref.delete()
self._operations = [] # Clear operations after commit

class MockCollection:
def __init__(self, db, name, parent_doc=None):
self.db = db
self.name = name
self.parent_doc = parent_doc
self._filters = []
self._limit = None
self._offset = 0
self._order_by = []

def _get_data(self):
if self.parent_doc:
if self.name not in self.parent_doc._get_data():
self.parent_doc._get_data()[self.name] = {}
return self.parent_doc._get_data()[self.name]
return self.db._data[self.name]

def document(self, doc_id=None):
if doc_id is None:
doc_id = str(uuid.uuid4())
return MockDocument(self.db, self, doc_id)

def add(self, data, doc_id=None):
if doc_id is None:
doc_id = str(uuid.uuid4())
doc = self.document(doc_id)
doc.set(data)
return None, doc

def _apply_filters(self, docs):
filtered_docs = []
for doc in docs:
data = doc._get_data()
if not data: continue

match = True
for f in self._filters:
field, op, value = f
# Handle dot notation for nested fields
val = data
for part in field.split('.'):
if isinstance(val, dict):
val = val.get(part)
else:
val = None
break

if op == '==' and val != value: match = False
elif op == '!=' and val == value: match = False
elif op == '>' and not (val > value if val is not None else False): match = False
elif op == '>=' and not (val >= value if val is not None else False): match = False
elif op == '<' and not (val < value if val is not None else False): match = False
elif op == '<=' and not (val <= value if val is not None else False): match = False
elif op == 'in' and val not in value: match = False
elif op == 'array_contains' and (val is None or value not in val): match = False

if not match: break

if match: filtered_docs.append(doc)
return filtered_docs

def stream(self):
data = self._get_data()
docs = [MockDocument(self.db, self, doc_id) for doc_id in data.keys()]

# Filter
docs = self._apply_filters(docs)

# Sort
for field, direction in self._order_by:
reverse = direction == 'DESCENDING'
docs.sort(key=lambda x: x._get_data().get(field, ""), reverse=reverse)

# Offset & Limit
if self._offset:
docs = docs[self._offset:]
if self._limit:
docs = docs[:self._limit]

return docs

def get(self):
return [doc.get() for doc in self.stream()]

db = firestore.Client()
def where(self, *args, **kwargs):
# Support both .where("field", "==", "value") and .where(filter=FieldFilter(...))
if 'filter' in kwargs:
f = kwargs['filter']
if isinstance(f, FieldFilter):
self._filters.append((f.field.field_path, f.op, f.value))
elif isinstance(f, BaseCompositeFilter):
# Basic composite handling (AND only for now)
for sub_filter in f.filters:
if isinstance(sub_filter, FieldFilter):
self._filters.append((sub_filter.field.field_path, sub_filter.op, sub_filter.value))
elif len(args) == 3:
self._filters.append(args)
return self

def limit(self, count):
self._limit = count
return self

def offset(self, count):
self._offset = count
return self

def order_by(self, field, direction='ASCENDING'):
self._order_by.append((field, direction))
return self

def count(self):
return MockCountQuery(self)

class MockCountQuery:
def __init__(self, query):
self.query = query

def get(self):
# Return a list containing a list containing an object with a value property
# Firestore count query structure: [[Aggregation(value=count)]]
count = len(self.query.stream())
return [[MockAggregation(count)]]

class MockAggregation:
def __init__(self, value):
self.value = value

class MockDocument:
def __init__(self, db, collection, doc_id):
self.db = db
self.collection = collection
self.id = doc_id

def _get_data(self):
col_data = self.collection._get_data()
if self.id not in col_data:
return None
return col_data[self.id]

def set(self, data):
col_data = self.collection._get_data()
col_data[self.id] = data
self.db._save()

def update(self, data):
current = self._get_data()
if current:
current.update(data)
self.db._save()

def get(self):
data = self._get_data()
return MockSnapshot(self.id, data)

def delete(self):
col_data = self.collection._get_data()
if self.id in col_data:
del col_data[self.id]
self.db._save()

@property
def reference(self):
return self

def collection(self, name):
current = self._get_data()
if current is None:
self.set({})
current = self._get_data()

if '__collections__' not in current:
current['__collections__'] = {}

return MockSubCollection(self.db, name, current['__collections__'])

class MockSubCollection(MockCollection):
def __init__(self, db, name, storage):
super().__init__(db, name)
self.storage = storage

def _get_data(self):
if self.name not in self.storage:
self.storage[self.name] = {}
return self.storage[self.name]

class MockSnapshot:
def __init__(self, doc_id, data):
self.id = doc_id
self._data = data
self.exists = data is not None
self.reference = None

def to_dict(self):
if self._data and '__collections__' in self._data:
d = self._data.copy()
del d['__collections__']
return d
return self._data or {}

# Removed: Writing SERVICE_ACCOUNT_JSON to a file
# Initialize Firestore client directly from JSON string if available
if os.environ.get('SERVICE_ACCOUNT_JSON'):
try:
service_account_info = json.loads(os.environ["SERVICE_ACCOUNT_JSON"])
# Use service_account_info directly to create credentials
cred = credentials.Certificate(service_account_info)
db = firestore.Client(credentials=cred)
except (json.JSONDecodeError, ValueError, Exception) as e:
print(f"⚠️ Error initializing Firestore with SERVICE_ACCOUNT_JSON: {e}. Falling back to default/mock.")
db = PersistentMockFirestore()
else:
try:
# Attempt to initialize with default credentials (e.g., gcloud auth application-default login)
db = firestore.Client()
except (DefaultCredentialsError, ValueError, Exception) as e:
print(f"⚠️ Warning: Firestore connection failed ({e}). Using PersistentMockFirestore for local dev.")
db = PersistentMockFirestore()


def get_users_uid():
Expand All @@ -23,4 +308,4 @@ def document_id_from_seed(seed: str) -> uuid.UUID:
"""Avoid repeating the same data"""
seed_hash = hashlib.sha256(seed.encode('utf-8')).digest()
generated_uuid = uuid.UUID(bytes=seed_hash[:16], version=4)
return str(generated_uuid)
return str(generated_uuid)
Loading