Skip to content
48 changes: 48 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# ─── Server ──────────────────────────────────────────────────────────────────
PORT=3000

# Set to "true" to enable /audit-log and /audit-log/schema endpoints
AUDIT_LOG_ENABLED=false

# ─── Azure OpenAI ─────────────────────────────────────────────────────────────
# Your Azure OpenAI resource endpoint (e.g. https://<resource>.openai.azure.com)
AZURE_OPENAI_ENDPOINT=https://<your-resource>.openai.azure.com
# API key from your Azure OpenAI resource > Keys and Endpoint
AZURE_OPENAI_KEY=<your-azure-openai-key>
# Chat/completions deployment name (must match the deployment name in Azure AI Studio, not the base model name)
AZURE_OPENAI_DEPLOYMENT=gpt-4o
# Embeddings deployment name
AZURE_OPENAI_EMBEDDING_DEPLOYMENT=text-embedding-ada-002
# Azure OpenAI REST API version
AZURE_OPENAI_API_VERSION=2024-12-01

# ─── Azure AI Search ──────────────────────────────────────────────────────────
# Endpoint for your Azure AI Search service (e.g. https://<service>.search.windows.net)
AZURE_SEARCH_ENDPOINT=https://<your-search-service>.search.windows.net
# Admin or query API key from your Search service > Keys
AZURE_SEARCH_KEY=<your-search-api-key>
# Name of the search index (must match between runtime and ingest script)
AZURE_SEARCH_INDEX=cg-knowledge-index
# Azure AI Search REST API version (used by root searchService.js)
AZURE_SEARCH_API_VERSION=2023-11-01

# ─── Azure Blob Storage ───────────────────────────────────────────────────────
# Full connection string from Storage Account > Access Keys
AZURE_BLOB_CONNECTION_STRING=DefaultEndpointsProtocol=https;AccountName=<account>;AccountKey=<key>;EndpointSuffix=core.windows.net
# Container name (used by both document storage and audit log service)
# Document upload/ingest uses this container for raw documents; audit service appends to audit_log_live.jsonl here
AZURE_BLOB_CONTAINER_NAME=<your-container-name>

# ─── Azure Document Intelligence ─────────────────────────────────────────────
# Required for PDF text extraction during document ingestion
AZURE_DOCUMENT_INTELLIGENCE_ENDPOINT=https://<your-resource>.cognitiveservices.azure.com
AZURE_DOCUMENT_INTELLIGENCE_API_KEY=<your-doc-intelligence-key>

# ─── Audit log blob name (optional) ──────────────────────────────────────────
# Defaults to "audit_log_live.jsonl" — matches the blob already in the storage container
AUDIT_LOG_BLOB=audit_log_live.jsonl

# ─── Frontend ─────────────────────────────────────────────────────────────────
# Backend API base URL consumed by Vite (frontend/src/App.jsx via import.meta.env.VITE_API_URL)
# Set this in frontend/.env for local dev, or in your deployment environment
VITE_API_URL=http://localhost:3000
39 changes: 27 additions & 12 deletions backend/config/azureConfig.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,26 @@ function warnMissing(service, vars) {
}

// ─── Blob ─────────────────────────────────────────────────────────────────────
const blobConnString =
process.env.AZURE_BLOB_CONNECTION_STRING ||
process.env.AZURE_STORAGE_CONNECTION_STRING;

let blobServiceClient = null;
let containerClient = null;
if (process.env.AZURE_STORAGE_CONNECTION_STRING) {
blobServiceClient = BlobServiceClient.fromConnectionString(process.env.AZURE_STORAGE_CONNECTION_STRING);
containerClient = blobServiceClient.getContainerClient(process.env.AZURE_BLOB_CONTAINER_NAME || "raw-documents");

if (blobConnString) {
blobServiceClient = BlobServiceClient.fromConnectionString(blobConnString);
containerClient = blobServiceClient.getContainerClient(
process.env.AZURE_BLOB_CONTAINER_NAME || "raw-documents"
);
} else {
warnMissing("Blob Storage", ["AZURE_STORAGE_CONNECTION_STRING"]);
warnMissing("Azure Blob Storage", ["AZURE_BLOB_CONNECTION_STRING", "AZURE_STORAGE_CONNECTION_STRING"]);
}

// ─── Search ───────────────────────────────────────────────────────────────────
let searchIndexClient = null;
let searchClient = null;

if (process.env.AZURE_SEARCH_ENDPOINT && process.env.AZURE_SEARCH_KEY) {
searchIndexClient = new SearchIndexClient(
process.env.AZURE_SEARCH_ENDPOINT,
Expand All @@ -37,22 +45,29 @@ if (process.env.AZURE_SEARCH_ENDPOINT && process.env.AZURE_SEARCH_KEY) {
new AzureKeyCredential(process.env.AZURE_SEARCH_KEY)
);
} else {
warnMissing("Azure Search", ["AZURE_SEARCH_ENDPOINT", "AZURE_SEARCH_KEY"]);
warnMissing("Azure Cognitive Search", ["AZURE_SEARCH_ENDPOINT", "AZURE_SEARCH_KEY"]);
}

// ─── OpenAI ───────────────────────────────────────────────────────────────────
// Support both AZURE_OPENAI_API_KEY (Azure SDK convention) and AZURE_OPENAI_KEY (repo legacy)
const azureOpenAiKey = process.env.AZURE_OPENAI_API_KEY || process.env.AZURE_OPENAI_KEY;
let openaiClient = null;
if (process.env.AZURE_OPENAI_ENDPOINT && azureOpenAiKey) {
const openaiApiKey = process.env.AZURE_OPENAI_KEY || process.env.AZURE_OPENAI_API_KEY;

if (process.env.AZURE_OPENAI_ENDPOINT && openaiApiKey) {
openaiClient = new OpenAIClient(
process.env.AZURE_OPENAI_ENDPOINT,
new OAICredential(azureOpenAiKey)
new OAICredential(openaiApiKey)
);
} else {
warnMissing("Azure OpenAI", ["AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_API_KEY (or AZURE_OPENAI_KEY)"]);
const missingVars = [];
if (!process.env.AZURE_OPENAI_ENDPOINT) {
missingVars.push("AZURE_OPENAI_ENDPOINT");
}
if (!process.env.AZURE_OPENAI_KEY && !process.env.AZURE_OPENAI_API_KEY) {
missingVars.push("AZURE_OPENAI_KEY or AZURE_OPENAI_API_KEY");
}
if (missingVars.length > 0) {
warnMissing("Azure OpenAI", missingVars);
}
}

// ─── Document Intelligence ────────────────────────────────────────────────────
let docIntelligenceClient = null;
if (process.env.AZURE_DOCUMENT_INTELLIGENCE_ENDPOINT && process.env.AZURE_DOCUMENT_INTELLIGENCE_API_KEY) {
Expand Down
2 changes: 1 addition & 1 deletion backend/routes/documents.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ router.post('/upload', upload.single('file'), async (req, res) => {
if (!blobServiceClient) {
return res.status(503).json({
success: false,
error: "Blob Storage service is not configured. Set AZURE_STORAGE_CONNECTION_STRING in your environment."
error: "Blob Storage service is not configured. Set AZURE_BLOB_CONNECTION_STRING (or AZURE_STORAGE_CONNECTION_STRING) in your environment."
});
}

Expand Down
2 changes: 1 addition & 1 deletion backend/services/auditService.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ const LOCAL_AUDIT_SCHEMA = {
let cachedSchema = null;

function isLocalFallbackEnabled() {
return !process.env.AZURE_STORAGE_CONNECTION_STRING;
return !process.env.AZURE_BLOB_CONNECTION_STRING;
Copy link

Copilot AI Mar 30, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

isLocalFallbackEnabled() only checks AZURE_BLOB_CONNECTION_STRING. If an environment still uses the legacy AZURE_STORAGE_CONNECTION_STRING, this will incorrectly enable the local fallback and bypass Blob storage even though Blob is configured. Update the check to treat Blob as configured when either env var is set (same fallback logic used elsewhere).

Suggested change
return !process.env.AZURE_BLOB_CONNECTION_STRING;
return !(
process.env.AZURE_BLOB_CONNECTION_STRING ||
process.env.AZURE_STORAGE_CONNECTION_STRING
);

Copilot uses AI. Check for mistakes.
}

async function ensureLocalDataDir() {
Expand Down
6 changes: 4 additions & 2 deletions backend/services/blobService.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
const { BlobServiceClient } = require("@azure/storage-blob");

const connectionString = process.env.AZURE_STORAGE_CONNECTION_STRING;
const connectionString =
process.env.AZURE_BLOB_CONNECTION_STRING ||
process.env.AZURE_STORAGE_CONNECTION_STRING;
const containerName = process.env.AZURE_BLOB_CONTAINER_NAME || "audit";

function getBlobServiceClient() {
if (!connectionString) {
throw new Error("AZURE_STORAGE_CONNECTION_STRING is required");
throw new Error("AZURE_BLOB_CONNECTION_STRING (or AZURE_STORAGE_CONNECTION_STRING) is required");
}
return BlobServiceClient.fromConnectionString(connectionString);
}
Expand Down
2 changes: 1 addition & 1 deletion backend/services/openaiService.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ class OpenAIService {
constructor() {
this.client = openaiClient;
this.embeddingDeployment = process.env.AZURE_OPENAI_EMBEDDING_DEPLOYMENT || "text-embedding-ada-002";
this.completionDeployment = process.env.AZURE_OPENAI_DEPLOYMENT || "gpt-4";
this.completionDeployment = process.env.AZURE_OPENAI_DEPLOYMENT || "gpt-4o";
Copy link

Copilot AI Mar 30, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The default AZURE_OPENAI_DEPLOYMENT fallback is now gpt-4o, but this service calls OpenAIClient.getCompletions(...) (legacy completions API). GPT‑4o deployments are intended for chat-style APIs and are not compatible with the completions endpoint; this default is likely to fail at runtime. Either switch this implementation to getChatCompletions(...) with messages, or keep the default aligned with a completions-compatible instruct deployment (e.g. gpt-35-turbo-instruct).

Suggested change
this.completionDeployment = process.env.AZURE_OPENAI_DEPLOYMENT || "gpt-4o";
this.completionDeployment = process.env.AZURE_OPENAI_DEPLOYMENT || "gpt-35-turbo-instruct";

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@megan-nepshinsky will you check this comment please. GPT‑4o is the correct deployment but I am not sure how your code is setup and don't want to break anything. I am also not sure as to how @steffahv may have wired the chat deployments in the background either. So I just want to make sure before making any changes.

}

/**
Expand Down
3 changes: 3 additions & 0 deletions frontend/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Backend API base URL — used by Vite via import.meta.env.VITE_API_URL
# Copy this file to frontend/.env and update the value for your environment
VITE_API_URL=http://localhost:3000
2 changes: 1 addition & 1 deletion frontend/src/App.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ function App() {
const [loading, setLoading] = useState(false)
const [error, setError] = useState('')

const API_BASE = 'http://localhost:3000'
const API_BASE = import.meta.env.VITE_API_URL || 'http://localhost:3000'

const handleSearch = async () => {
if (!query.trim()) return
Expand Down
6 changes: 4 additions & 2 deletions services/auditService.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ const fs = require("fs/promises");
const path = require("path");
const { BlobServiceClient } = require("@azure/storage-blob");

const CONNECTION_STRING = process.env.AZURE_STORAGE_CONNECTION_STRING;
const CONNECTION_STRING =
process.env.AZURE_BLOB_CONNECTION_STRING ||
process.env.AZURE_STORAGE_CONNECTION_STRING;
const CONTAINER_NAME = process.env.AZURE_BLOB_CONTAINER_NAME || "audit";
const AUDIT_LOG_BLOB = process.env.AUDIT_LOG_BLOB || "audit_log_live.jsonl";
const AUDIT_SCHEMA_BLOB = "audit_log_schema.json";
Expand Down Expand Up @@ -66,7 +68,7 @@ async function appendLocalAuditLine(line) {

function getContainerClient() {
if (!CONNECTION_STRING) {
throw new Error("AZURE_STORAGE_CONNECTION_STRING is required");
throw new Error("AZURE_BLOB_CONNECTION_STRING (or AZURE_STORAGE_CONNECTION_STRING) is required");
}

const blobServiceClient = BlobServiceClient.fromConnectionString(CONNECTION_STRING);
Expand Down