Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,093 changes: 1,093 additions & 0 deletions package-lock.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"node": ">=20.0.0"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.27.1",
"chalk": "^5.6.2",
"commander": "^14.0.3",
"dotenv": "^17.3.1",
Expand Down
154 changes: 154 additions & 0 deletions soshi.manifest.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
{
"id": "dev",
"name": "Coding",
"icon": "code",
"color": "#64D2FF",
"description": "AI-powered web development toolkit: Lighthouse audits, SEO analysis, site crawling, deployment management, and issue tracking",
"configSchema": "./src/config/schema.ts",
"defaults": "./src/config/defaults.ts",
"mcpServer": "./src/mcp/server.ts",
"toolsPath": "./ui/src/lib/ai/tools/index.ts",
"dataDir": "~/.dev/data/",

"configFields": [
{ "key": "defaultCategories", "label": "Default Audit Categories", "type": "multiselect", "group": "General", "options": ["performance", "seo", "accessibility", "best-practices"], "defaultValue": ["performance", "seo", "accessibility", "best-practices"] },
{ "key": "crawlMaxPages", "label": "Crawl Max Pages", "type": "slider", "group": "General", "min": 1, "max": 200, "step": 10, "defaultValue": 50 },
{ "key": "crawlRateLimit", "label": "Crawl Rate Limit (ms)", "type": "slider", "group": "General", "min": 100, "max": 5000, "step": 100, "defaultValue": 1000 },
{ "key": "respectRobotsTxt", "label": "Respect robots.txt", "type": "boolean", "group": "General", "defaultValue": true }
],

"credentials": [
{ "key": "anthropicApiKey", "label": "Claude API Key", "env": "ANTHROPIC_API_KEY" },
{ "key": "googleApiKey", "label": "Google API Key", "env": "GOOGLE_API_KEY" }
],

"events": [
{
"name": "audit_complete",
"description": "A Lighthouse or SEO audit finished",
"schema": {
"url": "string",
"auditType": "string",
"score": "number",
"projectId": "string"
}
},
{
"name": "deploy_triggered",
"description": "A deployment was triggered for a project",
"schema": {
"deploymentId": "string",
"projectId": "string",
"environment": "string",
"status": "string"
}
},
{
"name": "crawl_complete",
"description": "A site crawl finished discovering pages",
"schema": {
"url": "string",
"pageCount": "number",
"brokenLinks": "number",
"projectId": "string"
}
},
{
"name": "issue_created",
"description": "A new issue or ticket was created",
"schema": {
"issueId": "string",
"title": "string",
"status": "string",
"priority": "string",
"projectId": "string"
}
},
{
"name": "deploy_failed",
"description": "A deployment failed",
"schema": {
"deploymentId": "string",
"projectId": "string",
"environment": "string",
"error": "string"
}
}
],

"actions": [
{
"name": "run_lighthouse",
"description": "Run a full Lighthouse audit via PageSpeed Insights",
"tool": "run_lighthouse",
"params": {
"url": "string",
"projectId?": "string"
}
},
{
"name": "trigger_deploy",
"description": "Trigger a new deployment for a project",
"tool": "trigger_deploy",
"params": {
"projectId": "string",
"environment?": "string",
"branch?": "string"
}
},
{
"name": "crawl_site",
"description": "Crawl a site to discover pages, check status codes and links",
"tool": "crawl_site",
"params": {
"url": "string",
"maxPages?": "number",
"projectId?": "string"
}
},
{
"name": "analyze_seo",
"description": "Full SEO analysis including meta tags, OG, headings, and structured data",
"tool": "analyze_seo",
"params": {
"url": "string"
}
},
{
"name": "create_issue",
"description": "Create a new issue or ticket in a project",
"tool": "create_issue",
"params": {
"title": "string",
"description?": "string",
"projectId?": "string",
"priority?": "string",
"labels?": "string[]"
}
},
{
"name": "find_broken_links",
"description": "Check every link on a page for 404s and timeouts",
"tool": "find_broken_links",
"params": {
"url": "string"
}
}
],

"routerKeywords": [
"audit", "audits",
"deploy", "deployment", "deployments",
"lighthouse", "pagespeed",
"crawl", "crawling", "spider",
"repo", "repos", "repository",
"issue", "issues", "ticket", "bug",
"seo", "meta", "headings",
"site", "website",
"performance", "vitals", "lcp", "cls",
"build", "builds",
"broken", "links", "404",
"sitemap", "robots",
"branch", "commit", "pr"
]
}
18 changes: 18 additions & 0 deletions src/events/emitter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
type EventCallback = (eventName: string, data: Record<string, unknown>) => void;

class SoshiEventEmitter {
private listeners: EventCallback[] = [];

subscribe(cb: EventCallback) {
this.listeners.push(cb);
return () => { this.listeners = this.listeners.filter(l => l !== cb); };
}

emit(eventName: string, data: Record<string, unknown>) {
for (const cb of this.listeners) {
try { cb(eventName, data); } catch (e) { console.error('[SoshiEvents] Listener error:', e); }
}
}
}

export const soshiEvents = new SoshiEventEmitter();
51 changes: 51 additions & 0 deletions src/mcp/server.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#!/usr/bin/env node
/**
* Dev MCP Server
*
* Thin wrapper that re-exports Vercel AI SDK tools as MCP tools.
* Runs as a child process over stdio transport.
*
* Usage:
* npx tsx src/mcp/server.ts
*/

import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import { allTools } from "../../ui/src/lib/ai/tools/index.js";
import { soshiEvents } from '../events/emitter.js';

const server = new McpServer({
name: "dev",
version: "1.0.0",
});

// Register all Vercel AI SDK tools as MCP tools
for (const [name, t] of Object.entries(allTools)) {
const aiTool = t as any;
server.tool(
name,
aiTool.description || name,
aiTool.parameters?.shape ?? {},
async (args: Record<string, unknown>) => {
const result = await aiTool.execute(args);
return {
content: [{ type: "text", text: JSON.stringify(result) }],
};
}
);
}

// Bridge soshi events to MCP notifications
soshiEvents.subscribe((eventName, data) => {
server.server.notification({ method: 'notifications/event', params: { event: eventName, data } });
});

async function main() {
const transport = new StdioServerTransport();
await server.connect(transport);
}

main().catch((err) => {
console.error("Dev MCP server failed to start:", err);
process.exit(1);
});
6 changes: 5 additions & 1 deletion ui/src/lib/ai/tools/audit.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,11 @@ async function fetchPageSpeed(url: string, categories: string[]) {

const resp = await fetch(`${PAGESPEED_API}?${params}`);
if (!resp.ok) throw new Error(`PageSpeed API error: ${resp.status} ${resp.statusText}`);
return resp.json();
try {
return await resp.json();
} catch {
throw new Error(`PageSpeed API returned invalid JSON for ${url}`);
}
}

function extractScores(data: Record<string, unknown>): { performance: number; seo: number; accessibility: number; bestPractices: number } {
Expand Down
16 changes: 16 additions & 0 deletions ui/src/lib/ai/tools/crawl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,14 @@ async function fetchWithStatus(url: string, maxRedirects = 5): Promise<{ finalUr
let currentUrl = url;

for (let i = 0; i < maxRedirects; i++) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 15000);
const resp = await fetch(currentUrl, {
redirect: "manual",
headers: { "User-Agent": "DevClient-Crawler/1.0", Accept: "text/html" },
signal: controller.signal,
});
clearTimeout(timeout);

if (resp.status >= 300 && resp.status < 400) {
redirectChain.push(currentUrl);
Expand Down Expand Up @@ -187,11 +191,15 @@ export const crawlTools = {

for (const link of uniqueLinks.slice(0, 50)) {
try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 15000);
const resp = await fetch(link.href, {
method: "HEAD",
headers: { "User-Agent": "DevClient-LinkChecker/1.0" },
redirect: "follow",
signal: controller.signal,
});
clearTimeout(timeout);
if (resp.status >= 400) {
broken.push({
url: link.href,
Expand Down Expand Up @@ -239,10 +247,14 @@ export const crawlTools = {
}
seen.add(currentUrl);

const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 15000);
const resp = await fetch(currentUrl, {
redirect: "manual",
headers: { "User-Agent": "DevClient-RedirectChecker/1.0" },
signal: controller.signal,
});
clearTimeout(timeout);

chain.push({ url: currentUrl, status: resp.status });

Expand Down Expand Up @@ -286,9 +298,13 @@ export const crawlTools = {
visited.add(normalized);

try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 15000);
const resp = await fetch(currentUrl, {
headers: { "User-Agent": "DevClient-Sitemap/1.0", Accept: "text/html" },
signal: controller.signal,
});
clearTimeout(timeout);
if (!resp.ok) continue;

urls.push(normalized);
Expand Down
2 changes: 2 additions & 0 deletions ui/src/lib/stores/audits.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { readFile, writeFile } from "node:fs/promises";
import { join } from "node:path";
import { getDataDir } from "./paths";
import { soshiEvents } from '../../../../src/events/emitter.js';

export interface AuditScores {
performance: number;
Expand Down Expand Up @@ -59,6 +60,7 @@ export async function addAudit(data: Omit<AuditResult, "id" | "timestamp">): Pro
};
items.unshift(audit);
await saveAll(items);
soshiEvents.emit('audit_complete', { url: audit.url, auditType: audit.source ?? 'lighthouse', score: audit.scores?.performance ?? 0, projectId: audit.projectId ?? '' });
return audit;
}

Expand Down
2 changes: 2 additions & 0 deletions ui/src/lib/stores/crawls.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { readFile, writeFile } from "node:fs/promises";
import { join } from "node:path";
import { getDataDir } from "./paths";
import { soshiEvents } from '../../../../src/events/emitter.js';

export interface CrawledPage {
url: string;
Expand Down Expand Up @@ -61,6 +62,7 @@ export async function addCrawl(data: Omit<SiteCrawl, "id" | "timestamp">): Promi
};
items.unshift(crawl);
await saveAll(items);
soshiEvents.emit('crawl_complete', { url: crawl.rootUrl, pageCount: crawl.totalPages ?? crawl.pages?.length ?? 0, brokenLinks: crawl.brokenLinks?.length ?? 0, projectId: crawl.projectId ?? '' });
return crawl;
}

Expand Down
6 changes: 6 additions & 0 deletions ui/src/lib/stores/deployments.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { readFile, writeFile } from "node:fs/promises";
import { join } from "node:path";
import { getDataDir } from "./paths";
import { soshiEvents } from '../../../../src/events/emitter.js';

export type DeployEnvironment = "production" | "staging" | "preview" | "dev";
export type DeployStatus = "building" | "deploying" | "live" | "failed" | "rolled-back";
Expand Down Expand Up @@ -56,6 +57,7 @@ export async function createDeployment(data: {
};
items.push(deployment);
await saveAll(items);
soshiEvents.emit('deploy_triggered', { deploymentId: deployment.id, projectId: deployment.projectId, environment: deployment.environment, status: deployment.status });
return deployment;
}

Expand All @@ -78,6 +80,10 @@ export async function updateDeployment(id: string, updates: Partial<Omit<Deploym
if (idx === -1) return undefined;
items[idx] = { ...items[idx], ...updates, updatedAt: new Date().toISOString() };
await saveAll(items);
const updated = items[idx];
if (updates.status === 'failed') {
soshiEvents.emit('deploy_failed', { deploymentId: id, projectId: updated.projectId, environment: updated.environment, error: updates.failureReason ?? 'Unknown error' });
}
return items[idx];
}

Expand Down
2 changes: 2 additions & 0 deletions ui/src/lib/stores/issues.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { readFile, writeFile } from "node:fs/promises";
import { join } from "node:path";
import { getDataDir } from "./paths";
import { soshiEvents } from '../../../../src/events/emitter.js';

export type IssueStatus = "open" | "in-progress" | "closed";
export type IssuePriority = "low" | "medium" | "high" | "critical";
Expand Down Expand Up @@ -68,6 +69,7 @@ export async function createIssue(data: {
};
items.push(issue);
await saveAll(items);
soshiEvents.emit('issue_created', { issueId: issue.id, title: issue.title, status: issue.status, priority: issue.priority, projectId: issue.projectId ?? '' });
return issue;
}

Expand Down