diff --git a/build/darwin/Taskfile.yml b/build/darwin/Taskfile.yml
index 6f21428..9ed3798 100644
--- a/build/darwin/Taskfile.yml
+++ b/build/darwin/Taskfile.yml
@@ -165,6 +165,7 @@ tasks:
- cp build/darwin/icons.icns {{.BIN_DIR}}/{{.APP_NAME}}.dev.app/Contents/Resources
- cp {{.BIN_DIR}}/{{.APP_NAME}} {{.BIN_DIR}}/{{.APP_NAME}}.dev.app/Contents/MacOS
- cp build/darwin/Info.dev.plist {{.BIN_DIR}}/{{.APP_NAME}}.dev.app/Contents/Info.plist
+ - rm -f {{.BIN_DIR}}/{{.APP_NAME}}.dev.app/Contents/MacOS/*.cstemp
- codesign --force --deep --sign - {{.BIN_DIR}}/{{.APP_NAME}}.dev.app
- "{{.BIN_DIR}}/{{.APP_NAME}}.dev.app/Contents/MacOS/{{.APP_NAME}}"
diff --git a/cmd/serve/serve.go b/cmd/serve/serve.go
index 067e0d4..8eca76f 100644
--- a/cmd/serve/serve.go
+++ b/cmd/serve/serve.go
@@ -84,13 +84,98 @@ var Command = &cli.Command{
mux.HandleFunc(webhookPath, api.NewPolarWebhookHandler(apiService))
slog.Info("serving polar webhook handler", "path", webhookPath)
- // Gemini API proxy endpoint
+ geminiProxyConfig := llmProxyConfig{
+ Provider: "gemini",
+ BaseURL: "https://generativelanguage.googleapis.com",
+ PathPrefix: "/api/v1/gemini",
+ SetupRequest: func(_ *http.Request, targetURL *url.URL, _ *http.Request) error {
+ apiKey := os.Getenv("GEMINI_API_KEY")
+ if apiKey == "" {
+ return fmt.Errorf("missing GEMINI_API_KEY")
+ }
+
+ query := targetURL.Query()
+ query.Set("key", apiKey)
+ targetURL.RawQuery = query.Encode()
+ return nil
+ },
+ ExtractUsage: extractGeminiUsageMetadata,
+ }
+
+ openAIProxyConfig := llmProxyConfig{
+ Provider: "openai",
+ BaseURL: "https://api.openai.com",
+ PathPrefix: "/api/v1/openai",
+ SetupRequest: func(_ *http.Request, _ *url.URL, proxyReq *http.Request) error {
+ apiKey := os.Getenv("OPENAI_API_KEY")
+ if apiKey == "" {
+ return fmt.Errorf("missing OPENAI_API_KEY")
+ }
+
+ proxyReq.Header.Set("Authorization", "Bearer "+apiKey)
+ return nil
+ },
+ ExtractUsage: extractOpenAIUsageMetadata,
+ }
+
+ anthropicProxyConfig := llmProxyConfig{
+ Provider: "anthropic",
+ BaseURL: "https://api.anthropic.com",
+ PathPrefix: "/api/v1/anthropic",
+ SetupRequest: func(_ *http.Request, _ *url.URL, proxyReq *http.Request) error {
+ apiKey := os.Getenv("ANTHROPIC_API_KEY")
+ if apiKey == "" {
+ return fmt.Errorf("missing ANTHROPIC_API_KEY")
+ }
+
+ version := os.Getenv("ANTHROPIC_VERSION")
+ if version == "" {
+ version = "2023-06-01"
+ }
+
+ proxyReq.Header.Set("x-api-key", apiKey)
+ proxyReq.Header.Set("anthropic-version", version)
+ if proxyReq.Header.Get("Content-Type") == "" {
+ proxyReq.Header.Set("Content-Type", "application/json")
+ }
+ return nil
+ },
+ ExtractUsage: extractAnthropicUsageMetadata,
+ }
+
+ grokProxyConfig := llmProxyConfig{
+ Provider: "grok",
+ BaseURL: "https://api.x.ai",
+ PathPrefix: "/api/v1/grok",
+ SetupRequest: func(_ *http.Request, _ *url.URL, proxyReq *http.Request) error {
+ apiKey := os.Getenv("GROK_API_KEY")
+ if apiKey == "" {
+ return fmt.Errorf("missing GROK_API_KEY")
+ }
+
+ proxyReq.Header.Set("Authorization", "Bearer "+apiKey)
+ return nil
+ },
+ ExtractUsage: extractGrokUsageMetadata,
+ }
+
+ // LLM API proxy endpoints
geminiProxyPath := "/api/v1/gemini/"
- mux.HandleFunc(geminiProxyPath, func(w http.ResponseWriter, r *http.Request) {
- geminiProxyHandler(w, r, gormDB)
- })
+ mux.HandleFunc(geminiProxyPath, newLLMProxyHandler(gormDB, geminiProxyConfig))
slog.Info("serving gemini proxy handler", "path", geminiProxyPath)
+ openAIProxyPath := "/api/v1/openai/"
+ mux.HandleFunc(openAIProxyPath, newLLMProxyHandler(gormDB, openAIProxyConfig))
+ slog.Info("serving openai proxy handler", "path", openAIProxyPath)
+
+ anthropicProxyPath := "/api/v1/anthropic/"
+ mux.HandleFunc(anthropicProxyPath, newLLMProxyHandler(gormDB, anthropicProxyConfig))
+ slog.Info("serving anthropic proxy handler", "path", anthropicProxyPath)
+
+ grokProxyPath := "/api/v1/grok/"
+ mux.HandleFunc(grokProxyPath, newLLMProxyHandler(gormDB, grokProxyConfig))
+ slog.Info("serving grok proxy handler", "path", grokProxyPath)
+
slog.Info("serving rpc handler for api v1 service", "path", apiPath)
h2Handler := h2c.NewHandler(mux, &http2.Server{})
@@ -153,152 +238,172 @@ func setupDatabase(url, token string) (*gorm.DB, error) {
// geminiProxyHandler proxies requests to Google's Generative Language API
// Requests to /api/v1/gemini/* are forwarded to https://generativelanguage.googleapis.com/*
-func geminiProxyHandler(w http.ResponseWriter, r *http.Request, db *gorm.DB) {
- const geminiBaseURL = "https://generativelanguage.googleapis.com"
-
- // Fast-fail: Authenticate
- authHeader := r.Header.Get("Authorization")
- token := strings.TrimSpace(strings.TrimPrefix(authHeader, "Bearer "))
- if token == "" {
- http.Error(w, "Unauthorized: missing token", http.StatusUnauthorized)
- return
- }
+type llmProxyConfig struct {
+ Provider string
+ BaseURL string
+ PathPrefix string
+ SetupRequest func(incomingReq *http.Request, targetURL *url.URL, proxyReq *http.Request) error
+ ExtractUsage func(body []byte) (input int, output int, total int)
+}
- claims, err := api.ValidateToken(token)
- if err != nil {
- http.Error(w, "Unauthorized: invalid token", http.StatusUnauthorized)
- return
- }
+// newLLMProxyHandler proxies requests to a configured upstream LLM provider.
+func newLLMProxyHandler(db *gorm.DB, config llmProxyConfig) http.HandlerFunc {
+ return func(w http.ResponseWriter, r *http.Request) {
- // Rate Limiting
- var requestCount int64
- todayUnix := time.Now().Add(-24 * time.Hour).Unix()
- if err := db.Model(&api.LLMProxyUsage{}).Where("user_id = ? AND created_at >= ?", claims.UserID, todayUnix).Count(&requestCount).Error; err != nil {
- slog.Error("failed to query proxy usage count", "error", err)
- http.Error(w, "Internal Server Error", http.StatusInternalServerError)
- return
- }
+ // Fast-fail: Authenticate
+ authHeader := r.Header.Get("Authorization")
+ token := strings.TrimSpace(strings.TrimPrefix(authHeader, "Bearer "))
+ if token == "" {
+ http.Error(w, "Unauthorized: missing token", http.StatusUnauthorized)
+ return
+ }
- if requestCount >= 5000 {
- slog.Warn("user exceeded daily proxy limit", "user_id", claims.UserID)
- http.Error(w, "Too Many Requests", http.StatusTooManyRequests)
- return
- }
+ claims, err := api.ValidateToken(token)
+ if err != nil {
+ http.Error(w, "Unauthorized: invalid token", http.StatusUnauthorized)
+ return
+ }
- // Strip the proxy prefix to get the target path
- targetPath := strings.TrimPrefix(r.URL.Path, "/api/v1/gemini")
- if targetPath == "" {
- targetPath = "/"
- }
+ // Rate Limiting
+ var requestCount int64
+ todayUnix := time.Now().Add(-24 * time.Hour).Unix()
+ if err := db.Model(&api.LLMProxyUsage{}).Where("user_id = ? AND created_at >= ?", claims.UserID, todayUnix).Count(&requestCount).Error; err != nil {
+ slog.Error("failed to query proxy usage count", "error", err)
+ http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+ return
+ }
- // Build the target URL
- targetURL, err := url.Parse(geminiBaseURL + targetPath)
- if err != nil {
- slog.Error("failed to parse target URL", "error", err)
- http.Error(w, "Internal Server Error", http.StatusInternalServerError)
- return
- }
+ if requestCount >= 5000 {
+ slog.Warn("user exceeded daily proxy limit", "user_id", claims.UserID)
+ http.Error(w, "Too Many Requests", http.StatusTooManyRequests)
+ return
+ }
- // Preserve query parameters and append API key
- query := targetURL.Query()
- if r.URL.RawQuery != "" {
- query, _ = url.ParseQuery(r.URL.RawQuery)
- }
- query.Set("key", os.Getenv("GEMINI_API_KEY"))
- targetURL.RawQuery = query.Encode()
+ // Strip the proxy prefix to get the target path
+ targetPath := strings.TrimPrefix(r.URL.Path, config.PathPrefix)
+ if targetPath == "" {
+ targetPath = "/"
+ }
+ if !strings.HasPrefix(targetPath, "/") {
+ targetPath = "/" + targetPath
+ }
- slog.Info("proxying request to Gemini API", "method", r.Method, "target", targetURL.String())
+ // Build the target URL
+ targetURL, err := url.Parse(config.BaseURL + targetPath)
+ if err != nil {
+ slog.Error("failed to parse target URL", "error", err)
+ http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+ return
+ }
- // Create the proxy request
- // Create the proxy request using a detached context so the upstream request
- // to Google isn't canceled if the client disconnects mid-flight.
- proxyReq, err := http.NewRequestWithContext(context.WithoutCancel(r.Context()), r.Method, targetURL.String(), r.Body)
- if err != nil {
- slog.Error("failed to create proxy request", "error", err)
- http.Error(w, "Internal Server Error", http.StatusInternalServerError)
- return
- }
+ // Preserve query parameters
+ targetURL.RawQuery = r.URL.RawQuery
- // Copy headers from original request
- for key, values := range r.Header {
- for _, value := range values {
- proxyReq.Header.Add(key, value)
+ slog.Info("proxying request to LLM API", "provider", config.Provider, "method", r.Method, "target", targetURL.String())
+
+ // Create the proxy request
+ // Create the proxy request using a detached context so the upstream request
+ // to Google isn't canceled if the client disconnects mid-flight.
+ proxyReq, err := http.NewRequestWithContext(context.WithoutCancel(r.Context()), r.Method, targetURL.String(), r.Body)
+ if err != nil {
+ slog.Error("failed to create proxy request", "error", err)
+ http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+ return
}
- }
- // Remove hop-by-hop headers
- proxyReq.Header.Del("Connection")
- proxyReq.Header.Del("Keep-Alive")
- proxyReq.Header.Del("Proxy-Authenticate")
- proxyReq.Header.Del("Proxy-Authorization")
- proxyReq.Header.Del("Te")
- proxyReq.Header.Del("Trailers")
- proxyReq.Header.Del("Transfer-Encoding")
- proxyReq.Header.Del("Upgrade")
- proxyReq.Header.Del("Authorization")
-
- // Execute the proxy request
- client := &http.Client{Timeout: 120 * time.Second}
- resp, err := client.Do(proxyReq)
- if err != nil {
- slog.Error("failed to execute proxy request", "error", err)
- http.Error(w, "Bad Gateway", http.StatusBadGateway)
- return
- }
- defer resp.Body.Close()
+ // Copy headers from original request
+ for key, values := range r.Header {
+ for _, value := range values {
+ proxyReq.Header.Add(key, value)
+ }
+ }
- // Copy response headers
- for key, values := range resp.Header {
- for _, value := range values {
- w.Header().Add(key, value)
+ // Remove hop-by-hop headers
+ proxyReq.Header.Del("Connection")
+ proxyReq.Header.Del("Keep-Alive")
+ proxyReq.Header.Del("Proxy-Authenticate")
+ proxyReq.Header.Del("Proxy-Authorization")
+ proxyReq.Header.Del("Te")
+ proxyReq.Header.Del("Trailers")
+ proxyReq.Header.Del("Transfer-Encoding")
+ proxyReq.Header.Del("Upgrade")
+ proxyReq.Header.Del("Authorization")
+
+ if config.SetupRequest != nil {
+ if err := config.SetupRequest(r, targetURL, proxyReq); err != nil {
+ slog.Error("failed to setup provider request", "provider", config.Provider, "error", err)
+ http.Error(w, "Bad Gateway", http.StatusBadGateway)
+ return
+ }
+ proxyReq.URL = targetURL
}
- }
- // Copy the response body
- capturedBody, err := io.ReadAll(resp.Body)
- if err != nil {
- slog.Error("failed to read response body", "error", err)
- }
+ // Execute the proxy request
+ client := &http.Client{Timeout: 120 * time.Second}
+ resp, err := client.Do(proxyReq)
+ if err != nil {
+ slog.Error("failed to execute proxy request", "error", err)
+ http.Error(w, "Bad Gateway", http.StatusBadGateway)
+ return
+ }
+ defer resp.Body.Close()
+
+ // Copy response headers
+ for key, values := range resp.Header {
+ for _, value := range values {
+ w.Header().Add(key, value)
+ }
+ }
+
+ // Copy the response body
+ capturedBody, err := io.ReadAll(resp.Body)
+ if err != nil {
+ slog.Error("failed to read response body", "error", err)
+ }
- // if the status code is anything >= 400, print an error
- if resp.StatusCode >= 400 {
- logBody := capturedBody
- if resp.Header.Get("Content-Encoding") == "gzip" {
- if gr, err := gzip.NewReader(bytes.NewReader(capturedBody)); err == nil {
- if decompressed, err := io.ReadAll(gr); err == nil {
- logBody = decompressed
+ // if the status code is anything >= 400, print an error
+ if resp.StatusCode >= 400 {
+ logBody := capturedBody
+ if resp.Header.Get("Content-Encoding") == "gzip" {
+ if gr, err := gzip.NewReader(bytes.NewReader(capturedBody)); err == nil {
+ if decompressed, err := io.ReadAll(gr); err == nil {
+ logBody = decompressed
+ }
}
}
+ slog.Error("proxy request failed", "status code", resp.StatusCode, "body", string(logBody))
}
- slog.Error("proxy request failed", "status code", resp.StatusCode, "body", string(logBody))
- }
- // Set the status code
- w.WriteHeader(resp.StatusCode)
+ // Set the status code
+ w.WriteHeader(resp.StatusCode)
- // Synchronously parse tokens and save usage
- inputTokens, outputTokens, totalTokens := extractUsageMetadata(capturedBody)
+ // Synchronously parse tokens and save usage
+ inputTokens, outputTokens, totalTokens := 0, 0, 0
+ if config.ExtractUsage != nil {
+ inputTokens, outputTokens, totalTokens = config.ExtractUsage(capturedBody)
+ }
- usage := api.LLMProxyUsage{
- UserID: claims.UserID,
- CreatedAt: time.Now().Unix(),
- Provider: "gemini",
- InputTokens: inputTokens,
- OutputTokens: outputTokens,
- TotalTokens: totalTokens,
- }
+ usage := api.LLMProxyUsage{
+ UserID: claims.UserID,
+ CreatedAt: time.Now().Unix(),
+ Provider: config.Provider,
+ InputTokens: inputTokens,
+ OutputTokens: outputTokens,
+ TotalTokens: totalTokens,
+ }
- if err := db.Create(&usage).Error; err != nil {
- slog.Error("failed to save LLM proxy usage log", "error", err)
- }
+ if err := db.Create(&usage).Error; err != nil {
+ slog.Error("failed to save LLM proxy usage log", "error", err)
+ }
- // Write the captured body back to the client
- if _, err := w.Write(capturedBody); err != nil {
- slog.Error("failed to write response body", "error", err)
+ // Write the captured body back to the client
+ if _, err := w.Write(capturedBody); err != nil {
+ slog.Error("failed to write response body", "error", err)
+ }
}
}
-func extractUsageMetadata(body []byte) (input int, output int, total int) {
+func extractGeminiUsageMetadata(body []byte) (input int, output int, total int) {
type geminiResponse struct {
UsageMetadata struct {
PromptTokenCount int `json:"promptTokenCount"`
@@ -341,3 +446,93 @@ func extractUsageMetadata(body []byte) (input int, output int, total int) {
return input, output, total
}
+
+func extractOpenAIUsageMetadata(body []byte) (input int, output int, total int) {
+ type openAIResponse struct {
+ Usage struct {
+ PromptTokens int `json:"prompt_tokens"`
+ CompletionTokens int `json:"completion_tokens"`
+ TotalTokens int `json:"total_tokens"`
+ } `json:"usage"`
+ }
+
+ // Try to parse the whole body as a single JSON object (non-streaming)
+ var resp openAIResponse
+ if err := json.Unmarshal(body, &resp); err == nil && resp.Usage.TotalTokens > 0 {
+ return resp.Usage.PromptTokens, resp.Usage.CompletionTokens, resp.Usage.TotalTokens
+ }
+
+ // If it failed or has 0 tokens, it might be an SSE stream.
+ // SSE chunks start with "data: " and end with "\n\n"
+ scanner := bufio.NewScanner(bytes.NewReader(body))
+ buf := make([]byte, 0, 64*1024)
+ scanner.Buffer(buf, 1024*1024)
+
+ for scanner.Scan() {
+ line := scanner.Text()
+ if after, ok := strings.CutPrefix(line, "data: "); ok {
+ jsonStr := strings.TrimSpace(after)
+ if jsonStr == "" || jsonStr == "[DONE]" {
+ continue
+ }
+ var streamResp openAIResponse
+ if err := json.Unmarshal([]byte(jsonStr), &streamResp); err == nil {
+ if streamResp.Usage.TotalTokens > total {
+ input = streamResp.Usage.PromptTokens
+ output = streamResp.Usage.CompletionTokens
+ total = streamResp.Usage.TotalTokens
+ }
+ }
+ }
+ }
+
+ return input, output, total
+}
+
+func extractAnthropicUsageMetadata(body []byte) (input int, output int, total int) {
+ type anthropicResponse struct {
+ Usage struct {
+ InputTokens int `json:"input_tokens"`
+ OutputTokens int `json:"output_tokens"`
+ } `json:"usage"`
+ }
+
+ // Try to parse the whole body as a single JSON object (non-streaming)
+ var resp anthropicResponse
+ if err := json.Unmarshal(body, &resp); err == nil {
+ totalTokens := resp.Usage.InputTokens + resp.Usage.OutputTokens
+ if totalTokens > 0 {
+ return resp.Usage.InputTokens, resp.Usage.OutputTokens, totalTokens
+ }
+ }
+
+ // If it failed or has 0 tokens, it might be an SSE stream.
+ scanner := bufio.NewScanner(bytes.NewReader(body))
+ buf := make([]byte, 0, 64*1024)
+ scanner.Buffer(buf, 1024*1024)
+
+ for scanner.Scan() {
+ line := scanner.Text()
+ if strings.HasPrefix(line, "data: ") {
+ jsonStr := strings.TrimSpace(strings.TrimPrefix(line, "data: "))
+ if jsonStr == "" || jsonStr == "[DONE]" {
+ continue
+ }
+ var streamResp anthropicResponse
+ if err := json.Unmarshal([]byte(jsonStr), &streamResp); err == nil {
+ streamTotal := streamResp.Usage.InputTokens + streamResp.Usage.OutputTokens
+ if streamTotal > total {
+ input = streamResp.Usage.InputTokens
+ output = streamResp.Usage.OutputTokens
+ total = streamTotal
+ }
+ }
+ }
+ }
+
+ return input, output, total
+}
+
+func extractGrokUsageMetadata(body []byte) (input int, output int, total int) {
+ return extractOpenAIUsageMetadata(body)
+}
diff --git a/frontend/bindings/github.com/wailsapp/wails/v3/internal/eventcreate.js b/frontend/bindings/github.com/wailsapp/wails/v3/internal/eventcreate.js
index 444cb31..493eb33 100644
--- a/frontend/bindings/github.com/wailsapp/wails/v3/internal/eventcreate.js
+++ b/frontend/bindings/github.com/wailsapp/wails/v3/internal/eventcreate.js
@@ -14,7 +14,7 @@ function configure() {
Object.freeze(Object.assign($Create.Events, {
"daily-summary:ready": $$createType0,
"protection:status": $$createType1,
- "usage:update": $$createType2,
+ "usage:update": $$createType3,
}));
}
@@ -22,5 +22,6 @@ function configure() {
const $$createType0 = usage$0.LLMDailySummary.createFrom;
const $$createType1 = usage$0.ProtectionPause.createFrom;
const $$createType2 = usage$0.ApplicationUsage.createFrom;
+const $$createType3 = $Create.Nullable($$createType2);
configure();
diff --git a/frontend/bindings/github.com/wailsapp/wails/v3/internal/eventdata.d.ts b/frontend/bindings/github.com/wailsapp/wails/v3/internal/eventdata.d.ts
index 83e18a2..1c39dfd 100644
--- a/frontend/bindings/github.com/wailsapp/wails/v3/internal/eventdata.d.ts
+++ b/frontend/bindings/github.com/wailsapp/wails/v3/internal/eventdata.d.ts
@@ -15,7 +15,7 @@ declare module "@wailsio/runtime" {
"authctx:updated": any;
"daily-summary:ready": usage$0.LLMDailySummary;
"protection:status": usage$0.ProtectionPause;
- "usage:update": usage$0.ApplicationUsage;
+ "usage:update": usage$0.ApplicationUsage | null;
}
}
}
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index fb70eca..330179e 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -27,7 +27,7 @@
"@tanstack/router-plugin": "^1.132.0",
"@types/humanize-duration": "^3.27.4",
"@typescript/vfs": "^1.6.2",
- "@wailsio/runtime": "^3.0.0-alpha.77",
+ "@wailsio/runtime": "^3.0.0-alpha.74",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"comlink": "^4.4.2",
@@ -4035,9 +4035,9 @@
}
},
"node_modules/@wailsio/runtime": {
- "version": "3.0.0-alpha.77",
- "resolved": "https://registry.npmjs.org/@wailsio/runtime/-/runtime-3.0.0-alpha.77.tgz",
- "integrity": "sha512-DMWjT8VFCk8O818mnw2dbrgZilOf1TzmGGp5lemZyGej7g+SSqAhMFOHp9eCiGQ32EbxmGOdTO4aNZVA00j9Nw==",
+ "version": "3.0.0-alpha.74",
+ "resolved": "https://registry.npmjs.org/@wailsio/runtime/-/runtime-3.0.0-alpha.74.tgz",
+ "integrity": "sha512-6N3F6MpLDgLfTRIwgwAzxSrIVtlPICxMYDrs0bz5uUJ58IPCQjcqxWOedoMysFdqVaogi5VmCxXLKRJzI5hW2A==",
"license": "MIT"
},
"node_modules/acorn": {
diff --git a/frontend/package.json b/frontend/package.json
index 2ba5937..f45163a 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -31,7 +31,7 @@
"@tanstack/router-plugin": "^1.132.0",
"@types/humanize-duration": "^3.27.4",
"@typescript/vfs": "^1.6.2",
- "@wailsio/runtime": "^3.0.0-alpha.77",
+ "@wailsio/runtime": "^3.0.0-alpha.74",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"comlink": "^4.4.2",
diff --git a/frontend/src/components/app-sidebar.tsx b/frontend/src/components/app-sidebar.tsx
index c5755cf..3b1bfec 100644
--- a/frontend/src/components/app-sidebar.tsx
+++ b/frontend/src/components/app-sidebar.tsx
@@ -6,7 +6,7 @@ import {
} from "@tabler/icons-react";
import { Link, useMatchRoute } from "@tanstack/react-router";
import { useQuery } from "@tanstack/react-query";
-import { GetVersion } from "../../bindings/github.com/focusd-so/focusd/internal/settings/service";
+import { GetCurrentVersion } from "../../bindings/github.com/focusd-so/focusd/internal/updater/service";
import {
Sidebar,
@@ -44,7 +44,7 @@ export function AppSidebar() {
const matchRoute = useMatchRoute();
const { data: version } = useQuery({
queryKey: ["app-version"],
- queryFn: GetVersion,
+ queryFn: () => (import.meta.env.DEV ? Promise.resolve("dev") : GetCurrentVersion()),
});
return (
diff --git a/frontend/src/components/custom-rules.tsx b/frontend/src/components/custom-rules.tsx
index 7c24397..c2194ab 100644
--- a/frontend/src/components/custom-rules.tsx
+++ b/frontend/src/components/custom-rules.tsx
@@ -7,15 +7,7 @@ import { useAccountStore } from "@/stores/account-store";
import { DeviceHandshakeResponse_AccountTier } from "../../bindings/github.com/focusd-so/focusd/gen/api/v1/models";
import { Browser } from "@wailsio/runtime";
import { Button } from "@/components/ui/button";
-import {
- DropdownMenu,
- DropdownMenuContent,
- DropdownMenuItem,
- DropdownMenuLabel,
- DropdownMenuSeparator,
- DropdownMenuTrigger,
-} from "@/components/ui/dropdown-menu";
-import { IconDeviceFloppy, IconHistory, IconFileText, IconTerminal, IconTestPipe, IconCrown } from "@tabler/icons-react";
+import { IconDeviceFloppy, IconFileText, IconTerminal, IconTestPipe, IconCrown } from "@tabler/icons-react";
import { toast } from "sonner";
import { cn } from "@/lib/utils";
import { ExecutionLogsSheet } from "@/components/execution-logs";
@@ -315,22 +307,10 @@ export function terminationMode(ctx: Context): TerminationDecision | undefined {
}
`;
-function formatDate(timestamp: number): string {
- const date = new Date(timestamp * 1000);
- return date.toLocaleDateString(undefined, {
- month: "short",
- day: "numeric",
- hour: "2-digit",
- minute: "2-digit",
- });
-}
-
export function CustomRules() {
const {
customRules,
- customRulesHistory,
updateSetting,
- fetchCustomRulesHistory,
} = useSettingsStore();
const { checkoutLink, fetchAccountTier } = useAccountStore();
@@ -440,17 +420,6 @@ export function CustomRules() {
[]
);
- const handleHistoryOpen = (open: boolean) => {
- if (open) {
- fetchCustomRulesHistory(10);
- }
- };
-
- const handleRestoreVersion = (value: string) => {
- setDraft(value);
- toast.info("Version restored. Click Save to apply changes.");
- };
-
const handleEditorWillMount = useCallback((monaco: Monaco) => {
monacoRef.current = monaco;
@@ -513,41 +482,6 @@ export function CustomRules() {