Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,4 @@ coverage/
# Optional local tooling
.vercel
.netlify
skills/watch-comments/state/*.json
27 changes: 16 additions & 11 deletions bin/share/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,10 @@ import { SOCKET_PATH } from "../../src/rpc.js";
// ── Constants ─────────────────────────────────────────────────────────────────

const DEFAULT_PORT = parseInt(process.env["SHARE_PORT"] ?? "3001");
const DEFAULT_MINUTES = 10;
const DEFAULT_MINUTES = 5;
const MAX_MINUTES = 20;
const DEFAULT_TIMES = 1;
const MAX_TIMES = 3;
const DEFAULT_MAX_BYTES = 100 * 1024 * 1024; // 100 MB
const CLEANUP_INTERVAL_MS = 60_000;

Expand All @@ -45,13 +47,13 @@ const MIME: Record<string, string> = {
".jpeg": "image/jpeg",
".gif": "image/gif",
".webp": "image/webp",
".svg": "image/svg+xml",
".svg": "application/octet-stream", // SVG can contain JS; force download
// Archives
".zip": "application/zip",
".tar": "application/x-tar",
".gz": "application/gzip",
// Data / web (non-sensitive formats only)
".html": "text/html",
".html": "application/octet-stream", // force download; prevent JS execution in browser
".csv": "text/csv",
// Media
".mp4": "video/mp4",
Expand Down Expand Up @@ -123,7 +125,7 @@ USAGE
bun /app/bin/share rm <token|path> Revoke a link by token prefix or file path

ADD FLAGS
--minutes <n> Link lifetime in minutes (default: ${String(DEFAULT_MINUTES)}, max: 60)
--minutes <n> Link lifetime in minutes (default: ${String(DEFAULT_MINUTES)}, max: ${String(MAX_MINUTES)})
--times <n> Max downloads before expiry (default: ${String(DEFAULT_TIMES)})
--delete-after Delete source file after final download
--max-size <bytes> Reject files larger than this (default: 100MB)
Expand All @@ -139,7 +141,7 @@ EXAMPLES
bun /app/bin/share add /tmp/report.pdf --minutes 10

# Share a file 3 times over 30 minutes, delete after last download
bun /app/bin/share add /tmp/data.zip --minutes 30 --times 3 --delete-after
bun /app/bin/share add /tmp/data.zip --minutes 20 --times 3 --delete-after

# List active links
bun /app/bin/share list
Expand Down Expand Up @@ -210,6 +212,8 @@ function handleRequest(req: Request): Response {
}

const token = match[1]!;

// Single read — used for all checks and the decrement write
const store = readStore();
const entry = store[token];

Expand Down Expand Up @@ -248,7 +252,9 @@ function handleRequest(req: Request): Response {
try { unlinkSync(entry.filePath); } catch { /* best effort */ }
}

const fileName = entry.filePath.split("/").pop() ?? "file";
const rawName = entry.filePath.split("/").pop() ?? "file";
// Strip quotes and control chars to prevent Content-Disposition header injection
const fileName = rawName.replace(/[\x00-\x1f"\\]/g, "_");
const file = Bun.file(entry.filePath);

return new Response(file, {
Expand All @@ -257,7 +263,6 @@ function handleRequest(req: Request): Response {
...SECURE_HEADERS,
"Content-Type": mimeFor(entry.filePath),
"Content-Disposition": `attachment; filename="${fileName}"`,
"X-Uses-Remaining": String(entry.usesRemaining),
},
});
}
Expand Down Expand Up @@ -310,13 +315,13 @@ async function cmdAdd(argv: string[]): Promise<void> {
process.exit(1);
}

if (minutes < 1 || minutes > 60) {
console.error("Error: --minutes must be between 1 and 60");
if (minutes < 1 || minutes > MAX_MINUTES) {
console.error(`Error: --minutes must be between 1 and ${String(MAX_MINUTES)}`);
process.exit(1);
}

if (times < 1) {
console.error("Error: --times must be at least 1");
if (times < 1 || times > MAX_TIMES) {
console.error(`Error: --times must be between 1 and ${String(MAX_TIMES)}`);
process.exit(1);
}

Expand Down
2 changes: 1 addition & 1 deletion bin/share/store.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { readFileSync, writeFileSync, existsSync, renameSync } from "node:fs";

export const STORE_PATH = process.env["SHARE_STORE_PATH"] ?? "/data/sharing/store.json";
export const STORE_PATH = process.env["SHARE_STORE_PATH"] ?? "/tmp/sharing/store.json";
export const ALLOWED_PREFIXES = ["/data/", "/tmp/", "/var/tmp/"];

export interface TokenEntry {
Expand Down
118 changes: 118 additions & 0 deletions skills/gh-comments/SKILL.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
---
name: gh-comments
description: >
Use this skill to fetch or watch GitHub comments on issues and PRs.
Triggers include: "get comments in X repo", "watch for comments in X repo",
"any new comments on PR #N?", "keep an eye on issue #N", "start watching
owner/repo", "notify me of new comments". Default mode fetches once and exits;
use --watch to poll continuously.
---

# gh-comments Skill

Fetch new comments once, or watch continuously. State is persisted between runs
so the same comment is never reported twice.

---

## Usage

```bash
# Fetch new comments once and exit (default)
bun gh-comments.ts <owner/repo>

# Watch continuously, polling every 60s
bun gh-comments.ts <owner/repo> --watch

# Scope to a specific PR or issue
bun gh-comments.ts <owner/repo> --pr 42
bun gh-comments.ts <owner/repo> --issue 7

# Custom poll interval (watch mode only)
bun gh-comments.ts <owner/repo> --watch --interval 120

# Show all comments, ignoring state (re-fetch everything seen so far)
bun gh-comments.ts <owner/repo> --all
```

Always set env vars:
```bash
SSL_CERT_FILE=/data/cacert.pem GH_TOKEN="$GITHUB_TOKEN" bun gh-comments.ts ...
```

---

## Running in the Background (watch mode)

```bash
SSL_CERT_FILE=/data/cacert.pem GH_TOKEN="$GITHUB_TOKEN" \
bun /app/skills/gh-comments/scripts/gh-comments.ts <owner/repo> --watch \
>> /data/skills/gh-comments/<owner>-<repo>.log 2>&1 &

echo $! > /data/skills/gh-comments/<owner>-<repo>.pid
echo "Watcher started"
```

Check on it:
```bash
tail -f /data/skills/gh-comments/<owner>-<repo>.log
kill -0 $(cat /data/skills/gh-comments/<owner>-<repo>.pid) && echo running || echo stopped
```

Stop it:
```bash
kill $(cat /data/skills/gh-comments/<owner>-<repo>.pid)
```

---

## Telegram Notifications

Set once; persists to env:
```bash
export TELEGRAM_BOT_TOKEN=<token>
export TELEGRAM_CHAT_ID=<chat_id>
```

When set, every new comment is also sent as a Telegram message.

---

## What Gets Checked

For each open issue or PR, all three GitHub comment streams are checked:

| Stream | Covers |
|---|---|
| `issues/:n/comments` | Top-level thread (issues + PR conversation) |
| `pulls/:n/comments` | Inline review comments |
| `pulls/:n/reviews` | Submitted review bodies |

---

## State Files

Seen comment IDs are stored per-repo at:
```
/data/skills/gh-comments/state/<owner>-<repo>.json
```

Reset (re-report all existing comments on next run):
```bash
rm /data/skills/gh-comments/state/<owner>-<repo>.json
```

Or use `--all` for a one-off full fetch without touching the state file.

---

## Options

| Flag | Default | Description |
|---|---|---|
| `--watch` | — | Poll continuously instead of fetching once |
| `--pr <n>` | — | Scope to one PR |
| `--issue <n>` | — | Scope to one issue |
| `--interval <n>` | `60` | Seconds between polls (watch mode only) |
| `--all` | — | Ignore state; show all comments |
| `--state-dir <path>` | `/data/skills/gh-comments/state` | Custom state location |
Loading
Loading