Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion .github/workflows/claude-review.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Claude Code Review

on:
pull_request:
types: [opened, ready_for_review]
types: [opened, synchronize, ready_for_review]
issue_comment:
types: [created]

Expand All @@ -16,6 +16,7 @@ jobs:
review-pr:
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 15

steps:
- name: Checkout
Expand All @@ -25,6 +26,10 @@ jobs:
uses: anthropics/claude-code-action@v1
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
claude_args: >-
--allowedTools
"mcp__github_inline_comment__create_inline_comment,Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*)"
--max-turns 10
prompt: |
REPO: ${{ github.repository }}
PR NUMBER: ${{ github.event.pull_request.number }}
Expand All @@ -39,6 +44,7 @@ jobs:
github.event.issue.pull_request &&
contains(github.event.comment.body, '@claude')
runs-on: ubuntu-latest
timeout-minutes: 15

steps:
- name: Checkout
Expand All @@ -48,3 +54,7 @@ jobs:
uses: anthropics/claude-code-action@v1
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
claude_args: >-
--allowedTools
"mcp__github_inline_comment__create_inline_comment,Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*)"
--max-turns 10
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ node dist/cli/index.js --help
- `linkledger index-rebuild [--json]`
- `linkledger worker [--limit N] [--max-attempts N] [--base-backoff-ms N] [--json]`

## Ingestion adapters (M2)
## Ingestion adapters

- `article`: HTML extraction and chunking.
- `x`: oEmbed-based extraction with fallback to article adapter.
Expand All @@ -47,7 +47,7 @@ node dist/cli/index.js --help

Retryable adapter failures are requeued with exponential backoff in `worker`.

In M3, successful ingest also creates enrichment artifacts and moves items to `enriched`.
Successful ingest also creates enrichment artifacts (summary, key claims) and moves items to `enriched` status.

## Database path

Expand Down
16 changes: 6 additions & 10 deletions src/adapters/reddit-adapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ import type { AdapterParseResult, SourceAdapter } from './source-adapter.js';

const MAX_POST_CHARS = 1800;
const MAX_COMMENT_CHARS = 900;
// limit=8 in the fetch URL over-fetches to buffer for non-comment children
// (e.g. kind:'more' stubs) that get filtered out, ensuring we still collect
// up to MAX_COMMENTS actual t1 comments.
const MAX_COMMENTS = 5;

interface RedditListingChild {
Expand Down Expand Up @@ -59,15 +62,6 @@ const toPublishedAt = (value: unknown): string | undefined => {
return new Date(value * 1000).toISOString();
};

const isRedditPageUrl = (value: string): boolean => {
try {
const parsed = new URL(value);
return isRedditHost(parsed.hostname.toLowerCase());
} catch {
return false;
}
};

export class RedditAdapter implements SourceAdapter {
supports(url: string): boolean {
return this.detectType(url) === 'reddit';
Expand Down Expand Up @@ -109,13 +103,15 @@ export class RedditAdapter implements SourceAdapter {
const subreddit = compact(toText(postData.subreddit));
const selfText = compact(toText(postData.selftext));
const linkedUrl = compact(toText(postData.url));
let isLinkedReddit = false;
try { isLinkedReddit = isRedditHost(new URL(linkedUrl).hostname.toLowerCase()); } catch { /* malformed URL */ }

const postParts = [
title ? `Title: ${title}` : '',
subreddit ? `Subreddit: r/${subreddit}` : '',
author ? `Author: u/${author}` : '',
selfText ? `Body:\n${selfText}` : '',
linkedUrl && !isRedditPageUrl(linkedUrl) ? `Linked URL: ${linkedUrl}` : ''
linkedUrl && !isLinkedReddit ? `Linked URL: ${linkedUrl}` : ''
].filter(Boolean);

const chunks: Array<{ text: string; tokenCount: number }> = [];
Expand Down
26 changes: 26 additions & 0 deletions test/unit/reddit-adapter.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import assert from 'node:assert/strict';
import { readFileSync } from 'node:fs';
import path from 'node:path';
import test from 'node:test';
import { AppError } from '../../src/lib/errors.js';
import { RedditAdapter } from '../../src/adapters/reddit-adapter.js';

const fixture = (name: string): string =>
Expand Down Expand Up @@ -57,3 +58,28 @@ test('RedditAdapter marks upstream failures as retryable for 5xx', async () => {
globalThis.fetch = originalFetch;
}
});

test('RedditAdapter marks 404 as non-retryable', async () => {
const adapter = new RedditAdapter();
const originalFetch = globalThis.fetch;

globalThis.fetch = async () =>
new Response('not found', {
status: 404,
headers: { 'content-type': 'text/plain' }
});

try {
await assert.rejects(
() => adapter.fetchAndParse({ url: 'https://www.reddit.com/comments/abc123' }),
(error: unknown) => {
assert.ok(error instanceof AppError);
assert.equal(error.code, 'FETCH_FAILED');
assert.equal(error.retryable, false);
return true;
}
);
} finally {
globalThis.fetch = originalFetch;
}
});
Loading