feat: unified feed bots for discord

combines reddit digest, github trending, new ai repos,
and claude code release tracking into one CLI tool.

usage: bun run feed <reddit|trending|new-repos|claude-releases|all>
This commit is contained in:
Nicholai Vogel 2026-01-24 01:39:36 -07:00
commit bb90d15209
9 changed files with 487 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
node_modules/
bun.lock
.last-version

12
package.json Normal file
View File

@ -0,0 +1,12 @@
{
"name": "discord-feed-bots",
"version": "1.0.0",
"description": "Discord feed bots: reddit digest, github trending, new AI repos, claude-code releases",
"type": "module",
"scripts": {
"feed": "bun run src/index.ts"
},
"devDependencies": {
"@types/bun": "latest"
}
}

54
src/config.ts Normal file
View File

@ -0,0 +1,54 @@
// --- Reddit Digest ---
export const REDDIT_SUBREDDITS = [
'LocalLLaMA',
'MachineLearning',
'ClaudeAI',
'ChatGPT',
'artificial',
'LangChain',
'AutoGPT',
];
export const REDDIT_LISTINGS = ['hot', 'rising'] as const;
export const REDDIT_MIN_SCORE = 50;
export const REDDIT_MAX_AGE_HOURS = 24;
export const REDDIT_MAX_POSTS_PER_SUB = 10;
export const REDDIT_SNIPPET_LENGTH = 200;
export const REDDIT_REQUEST_DELAY_MS = 1500;
export const REDDIT_POSTS_PER_PAGE = 100;
export const REDDIT_USER_AGENT =
'discord-feed-bots/1.0 (reddit digest bot)';
// --- GitHub Trending ---
export const TRENDING_QUERIES = [
'topic:ai-agent topic:llm-agent topic:ai-agents',
'"agent framework" language:python,typescript',
'"autonomous agent" stars:>50',
'topic:langchain topic:autogpt',
];
export const TRENDING_MIN_STARS = 10;
export const TRENDING_PUSHED_DAYS_AGO = 7;
export const TRENDING_MAX_RESULTS = 15;
// --- New AI Repos ---
export const NEW_REPO_TOPICS = [
'ai-agent',
'llm-agent',
'autonomous-agent',
'ai-agents',
'agent-framework',
];
export const NEW_REPOS_MIN_STARS = 100;
export const NEW_REPOS_CREATED_DAYS_AGO = 7;
export const NEW_REPOS_MAX_RESULTS = 15;
// --- Claude Code Releases ---
export const GITHUB_API = 'https://api.github.com';
export const NPM_REGISTRY = 'https://registry.npmjs.org';
export const CLAUDE_CODE_PACKAGE = '@anthropic-ai/claude-code';
export const LAST_VERSION_FILE = new URL(
'../../.last-version',
import.meta.url
).pathname;

View File

@ -0,0 +1,65 @@
import { existsSync, readFileSync, writeFileSync } from 'fs';
import {
CLAUDE_CODE_PACKAGE,
NPM_REGISTRY,
LAST_VERSION_FILE,
} from '../config';
import { log } from '../utils';
export async function run(): Promise<string> {
log('[claude-releases] Checking npm for claude-code updates...');
const url = `${NPM_REGISTRY}/${CLAUDE_CODE_PACKAGE}`;
const res = await fetch(url, {
headers: { Accept: 'application/json' },
});
if (!res.ok) {
log(` ⚠ npm registry ${res.status}`);
return '## Claude Code Releases\n\n_Failed to check registry._\n';
}
const data = (await res.json()) as {
'dist-tags': { latest: string };
time: Record<string, string>;
};
const latest = data['dist-tags'].latest;
const publishedAt = data.time?.[latest] ?? 'unknown';
let lastSeen = '';
if (existsSync(LAST_VERSION_FILE)) {
lastSeen = readFileSync(LAST_VERSION_FILE, 'utf-8').trim();
}
if (latest === lastSeen) {
log(`[claude-releases] No new version (current: ${latest})`);
return '';
}
writeFileSync(LAST_VERSION_FILE, latest, 'utf-8');
log(
`[claude-releases] New version: ${latest} ` +
`(was: ${lastSeen || 'none'})`
);
const changelogUrl =
'https://github.com/anthropics/claude-code/releases';
const npmUrl =
`https://www.npmjs.com/package/${CLAUDE_CODE_PACKAGE}`;
const lines = [
'## 🚀 Claude Code — New Release\n',
`**Version:** \`${latest}\``,
`**Published:** ${publishedAt}\n`,
`📦 [npm](${npmUrl}) | 📝 [Releases](${changelogUrl})`,
];
if (lastSeen) lines.push(`\n_Previous: \`${lastSeen}\`_`);
return lines.join('\n');
}
if (import.meta.main) {
console.log(await run());
}

View File

@ -0,0 +1,40 @@
import {
TRENDING_QUERIES,
TRENDING_MIN_STARS,
TRENDING_PUSHED_DAYS_AGO,
TRENDING_MAX_RESULTS,
} from '../config';
import {
daysAgo,
log,
ghSearch,
formatRepos,
dedupeRepos,
type GHRepo,
} from '../utils';
export async function run(): Promise<string> {
log('[trending] Searching GitHub for trending AI/agent repos...');
const since = daysAgo(TRENDING_PUSHED_DAYS_AGO);
const baseFilter = `stars:>${TRENDING_MIN_STARS} pushed:>${since}`;
const all: GHRepo[] = [];
for (const q of TRENDING_QUERIES) {
const query = `${q} ${baseFilter}`;
const repos = await ghSearch(query);
all.push(...repos);
}
const unique = dedupeRepos(all)
.sort((a, b) => b.stargazers_count - a.stargazers_count)
.slice(0, TRENDING_MAX_RESULTS);
log(`[trending] Found ${unique.length} repos`);
return formatRepos(unique, '🔥 Trending AI/Agent Repos');
}
if (import.meta.main) {
console.log(await run());
}

40
src/feeds/new-ai-repos.ts Normal file
View File

@ -0,0 +1,40 @@
import {
NEW_REPO_TOPICS,
NEW_REPOS_MIN_STARS,
NEW_REPOS_CREATED_DAYS_AGO,
NEW_REPOS_MAX_RESULTS,
} from '../config';
import {
daysAgo,
log,
ghSearch,
formatRepos,
dedupeRepos,
type GHRepo,
} from '../utils';
export async function run(): Promise<string> {
log('[new-repos] Searching for new AI repos (last 7 days)...');
const since = daysAgo(NEW_REPOS_CREATED_DAYS_AGO);
const baseFilter = `created:>${since} stars:>${NEW_REPOS_MIN_STARS}`;
const all: GHRepo[] = [];
for (const topic of NEW_REPO_TOPICS) {
const query = `topic:${topic} ${baseFilter}`;
const repos = await ghSearch(query);
all.push(...repos);
}
const unique = dedupeRepos(all)
.sort((a, b) => b.stargazers_count - a.stargazers_count)
.slice(0, NEW_REPOS_MAX_RESULTS);
log(`[new-repos] Found ${unique.length} new repos`);
return formatRepos(unique, '🆕 New AI/Agent Repos (Past 7 Days)');
}
if (import.meta.main) {
console.log(await run());
}

152
src/feeds/reddit-digest.ts Normal file
View File

@ -0,0 +1,152 @@
import {
REDDIT_SUBREDDITS,
REDDIT_LISTINGS,
REDDIT_MIN_SCORE,
REDDIT_MAX_AGE_HOURS,
REDDIT_MAX_POSTS_PER_SUB,
REDDIT_SNIPPET_LENGTH,
REDDIT_REQUEST_DELAY_MS,
REDDIT_POSTS_PER_PAGE,
REDDIT_USER_AGENT,
} from '../config';
import { log, sleep } from '../utils';
// --- Types ---
interface RedditPost {
id: string;
title: string;
selftext: string;
score: number;
num_comments: number;
created_utc: number;
permalink: string;
subreddit: string;
is_self: boolean;
}
interface RedditListing {
data: {
children: Array<{ kind: string; data: RedditPost }>;
};
}
// --- Scraper ---
async function fetchListing(
subreddit: string,
listing: string
): Promise<RedditPost[]> {
const url =
`https://www.reddit.com/r/${subreddit}/${listing}.json` +
`?limit=${REDDIT_POSTS_PER_PAGE}&raw_json=1`;
const res = await fetch(url, {
headers: {
'User-Agent': REDDIT_USER_AGENT,
Accept: 'application/json',
},
});
if (!res.ok) {
log(` [warn] r/${subreddit}/${listing}: ${res.status}`);
return [];
}
const json = (await res.json()) as RedditListing;
return json.data.children
.filter((c) => c.kind === 't3')
.map((c) => c.data);
}
async function scrapeSubreddit(
subreddit: string
): Promise<RedditPost[]> {
const seen = new Set<string>();
const posts: RedditPost[] = [];
for (const listing of REDDIT_LISTINGS) {
const fetched = await fetchListing(subreddit, listing);
for (const post of fetched) {
if (!seen.has(post.id)) {
seen.add(post.id);
posts.push(post);
}
}
await sleep(REDDIT_REQUEST_DELAY_MS);
}
return posts;
}
// --- Filter ---
function filterPosts(posts: RedditPost[]): RedditPost[] {
const cutoff = Date.now() / 1000 - REDDIT_MAX_AGE_HOURS * 3600;
return posts
.filter((p) => p.score >= REDDIT_MIN_SCORE && p.created_utc >= cutoff)
.sort((a, b) => b.score - a.score)
.slice(0, REDDIT_MAX_POSTS_PER_SUB);
}
// --- Format ---
function snippet(text: string): string {
if (!text) return '';
const clean = text.replace(/\n+/g, ' ').trim();
if (clean.length <= REDDIT_SNIPPET_LENGTH) return clean;
return clean.slice(0, REDDIT_SNIPPET_LENGTH).trimEnd() + '…';
}
function formatPost(post: RedditPost): string {
const link = `https://reddit.com${post.permalink}`;
const meta = `${post.score} | 💬 ${post.num_comments}`;
const selfSnippet = post.is_self ? snippet(post.selftext) : '';
let out = `**[${post.title}](${link})**\n${meta}`;
if (selfSnippet) out += `\n> ${selfSnippet}`;
return out;
}
function formatDigest(grouped: Map<string, RedditPost[]>): string {
if (grouped.size === 0) {
return '*No posts matched the filters in the last 24h.*';
}
const sections: string[] = [];
for (const [subreddit, posts] of grouped) {
const header = `## r/${subreddit}`;
const body = posts.map(formatPost).join('\n\n');
sections.push(`${header}\n${body}`);
}
const now = new Date().toISOString().slice(0, 16).replace('T', ' ');
return `# 🤖 AI Reddit Digest — ${now} UTC\n\n` +
sections.join('\n\n---\n\n') + '\n';
}
// --- Entry ---
export async function run(): Promise<string> {
log('[reddit] Fetching AI subreddit digest...');
const grouped = new Map<string, RedditPost[]>();
for (const subreddit of REDDIT_SUBREDDITS) {
log(` r/${subreddit}...`);
const raw = await scrapeSubreddit(subreddit);
const filtered = filterPosts(raw);
if (filtered.length > 0) {
grouped.set(subreddit, filtered);
}
log(` ${raw.length} fetched, ${filtered.length} kept`);
}
return formatDigest(grouped);
}
if (import.meta.main) {
console.log(await run());
}

51
src/index.ts Normal file
View File

@ -0,0 +1,51 @@
import { run as reddit } from './feeds/reddit-digest';
import { run as trending } from './feeds/github-trending';
import { run as newRepos } from './feeds/new-ai-repos';
import { run as claudeReleases } from './feeds/claude-code-releases';
import { log } from './utils';
const COMMANDS: Record<string, () => Promise<string>> = {
reddit,
trending,
'new-repos': newRepos,
'claude-releases': claudeReleases,
};
async function main() {
const cmd = process.argv[2];
if (!cmd || cmd === '--help' || cmd === '-h') {
console.error(
'Usage: bun run feed ' +
'<reddit|trending|new-repos|claude-releases|all>'
);
process.exit(1);
}
if (cmd === 'all') {
for (const [name, fn] of Object.entries(COMMANDS)) {
log(`\n--- ${name} ---`);
const output = await fn();
if (output) console.log(output);
}
return;
}
const fn = COMMANDS[cmd];
if (!fn) {
console.error(`Unknown command: ${cmd}`);
console.error(
`Available: ${Object.keys(COMMANDS).join(', ')}, all`
);
process.exit(1);
}
const output = await fn();
if (output) console.log(output);
else log('(no output)');
}
main().catch((e) => {
console.error(e);
process.exit(1);
});

70
src/utils.ts Normal file
View File

@ -0,0 +1,70 @@
export function daysAgo(n: number): string {
const d = new Date();
d.setDate(d.getDate() - n);
return d.toISOString().split('T')[0];
}
export function log(...args: unknown[]) {
console.error(...args);
}
export function sleep(ms: number): Promise<void> {
return new Promise((r) => setTimeout(r, ms));
}
export async function ghSearch(query: string): Promise<GHRepo[]> {
const url =
`https://api.github.com/search/repositories` +
`?q=${encodeURIComponent(query)}&sort=stars&order=desc&per_page=30`;
log(` fetching: ${query}`);
const res = await fetch(url, {
headers: { Accept: 'application/vnd.github.v3+json' },
});
if (!res.ok) {
log(` ⚠ GitHub API ${res.status}: ${await res.text()}`);
return [];
}
const data = (await res.json()) as { items: GHRepo[] };
return data.items ?? [];
}
export interface GHRepo {
full_name: string;
description: string | null;
stargazers_count: number;
language: string | null;
html_url: string;
created_at: string;
pushed_at: string;
topics?: string[];
}
export function formatRepos(repos: GHRepo[], title: string): string {
if (!repos.length) return `## ${title}\n\n_No results found._\n`;
const lines = [`## ${title}\n`];
for (const r of repos) {
const desc = r.description
? r.description.slice(0, 120)
: '_No description_';
const lang = r.language ?? '?';
lines.push(
`**[${r.full_name}](${r.html_url})** ` +
`${r.stargazers_count.toLocaleString()} | \`${lang}\``
);
lines.push(`> ${desc}\n`);
}
return lines.join('\n');
}
export function dedupeRepos(repos: GHRepo[]): GHRepo[] {
const seen = new Set<string>();
return repos.filter((r) => {
if (seen.has(r.full_name)) return false;
seen.add(r.full_name);
return true;
});
}