commit bb90d15209923cf2e2a9d4749215ec3c164cc896 Author: Nicholai Date: Sat Jan 24 01:39:36 2026 -0700 feat: unified feed bots for discord combines reddit digest, github trending, new ai repos, and claude code release tracking into one CLI tool. usage: bun run feed diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c95c8f1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +node_modules/ +bun.lock +.last-version diff --git a/package.json b/package.json new file mode 100644 index 0000000..94f67fe --- /dev/null +++ b/package.json @@ -0,0 +1,12 @@ +{ + "name": "discord-feed-bots", + "version": "1.0.0", + "description": "Discord feed bots: reddit digest, github trending, new AI repos, claude-code releases", + "type": "module", + "scripts": { + "feed": "bun run src/index.ts" + }, + "devDependencies": { + "@types/bun": "latest" + } +} diff --git a/src/config.ts b/src/config.ts new file mode 100644 index 0000000..b76c5b0 --- /dev/null +++ b/src/config.ts @@ -0,0 +1,54 @@ +// --- Reddit Digest --- +export const REDDIT_SUBREDDITS = [ + 'LocalLLaMA', + 'MachineLearning', + 'ClaudeAI', + 'ChatGPT', + 'artificial', + 'LangChain', + 'AutoGPT', +]; + +export const REDDIT_LISTINGS = ['hot', 'rising'] as const; +export const REDDIT_MIN_SCORE = 50; +export const REDDIT_MAX_AGE_HOURS = 24; +export const REDDIT_MAX_POSTS_PER_SUB = 10; +export const REDDIT_SNIPPET_LENGTH = 200; +export const REDDIT_REQUEST_DELAY_MS = 1500; +export const REDDIT_POSTS_PER_PAGE = 100; +export const REDDIT_USER_AGENT = + 'discord-feed-bots/1.0 (reddit digest bot)'; + +// --- GitHub Trending --- +export const TRENDING_QUERIES = [ + 'topic:ai-agent topic:llm-agent topic:ai-agents', + '"agent framework" language:python,typescript', + '"autonomous agent" stars:>50', + 'topic:langchain topic:autogpt', +]; + +export const TRENDING_MIN_STARS = 10; +export const TRENDING_PUSHED_DAYS_AGO = 7; +export const TRENDING_MAX_RESULTS = 15; + +// --- New AI Repos --- +export const NEW_REPO_TOPICS = [ + 'ai-agent', + 'llm-agent', + 'autonomous-agent', + 'ai-agents', + 'agent-framework', +]; + +export const NEW_REPOS_MIN_STARS = 100; +export const NEW_REPOS_CREATED_DAYS_AGO = 7; +export const NEW_REPOS_MAX_RESULTS = 15; + +// --- Claude Code Releases --- +export const GITHUB_API = 'https://api.github.com'; +export const NPM_REGISTRY = 'https://registry.npmjs.org'; +export const CLAUDE_CODE_PACKAGE = '@anthropic-ai/claude-code'; +export const LAST_VERSION_FILE = new URL( + '../../.last-version', + import.meta.url +).pathname; diff --git a/src/feeds/claude-code-releases.ts b/src/feeds/claude-code-releases.ts new file mode 100644 index 0000000..f4b79dd --- /dev/null +++ b/src/feeds/claude-code-releases.ts @@ -0,0 +1,65 @@ +import { existsSync, readFileSync, writeFileSync } from 'fs'; +import { + CLAUDE_CODE_PACKAGE, + NPM_REGISTRY, + LAST_VERSION_FILE, +} from '../config'; +import { log } from '../utils'; + +export async function run(): Promise { + log('[claude-releases] Checking npm for claude-code updates...'); + + const url = `${NPM_REGISTRY}/${CLAUDE_CODE_PACKAGE}`; + const res = await fetch(url, { + headers: { Accept: 'application/json' }, + }); + + if (!res.ok) { + log(` ⚠ npm registry ${res.status}`); + return '## Claude Code Releases\n\n_Failed to check registry._\n'; + } + + const data = (await res.json()) as { + 'dist-tags': { latest: string }; + time: Record; + }; + + const latest = data['dist-tags'].latest; + const publishedAt = data.time?.[latest] ?? 'unknown'; + + let lastSeen = ''; + if (existsSync(LAST_VERSION_FILE)) { + lastSeen = readFileSync(LAST_VERSION_FILE, 'utf-8').trim(); + } + + if (latest === lastSeen) { + log(`[claude-releases] No new version (current: ${latest})`); + return ''; + } + + writeFileSync(LAST_VERSION_FILE, latest, 'utf-8'); + log( + `[claude-releases] New version: ${latest} ` + + `(was: ${lastSeen || 'none'})` + ); + + const changelogUrl = + 'https://github.com/anthropics/claude-code/releases'; + const npmUrl = + `https://www.npmjs.com/package/${CLAUDE_CODE_PACKAGE}`; + + const lines = [ + '## 🚀 Claude Code — New Release\n', + `**Version:** \`${latest}\``, + `**Published:** ${publishedAt}\n`, + `📦 [npm](${npmUrl}) | 📝 [Releases](${changelogUrl})`, + ]; + + if (lastSeen) lines.push(`\n_Previous: \`${lastSeen}\`_`); + + return lines.join('\n'); +} + +if (import.meta.main) { + console.log(await run()); +} diff --git a/src/feeds/github-trending.ts b/src/feeds/github-trending.ts new file mode 100644 index 0000000..3772555 --- /dev/null +++ b/src/feeds/github-trending.ts @@ -0,0 +1,40 @@ +import { + TRENDING_QUERIES, + TRENDING_MIN_STARS, + TRENDING_PUSHED_DAYS_AGO, + TRENDING_MAX_RESULTS, +} from '../config'; +import { + daysAgo, + log, + ghSearch, + formatRepos, + dedupeRepos, + type GHRepo, +} from '../utils'; + +export async function run(): Promise { + log('[trending] Searching GitHub for trending AI/agent repos...'); + + const since = daysAgo(TRENDING_PUSHED_DAYS_AGO); + const baseFilter = `stars:>${TRENDING_MIN_STARS} pushed:>${since}`; + + const all: GHRepo[] = []; + + for (const q of TRENDING_QUERIES) { + const query = `${q} ${baseFilter}`; + const repos = await ghSearch(query); + all.push(...repos); + } + + const unique = dedupeRepos(all) + .sort((a, b) => b.stargazers_count - a.stargazers_count) + .slice(0, TRENDING_MAX_RESULTS); + + log(`[trending] Found ${unique.length} repos`); + return formatRepos(unique, '🔥 Trending AI/Agent Repos'); +} + +if (import.meta.main) { + console.log(await run()); +} diff --git a/src/feeds/new-ai-repos.ts b/src/feeds/new-ai-repos.ts new file mode 100644 index 0000000..a382488 --- /dev/null +++ b/src/feeds/new-ai-repos.ts @@ -0,0 +1,40 @@ +import { + NEW_REPO_TOPICS, + NEW_REPOS_MIN_STARS, + NEW_REPOS_CREATED_DAYS_AGO, + NEW_REPOS_MAX_RESULTS, +} from '../config'; +import { + daysAgo, + log, + ghSearch, + formatRepos, + dedupeRepos, + type GHRepo, +} from '../utils'; + +export async function run(): Promise { + log('[new-repos] Searching for new AI repos (last 7 days)...'); + + const since = daysAgo(NEW_REPOS_CREATED_DAYS_AGO); + const baseFilter = `created:>${since} stars:>${NEW_REPOS_MIN_STARS}`; + + const all: GHRepo[] = []; + + for (const topic of NEW_REPO_TOPICS) { + const query = `topic:${topic} ${baseFilter}`; + const repos = await ghSearch(query); + all.push(...repos); + } + + const unique = dedupeRepos(all) + .sort((a, b) => b.stargazers_count - a.stargazers_count) + .slice(0, NEW_REPOS_MAX_RESULTS); + + log(`[new-repos] Found ${unique.length} new repos`); + return formatRepos(unique, '🆕 New AI/Agent Repos (Past 7 Days)'); +} + +if (import.meta.main) { + console.log(await run()); +} diff --git a/src/feeds/reddit-digest.ts b/src/feeds/reddit-digest.ts new file mode 100644 index 0000000..9035bf4 --- /dev/null +++ b/src/feeds/reddit-digest.ts @@ -0,0 +1,152 @@ +import { + REDDIT_SUBREDDITS, + REDDIT_LISTINGS, + REDDIT_MIN_SCORE, + REDDIT_MAX_AGE_HOURS, + REDDIT_MAX_POSTS_PER_SUB, + REDDIT_SNIPPET_LENGTH, + REDDIT_REQUEST_DELAY_MS, + REDDIT_POSTS_PER_PAGE, + REDDIT_USER_AGENT, +} from '../config'; +import { log, sleep } from '../utils'; + +// --- Types --- + +interface RedditPost { + id: string; + title: string; + selftext: string; + score: number; + num_comments: number; + created_utc: number; + permalink: string; + subreddit: string; + is_self: boolean; +} + +interface RedditListing { + data: { + children: Array<{ kind: string; data: RedditPost }>; + }; +} + +// --- Scraper --- + +async function fetchListing( + subreddit: string, + listing: string +): Promise { + const url = + `https://www.reddit.com/r/${subreddit}/${listing}.json` + + `?limit=${REDDIT_POSTS_PER_PAGE}&raw_json=1`; + + const res = await fetch(url, { + headers: { + 'User-Agent': REDDIT_USER_AGENT, + Accept: 'application/json', + }, + }); + + if (!res.ok) { + log(` [warn] r/${subreddit}/${listing}: ${res.status}`); + return []; + } + + const json = (await res.json()) as RedditListing; + return json.data.children + .filter((c) => c.kind === 't3') + .map((c) => c.data); +} + +async function scrapeSubreddit( + subreddit: string +): Promise { + const seen = new Set(); + const posts: RedditPost[] = []; + + for (const listing of REDDIT_LISTINGS) { + const fetched = await fetchListing(subreddit, listing); + for (const post of fetched) { + if (!seen.has(post.id)) { + seen.add(post.id); + posts.push(post); + } + } + await sleep(REDDIT_REQUEST_DELAY_MS); + } + + return posts; +} + +// --- Filter --- + +function filterPosts(posts: RedditPost[]): RedditPost[] { + const cutoff = Date.now() / 1000 - REDDIT_MAX_AGE_HOURS * 3600; + + return posts + .filter((p) => p.score >= REDDIT_MIN_SCORE && p.created_utc >= cutoff) + .sort((a, b) => b.score - a.score) + .slice(0, REDDIT_MAX_POSTS_PER_SUB); +} + +// --- Format --- + +function snippet(text: string): string { + if (!text) return ''; + const clean = text.replace(/\n+/g, ' ').trim(); + if (clean.length <= REDDIT_SNIPPET_LENGTH) return clean; + return clean.slice(0, REDDIT_SNIPPET_LENGTH).trimEnd() + '…'; +} + +function formatPost(post: RedditPost): string { + const link = `https://reddit.com${post.permalink}`; + const meta = `⬆ ${post.score} | 💬 ${post.num_comments}`; + const selfSnippet = post.is_self ? snippet(post.selftext) : ''; + + let out = `**[${post.title}](${link})**\n${meta}`; + if (selfSnippet) out += `\n> ${selfSnippet}`; + return out; +} + +function formatDigest(grouped: Map): string { + if (grouped.size === 0) { + return '*No posts matched the filters in the last 24h.*'; + } + + const sections: string[] = []; + for (const [subreddit, posts] of grouped) { + const header = `## r/${subreddit}`; + const body = posts.map(formatPost).join('\n\n'); + sections.push(`${header}\n${body}`); + } + + const now = new Date().toISOString().slice(0, 16).replace('T', ' '); + return `# 🤖 AI Reddit Digest — ${now} UTC\n\n` + + sections.join('\n\n---\n\n') + '\n'; +} + +// --- Entry --- + +export async function run(): Promise { + log('[reddit] Fetching AI subreddit digest...'); + + const grouped = new Map(); + + for (const subreddit of REDDIT_SUBREDDITS) { + log(` r/${subreddit}...`); + const raw = await scrapeSubreddit(subreddit); + const filtered = filterPosts(raw); + + if (filtered.length > 0) { + grouped.set(subreddit, filtered); + } + log(` ${raw.length} fetched, ${filtered.length} kept`); + } + + return formatDigest(grouped); +} + +if (import.meta.main) { + console.log(await run()); +} diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 0000000..6c9fb5c --- /dev/null +++ b/src/index.ts @@ -0,0 +1,51 @@ +import { run as reddit } from './feeds/reddit-digest'; +import { run as trending } from './feeds/github-trending'; +import { run as newRepos } from './feeds/new-ai-repos'; +import { run as claudeReleases } from './feeds/claude-code-releases'; +import { log } from './utils'; + +const COMMANDS: Record Promise> = { + reddit, + trending, + 'new-repos': newRepos, + 'claude-releases': claudeReleases, +}; + +async function main() { + const cmd = process.argv[2]; + + if (!cmd || cmd === '--help' || cmd === '-h') { + console.error( + 'Usage: bun run feed ' + + '' + ); + process.exit(1); + } + + if (cmd === 'all') { + for (const [name, fn] of Object.entries(COMMANDS)) { + log(`\n--- ${name} ---`); + const output = await fn(); + if (output) console.log(output); + } + return; + } + + const fn = COMMANDS[cmd]; + if (!fn) { + console.error(`Unknown command: ${cmd}`); + console.error( + `Available: ${Object.keys(COMMANDS).join(', ')}, all` + ); + process.exit(1); + } + + const output = await fn(); + if (output) console.log(output); + else log('(no output)'); +} + +main().catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/src/utils.ts b/src/utils.ts new file mode 100644 index 0000000..899cd59 --- /dev/null +++ b/src/utils.ts @@ -0,0 +1,70 @@ +export function daysAgo(n: number): string { + const d = new Date(); + d.setDate(d.getDate() - n); + return d.toISOString().split('T')[0]; +} + +export function log(...args: unknown[]) { + console.error(...args); +} + +export function sleep(ms: number): Promise { + return new Promise((r) => setTimeout(r, ms)); +} + +export async function ghSearch(query: string): Promise { + const url = + `https://api.github.com/search/repositories` + + `?q=${encodeURIComponent(query)}&sort=stars&order=desc&per_page=30`; + log(` fetching: ${query}`); + + const res = await fetch(url, { + headers: { Accept: 'application/vnd.github.v3+json' }, + }); + + if (!res.ok) { + log(` ⚠ GitHub API ${res.status}: ${await res.text()}`); + return []; + } + + const data = (await res.json()) as { items: GHRepo[] }; + return data.items ?? []; +} + +export interface GHRepo { + full_name: string; + description: string | null; + stargazers_count: number; + language: string | null; + html_url: string; + created_at: string; + pushed_at: string; + topics?: string[]; +} + +export function formatRepos(repos: GHRepo[], title: string): string { + if (!repos.length) return `## ${title}\n\n_No results found._\n`; + + const lines = [`## ${title}\n`]; + for (const r of repos) { + const desc = r.description + ? r.description.slice(0, 120) + : '_No description_'; + const lang = r.language ?? '?'; + lines.push( + `**[${r.full_name}](${r.html_url})** ` + + `⭐ ${r.stargazers_count.toLocaleString()} | \`${lang}\`` + ); + lines.push(`> ${desc}\n`); + } + return lines.join('\n'); +} + +export function dedupeRepos(repos: GHRepo[]): GHRepo[] { + const seen = new Set(); + return repos.filter((r) => { + if (seen.has(r.full_name)) return false; + seen.add(r.full_name); + return true; + }); +}