feat(twitter): add X/Twitter AI feed scraper
uses Twitter API v2 search/recent with bearer token. searches for AI agent, claude code, LLM agent topics. also adds .env to gitignore.
This commit is contained in:
parent
bb90d15209
commit
d13032d515
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
node_modules/
|
||||
bun.lock
|
||||
.last-version
|
||||
.env
|
||||
|
||||
@ -44,6 +44,17 @@ export const NEW_REPOS_MIN_STARS = 100;
|
||||
export const NEW_REPOS_CREATED_DAYS_AGO = 7;
|
||||
export const NEW_REPOS_MAX_RESULTS = 15;
|
||||
|
||||
// --- Twitter AI ---
|
||||
export const TWITTER_API = 'https://api.twitter.com/2';
|
||||
export const TWITTER_SEARCH_QUERIES = [
|
||||
'(AI agents OR "claude code" OR "LLM agents" OR ' +
|
||||
'"autonomous agents" OR "agentic AI") ' +
|
||||
'-is:retweet -is:reply lang:en',
|
||||
];
|
||||
export const TWITTER_MAX_AGE_HOURS = 24;
|
||||
export const TWITTER_MAX_RESULTS = 15;
|
||||
export const TWITTER_SNIPPET_LENGTH = 280;
|
||||
|
||||
// --- Claude Code Releases ---
|
||||
export const GITHUB_API = 'https://api.github.com';
|
||||
export const NPM_REGISTRY = 'https://registry.npmjs.org';
|
||||
|
||||
178
src/feeds/twitter-ai.ts
Normal file
178
src/feeds/twitter-ai.ts
Normal file
@ -0,0 +1,178 @@
|
||||
import {
|
||||
TWITTER_API,
|
||||
TWITTER_SEARCH_QUERIES,
|
||||
TWITTER_MAX_AGE_HOURS,
|
||||
TWITTER_MAX_RESULTS,
|
||||
TWITTER_SNIPPET_LENGTH,
|
||||
} from '../config';
|
||||
import { log } from '../utils';
|
||||
|
||||
// --- Types ---
|
||||
|
||||
interface TweetMetrics {
|
||||
like_count: number;
|
||||
retweet_count: number;
|
||||
reply_count: number;
|
||||
impression_count: number;
|
||||
}
|
||||
|
||||
interface Tweet {
|
||||
id: string;
|
||||
text: string;
|
||||
author_id: string;
|
||||
public_metrics: TweetMetrics;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
interface TwitterUser {
|
||||
id: string;
|
||||
name: string;
|
||||
username: string;
|
||||
}
|
||||
|
||||
interface TwitterSearchResponse {
|
||||
data?: Tweet[];
|
||||
includes?: { users?: TwitterUser[] };
|
||||
meta?: { result_count: number; next_token?: string };
|
||||
}
|
||||
|
||||
// --- API ---
|
||||
|
||||
function getToken(): string {
|
||||
const token = process.env.TWITTER_BEARER_TOKEN;
|
||||
if (!token) {
|
||||
throw new Error(
|
||||
'TWITTER_BEARER_TOKEN not set in environment'
|
||||
);
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
function buildStartTime(): string {
|
||||
const d = new Date();
|
||||
d.setHours(d.getHours() - TWITTER_MAX_AGE_HOURS);
|
||||
return d.toISOString();
|
||||
}
|
||||
|
||||
async function searchTweets(
|
||||
query: string
|
||||
): Promise<TwitterSearchResponse> {
|
||||
const params = new URLSearchParams({
|
||||
query,
|
||||
max_results: '100',
|
||||
'tweet.fields': 'public_metrics,created_at,author_id',
|
||||
expansions: 'author_id',
|
||||
'user.fields': 'name,username',
|
||||
start_time: buildStartTime(),
|
||||
});
|
||||
|
||||
const url = `${TWITTER_API}/tweets/search/recent?${params}`;
|
||||
log(` fetching: ${query.slice(0, 60)}...`);
|
||||
|
||||
const res = await fetch(url, {
|
||||
headers: { Authorization: `Bearer ${getToken()}` },
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const body = await res.text();
|
||||
log(` ⚠ Twitter API ${res.status}: ${body}`);
|
||||
return {};
|
||||
}
|
||||
|
||||
return (await res.json()) as TwitterSearchResponse;
|
||||
}
|
||||
|
||||
// --- Filter & Sort ---
|
||||
|
||||
function engagementScore(t: Tweet): number {
|
||||
const m = t.public_metrics;
|
||||
return m.like_count + m.retweet_count * 2;
|
||||
}
|
||||
|
||||
function topTweets(tweets: Tweet[]): Tweet[] {
|
||||
return [...tweets]
|
||||
.sort((a, b) => engagementScore(b) - engagementScore(a))
|
||||
.slice(0, TWITTER_MAX_RESULTS);
|
||||
}
|
||||
|
||||
// --- Format ---
|
||||
|
||||
function snippet(text: string): string {
|
||||
const clean = text.replace(/\n+/g, ' ').trim();
|
||||
if (clean.length <= TWITTER_SNIPPET_LENGTH) return clean;
|
||||
return clean.slice(0, TWITTER_SNIPPET_LENGTH).trimEnd() + '…';
|
||||
}
|
||||
|
||||
function tweetUrl(username: string, id: string): string {
|
||||
return `https://x.com/${username}/status/${id}`;
|
||||
}
|
||||
|
||||
function formatTweet(
|
||||
tweet: Tweet,
|
||||
users: Map<string, TwitterUser>
|
||||
): string {
|
||||
const user = users.get(tweet.author_id);
|
||||
const name = user?.name ?? 'Unknown';
|
||||
const handle = user?.username ?? 'unknown';
|
||||
const m = tweet.public_metrics;
|
||||
const link = tweetUrl(handle, tweet.id);
|
||||
|
||||
return (
|
||||
`**${name}** ([@${handle}](${link}))\n` +
|
||||
`> ${snippet(tweet.text)}\n` +
|
||||
`❤️ ${m.like_count.toLocaleString()} | ` +
|
||||
`🔁 ${m.retweet_count.toLocaleString()}`
|
||||
);
|
||||
}
|
||||
|
||||
function formatDigest(
|
||||
tweets: Tweet[],
|
||||
users: Map<string, TwitterUser>
|
||||
): string {
|
||||
if (!tweets.length) {
|
||||
return '*No trending AI tweets found in the last 24h.*';
|
||||
}
|
||||
|
||||
const body = tweets.map((t) => formatTweet(t, users)).join('\n\n');
|
||||
const now = new Date().toISOString().slice(0, 16).replace('T', ' ');
|
||||
return `# 🐦 Trending AI Tweets — ${now} UTC\n\n${body}\n`;
|
||||
}
|
||||
|
||||
// --- Entry ---
|
||||
|
||||
export async function run(): Promise<string> {
|
||||
log('[twitter] Searching for trending AI tweets...');
|
||||
|
||||
const allTweets: Tweet[] = [];
|
||||
const userMap = new Map<string, TwitterUser>();
|
||||
|
||||
for (const query of TWITTER_SEARCH_QUERIES) {
|
||||
const resp = await searchTweets(query);
|
||||
|
||||
if (resp.data) allTweets.push(...resp.data);
|
||||
if (resp.includes?.users) {
|
||||
for (const u of resp.includes.users) {
|
||||
userMap.set(u.id, u);
|
||||
}
|
||||
}
|
||||
|
||||
log(` ${resp.meta?.result_count ?? 0} results`);
|
||||
}
|
||||
|
||||
// Dedupe by tweet id
|
||||
const seen = new Set<string>();
|
||||
const unique = allTweets.filter((t) => {
|
||||
if (seen.has(t.id)) return false;
|
||||
seen.add(t.id);
|
||||
return true;
|
||||
});
|
||||
|
||||
const top = topTweets(unique);
|
||||
log(`[twitter] ${unique.length} total, ${top.length} top tweets`);
|
||||
|
||||
return formatDigest(top, userMap);
|
||||
}
|
||||
|
||||
if (import.meta.main) {
|
||||
console.log(await run());
|
||||
}
|
||||
@ -2,6 +2,7 @@ import { run as reddit } from './feeds/reddit-digest';
|
||||
import { run as trending } from './feeds/github-trending';
|
||||
import { run as newRepos } from './feeds/new-ai-repos';
|
||||
import { run as claudeReleases } from './feeds/claude-code-releases';
|
||||
import { run as twitter } from './feeds/twitter-ai';
|
||||
import { log } from './utils';
|
||||
|
||||
const COMMANDS: Record<string, () => Promise<string>> = {
|
||||
@ -9,6 +10,7 @@ const COMMANDS: Record<string, () => Promise<string>> = {
|
||||
trending,
|
||||
'new-repos': newRepos,
|
||||
'claude-releases': claudeReleases,
|
||||
twitter,
|
||||
};
|
||||
|
||||
async function main() {
|
||||
@ -17,7 +19,7 @@ async function main() {
|
||||
if (!cmd || cmd === '--help' || cmd === '-h') {
|
||||
console.error(
|
||||
'Usage: bun run feed ' +
|
||||
'<reddit|trending|new-repos|claude-releases|all>'
|
||||
'<reddit|trending|new-repos|claude-releases|twitter|all>'
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user