'use client' import React, { useEffect, useState, useMemo, useCallback } from 'react' import { useToast } from '@/components/ui/toast' import { BarChart, Bar, PieChart, Pie, Cell, XAxis, YAxis, CartesianGrid, Tooltip, Legend, ResponsiveContainer, } from 'recharts' interface Problem { clusterId: number problem: string description: string size: number totalEngagement: number lastActive: number subreddits: string[] sampleQuestions: string[] impactScore?: number } interface DiscussionSample { id: string type: 'post' | 'comment' subreddit: string title?: string author: string body: string score: number created: number permalink: string parent_id?: string } function DiscussionSamples({ clusterId }: { clusterId: number }) { const [samples, setSamples] = useState([]) const [loading, setLoading] = useState(true) const [expandedBodies, setExpandedBodies] = useState>(new Set()) useEffect(() => { fetch(`/api/clusters/${clusterId}`) .then(res => res.json()) .then(data => { setSamples(data.samples || []) setLoading(false) }) .catch(() => setLoading(false)) }, [clusterId]) const toggleBody = useCallback((id: string) => { setExpandedBodies(prev => { const next = new Set(prev) if (next.has(id)) { next.delete(id) } else { next.add(id) } return next }) }, []) const formatDate = (timestamp: number) => { const date = new Date(timestamp * 1000) return date.toLocaleDateString() } if (loading) { return (
Loading discussions...
) } if (samples.length === 0) { return (
No discussion samples available. Re-run clustering to populate.
) } return (
Discussion Samples ({samples.length})
{samples.map(sample => { const isExpanded = expandedBodies.has(sample.id) const bodyTruncated = sample.body.length > 300 const displayBody = isExpanded ? sample.body : sample.body.slice(0, 300) return (
{sample.type} r/{sample.subreddit} u/{sample.author} {sample.score} pts {formatDate(sample.created)}
view on reddit →
{sample.title && (
{sample.title}
)}
{displayBody} {bodyTruncated && !isExpanded && '...'}
{bodyTruncated && ( )}
) })}
) } interface Weights { engagement: number velocity: number sentiment: number } interface SimilarityData { matrix: number[][] labels: string[] clusterIds: number[] } function CorrelationHeatmap({ onCellClick, }: { onCellClick?: (clusterIds: [number, number]) => void }) { const [data, setData] = useState(null) const [loading, setLoading] = useState(true) const [hoveredCell, setHoveredCell] = useState<{ i: number; j: number } | null>(null) useEffect(() => { fetch('/api/clusters/similarity') .then(res => res.json()) .then(d => { setData(d) setLoading(false) }) .catch(() => setLoading(false)) }, []) if (loading) { return (
Loading correlation data...
) } if (!data || data.matrix.length === 0) { return (
No clusters to compare
) } const getColor = (value: number) => { // white (0) -> indigo (1) const intensity = Math.round(value * 255) return `rgb(${255 - intensity * 0.6}, ${255 - intensity * 0.62}, ${255 - intensity * 0.05})` } const n = data.matrix.length const cellSize = Math.max(24, Math.min(40, 400 / n)) return (
{/* empty corner cell */}
{/* column headers */} {data.labels.map((label, j) => (
{label.slice(0, 15)}
))} {/* rows */} {data.matrix.map((row, i) => ( {/* row label */}
{data.labels[i].slice(0, 12)}
{/* cells */} {row.map((value, j) => { const isHovered = hoveredCell?.i === i && hoveredCell?.j === j return (
setHoveredCell({ i, j })} onMouseLeave={() => setHoveredCell(null)} onClick={() => onCellClick?.([data.clusterIds[i], data.clusterIds[j]])} title={`${data.labels[i]} ↔ ${data.labels[j]}: ${(value * 100).toFixed(0)}%`} > {isHovered && (
{(value * 100).toFixed(0)}% similar
)}
) })}
))}
{/* legend */}
0%
100% keyword overlap
) } // chart colors - recharts can't parse CSS variables at runtime const CHART_COLORS = { primary: '#6366f1', // indigo secondary: '#8b5cf6', // violet accent: '#06b6d4', // cyan muted: '#94a3b8', // slate grid: '#e2e8f0', // light grid text: '#64748b', // muted text palette: ['#6366f1', '#8b5cf6', '#06b6d4', '#10b981', '#f59e0b', '#ef4444', '#ec4899', '#84cc16'], } export default function ProblemsPage() { const [problems, setProblems] = useState([]) const [loading, setLoading] = useState(true) const [expanded, setExpanded] = useState(null) const [clustering, setClustering] = useState(false) const { addToast, updateToast } = useToast() const [similarityThreshold, setSimilarityThreshold] = useState(0.5) const [minClusterSize, setMinClusterSize] = useState(2) const [weights, setWeights] = useState({ engagement: 0.5, velocity: 0.3, sentiment: 0.2, }) const fetchClusters = () => { fetch('/api/clusters') .then(res => res.json()) .then(data => { setProblems(data.clusters || []) setLoading(false) }) .catch(() => setLoading(false)) } useEffect(() => { fetchClusters() }, []) const sortedProblems = useMemo(() => { if (problems.length === 0) return [] const now = Math.floor(Date.now() / 1000) const oneWeek = 7 * 24 * 60 * 60 const maxEngagement = Math.max(...problems.map(p => p.totalEngagement)) return [...problems] .map(p => { const engagementScore = maxEngagement > 0 ? p.totalEngagement / maxEngagement : 0 const age = now - p.lastActive const velocityScore = Math.max(0, 1 - age / oneWeek) const sentimentScore = 0.5 const impactScore = weights.engagement * engagementScore + weights.velocity * velocityScore + weights.sentiment * sentimentScore return { ...p, impactScore } }) .sort((a, b) => (b.impactScore || 0) - (a.impactScore || 0)) }, [problems, weights]) const chartData = useMemo(() => { if (sortedProblems.length === 0) return { impact: [], subreddits: [], sizes: [] } const impactData = sortedProblems.slice(0, 10).map(p => ({ name: p.problem.slice(0, 30) + (p.problem.length > 30 ? '...' : ''), impact: Math.round((p.impactScore || 0) * 100), engagement: p.totalEngagement, discussions: p.size, })) const subredditCounts = new Map() sortedProblems.forEach(p => { p.subreddits.forEach(sub => { subredditCounts.set(sub, (subredditCounts.get(sub) || 0) + 1) }) }) const subredditData = Array.from(subredditCounts.entries()) .map(([name, value]) => ({ name: `r/${name}`, value })) .sort((a, b) => b.value - a.value) .slice(0, 8) const sizeDistribution = sortedProblems.reduce((acc, p) => { const bucket = p.size < 5 ? '2-4' : p.size < 10 ? '5-9' : p.size < 20 ? '10-19' : '20+' acc[bucket] = (acc[bucket] || 0) + 1 return acc }, {} as Record) const sizeData = Object.entries(sizeDistribution).map(([name, value]) => ({ name: `${name} discussions`, value, })) return { impact: impactData, subreddits: subredditData, sizes: sizeData } }, [sortedProblems]) const handleRecluster = async () => { setClustering(true) const toastId = addToast('Clustering discussions...', 'loading') try { const res = await fetch('/api/clusters', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ similarityThreshold, minClusterSize }), }) const data = await res.json() if (data.success) { updateToast(toastId, `Found ${data.clusters?.length || 0} problem clusters`, 'success') fetchClusters() } else { updateToast(toastId, data.error || 'Clustering failed', 'error') } } catch (e) { updateToast(toastId, 'Clustering failed - check console', 'error') } finally { setClustering(false) } } const updateWeight = (key: keyof Weights, value: number) => { setWeights(prev => ({ ...prev, [key]: value })) } const formatDate = (timestamp: number) => { const date = new Date(timestamp * 1000) return date.toLocaleDateString() } if (loading) { return (
Loading...
) } const clusterControls = (
Clustering Settings
setSimilarityThreshold(parseFloat(e.target.value))} className="w-full accent-primary" />
Loose (0.3) Strict (0.9)
) const weightControls = (
Impact Score Weights
updateWeight('engagement', parseFloat(e.target.value))} className="w-full accent-primary" />
updateWeight('velocity', parseFloat(e.target.value))} className="w-full accent-primary" />
updateWeight('sentiment', parseFloat(e.target.value))} className="w-full accent-primary" />
) if (problems.length === 0) { return (

Problem Explorer

View and analyze problem clusters

{clusterControls}

No problems found. Adjust settings and run clustering.

) } return (

Problem Explorer

{sortedProblems.length} problem clusters identified

{clusterControls} {weightControls}
{sortedProblems.length > 0 && (
Problem Analytics
Top Problems by Impact Score
Discussion Distribution by Subreddit
`${name} (${(percent * 100).toFixed(0)}%)` } outerRadius={80} dataKey="value" > {chartData.subreddits.map((_, index) => ( ))}
Cluster Size Distribution
Key Metrics
{sortedProblems.length}
Total Clusters
{sortedProblems.reduce((sum, p) => sum + p.size, 0).toLocaleString()}
Total Discussions
{sortedProblems .reduce((sum, p) => sum + p.totalEngagement, 0) .toLocaleString()}
Total Upvotes
{new Set(sortedProblems.flatMap(p => p.subreddits)).size}
Unique Subreddits
{/* correlation heatmap */}
Problem Cluster Correlation
{ // expand first cluster that isn't already expanded if (expanded !== id1) { setExpanded(id1) } else if (expanded !== id2) { setExpanded(id2) } }} />
)}
{sortedProblems.map(problem => ( setExpanded(expanded === problem.clusterId ? null : problem.clusterId)} className="border-t border-border hover:bg-accent/50 cursor-pointer" > {expanded === problem.clusterId && ( )} ))}
Problem Impact Discussions Upvotes Last Active
{problem.problem}
{problem.subreddits.map(s => `r/${s}`).join(', ')}
{((problem.impactScore || 0) * 100).toFixed(0)} {problem.size} {problem.totalEngagement.toLocaleString()} {formatDate(problem.lastActive)}

{problem.description}

{problem.sampleQuestions.length > 0 && (
Sample Questions:
    {problem.sampleQuestions.map((q, i) => (
  • {q}
  • ))}
)}
) }