#!/bin/bash # Reonomy Scraper Runner # Usage: ./reonomy-run.sh [max_properties] [search_id] # Output: reonomy-export-TIMESTAMP.csv set -e WORKSPACE="$HOME/.clawdbot/workspace" MAX_PROPS="${1:-10}" SEARCH_ID="${2:-}" TIMESTAMP=$(date +%Y%m%d-%H%M%S) EXPORT_CSV="$WORKSPACE/reonomy-export-${TIMESTAMP}.csv" FRESH_JSON="$WORKSPACE/reonomy-leads-run-${TIMESTAMP}.json" echo "=== Reonomy Scraper Run ===" echo "Max properties: $MAX_PROPS" echo "Search ID: ${SEARCH_ID:-default}" echo "Output: $EXPORT_CSV" echo "" # Clear the leads file for a fresh run echo '{"lastUpdated":"","searchId":"","totalLeads":0,"leads":[]}' > "$WORKSPACE/reonomy-leads-v13.json" # Run scraper export MAX_PROPERTIES="$MAX_PROPS" if [ -n "$SEARCH_ID" ]; then export REONOMY_SEARCH_ID="$SEARCH_ID" fi echo "Starting scraper..." node "$WORKSPACE/reonomy-scraper-v13.js" # Copy raw JSON cp "$WORKSPACE/reonomy-leads-v13.json" "$FRESH_JSON" # Convert to CSV echo "" echo "Converting to CSV..." node "$WORKSPACE/reonomy-to-csv.js" "$FRESH_JSON" "$EXPORT_CSV" # Summary LEAD_COUNT=$(node -e "const d=JSON.parse(require('fs').readFileSync('$FRESH_JSON','utf8'));console.log(d.totalLeads)") echo "" echo "=== Done ===" echo "Leads: $LEAD_COUNT" echo "JSON: $FRESH_JSON" echo "CSV: $EXPORT_CSV"