#!/bin/bash # Reonomy v14 Runner — self-configuring search + scrape # Usage: REONOMY_STATE="New Jersey" REONOMY_TYPES="Industrial" REONOMY_MIN_SF="50000" REONOMY_SALE_FILTER="not_within_10y" MAX_PROPERTIES=20 ./reonomy-run-v14.sh set -e WORKSPACE="$HOME/.clawdbot/workspace" TIMESTAMP=$(date +%Y%m%d-%H%M%S) EXPORT_CSV="$WORKSPACE/reonomy-export-${TIMESTAMP}.csv" FRESH_JSON="$WORKSPACE/reonomy-leads-v14-run-${TIMESTAMP}.json" echo "=== Reonomy v14 Run ===" echo "State: ${REONOMY_STATE:-New Jersey}" echo "Types: ${REONOMY_TYPES:-Industrial}" echo "Min SF: ${REONOMY_MIN_SF:-none}" echo "Sale filter: ${REONOMY_SALE_FILTER:-none}" echo "Max: ${MAX_PROPERTIES:-20}" echo "" # Clear leads for fresh run echo '{"lastUpdated":"","searchId":"","totalLeads":0,"leads":[]}' > "$WORKSPACE/reonomy-leads-v14.json" # Run scraper node "$WORKSPACE/reonomy-scraper-v14.js" # Copy raw JSON cp "$WORKSPACE/reonomy-leads-v14.json" "$FRESH_JSON" # Convert to CSV echo "" echo "Converting to CSV..." node "$WORKSPACE/reonomy-to-csv.js" "$FRESH_JSON" "$EXPORT_CSV" LEAD_COUNT=$(node -e "const d=JSON.parse(require('fs').readFileSync('$FRESH_JSON','utf8'));console.log(d.totalLeads)") echo "" echo "=== Done ===" echo "Leads: $LEAD_COUNT" echo "CSV: $EXPORT_CSV"