Real Estate Property Monitoring
Track property listings and market changes automatically. Automate property market research by monitoring listings, price changes, and new properties across multiple platforms.
Quick Example
import requests
import os
api_key = os.environ['SUPACRAWLER_API_KEY']
properties = [
{
"site": "Zillow",
"url": "https://www.zillow.com/san-francisco-ca/",
"selector": ".property-card-price"
},
{
"site": "Realtor.com",
"url": "https://www.realtor.com/realestateandhomes-search/San-Francisco_CA",
"selector": ".listing-price"
}
]
for prop in properties:
requests.post("https://api.supacrawler.com/api/v1/watch",
headers={"Authorization": f"Bearer {api_key}"},
json={
"url": prop["url"],
"frequency": "daily",
"selector": prop["selector"],
"notify_email": "[email protected]",
"webhook_url": "https://your-api.com/property-updates"
}
)
Price Change Tracking
from supacrawler import SupacrawlerClient
client = SupacrawlerClient(api_key=os.environ['SUPACRAWLER_API_KEY'])
def track_price_changes(url):
result = client.scrape(url, format="markdown")
# Extract prices and property details
import re
prices = re.findall(r'\$[\d,]+', result.content)
return {
'url': url,
'prices': prices,
'timestamp': datetime.now().isoformat()
}
Market Analysis
job = client.create_crawl_job(
url="https://www.zillow.com/san-francisco-ca/",
depth=2,
link_limit=50,
patterns=["/homedetails/*"]
)
result = client.wait_for_crawl(job.job_id)
prices = []
for url, data in result.data.crawl_data.items():
price_match = re.search(r'\$[\d,]+', data.markdown)
if price_match:
prices.append(int(price_match.group().replace('$', '').replace(',', '')))
avg_price = sum(prices) / len(prices)
print(f"Average price: ${avg_price:,.0f}")
Best Practices
- Daily monitoring for listing updates
- Track price history over time
- Monitor multiple platforms for comprehensive coverage
- Use webhooks for instant new listing alerts
- Store data in database for trend analysis
Was this page helpful?