1. Scraping search results
Realtor.com renders listing cards via JavaScript. Use SnapRender to extract structured data from the rendered page:
#E8A0BF">import requests
#E8A0BF">import json
API_KEY = #A8D4A0">"sr_live_YOUR_KEY"
#E8A0BF">def scrape_realtor_search(city, state, page=1):
#A8D4A0">""#A8D4A0">"Scrape Realtor.com search results #E8A0BF">for a city"#A8D4A0">""
url = f#A8D4A0">"https://www.realtor.com/realestateandhomes-search/{city}_{state}/pg-{page}"
resp = requests.post(
#A8D4A0">"https://api.snaprender.dev/v1/extract",
headers={
#A8D4A0">"x-api-key": API_KEY,
#A8D4A0">"Content-Type": #A8D4A0">"application/json"
},
json={
#A8D4A0">"url": url,
#A8D4A0">"selectors": {
#A8D4A0">"addresses": #A8D4A0">"[data-testid=#A8D4A0">'card-address']",
#A8D4A0">"prices": #A8D4A0">"[data-testid=#A8D4A0">'card-price']",
#A8D4A0">"beds": #A8D4A0">"[data-testid=#A8D4A0">'property-meta-beds'] span",
#A8D4A0">"baths": #A8D4A0">"[data-testid=#A8D4A0">'property-meta-baths'] span",
#A8D4A0">"sqft": #A8D4A0">"[data-testid=#A8D4A0">'property-meta-sqft'] span",
#A8D4A0">"links": #A8D4A0">"[data-testid=#A8D4A0">'card-link']::attr(href)"
}
}
)
#E8A0BF">return resp.json()[#A8D4A0">"data"]
# Scrape first 5 pages of Austin, TX listings
all_listings = []
#E8A0BF">for page #E8A0BF">in range(1, 6):
data = scrape_realtor_search(#A8D4A0">"Austin", #A8D4A0">"TX", page)
#E8A0BF">for i #E8A0BF">in range(len(data.get(#A8D4A0">"addresses", []))):
all_listings.append({
#A8D4A0">"address": data[#A8D4A0">"addresses"][i] #E8A0BF">if i < len(data[#A8D4A0">"addresses"]) #E8A0BF">else #A8D4A0">"",
#A8D4A0">"price": data[#A8D4A0">"prices"][i] #E8A0BF">if i < len(data[#A8D4A0">"prices"]) #E8A0BF">else #A8D4A0">"",
#A8D4A0">"beds": data[#A8D4A0">"beds"][i] #E8A0BF">if i < len(data[#A8D4A0">"beds"]) #E8A0BF">else #A8D4A0">"",
#A8D4A0">"baths": data[#A8D4A0">"baths"][i] #E8A0BF">if i < len(data[#A8D4A0">"baths"]) #E8A0BF">else #A8D4A0">"",
#A8D4A0">"sqft": data[#A8D4A0">"sqft"][i] #E8A0BF">if i < len(data[#A8D4A0">"sqft"]) #E8A0BF">else #A8D4A0">"",
})
#E8A0BF">print(f#A8D4A0">"Page {page}: {len(data.get(#A8D4A0">'addresses', []))} listings")
#E8A0BF">print(f#A8D4A0">"Total: {len(all_listings)} listings scraped")2. Extracting listing details
Individual listing pages have richer data: description, property history, agent info, and more:
#E8A0BF">def scrape_listing_detail(listing_url):
#A8D4A0">""#A8D4A0">"Scrape detailed data #E8A0BF">from a single listing page"#A8D4A0">""
full_url = f#A8D4A0">"https://www.realtor.com{listing_url}"
resp = requests.post(
#A8D4A0">"https://api.snaprender.dev/v1/extract",
headers={
#A8D4A0">"x-api-key": API_KEY,
#A8D4A0">"Content-Type": #A8D4A0">"application/json"
},
json={
#A8D4A0">"url": full_url,
#A8D4A0">"selectors": {
#A8D4A0">"price": #A8D4A0">"[data-testid=#A8D4A0">'list-price']",
#A8D4A0">"address": #A8D4A0">"[data-testid=#A8D4A0">'address-line']",
#A8D4A0">"beds": #A8D4A0">"[data-testid=#A8D4A0">'bed-count']",
#A8D4A0">"baths": #A8D4A0">"[data-testid=#A8D4A0">'bath-count']",
#A8D4A0">"sqft": #A8D4A0">"[data-testid=#A8D4A0">'sqft-count']",
#A8D4A0">"lot_size": #A8D4A0">"[data-testid=#A8D4A0">'lot-size']",
#A8D4A0">"year_built": #A8D4A0">"[data-testid=#A8D4A0">'year-built']",
#A8D4A0">"property_type": #A8D4A0">"[data-testid=#A8D4A0">'property-type']",
#A8D4A0">"description": #A8D4A0">".ldp-description-text",
#A8D4A0">"days_on_market": #A8D4A0">"[data-testid=#A8D4A0">'dom']",
#A8D4A0">"hoa": #A8D4A0">"[data-testid=#A8D4A0">'hoa-fees']",
#A8D4A0">"agent_name": #A8D4A0">"[data-testid=#A8D4A0">'agent-name']"
}
}
)
#E8A0BF">return resp.json()[#A8D4A0">"data"]
# Example: get details #E8A0BF">for a specific listing
details = scrape_listing_detail(#A8D4A0">"/realestateandhomes-detail/123-Main-St")
#E8A0BF">print(json.dumps(details, indent=2))3. Market analysis with pandas
Clean the scraped data and run market analysis:
#E8A0BF">import pandas #E8A0BF">as pd
# Load scraped data into pandas
df = pd.DataFrame(all_listings)
# Clean price column
df[#A8D4A0">"price_clean"] = (
df[#A8D4A0">"price"]
.str.replace(#A8D4A0">"$", #A8D4A0">"", regex=#E8A0BF">False)
.str.replace(#A8D4A0">",", #A8D4A0">"", regex=#E8A0BF">False)
.astype(float)
)
# Clean sqft
df[#A8D4A0">"sqft_clean"] = (
df[#A8D4A0">"sqft"]
.str.replace(#A8D4A0">",", #A8D4A0">"", regex=#E8A0BF">False)
.str.extract(r#A8D4A0">"(d+)")
.astype(float)
)
# Calculate price per sqft
df[#A8D4A0">"price_per_sqft"] = df[#A8D4A0">"price_clean"] / df[#A8D4A0">"sqft_clean"]
# Market analysis
#E8A0BF">print(#A8D4A0">"=== Austin TX Market Summary ===")
#E8A0BF">print(f#A8D4A0">"Total listings: {len(df)}")
#E8A0BF">print(f#A8D4A0">"Median price: $" + f#A8D4A0">"{df[#A8D4A0">'price_clean'].median():,.0f}")
#E8A0BF">print(f#A8D4A0">"Mean price: $" + f#A8D4A0">"{df[#A8D4A0">'price_clean'].mean():,.0f}")
#E8A0BF">print(f#A8D4A0">"Median $/sqft: $" + f#A8D4A0">"{df[#A8D4A0">'price_per_sqft'].median():,.0f}")
#E8A0BF">print(f#A8D4A0">"Median sqft: {df[#A8D4A0">'sqft_clean'].median():,.0f}")
# Price distribution
price_ranges = pd.cut(
df[#A8D4A0">"price_clean"],
bins=[0, 200000, 400000, 600000, 800000, 1000000, float(#A8D4A0">"inf")],
labels=[#A8D4A0">"<200K", #A8D4A0">"200-400K", #A8D4A0">"400-600K", #A8D4A0">"600-800K", #A8D4A0">"800K-1M", #A8D4A0">"1M+"]
)
#E8A0BF">print(#A8D4A0">"\n=== Price Distribution ===")
#E8A0BF">print(price_ranges.value_counts().sort_index())
# Export
df.to_csv(#A8D4A0">"austin_listings.csv", index=#E8A0BF">False)4. Multi-city comparison
Compare real estate markets across multiple cities:
#E8A0BF">import time
cities = [
(#A8D4A0">"Austin", #A8D4A0">"TX"),
(#A8D4A0">"Denver", #A8D4A0">"CO"),
(#A8D4A0">"Nashville", #A8D4A0">"TN"),
(#A8D4A0">"Phoenix", #A8D4A0">"AZ"),
(#A8D4A0">"Raleigh", #A8D4A0">"NC"),
(#A8D4A0">"Tampa", #A8D4A0">"FL"),
]
all_markets = []
#E8A0BF">for city, state #E8A0BF">in cities:
#E8A0BF">print(f#A8D4A0">"Scraping {city}, {state}...")
listings = []
#E8A0BF">for page #E8A0BF">in range(1, 4): # 3 pages per city
data = scrape_realtor_search(city, state, page)
addresses = data.get(#A8D4A0">"addresses", [])
prices = data.get(#A8D4A0">"prices", [])
#E8A0BF">for i #E8A0BF">in range(len(addresses)):
listings.append({
#A8D4A0">"city": city,
#A8D4A0">"state": state,
#A8D4A0">"address": addresses[i],
#A8D4A0">"price": prices[i] #E8A0BF">if i < len(prices) #E8A0BF">else #A8D4A0">"",
})
time.sleep(2) # polite delay
all_markets.extend(listings)
#E8A0BF">print(f#A8D4A0">" {len(listings)} listings found")
df = pd.DataFrame(all_markets)
df.to_csv(#A8D4A0">"multi_market_comparison.csv", index=#E8A0BF">False)
#E8A0BF">print(f#A8D4A0">"Total: {len(df)} listings across {len(cities)} markets")Scrape real estate data at scale
SnapRender handles JavaScript rendering and anti-bot protection. Extract listing data from Realtor.com with a simple API call.
Get Your API Key — FreeFrequently asked questions
Publicly available listing data is generally considered fair game for personal use and analysis. However, Realtor.com's Terms of Service prohibit automated scraping. Use the data for research and analysis, not for republishing or building a competing listings site. Always consult a lawyer for your specific use case.
Realtor.com is a React-based SPA. Standard HTTP requests return a mostly-empty HTML shell because listing data is loaded via JavaScript. You need either a headless browser or an API like SnapRender that renders JavaScript before extracting content.
New listings typically appear within hours. Price changes, status updates (pending, sold), and delisting usually reflect within 24-48 hours. For time-sensitive analysis, scrape daily. For market trend analysis, weekly or monthly scrapes are sufficient.
Realtor.com shows limited price history on individual listing pages (price changes, original list price). For comprehensive historical data, combine scraping with third-party datasets. Scrape current listings regularly and build your own historical database over time.
Key data points include: address, list price, beds/baths, square footage, lot size, year built, property type, listing status, days on market, price per square foot, HOA fees, property tax, listing agent, photos, and description. Some fields require visiting the individual listing page.