1. Setting up the price scraper
Start with a SQLite database for price history and a generic scraper function that works across any ecommerce site:
#E8A0BF">import requests
#E8A0BF">import sqlite3
#E8A0BF">import json
#E8A0BF">from datetime #E8A0BF">import datetime
API_KEY = #A8D4A0">"sr_live_YOUR_KEY"
# Initialize SQLite database
conn = sqlite3.connect(#A8D4A0">"price_monitor.db")
conn.execute(#A8D4A0">""#A8D4A0">"
CREATE TABLE IF NOT EXISTS prices (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TEXT,
site TEXT,
product_url TEXT,
product_name TEXT,
price REAL,
original_price REAL,
in_stock INTEGER
)
"#A8D4A0">"")
conn.commit()
#E8A0BF">def scrape_product_price(site, url, selectors):
#A8D4A0">""#A8D4A0">"Scrape a single product price #E8A0BF">from any site"#A8D4A0">""
resp = requests.post(
#A8D4A0">"https://api.snaprender.dev/v1/extract",
headers={
#A8D4A0">"x-api-key": API_KEY,
#A8D4A0">"Content-Type": #A8D4A0">"application/json"
},
json={
#A8D4A0">"url": url,
#A8D4A0">"selectors": selectors,
#A8D4A0">"use_flaresolverr": #E8A0BF">True
}
)
data = resp.json()[#A8D4A0">"data"]
#E8A0BF">return {
#A8D4A0">"site": site,
#A8D4A0">"url": url,
#A8D4A0">"name": data.get(#A8D4A0">"name", #A8D4A0">""),
#A8D4A0">"price": data.get(#A8D4A0">"price", #A8D4A0">""),
#A8D4A0">"original_price": data.get(#A8D4A0">"original_price", #A8D4A0">""),
#A8D4A0">"in_stock": data.get(#A8D4A0">"availability", #A8D4A0">""),
}2. Multi-site monitoring
Define products to track and check prices across Amazon, Best Buy, and Walmart in a single run:
# Define products to monitor across sites
WATCH_LIST = [
{
#A8D4A0">"name": #A8D4A0">"Sony WH-1000XM5",
#A8D4A0">"sites": {
#A8D4A0">"amazon": {
#A8D4A0">"url": #A8D4A0">"https://www.amazon.com/dp/B0BX2L8PZ2",
#A8D4A0">"selectors": {
#A8D4A0">"name": #A8D4A0">"#productTitle",
#A8D4A0">"price": #A8D4A0">".a-price .a-offscreen",
#A8D4A0">"original_price": #A8D4A0">".a-price[data-a-strike] .a-offscreen",
#A8D4A0">"availability": #A8D4A0">"#availability span"
}
},
#A8D4A0">"bestbuy": {
#A8D4A0">"url": #A8D4A0">"https://www.bestbuy.com/site/sony-wh1000xm5/6505727.p",
#A8D4A0">"selectors": {
#A8D4A0">"name": #A8D4A0">".sku-title h1",
#A8D4A0">"price": #A8D4A0">".priceView-hero-price span:first-child",
#A8D4A0">"original_price": #A8D4A0">".pricing-price__regular-price",
#A8D4A0">"availability": #A8D4A0">".fulfillment-add-to-cart-button"
}
},
#A8D4A0">"walmart": {
#A8D4A0">"url": #A8D4A0">"https://www.walmart.com/ip/Sony-WH-1000XM5/example",
#A8D4A0">"selectors": {
#A8D4A0">"name": #A8D4A0">"[itemprop=#A8D4A0">'name']",
#A8D4A0">"price": #A8D4A0">"[itemprop=#A8D4A0">'price']",
#A8D4A0">"original_price": #A8D4A0">".was-price",
#A8D4A0">"availability": #A8D4A0">"[data-testid=#A8D4A0">'fulfillment-badge']"
}
}
}
}
]
#E8A0BF">import time
#E8A0BF">def run_price_check():
#A8D4A0">""#A8D4A0">"Check all products across all sites"#A8D4A0">""
timestamp = datetime.now().isoformat()
#E8A0BF">for product #E8A0BF">in WATCH_LIST:
#E8A0BF">print(f#A8D4A0">"\nChecking: {product[#A8D4A0">'name']}")
#E8A0BF">for site, config #E8A0BF">in product[#A8D4A0">"sites"].items():
#E8A0BF">try:
result = scrape_product_price(
site, config[#A8D4A0">"url"], config[#A8D4A0">"selectors"]
)
# Parse price to float
#E8A0BF">import re
price_match = re.search(
r#A8D4A0">"[\d,]+\.?\d*",
result[#A8D4A0">"price"].replace(#A8D4A0">",", #A8D4A0">"")
)
price_num = float(price_match.group()) #E8A0BF">if price_match #E8A0BF">else 0
conn.execute(
#A8D4A0">""#A8D4A0">"INSERT INTO prices
(timestamp, site, product_url, product_name, price,
original_price, in_stock)
VALUES (?, ?, ?, ?, ?, ?, ?)"#A8D4A0">"",
(timestamp, site, config[#A8D4A0">"url"],
result[#A8D4A0">"name"], price_num,
result.get(#A8D4A0">"original_price", #A8D4A0">""),
1 #E8A0BF">if #A8D4A0">"#E8A0BF">in stock" #E8A0BF">in result.get(#A8D4A0">"in_stock", #A8D4A0">"").lower() #E8A0BF">else 0)
)
#E8A0BF">print(f#A8D4A0">" {site}: $" + f#A8D4A0">"{price_num:.2f}")
time.sleep(2)
#E8A0BF">except Exception #E8A0BF">as e:
#E8A0BF">print(f#A8D4A0">" {site}: Error - {e}")
conn.commit()
run_price_check()3. Price drop alerts
Automatically detect significant price drops and send alerts:
#E8A0BF">import smtplib
#E8A0BF">from email.mime.text #E8A0BF">import MIMEText
#E8A0BF">def check_for_drops(threshold_pct=5):
#A8D4A0">""#A8D4A0">"Detect price drops #E8A0BF">and send alerts"#A8D4A0">""
cursor = conn.execute(#A8D4A0">""#A8D4A0">"
WITH ranked AS (
SELECT *,
ROW_NUMBER() OVER (
PARTITION BY site, product_url
ORDER BY timestamp DESC
) #E8A0BF">as rn
FROM prices
)
SELECT
a.site, a.product_name, a.price #E8A0BF">as current_price,
b.price #E8A0BF">as previous_price, a.product_url
FROM ranked a
JOIN ranked b ON a.site = b.site
AND a.product_url = b.product_url
AND a.rn = 1 AND b.rn = 2
WHERE a.price < b.price
"#A8D4A0">"")
alerts = []
#E8A0BF">for row #E8A0BF">in cursor:
site, name, current, previous, url = row
drop_pct = ((previous - current) / previous) * 100
#E8A0BF">if drop_pct >= threshold_pct:
alerts.append({
#A8D4A0">"site": site,
#A8D4A0">"name": name,
#A8D4A0">"current": current,
#A8D4A0">"previous": previous,
#A8D4A0">"drop_pct": drop_pct,
#A8D4A0">"url": url
})
#E8A0BF">if alerts:
msg_body = #A8D4A0">"Price Drop Alerts:\n\n"
#E8A0BF">for a #E8A0BF">in alerts:
msg_body += f#A8D4A0">"{a[#A8D4A0">'name']} ({a[#A8D4A0">'site']})\n"
msg_body += f#A8D4A0">" $" + f#A8D4A0">"{a[#A8D4A0">'previous']:.2f} -> $" + f#A8D4A0">"{a[#A8D4A0">'current']:.2f}"
msg_body += f#A8D4A0">" (-{a[#A8D4A0">'drop_pct']:.1f}%)\n"
msg_body += f#A8D4A0">" {a[#A8D4A0">'url']}\n\n"
#E8A0BF">print(msg_body)
# Send email alert (configure SMTP)
# send_alert_email(msg_body)
#E8A0BF">return alerts
drops = check_for_drops(threshold_pct=5)
#E8A0BF">print(f#A8D4A0">"Found {len(drops)} significant price drops")Pro tip
Schedule price checks with cron: run daily checks at off-peak hours (3-5 AM) for consistent data. For time-sensitive products (electronics, holiday items), add a second check during business hours.
4. Reporting and analysis
Generate price comparison reports and export trend data:
#E8A0BF">import pandas #E8A0BF">as pd
#E8A0BF">def generate_price_report():
#A8D4A0">""#A8D4A0">"Generate a comprehensive price comparison report"#A8D4A0">""
df = pd.read_sql_query(
#A8D4A0">"SELECT * FROM prices ORDER BY timestamp DESC",
conn
)
# Latest prices per site per product
latest = df.groupby([#A8D4A0">"product_name", #A8D4A0">"site"]).first().reset_index()
#E8A0BF">print(#A8D4A0">"=== Price Comparison Report ===")
#E8A0BF">print(f#A8D4A0">"Generated: {datetime.now():%Y-%m-%d %H:%M}\n")
#E8A0BF">for product_name #E8A0BF">in latest[#A8D4A0">"product_name"].unique():
product_data = latest[latest[#A8D4A0">"product_name"] == product_name]
#E8A0BF">print(f#A8D4A0">"Product: {product_name}")
#E8A0BF">for _, row #E8A0BF">in product_data.iterrows():
status = #A8D4A0">"In Stock" #E8A0BF">if row[#A8D4A0">"in_stock"] #E8A0BF">else #A8D4A0">"Out of Stock"
#E8A0BF">print(f#A8D4A0">" {row[#A8D4A0">'site']:12s} $" + f#A8D4A0">"{row[#A8D4A0">'price']:>8.2f} {status}")
best = product_data.loc[product_data[#A8D4A0">"price"].idxmin()]
#E8A0BF">print(f#A8D4A0">" BEST PRICE -> {best[#A8D4A0">'site']} at $" + f#A8D4A0">"{best[#A8D4A0">'price']:.2f}")
#E8A0BF">print()
# Price history trends
df[#A8D4A0">"date"] = pd.to_datetime(df[#A8D4A0">"timestamp"]).dt.date
daily = df.groupby([#A8D4A0">"date", #A8D4A0">"product_name", #A8D4A0">"site"])[#A8D4A0">"price"].mean()
daily.to_csv(#A8D4A0">"price_trends.csv")
#E8A0BF">print(#A8D4A0">"Price history saved to price_trends.csv")
generate_price_report()Monitor prices across every retailer
SnapRender handles bot protection, JavaScript rendering, and data extraction across all major ecommerce sites. Build your price monitor with a single API.
Get Your API Key — FreeFrequently asked questions
Ecommerce price monitoring is the automated process of tracking competitor prices, stock levels, and promotional activity across online retail sites. Businesses use it to adjust their own pricing strategy, match competitor deals, and identify market trends.
It depends on your market. For consumer electronics, daily monitoring catches flash sales and price drops. For fashion and home goods, 2-3 times per week is sufficient. For grocery and CPG, real-time monitoring may be needed during promotional periods.
Monitor your direct competitors and the major marketplaces where your products appear: Amazon, Walmart, Target, Best Buy, and any niche retailers in your category. Also monitor comparison shopping engines like Google Shopping for aggregate pricing data.
Store historical price data in a database (SQLite or PostgreSQL), then compare each new scrape against the previous price. Trigger alerts via email, Slack, or SMS when prices drop below a threshold. Common thresholds are 5%, 10%, or a fixed dollar amount.
Monitoring publicly displayed prices is generally permissible for competitive research. The data is publicly available to any consumer. However, respect each site's Terms of Service, use reasonable scraping rates, and do not republish raw scraped data as your own.