Tracking your search engine rankings manually is tedious and doesn't scale. If you manage more than a handful of keywords, you need an automated SEO monitoring tool that checks positions, tracks changes over time, and alerts you to significant movements.
This tutorial shows how to build a complete SEO monitoring tool with Python and the SearchHive API -- no expensive SaaS subscriptions required.
Key Takeaways
- SearchHive's SwiftSearch API returns organic search results with position data for any keyword
- A complete SEO monitor needs: keyword tracking, historical storage, change detection, and alerting
- SQLite handles storage for thousands of keywords without needing a database server
- The full tool runs in under 200 lines of Python
- SearchHive's free tier (500 credits) lets you track ~100 keywords daily
Prerequisites
- Python 3.8+
- SearchHive API key (free at searchhive.dev)
- Basic SQL knowledge (for SQLite queries)
pip install requests searchhive
Step 1: Set Up the Database
SQLite is perfect for SEO monitoring -- zero configuration, file-based, and handles the read/write patterns of daily ranking checks easily.
import sqlite3
from datetime import datetime
def init_db(db_path="seo_monitor.db"):
# Initialize the SQLite database for SEO tracking
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute("39| CREATE TABLE IF NOT EXISTS keywords ( 40| id INTEGER PRIMARY KEY AUTOINCREMENT, 41| keyword TEXT UNIQUE NOT NULL, 42| url TEXT NOT NULL, 43| search_engine TEXT DEFAULT 'google', 44| location TEXT DEFAULT 'us', 45| created_at TEXT DEFAULT CURRENT_TIMESTAMP 46| ) 47|"))
cursor.execute("50| CREATE TABLE IF NOT EXISTS rankings ( 51| id INTEGER PRIMARY KEY AUTOINCREMENT, 52| keyword_id INTEGER NOT NULL, 53| position INTEGER, 54| url_found TEXT, 55| title TEXT, 56| checked_at TEXT DEFAULT CURRENT_TIMESTAMP, 57| FOREIGN KEY (keyword_id) REFERENCES keywords(id) 58| ) 59|"))
cursor.execute("62| CREATE INDEX IF NOT EXISTS idx_rankings_keyword 63| ON rankings(keyword_id, checked_at) 64|"))
conn.commit()
conn.close()
print("Database initialized")
init_db()
Step 2: Add Keywords to Track
import sqlite3
def add_keyword(keyword, target_url, db_path="seo_monitor.db"):
# Add a keyword + URL pair to monitor
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
try:
cursor.execute(
"INSERT INTO keywords (keyword, url) VALUES (?, ?)",
(keyword, target_url)
)
conn.commit()
print(f"Added: {keyword} -> {target_url}")
except sqlite3.IntegrityError:
print(f"Already tracking: {keyword}")
finally:
conn.close()
def add_keywords_bulk(pairs, db_path="seo_monitor.db"):
# Add multiple keyword/URL pairs at once
# pairs: list of (keyword, url) tuples
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
added = 0
for keyword, url in pairs:
try:
cursor.execute(
"INSERT INTO keywords (keyword, url) VALUES (?, ?)",
(keyword, url)
)
added += 1
except sqlite3.IntegrityError:
pass
conn.commit()
conn.close()
print(f"Added {added} keywords (skipped duplicates)")
# Example: add keywords for your site
add_keyword("best project management tools 2026", "https://your-site.com/blog/project-management-tools")
add_keyword("python web scraping tutorial", "https://your-site.com/tutorials/python-scraping")
# Bulk add from a list
add_keywords_bulk([
("search api comparison", "https://your-site.com/compare/search-apis"),
("scrape google results python", "https://your-site.com/tutorials/scrape-google"),
("serp api alternatives", "https://your-site.com/compare/serpapi"),
])
Step 3: Check Rankings with SwiftSearch
This is the core of the tool -- querying SearchHive's SwiftSearch API to find where your URL ranks for each tracked keyword.
import requests
import sqlite3
import time
API_KEY="***"
BASE_URL = "https://api.searchhive.dev/v1"
def check_ranking(keyword, target_url, max_position=50, db_path="seo_monitor.db"):
# Search for the keyword and find where target_url ranks
response = requests.post(
f"{BASE_URL}/search",
headers={"Authorization": f"Bearer {API_KEY}"},
json={
"query": keyword,
"num_results": max_position,
"engine": "google"
}
)
if response.status_code != 200:
print(f"API error for '{keyword}': {response.status_code}")
return None
results = response.json().get("results", [])
# Find our URL in the results
position = None
url_found = None
title = None
for i, result in enumerate(results):
pos = i + 1
result_url = result.get("url", "")
if target_url in result_url:
position = pos
url_found = result_url
title = result.get("title", "")
break
# Save to database
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute(
"SELECT id FROM keywords WHERE keyword = ?",
(keyword,)
)
row = cursor.fetchone()
if row:
keyword_id = row[0]
cursor.execute(
"INSERT INTO rankings (keyword_id, position, url_found, title) VALUES (?, ?, ?, ?)",
(keyword_id, position, url_found, title)
)
conn.commit()
conn.close()
if position:
print(f" #{position}: {keyword}")
else:
print(f" >{max_position}: {keyword} (not found)")
return position
def check_all_keywords(db_path="seo_monitor.db"):
# Check rankings for all tracked keywords
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute("SELECT keyword, url FROM keywords")
keywords = cursor.fetchall()
conn.close()
print(f"Checking {len(keywords)} keywords...")
results = {}
for keyword, url in keywords:
position = check_ranking(keyword, url)
results[keyword] = position
time.sleep(2) # Rate limiting
return results
# Run a full check
rankings = check_all_keywords()
Step 4: Track Ranking Changes Over Time
import sqlite3
from datetime import datetime, timedelta
def get_ranking_history(keyword, days=30, db_path="seo_monitor.db"):
# Get ranking history for a keyword over the past N days
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute("227| SELECT r.position, r.checked_at 228| FROM rankings r 229| JOIN keywords k ON r.keyword_id = k.id 230| WHERE k.keyword = ? 231| AND r.checked_at >= date('now', ?) 232| ORDER BY r.checked_at 233|"), (keyword, f"-{days} days"))
history = cursor.fetchall()
conn.close()
return [{"position": pos, "date": date} for pos, date in history]
def get_biggest_movers(db_path="seo_monitor.db", days=7):
# Find keywords with the biggest ranking changes
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute("246| WITH latest AS ( 247| SELECT k.keyword, k.url, r.position, 248| ROW_NUMBER() OVER (PARTITION BY k.id ORDER BY r.checked_at DESC) as rn 249| FROM rankings r JOIN keywords k ON r.keyword_id = k.id 250| ), 251| previous AS ( 252| SELECT k.keyword, r.position, 253| ROW_NUMBER() OVER (PARTITION BY k.id ORDER BY r.checked_at DESC) as rn 254| FROM rankings r JOIN keywords k ON r.keyword_id = k.id 255| ) 256| SELECT 257| l.keyword, 258| l.url, 259| l.position as current_pos, 260| p.position as prev_pos, 261| (p.position - l.position) as change 262| FROM latest l 263| JOIN previous p ON l.keyword = p.keyword AND p.rn = 2 264| WHERE l.rn = 1 265| AND p.position IS NOT NULL AND l.position IS NOT NULL 266| ORDER BY ABS(p.position - l.position) DESC 267| LIMIT 20 268|"))
movers = []
for row in cursor.fetchall():
movers.append({
"keyword": row[0],
"url": row[1],
"current": row[2],
"previous": row[3],
"change": row[4]
})
conn.close()
return movers
# Example: show ranking history
history = get_ranking_history("best project management tools 2026", days=14)
for entry in history:
status = f"#{entry['position']}" if entry['position'] else "Not found"
print(f" {entry['date']}: {status}")
# Example: show biggest movers
print("\nBiggest ranking changes this week:")
movers = get_biggest_movers(days=7)
for m in movers:
direction = "+" if m["change"] > 0 else ""
print(f" {m['keyword']}: {direction}{m['change']} (#{m['previous']} -> #{m['current']})")
Step 5: Set Up Alerts for Significant Changes
import sqlite3
def send_alert(keyword, old_pos, new_pos, url):
# Placeholder for your alert mechanism
direction = "improved" if new_pos < old_pos else "dropped"
if new_pos is None:
direction = "dropped out of results"
message = f"SEO Alert: '{keyword}' {direction}"
if new_pos:
message += f" (#{old_pos} -> #{new_pos})"
else:
message += f" (was #{old_pos})"
print(f"ALERT: {message}")
# Add your integration: email, Slack webhook, Telegram, etc.
# requests.post("https://hooks.slack.com/...", json={"text": message})
def check_for_alerts(threshold=5, db_path="seo_monitor.db"):
# Alert on ranking changes greater than threshold positions
movers = get_biggest_movers(days=1, db_path=db_path)
for m in movers:
if abs(m["change"]) >= threshold:
send_alert(m["keyword"], m["previous"], m["current"], m["url"])
# Run alerts after each ranking check
check_for_alerts(threshold=5)
Step 6: Complete Daily Monitoring Script
Combine everything into a single script you can run as a daily cron expression generator:
import sqlite3
import requests
import time
from datetime import datetime
API_KEY="***"
BASE_URL = "https://api.searchhive.dev/v1"
DB_PATH = "seo_monitor.db"
def daily_monitor():
# Full daily SEO monitoring cycle
print(f"=== SEO Monitor - {datetime.now().strftime('%Y-%m-%d %H:%M')} ===")
# 1. Check all keywords
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
cursor.execute("SELECT keyword, url FROM keywords")
keywords = cursor.fetchall()
conn.close()
print(f"Tracking {len(keywords)} keywords")
for keyword, url in keywords:
response = requests.post(
f"{BASE_URL}/search",
headers={"Authorization": f"Bearer {API_KEY}"},
json={"query": keyword, "num_results": 30}
)
if response.status_code == 200:
results = response.json().get("results", [])
position = None
title = None
for i, r in enumerate(results):
if url in r.get("url", ""):
position = i + 1
title = r.get("title", "")
break
# Save ranking
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
cursor.execute(
"SELECT id FROM keywords WHERE keyword = ?", (keyword,)
)
row = cursor.fetchone()
if row:
cursor.execute(
"INSERT INTO rankings (keyword_id, position, title) VALUES (?, ?, ?)",
(row[0], position, title)
)
conn.commit()
conn.close()
time.sleep(2)
# 2. Check for alerts
check_for_alerts(threshold=5, db_path=DB_PATH)
print("=== Done ===")
if __name__ == "__main__":
daily_monitor()
Set up the cron job:
# Run daily at 8 AM
0 8 * * * /usr/bin/python3 /path/to/seo_monitor.py >> /var/log/seo_monitor.log 2>&1
Common Issues
Inconsistent results: Search engines personalize results. Use the location parameter in SwiftSearch to set a consistent location, and consider using a proxy for consistent IP-based results.
Missing rankings: If your URL isn't in the top 30 results, increase num_results. Each additional 10 results costs more credits, so balance coverage with cost.
Database growth: The rankings table grows daily. Archive old data monthly by exporting and deleting records older than 90 days.
API rate limits: SearchHive's free tier has rate limits. The 2-second delay between requests keeps you well within limits. The Builder plan ($49/mo, 100K credits) supports tracking thousands of keywords daily.
Cost Comparison
| Approach | Cost for 100 Keywords/Day |
|---|---|
| Ahrefs | $99-$199/month |
| SEMrush | $119-$229/month |
| SERPWatcher (Mangools) | $29/month (limited) |
| SearchHive Free | $0 (500 credits = ~16 checks) |
| SearchHive Starter | $9/month (5K credits = ~160 checks) |
| SearchHive Builder | $49/month (100K credits = ~3,300 checks) |
For most small-to-medium sites, the SearchHive Builder plan at $49/month tracks 100+ keywords daily -- a fraction of what enterprise SEO tools charge.
Next Steps
- Add competitor tracking (check rankings for competitor URLs on your target keywords)
- Build a simple dashboard with Flask or Streamlit
- Track featured snippets and SERP features, not just organic positions
- Add keyword research with SwiftSearch to discover new ranking opportunities
Get started free with 500 credits at searchhive.dev -- no credit card required. Check the docs for the full API reference.
Related tutorials: /tutorials/how-to-monitor-brand-mentions-across-the-web-with-python | /tutorials/how-to-scrape-e-commerce-pricing-data-with-python
Compare: /compare/serpapi | /compare/serper