How to Build a Crypto Price Tracker with Python APIs
Cryptocurrency prices move fast. If you are building trading bots, portfolio dashboards, or alert systems, you need reliable real-time price data. This tutorial shows you how to build a cryptocurrency price tracker using Python -- from fetching live prices to tracking historical trends and setting up price alerts.
We will use SearchHive SwiftSearch for fetching live data from multiple sources and ScrapeForge for extracting data from exchange pages that lack public APIs.
Prerequisites
- Python 3.8+
- A SearchHive API key (free with 500 credits)
- pip install requests
Step 1: Set Up Your API Connection
Create a configuration file and initialize the SearchHive client.
import requests
import time
import json
from datetime import datetime
API_KEY = "your_searchhive_api_key"
BASE_URL = "https://api.searchhive.dev/v1"
HEADERS = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
def search_web(query):
"""Use SwiftSearch to find real-time crypto data."""
response = requests.post(
f"{BASE_URL}/search",
headers=HEADERS,
json={"query": query, "num_results": 10}
)
return response.json()
Step 2: Fetch Live Prices from Multiple Sources
Different sources report slightly different prices. A robust tracker aggregates from multiple sources.
def get_crypto_price_from_web(coin):
"""Scrape live price from search results."""
results = search_web(f"{coin} price USD today")
prices = []
for result in results.get("results", []):
title = result.get("title", "")
snippet = result.get("snippet", "")
text = f"{title} {snippet}"
import re
# Match dollar amounts that look like prices
matches = re.findall(r'\$([0-9,]+\.?[0-9]*)', text)
for m in matches:
price = float(m.replace(",", ""))
if 0.01 < price < 1000000:
prices.append(price)
if prices:
return sum(prices) / len(prices)
return None
Step 3: Scrape Exchange Pages with ScrapeForge
Many crypto data pages require JavaScript rendering. ScrapeForge handles this automatically.
def scrape_coinmarketcap(coin_slug="bitcoin"):
"""Extract price data from CoinMarketCap using ScrapeForge."""
response = requests.post(
f"{BASE_URL}/scrape",
headers=HEADERS,
json={
"url": f"https://coinmarketcap.com/currencies/{coin_slug}/",
"format": "text",
"render_js": True
}
)
data = response.json()
content = data.get("content", "")
import re
price_match = re.search(r'(\$[0-9,]+\.?[0-9]*)', content)
if price_match:
return float(price_match.group(1).replace("$", "").replace(",", ""))
return None
def scrape_coin_gecko(coin_id="bitcoin"):
"""Extract data from CoinGecko using ScrapeForge."""
response = requests.post(
f"{BASE_URL}/scrape",
headers=HEADERS,
json={
"url": f"https://www.coingecko.com/en/coins/{coin_id}",
"format": "text",
"render_js": True
}
)
data = response.json()
content = data.get("content", "")
import re
price_match = re.search(r'(\$[0-9,]+\.?[0-9]*)', content)
if price_match:
return float(price_match.group(1).replace("$", "").replace(",", ""))
return None
Step 4: Build the Price Aggregator
Combine multiple sources into a single aggregated price with confidence scoring.
def get_aggregated_price(coin_name, coin_slug="bitcoin", coin_gecko_id="bitcoin"):
"""Aggregate prices from multiple sources."""
sources = {}
# Source 1: Web search
web_price = get_crypto_price_from_web(coin_name)
if web_price:
sources["web_search"] = web_price
# Source 2: CoinMarketCap
try:
cmc_price = scrape_coinmarketcap(coin_slug)
if cmc_price:
sources["coinmarketcap"] = cmc_price
except Exception as e:
print(f"CoinMarketCap error: {e}")
# Source 3: CoinGecko
try:
cg_price = scrape_coin_gecko(coin_gecko_id)
if cg_price:
sources["coingecko"] = cg_price
except Exception as e:
print(f"CoinGecko error: {e}")
if not sources:
return None, {}
# Calculate weighted average (more sources = more confidence)
avg_price = sum(sources.values()) / len(sources)
confidence = min(len(sources) / 3, 1.0)
return avg_price, sources
# Test it
price, sources = get_aggregated_price("Bitcoin", "bitcoin", "bitcoin")
print(f"BTC Price: ${price:,.2f} (confidence: {sources})")
Step 5: Add Historical Tracking
Store prices over time in a simple free JSON formatter file for trend analysis.
import os
from pathlib import Path
HISTORY_FILE = Path("crypto_history.json")
def save_price(coin, price, sources):
"""Append price to history file."""
history = []
if HISTORY_FILE.exists():
with open(HISTORY_FILE) as f:
history = json.load(f)
entry = {
"coin": coin,
"price": price,
"sources": sources,
"timestamp": datetime.utcnow().isoformat()
}
history.append(entry)
with open(HISTORY_FILE, "w") as f:
json.dump(history, f, indent=2)
def get_price_history(coin, hours=24):
"""Get price history for a coin."""
if not HISTORY_FILE.exists():
return []
with open(HISTORY_FILE) as f:
history = json.load(f)
cutoff = datetime.utcnow().timestamp() - (hours * 3600)
return [
h for h in history
if h["coin"] == coin
and datetime.fromisoformat(h["timestamp"]).timestamp() > cutoff
]
Step 6: Set Up Price Alerts
Get notified when a coin crosses a threshold.
class PriceAlert:
def __init__(self):
self.alerts = {}
def set_alert(self, coin, condition, threshold):
"""Set a price alert. condition: 'above' or 'below'."""
self.alerts[coin] = {"condition": condition, "threshold": threshold}
def check(self, coin, price):
"""Check if an alert is triggered. Returns message or None."""
if coin not in self.alerts:
return None
alert = self.alerts[coin]
triggered = (
(alert["condition"] == "above" and price > alert["threshold"]) or
(alert["condition"] == "below" and price < alert["threshold"])
)
if triggered:
direction = "rose above" if alert["condition"] == "above" else "dropped below"
return f"ALERT: {coin} {direction} ${alert['threshold']:,.2f} (current: ${price:,.2f})"
return None
# Usage
alerts = PriceAlert()
alerts.set_alert("Bitcoin", "above", 100000)
alerts.set_alert("Ethereum", "below", 2000)
Step 7: The Main Loop
Put it all together into a continuously running tracker.
def run_tracker(coins, interval_seconds=300):
"""Run the crypto price tracker for multiple coins."""
alert_system = PriceAlert()
# Configure alerts
alert_system.set_alert("Bitcoin", "above", 100000)
coin_configs = {
"Bitcoin": {"slug": "bitcoin", "gecko_id": "bitcoin"},
"Ethereum": {"slug": "ethereum", "gecko_id": "ethereum"},
"Solana": {"slug": "solana", "gecko_id": "solana"},
}
print(f"Tracking {len(coins)} coins every {interval_seconds}s...")
print("-" * 50)
while True:
for coin in coins:
config = coin_configs.get(coin, {"slug": coin.lower(), "gecko_id": coin.lower()})
price, sources = get_aggregated_price(coin, config["slug"], config["gecko_id"])
if price:
print(f"[{datetime.now().strftime('%H:%M:%S')}] {coin}: ${price:,.2f} ({len(sources)} sources)")
save_price(coin, price, sources)
alert = alert_system.check(coin, price)
if alert:
print(f" >> {alert}")
else:
print(f"[{datetime.now().strftime('%H:%M:%S')}] {coin}: FAILED to fetch price")
print("-" * 50)
time.sleep(interval_seconds)
# Start tracking
run_tracker(["Bitcoin", "Ethereum", "Solana"], interval_seconds=300)
Common Issues
- Rate limiting: CoinMarketCap and CoinGecko may block frequent requests. ScrapeForge handles proxy rotation, so your requests go through different IPs automatically.
- JavaScript rendering: Many crypto dashboards load data via client-side JavaScript. Always set
render_js: Truein ScrapeForge requests. - Price discrepancies: Different exchanges report different prices. Use the aggregated approach above to get a more reliable estimate.
- Historical data gaps: If the tracker crashes, you miss data points. Add error handling and consider running it as a systemd service or Docker container.
Next Steps
- Add WebSocket support for real-time exchange feeds
- Build a dashboard with Streamlit or Plotly Dash
- Add technical indicators (moving averages, RSI) using the historical data
- Deploy to a cloud server for 24/7 monitoring
Get your free SearchHive API key with 500 credits to start building your crypto tracker today. The SwiftSearch and ScrapeForge APIs give you access to real-time data from any source on the web.