import requests, pandas as pd
from datetime import datetime
PROXY = "http://username:password@HOST:PORT" # Ping Network proxy
proxies = {"http": PROXY, "https": PROXY}
headers = {"User-Agent": "Mozilla/5.0", "Accept": "application/json, */*"}
url = "https://example-nasdaq-endpoint.com/api/quote?ticker=AAPL"
r = requests.get(url, headers=headers, proxies=proxies, timeout=20)
data = r.json()
df = pd.DataFrame([{
"ticker": "AAPL",
"price": data["last"],
"change_pct": data["changePercent"],
"volume": data["volume"],
"scraped_at": datetime.utcnow().isoformat()
}])
df.to_csv("nasdaq_snapshot.csv", index=False)
import asyncio, pandas as pd
from datetime import datetime
from playwright.async_api import async_playwright
PROXY = "http://username:password@HOST:PORT"
async def scrape_quote(ticker):
async with async_playwright() as p:
browser = await p.chromium.launch(proxy={"server": PROXY}, headless=True)
page = await browser.new_page()
url = f"https://www.nasdaq.com/market-activity/stocks/{ticker.lower()}"
await page.goto(url, wait_until="networkidle")
await page.wait_for_selector("[data-testid='qsp-price']")
return {
"ticker": ticker,
"price": await page.text_content("[data-testid='qsp-price']"),
"change": await page.text_content("[data-testid='qsp-price-change']"),
"volume": await page.text_content("[data-testid='qsp-volume']"),
"scraped_at": datetime.utcnow().isoformat()
}
async def main():
df = pd.DataFrame([await scrape_quote(t) for t in ["AAPL","MSFT","GOOGL"]])
df.to_csv("nasdaq_quotes.csv", index=False)
asyncio.run(main())
http://username:password@HOST:PORT
http://username=session-abc123-country-us:password@HOST:PORT