restructure
This commit is contained in:
181
scraper.py
Normal file
181
scraper.py
Normal file
@@ -0,0 +1,181 @@
|
||||
import json
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import browser_cookie3
|
||||
import pandas as pd
|
||||
from curl_cffi import requests
|
||||
|
||||
|
||||
BASE = "https://giantfood.com"
|
||||
ACCOUNT_PAGE = f"{BASE}/account/history/invoice/in-store"
|
||||
|
||||
USER_ID = "369513017"
|
||||
LOYALTY = "440155630880"
|
||||
|
||||
|
||||
def build_session():
|
||||
s = requests.Session()
|
||||
s.cookies.update(browser_cookie3.firefox(domain_name="giantfood.com"))
|
||||
s.headers.update({
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:148.0) Gecko/20100101 Firefox/148.0",
|
||||
"accept": "application/json, text/plain, */*",
|
||||
"accept-language": "en-US,en;q=0.9",
|
||||
"referer": ACCOUNT_PAGE,
|
||||
})
|
||||
return s
|
||||
|
||||
|
||||
def safe_get(session, url, **kwargs):
|
||||
last_response = None
|
||||
|
||||
for attempt in range(3):
|
||||
try:
|
||||
r = session.get(
|
||||
url,
|
||||
impersonate="firefox",
|
||||
timeout=30,
|
||||
**kwargs,
|
||||
)
|
||||
last_response = r
|
||||
|
||||
if r.status_code == 200:
|
||||
return r
|
||||
|
||||
print(f"retry {attempt + 1}/3 status={r.status_code}")
|
||||
except Exception as e:
|
||||
print(f"retry {attempt + 1}/3 error={e}")
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
if last_response is not None:
|
||||
last_response.raise_for_status()
|
||||
|
||||
raise RuntimeError(f"failed to fetch {url}")
|
||||
|
||||
|
||||
def get_history(session):
|
||||
url = f"{BASE}/api/v6.0/user/{USER_ID}/order/history"
|
||||
r = safe_get(
|
||||
session,
|
||||
url,
|
||||
params={
|
||||
"filter": "instore",
|
||||
"loyaltyNumber": LOYALTY,
|
||||
},
|
||||
)
|
||||
return r.json()
|
||||
|
||||
|
||||
def get_order_detail(session, order_id):
|
||||
url = f"{BASE}/api/v6.0/user/{USER_ID}/order/history/detail/{order_id}"
|
||||
r = safe_get(
|
||||
session,
|
||||
url,
|
||||
params={"isInStore": "true"},
|
||||
)
|
||||
return r.json()
|
||||
|
||||
|
||||
def flatten_orders(history, details):
|
||||
orders = []
|
||||
items = []
|
||||
|
||||
history_lookup = {
|
||||
r["orderId"]: r
|
||||
for r in history.get("records", [])
|
||||
}
|
||||
|
||||
for d in details:
|
||||
hist = history_lookup.get(d["orderId"], {})
|
||||
pup = d.get("pup", {})
|
||||
|
||||
orders.append({
|
||||
"order_id": d["orderId"],
|
||||
"order_date": d.get("orderDate"),
|
||||
"delivery_date": d.get("deliveryDate"),
|
||||
"service_type": hist.get("serviceType"),
|
||||
"order_total": d.get("orderTotal"),
|
||||
"payment_method": d.get("paymentMethod"),
|
||||
"total_item_count": d.get("totalItemCount"),
|
||||
"total_savings": d.get("totalSavings"),
|
||||
"your_savings_total": d.get("yourSavingsTotal"),
|
||||
"coupons_discounts_total": d.get("couponsDiscountsTotal"),
|
||||
"store_name": pup.get("storeName"),
|
||||
"store_number": pup.get("aholdStoreNumber"),
|
||||
"store_address1": pup.get("storeAddress1"),
|
||||
"store_city": pup.get("storeCity"),
|
||||
"store_state": pup.get("storeState"),
|
||||
"store_zipcode": pup.get("storeZipcode"),
|
||||
"refund_order": d.get("refundOrder"),
|
||||
"ebt_order": d.get("ebtOrder"),
|
||||
})
|
||||
|
||||
for i, item in enumerate(d.get("items", []), start=1):
|
||||
items.append({
|
||||
"order_id": d["orderId"],
|
||||
"order_date": d.get("orderDate"),
|
||||
"line_no": i,
|
||||
"pod_id": item.get("podId"),
|
||||
"item_name": item.get("itemName"),
|
||||
"upc": item.get("primUpcCd"),
|
||||
"category_id": item.get("categoryId"),
|
||||
"category": item.get("categoryDesc"),
|
||||
"qty": item.get("shipQy"),
|
||||
"unit": item.get("lbEachCd"),
|
||||
"unit_price": item.get("unitPrice"),
|
||||
"line_total": item.get("groceryAmount"),
|
||||
"picked_weight": item.get("totalPickedWeight"),
|
||||
"mvp_savings": item.get("mvpSavings"),
|
||||
"reward_savings": item.get("rewardSavings"),
|
||||
"coupon_savings": item.get("couponSavings"),
|
||||
"coupon_price": item.get("couponPrice"),
|
||||
})
|
||||
|
||||
return pd.DataFrame(orders), pd.DataFrame(items)
|
||||
|
||||
|
||||
def main():
|
||||
outdir = Path("giant_output")
|
||||
rawdir = outdir / "raw"
|
||||
rawdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
session = build_session()
|
||||
|
||||
print("fetching order history...")
|
||||
history = get_history(session)
|
||||
|
||||
(rawdir / "history.json").write_text(
|
||||
json.dumps(history, indent=2),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
order_ids = [r["orderId"] for r in history.get("records", [])]
|
||||
print(f"{len(order_ids)} orders found")
|
||||
|
||||
details = []
|
||||
for order_id in order_ids:
|
||||
print(f"fetching {order_id}")
|
||||
d = get_order_detail(session, order_id)
|
||||
details.append(d)
|
||||
|
||||
(rawdir / f"{order_id}.json").write_text(
|
||||
json.dumps(d, indent=2),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
time.sleep(1.5)
|
||||
|
||||
print("flattening data...")
|
||||
orders_df, items_df = flatten_orders(history, details)
|
||||
|
||||
orders_df.to_csv(outdir / "orders.csv", index=False)
|
||||
items_df.to_csv(outdir / "items.csv", index=False)
|
||||
|
||||
print("done")
|
||||
print(f"{len(orders_df)} orders written to {outdir / 'orders.csv'}")
|
||||
print(f"{len(items_df)} items written to {outdir / 'items.csv'}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user