Build Giant item enricher
This commit is contained in:
426
enrich_giant.py
Normal file
426
enrich_giant.py
Normal file
@@ -0,0 +1,426 @@
|
||||
import csv
|
||||
import json
|
||||
import re
|
||||
from decimal import Decimal, InvalidOperation, ROUND_HALF_UP
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
|
||||
|
||||
PARSER_VERSION = "giant-enrich-v1"
|
||||
RETAILER = "giant"
|
||||
DEFAULT_INPUT_DIR = Path("giant_output/raw")
|
||||
DEFAULT_OUTPUT_CSV = Path("giant_output/items_enriched.csv")
|
||||
|
||||
OUTPUT_FIELDS = [
|
||||
"retailer",
|
||||
"order_id",
|
||||
"line_no",
|
||||
"observed_item_key",
|
||||
"order_date",
|
||||
"pod_id",
|
||||
"item_name",
|
||||
"upc",
|
||||
"category_id",
|
||||
"category",
|
||||
"qty",
|
||||
"unit",
|
||||
"unit_price",
|
||||
"line_total",
|
||||
"picked_weight",
|
||||
"mvp_savings",
|
||||
"reward_savings",
|
||||
"coupon_savings",
|
||||
"coupon_price",
|
||||
"image_url",
|
||||
"raw_order_path",
|
||||
"item_name_norm",
|
||||
"brand_guess",
|
||||
"variant",
|
||||
"size_value",
|
||||
"size_unit",
|
||||
"pack_qty",
|
||||
"measure_type",
|
||||
"is_store_brand",
|
||||
"is_fee",
|
||||
"price_per_each",
|
||||
"price_per_lb",
|
||||
"price_per_oz",
|
||||
"parse_version",
|
||||
"parse_notes",
|
||||
]
|
||||
|
||||
STORE_BRAND_PREFIXES = {
|
||||
"SB": "SB",
|
||||
"NP": "NP",
|
||||
}
|
||||
|
||||
ABBREVIATIONS = {
|
||||
"APPLE": "APPLE",
|
||||
"APPLES": "APPLES",
|
||||
"APLE": "APPLE",
|
||||
"BASIL": "BASIL",
|
||||
"BLK": "BLACK",
|
||||
"BNLS": "BONELESS",
|
||||
"BRWN": "BROWN",
|
||||
"CARROTS": "CARROTS",
|
||||
"CHDR": "CHEDDAR",
|
||||
"CHICKEN": "CHICKEN",
|
||||
"CHOC": "CHOCOLATE",
|
||||
"CHS": "CHEESE",
|
||||
"CHSE": "CHEESE",
|
||||
"CHZ": "CHEESE",
|
||||
"CILANTRO": "CILANTRO",
|
||||
"CKI": "COOKIE",
|
||||
"CRSHD": "CRUSHED",
|
||||
"FLR": "FLOUR",
|
||||
"FRSH": "FRESH",
|
||||
"GALA": "GALA",
|
||||
"GRAHM": "GRAHAM",
|
||||
"HOT": "HOT",
|
||||
"HRSRDSH": "HORSERADISH",
|
||||
"IMP": "IMPORTED",
|
||||
"IQF": "IQF",
|
||||
"LENTILS": "LENTILS",
|
||||
"LG": "LARGE",
|
||||
"MLK": "MILK",
|
||||
"MSTRD": "MUSTARD",
|
||||
"ONION": "ONION",
|
||||
"ORG": "ORGANIC",
|
||||
"PEPPER": "PEPPER",
|
||||
"PEPPERS": "PEPPERS",
|
||||
"POT": "POTATO",
|
||||
"POTATO": "POTATO",
|
||||
"PPR": "PEPPER",
|
||||
"RICOTTA": "RICOTTA",
|
||||
"ROASTER": "ROASTER",
|
||||
"ROTINI": "ROTINI",
|
||||
"SCE": "SAUCE",
|
||||
"SLC": "SLICED",
|
||||
"SPINCH": "SPINACH",
|
||||
"SPNC": "SPINACH",
|
||||
"SPINACH": "SPINACH",
|
||||
"SQZ": "SQUEEZE",
|
||||
"SWT": "SWEET",
|
||||
"THYME": "THYME",
|
||||
"TOM": "TOMATO",
|
||||
"TOMS": "TOMATOES",
|
||||
"TRTL": "TORTILLA",
|
||||
"VEG": "VEGETABLE",
|
||||
"VINEGAR": "VINEGAR",
|
||||
"WHT": "WHITE",
|
||||
"WHOLE": "WHOLE",
|
||||
"YLW": "YELLOW",
|
||||
"YLWGLD": "YELLOW_GOLD",
|
||||
}
|
||||
|
||||
FEE_PATTERNS = [
|
||||
re.compile(r"\bBAG CHARGE\b"),
|
||||
re.compile(r"\bDISC AT TOTAL\b"),
|
||||
]
|
||||
|
||||
SIZE_RE = re.compile(r"(?<![A-Z0-9])(\d+(?:\.\d+)?)(?:\s*)(OZ|Z|LB|LBS|ML|L|FZ|FL OZ|QT|PT|GAL|GA)\b")
|
||||
PACK_RE = re.compile(r"(?<![A-Z0-9])(\d+(?:\.\d+)?)(?:\s*)(CT|PK|PKG|PACK)\b")
|
||||
|
||||
|
||||
def to_decimal(value):
|
||||
if value in ("", None):
|
||||
return None
|
||||
|
||||
try:
|
||||
return Decimal(str(value))
|
||||
except (InvalidOperation, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def format_decimal(value, places=4):
|
||||
if value is None:
|
||||
return ""
|
||||
|
||||
quant = Decimal("1").scaleb(-places)
|
||||
normalized = value.quantize(quant, rounding=ROUND_HALF_UP).normalize()
|
||||
return format(normalized, "f")
|
||||
|
||||
|
||||
def normalize_whitespace(value):
|
||||
return " ".join(str(value or "").strip().split())
|
||||
|
||||
|
||||
def clean_item_name(name):
|
||||
cleaned = normalize_whitespace(name).upper()
|
||||
cleaned = re.sub(r"^\+", "", cleaned)
|
||||
cleaned = re.sub(r"^PLU#\d+\s*", "", cleaned)
|
||||
cleaned = cleaned.replace("#", " ")
|
||||
return normalize_whitespace(cleaned)
|
||||
|
||||
|
||||
def extract_store_brand_prefix(cleaned_name):
|
||||
for prefix, brand in STORE_BRAND_PREFIXES.items():
|
||||
if cleaned_name == prefix or cleaned_name.startswith(f"{prefix} "):
|
||||
return prefix, brand
|
||||
return "", ""
|
||||
|
||||
|
||||
def extract_image_url(item):
|
||||
image = item.get("image")
|
||||
if isinstance(image, dict):
|
||||
for key in ["xlarge", "large", "medium", "small"]:
|
||||
value = image.get(key)
|
||||
if value:
|
||||
return value
|
||||
if isinstance(image, str):
|
||||
return image
|
||||
return ""
|
||||
|
||||
|
||||
def parse_size_and_pack(cleaned_name):
|
||||
size_value = ""
|
||||
size_unit = ""
|
||||
pack_qty = ""
|
||||
|
||||
size_matches = list(SIZE_RE.finditer(cleaned_name))
|
||||
if size_matches:
|
||||
match = size_matches[-1]
|
||||
size_value = normalize_number(match.group(1))
|
||||
size_unit = normalize_unit(match.group(2))
|
||||
|
||||
pack_matches = list(PACK_RE.finditer(cleaned_name))
|
||||
if pack_matches:
|
||||
match = pack_matches[-1]
|
||||
pack_qty = normalize_number(match.group(1))
|
||||
|
||||
return size_value, size_unit, pack_qty
|
||||
|
||||
|
||||
def normalize_number(value):
|
||||
decimal = to_decimal(value)
|
||||
if decimal is None:
|
||||
return ""
|
||||
return format(decimal.normalize(), "f")
|
||||
|
||||
|
||||
def normalize_unit(unit):
|
||||
collapsed = normalize_whitespace(unit).upper()
|
||||
return {
|
||||
"Z": "oz",
|
||||
"OZ": "oz",
|
||||
"FZ": "fl_oz",
|
||||
"FL OZ": "fl_oz",
|
||||
"LB": "lb",
|
||||
"LBS": "lb",
|
||||
"ML": "ml",
|
||||
"L": "l",
|
||||
"QT": "qt",
|
||||
"PT": "pt",
|
||||
"GAL": "gal",
|
||||
"GA": "gal",
|
||||
}.get(collapsed, collapsed.lower())
|
||||
|
||||
|
||||
def strip_measure_tokens(cleaned_name):
|
||||
without_sizes = SIZE_RE.sub(" ", cleaned_name)
|
||||
without_measures = PACK_RE.sub(" ", without_sizes)
|
||||
return normalize_whitespace(without_measures)
|
||||
|
||||
|
||||
def expand_token(token):
|
||||
return ABBREVIATIONS.get(token, token)
|
||||
|
||||
|
||||
def normalize_item_name(cleaned_name):
|
||||
prefix, _brand = extract_store_brand_prefix(cleaned_name)
|
||||
base = cleaned_name
|
||||
if prefix:
|
||||
base = normalize_whitespace(base[len(prefix):])
|
||||
|
||||
base = strip_measure_tokens(base)
|
||||
expanded_tokens = [expand_token(token) for token in base.split()]
|
||||
expanded = " ".join(token for token in expanded_tokens if token)
|
||||
return normalize_whitespace(expanded)
|
||||
|
||||
|
||||
def guess_measure_type(item, size_unit, pack_qty):
|
||||
unit = normalize_whitespace(item.get("lbEachCd")).upper()
|
||||
picked_weight = to_decimal(item.get("totalPickedWeight"))
|
||||
qty = to_decimal(item.get("shipQy"))
|
||||
|
||||
if unit == "LB" or (picked_weight is not None and picked_weight > 0 and unit != "EA"):
|
||||
return "weight"
|
||||
if size_unit in {"lb", "oz"}:
|
||||
return "weight"
|
||||
if size_unit in {"ml", "l", "qt", "pt", "gal", "fl_oz"}:
|
||||
return "volume"
|
||||
if pack_qty:
|
||||
return "count"
|
||||
if unit == "EA" or (qty is not None and qty > 0):
|
||||
return "each"
|
||||
return ""
|
||||
|
||||
|
||||
def is_fee_item(cleaned_name):
|
||||
return any(pattern.search(cleaned_name) for pattern in FEE_PATTERNS)
|
||||
|
||||
|
||||
def derive_prices(item, measure_type, size_value="", size_unit="", pack_qty=""):
|
||||
qty = to_decimal(item.get("shipQy"))
|
||||
line_total = to_decimal(item.get("groceryAmount"))
|
||||
picked_weight = to_decimal(item.get("totalPickedWeight"))
|
||||
parsed_size = to_decimal(size_value)
|
||||
parsed_pack = to_decimal(pack_qty) or Decimal("1")
|
||||
|
||||
price_per_each = ""
|
||||
price_per_lb = ""
|
||||
price_per_oz = ""
|
||||
|
||||
if line_total is None:
|
||||
return price_per_each, price_per_lb, price_per_oz
|
||||
|
||||
if measure_type == "each" and qty not in (None, Decimal("0")):
|
||||
price_per_each = format_decimal(line_total / qty)
|
||||
|
||||
if measure_type == "count" and qty not in (None, Decimal("0")):
|
||||
price_per_each = format_decimal(line_total / qty)
|
||||
|
||||
if measure_type == "weight" and picked_weight not in (None, Decimal("0")):
|
||||
per_lb = line_total / picked_weight
|
||||
price_per_lb = format_decimal(per_lb)
|
||||
price_per_oz = format_decimal(per_lb / Decimal("16"))
|
||||
return price_per_each, price_per_lb, price_per_oz
|
||||
|
||||
if measure_type == "weight" and parsed_size not in (None, Decimal("0")) and qty not in (None, Decimal("0")):
|
||||
total_units = qty * parsed_pack * parsed_size
|
||||
if size_unit == "lb":
|
||||
per_lb = line_total / total_units
|
||||
price_per_lb = format_decimal(per_lb)
|
||||
price_per_oz = format_decimal(per_lb / Decimal("16"))
|
||||
elif size_unit == "oz":
|
||||
per_oz = line_total / total_units
|
||||
price_per_oz = format_decimal(per_oz)
|
||||
price_per_lb = format_decimal(per_oz * Decimal("16"))
|
||||
|
||||
return price_per_each, price_per_lb, price_per_oz
|
||||
|
||||
|
||||
def parse_item(order_id, order_date, raw_path, line_no, item):
|
||||
cleaned_name = clean_item_name(item.get("itemName", ""))
|
||||
size_value, size_unit, pack_qty = parse_size_and_pack(cleaned_name)
|
||||
prefix, brand_guess = extract_store_brand_prefix(cleaned_name)
|
||||
normalized_name = normalize_item_name(cleaned_name)
|
||||
measure_type = guess_measure_type(item, size_unit, pack_qty)
|
||||
price_per_each, price_per_lb, price_per_oz = derive_prices(
|
||||
item,
|
||||
measure_type,
|
||||
size_value=size_value,
|
||||
size_unit=size_unit,
|
||||
pack_qty=pack_qty,
|
||||
)
|
||||
is_fee = is_fee_item(cleaned_name)
|
||||
parse_notes = []
|
||||
|
||||
if prefix:
|
||||
parse_notes.append(f"store_brand_prefix={prefix}")
|
||||
if is_fee:
|
||||
parse_notes.append("fee_item")
|
||||
if size_value and not size_unit:
|
||||
parse_notes.append("size_without_unit")
|
||||
|
||||
return {
|
||||
"retailer": RETAILER,
|
||||
"order_id": str(order_id),
|
||||
"line_no": str(line_no),
|
||||
"observed_item_key": f"{RETAILER}:{order_id}:{line_no}",
|
||||
"order_date": normalize_whitespace(order_date),
|
||||
"pod_id": stringify(item.get("podId")),
|
||||
"item_name": stringify(item.get("itemName")),
|
||||
"upc": stringify(item.get("primUpcCd")),
|
||||
"category_id": stringify(item.get("categoryId")),
|
||||
"category": stringify(item.get("categoryDesc")),
|
||||
"qty": stringify(item.get("shipQy")),
|
||||
"unit": stringify(item.get("lbEachCd")),
|
||||
"unit_price": stringify(item.get("unitPrice")),
|
||||
"line_total": stringify(item.get("groceryAmount")),
|
||||
"picked_weight": stringify(item.get("totalPickedWeight")),
|
||||
"mvp_savings": stringify(item.get("mvpSavings")),
|
||||
"reward_savings": stringify(item.get("rewardSavings")),
|
||||
"coupon_savings": stringify(item.get("couponSavings")),
|
||||
"coupon_price": stringify(item.get("couponPrice")),
|
||||
"image_url": extract_image_url(item),
|
||||
"raw_order_path": raw_path.as_posix(),
|
||||
"item_name_norm": normalized_name,
|
||||
"brand_guess": brand_guess,
|
||||
"variant": "",
|
||||
"size_value": size_value,
|
||||
"size_unit": size_unit,
|
||||
"pack_qty": pack_qty,
|
||||
"measure_type": measure_type,
|
||||
"is_store_brand": "true" if bool(prefix) else "false",
|
||||
"is_fee": "true" if is_fee else "false",
|
||||
"price_per_each": price_per_each,
|
||||
"price_per_lb": price_per_lb,
|
||||
"price_per_oz": price_per_oz,
|
||||
"parse_version": PARSER_VERSION,
|
||||
"parse_notes": ";".join(parse_notes),
|
||||
}
|
||||
|
||||
|
||||
def stringify(value):
|
||||
if value is None:
|
||||
return ""
|
||||
return str(value)
|
||||
|
||||
|
||||
def iter_order_rows(raw_dir):
|
||||
for path in sorted(raw_dir.glob("*.json")):
|
||||
if path.name == "history.json":
|
||||
continue
|
||||
|
||||
payload = json.loads(path.read_text(encoding="utf-8"))
|
||||
order_id = payload.get("orderId", path.stem)
|
||||
order_date = payload.get("orderDate", "")
|
||||
|
||||
for line_no, item in enumerate(payload.get("items", []), start=1):
|
||||
yield parse_item(order_id, order_date, path, line_no, item)
|
||||
|
||||
|
||||
def build_items_enriched(raw_dir):
|
||||
rows = list(iter_order_rows(raw_dir))
|
||||
rows.sort(key=lambda row: (row["order_date"], row["order_id"], int(row["line_no"])))
|
||||
return rows
|
||||
|
||||
|
||||
def write_csv(path, rows):
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("w", newline="", encoding="utf-8") as handle:
|
||||
writer = csv.DictWriter(handle, fieldnames=OUTPUT_FIELDS)
|
||||
writer.writeheader()
|
||||
writer.writerows(rows)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
"--input-dir",
|
||||
default=str(DEFAULT_INPUT_DIR),
|
||||
show_default=True,
|
||||
help="Directory containing Giant raw order json files.",
|
||||
)
|
||||
@click.option(
|
||||
"--output-csv",
|
||||
default=str(DEFAULT_OUTPUT_CSV),
|
||||
show_default=True,
|
||||
help="CSV path for enriched Giant item rows.",
|
||||
)
|
||||
def main(input_dir, output_csv):
|
||||
raw_dir = Path(input_dir)
|
||||
output_path = Path(output_csv)
|
||||
|
||||
if not raw_dir.exists():
|
||||
raise click.ClickException(f"input dir does not exist: {raw_dir}")
|
||||
|
||||
rows = build_items_enriched(raw_dir)
|
||||
write_csv(output_path, rows)
|
||||
|
||||
click.echo(f"wrote {len(rows)} rows to {output_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -32,11 +32,11 @@
|
||||
- keep schema minimal but extensible
|
||||
|
||||
** evidence
|
||||
- commit:
|
||||
- commit: `42dbae1` on branch `cx`
|
||||
- tests: reviewed `giant_output/raw/history.json`, one sample raw order json, `giant_output/orders.csv`, `giant_output/items.csv`; documented schemas in `pm/data-model.org`
|
||||
- date: 2026-03-15
|
||||
|
||||
* [ ] t1.3: build giant parser/enricher from raw json (2-4 commits)
|
||||
* [X] t1.3: build giant parser/enricher from raw json (2-4 commits)
|
||||
** acceptance criteria
|
||||
- parser reads giant raw order json files
|
||||
- outputs `items_enriched.csv`
|
||||
@@ -55,8 +55,8 @@
|
||||
|
||||
** evidence
|
||||
- commit:
|
||||
- tests:
|
||||
- date:
|
||||
- tests: `./venv/bin/python -m unittest discover -s tests`; `./venv/bin/python enrich_giant.py`; verified `giant_output/items_enriched.csv` on real raw data
|
||||
- date: 2026-03-16
|
||||
|
||||
* [ ] t1.4: generate observed-product layer from enriched items (2-3 commits)
|
||||
|
||||
|
||||
190
tests/test_enrich_giant.py
Normal file
190
tests/test_enrich_giant.py
Normal file
@@ -0,0 +1,190 @@
|
||||
import csv
|
||||
import json
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import enrich_giant
|
||||
|
||||
|
||||
class EnrichGiantTests(unittest.TestCase):
|
||||
def test_parse_size_and_pack_handles_pack_and_weight_tokens(self):
|
||||
size_value, size_unit, pack_qty = enrich_giant.parse_size_and_pack(
|
||||
"COKE CHERRY 6PK 7.5Z"
|
||||
)
|
||||
|
||||
self.assertEqual("7.5", size_value)
|
||||
self.assertEqual("oz", size_unit)
|
||||
self.assertEqual("6", pack_qty)
|
||||
|
||||
def test_parse_item_marks_store_brand_fee_and_weight_prices(self):
|
||||
row = enrich_giant.parse_item(
|
||||
order_id="abc123",
|
||||
order_date="2026-03-01",
|
||||
raw_path=Path("raw/abc123.json"),
|
||||
line_no=1,
|
||||
item={
|
||||
"podId": 1,
|
||||
"shipQy": 1,
|
||||
"totalPickedWeight": 2,
|
||||
"unitPrice": 3.98,
|
||||
"itemName": "+SB GALA APPLE 5 LB",
|
||||
"lbEachCd": "LB",
|
||||
"groceryAmount": 3.98,
|
||||
"primUpcCd": "111",
|
||||
"mvpSavings": 0,
|
||||
"rewardSavings": 0,
|
||||
"couponSavings": 0,
|
||||
"couponPrice": 0,
|
||||
"categoryId": "1",
|
||||
"categoryDesc": "Grocery",
|
||||
"image": {"large": "https://example.test/apple.jpg"},
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual("SB", row["brand_guess"])
|
||||
self.assertEqual("GALA APPLE", row["item_name_norm"])
|
||||
self.assertEqual("5", row["size_value"])
|
||||
self.assertEqual("lb", row["size_unit"])
|
||||
self.assertEqual("weight", row["measure_type"])
|
||||
self.assertEqual("true", row["is_store_brand"])
|
||||
self.assertEqual("1.99", row["price_per_lb"])
|
||||
self.assertEqual("0.1244", row["price_per_oz"])
|
||||
self.assertEqual("https://example.test/apple.jpg", row["image_url"])
|
||||
|
||||
fee_row = enrich_giant.parse_item(
|
||||
order_id="abc123",
|
||||
order_date="2026-03-01",
|
||||
raw_path=Path("raw/abc123.json"),
|
||||
line_no=2,
|
||||
item={
|
||||
"podId": 2,
|
||||
"shipQy": 1,
|
||||
"totalPickedWeight": 0,
|
||||
"unitPrice": 0.05,
|
||||
"itemName": "GL BAG CHARGE",
|
||||
"lbEachCd": "EA",
|
||||
"groceryAmount": 0.05,
|
||||
"primUpcCd": "",
|
||||
"mvpSavings": 0,
|
||||
"rewardSavings": 0,
|
||||
"couponSavings": 0,
|
||||
"couponPrice": 0,
|
||||
"categoryId": "1",
|
||||
"categoryDesc": "Grocery",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual("true", fee_row["is_fee"])
|
||||
self.assertEqual("GL BAG CHARGE", fee_row["item_name_norm"])
|
||||
|
||||
def test_parse_item_derives_packaged_weight_prices_from_size_tokens(self):
|
||||
row = enrich_giant.parse_item(
|
||||
order_id="abc123",
|
||||
order_date="2026-03-01",
|
||||
raw_path=Path("raw/abc123.json"),
|
||||
line_no=1,
|
||||
item={
|
||||
"podId": 1,
|
||||
"shipQy": 2,
|
||||
"totalPickedWeight": 0,
|
||||
"unitPrice": 3.0,
|
||||
"itemName": "PEPSI 6PK 7.5Z",
|
||||
"lbEachCd": "EA",
|
||||
"groceryAmount": 6.0,
|
||||
"primUpcCd": "111",
|
||||
"mvpSavings": 0,
|
||||
"rewardSavings": 0,
|
||||
"couponSavings": 0,
|
||||
"couponPrice": 0,
|
||||
"categoryId": "1",
|
||||
"categoryDesc": "Grocery",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual("weight", row["measure_type"])
|
||||
self.assertEqual("6", row["pack_qty"])
|
||||
self.assertEqual("7.5", row["size_value"])
|
||||
self.assertEqual("0.0667", row["price_per_oz"])
|
||||
self.assertEqual("1.0667", row["price_per_lb"])
|
||||
|
||||
def test_build_items_enriched_reads_raw_order_files_and_writes_csv(self):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
raw_dir = Path(tmpdir) / "raw"
|
||||
raw_dir.mkdir()
|
||||
(raw_dir / "history.json").write_text("{}", encoding="utf-8")
|
||||
(raw_dir / "order-2.json").write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"orderId": "order-2",
|
||||
"orderDate": "2026-03-02",
|
||||
"items": [
|
||||
{
|
||||
"podId": 20,
|
||||
"shipQy": 1,
|
||||
"totalPickedWeight": 0,
|
||||
"unitPrice": 2.99,
|
||||
"itemName": "SB ROTINI 16Z",
|
||||
"lbEachCd": "EA",
|
||||
"groceryAmount": 2.99,
|
||||
"primUpcCd": "222",
|
||||
"mvpSavings": 0,
|
||||
"rewardSavings": 0,
|
||||
"couponSavings": 0,
|
||||
"couponPrice": 0,
|
||||
"categoryId": "1",
|
||||
"categoryDesc": "Grocery",
|
||||
"image": {"small": "https://example.test/rotini.jpg"},
|
||||
}
|
||||
],
|
||||
}
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
(raw_dir / "order-1.json").write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"orderId": "order-1",
|
||||
"orderDate": "2026-03-01",
|
||||
"items": [
|
||||
{
|
||||
"podId": 10,
|
||||
"shipQy": 2,
|
||||
"totalPickedWeight": 0,
|
||||
"unitPrice": 1.5,
|
||||
"itemName": "PEPSI 6PK 7.5Z",
|
||||
"lbEachCd": "EA",
|
||||
"groceryAmount": 3.0,
|
||||
"primUpcCd": "111",
|
||||
"mvpSavings": 0,
|
||||
"rewardSavings": 0,
|
||||
"couponSavings": 0,
|
||||
"couponPrice": 0,
|
||||
"categoryId": "1",
|
||||
"categoryDesc": "Grocery",
|
||||
}
|
||||
],
|
||||
}
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
rows = enrich_giant.build_items_enriched(raw_dir)
|
||||
output_csv = Path(tmpdir) / "items_enriched.csv"
|
||||
enrich_giant.write_csv(output_csv, rows)
|
||||
|
||||
self.assertEqual(["order-1", "order-2"], [row["order_id"] for row in rows])
|
||||
self.assertEqual("PEPSI", rows[0]["item_name_norm"])
|
||||
self.assertEqual("6", rows[0]["pack_qty"])
|
||||
self.assertEqual("7.5", rows[0]["size_value"])
|
||||
self.assertEqual("true", rows[1]["is_store_brand"])
|
||||
|
||||
with output_csv.open(newline="", encoding="utf-8") as handle:
|
||||
written_rows = list(csv.DictReader(handle))
|
||||
|
||||
self.assertEqual(2, len(written_rows))
|
||||
self.assertEqual(enrich_giant.OUTPUT_FIELDS, list(written_rows[0].keys()))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user