Streaming Support for meta data

#17
by sal4ahm - opened
           meta_cat = category.replace("raw_review", "raw_meta")
            print(f"\n📦 Loading metadata for: {meta_cat}")
            meta_path = os.path.join(META_DIR, f"{task_name}__{category}.jsonl")
            meta_dataset = load_dataset(
                "McAuley-Lab/Amazon-Reviews-2023",
                name=meta_cat,
                split="full",
                streaming=True,
                trust_remote_code=True
            )
            print(meta_dataset)
            save_meta_stream_to_jsonl(
                dataset=meta_dataset,
                out_path=meta_path,
                fields=META_FIELDS,
                max_samples=max_samples
            )
def save_meta_stream_to_jsonl(dataset, out_path, fields, max_samples=None):
    with open(out_path, "w", encoding="utf-8") as f_out:
        count = 0
        for record in tqdm(dataset, desc=f"Saving to {out_path}"):
            flat = {}
            for k in fields:
                v = record.get(k)
                print(k, v)
                if k in ["images", "videos"] and isinstance(v, (dict, list)):
                    continue  # skip problematic nested fields
                flat[k] = v
            if isinstance(flat.get("details"), str):
                try:
                    flat["details"] = json.loads(flat["details"])
                except:
                    pass
            f_out.write(json.dumps(flat) + "\n")
            count += 1
            if max_samples and count >= max_samples:
                break
    print(f"✅ Saved {count} metadata records to {out_path}")
    return count
Traceback (most recent call last):
  File "/home/jupyter/HyperAmazon/etl/ingest/load_hf_dataset.py", line 142, in <module>
    stream_and_save(task_name=args.task, config_path=args.config, save_meta=not args.no_meta)
  File "/home/jupyter/HyperAmazon/etl/ingest/load_hf_dataset.py", line 126, in stream_and_save
    save_meta_stream_to_jsonl(
  File "/home/jupyter/HyperAmazon/etl/ingest/load_hf_dataset.py", line 74, in save_meta_stream_to_jsonl
    for record in tqdm(dataset, desc=f"Saving to {out_path}"):
  File "/opt/conda/envs/hyperamazon/lib/python3.10/site-packages/tqdm/std.py", line 1181, in __iter__
    for obj in iterable:
  File "/opt/conda/envs/hyperamazon/lib/python3.10/site-packages/datasets/iterable_dataset.py", line 2270, in __iter__
    for key, example in ex_iterable:
  File "/opt/conda/envs/hyperamazon/lib/python3.10/site-packages/datasets/iterable_dataset.py", line 1856, in __iter__
    for key, pa_table in self._iter_arrow():
  File "/opt/conda/envs/hyperamazon/lib/python3.10/site-packages/datasets/iterable_dataset.py", line 1888, in _iter_arrow
    pa_table = cast_table_to_features(pa_table, self.features)
  File "/opt/conda/envs/hyperamazon/lib/python3.10/site-packages/datasets/table.py", line 2221, in cast_table_to_features
    return pa.Table.from_arrays(arrays, schema=features.arrow_schema)
  File "pyarrow/table.pxi", line 4893, in pyarrow.lib.Table.from_arrays
  File "pyarrow/table.pxi", line 1622, in pyarrow.lib._sanitize_arrays
  File "pyarrow/array.pxi", line 402, in pyarrow.lib.asarray
  File "pyarrow/table.pxi", line 593, in pyarrow.lib.ChunkedArray.cast
  File "/opt/conda/envs/hyperamazon/lib/python3.10/site-packages/pyarrow/compute.py", line 410, in cast
    return call_function("cast", [arr], options, memory_pool)
  File "pyarrow/_compute.pyx", line 612, in pyarrow._compute.call_function
  File "pyarrow/_compute.pyx", line 407, in pyarrow._compute.Function.call
  File "pyarrow/error.pxi", line 155, in pyarrow.lib.pyarrow_internal_check_status
  File "pyarrow/error.pxi", line 92, in pyarrow.lib.check_status
pyarrow.lib.ArrowNotImplementedError: Unsupported cast from list<item: struct<hi_res: string, large: string, thumb: string, variant: string>> to struct using function cast_struct

Sign up or log in to comment