| | |
| | """Within Us AI: Upload helper for Genesis repos. |
| | |
| | Requires: |
| | pip install datasets huggingface_hub pyarrow pandas |
| | |
| | Usage: |
| | huggingface-cli login |
| | python scripts/hf_upload.py --repo_id YOURNAME/REPO --data_dir data |
| | """ |
| | from __future__ import annotations |
| | import argparse |
| | from pathlib import Path |
| | from datasets import load_dataset |
| |
|
| | def main() -> int: |
| | ap = argparse.ArgumentParser() |
| | ap.add_argument("--repo_id", required=True) |
| | ap.add_argument("--data_dir", default="data") |
| | ap.add_argument("--private", action="store_true") |
| | args = ap.parse_args() |
| |
|
| | d = Path(args.data_dir) |
| | |
| | parquet_train = str(d / "train-*.parquet") |
| | parquet_val = str(d / "validation-*.parquet") |
| | ds = load_dataset("parquet", data_files={"train": parquet_train, "validation": parquet_val}) |
| | if len(ds["train"]) == 0: |
| | ds = load_dataset("json", data_files={"train": str(d/"train-*.jsonl"), "validation": str(d/"validation-*.jsonl")}) |
| | ds.push_to_hub(args.repo_id, private=args.private) |
| | print(f"Uploaded to https://huggingface.co/datasets/{args.repo_id}") |
| | return 0 |
| |
|
| | if __name__ == "__main__": |
| | raise SystemExit(main()) |
| |
|