ellisbrown commited on
Commit
5e0f151
·
verified ·
1 Parent(s): 76c0bf4

Upload unpack.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. unpack.py +112 -0
unpack.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Download and unpack the objaverse_vida dataset from HuggingFace.
4
+
5
+ This script downloads the dataset and extracts all tar archives to restore
6
+ the original directory structure expected by downstream consumers.
7
+
8
+ Usage:
9
+ python unpack.py [target_directory]
10
+
11
+ Example:
12
+ python unpack.py ./objaverse_vida
13
+ python unpack.py /data/datasets/objaverse_vida
14
+ """
15
+
16
+ import os
17
+ import sys
18
+ import tarfile
19
+ from pathlib import Path
20
+
21
+
22
+ def unpack_dataset(target_dir: str = "./objaverse_vida"):
23
+ """Download and unpack the dataset to the target directory."""
24
+
25
+ # Import here to give helpful error if not installed
26
+ try:
27
+ from huggingface_hub import snapshot_download
28
+ except ImportError:
29
+ print("Error: huggingface_hub not installed.")
30
+ print("Install with: pip install huggingface_hub[hf_transfer]")
31
+ sys.exit(1)
32
+
33
+ target = Path(target_dir).resolve()
34
+ print(f"Target directory: {target}")
35
+
36
+ # Enable hf_transfer for faster downloads (if available)
37
+ os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
38
+
39
+ # Download dataset
40
+ print("\n[1/3] Downloading dataset from HuggingFace...")
41
+ print(" (This may take a while for ~30GB)")
42
+ snapshot_download(
43
+ repo_id="spatial-training/objaverse_vida",
44
+ repo_type="dataset",
45
+ local_dir=str(target),
46
+ local_dir_use_symlinks=False
47
+ )
48
+ print(" Done!")
49
+
50
+ # Unpack processed shards
51
+ processed_dir = target / "processed_2023_07_28"
52
+ if processed_dir.exists():
53
+ print("\n[2/3] Unpacking processed objects...")
54
+ shards = sorted(processed_dir.glob("shard_*.tar"))
55
+ total_shards = len(shards)
56
+ for i, shard in enumerate(shards, 1):
57
+ print(f" [{i}/{total_shards}] Extracting {shard.name}...")
58
+ with tarfile.open(shard) as tar:
59
+ tar.extractall(processed_dir)
60
+ shard.unlink() # Remove tar after extraction
61
+
62
+ # Remove manifest (no longer needed)
63
+ manifest = processed_dir / "manifest.json"
64
+ if manifest.exists():
65
+ manifest.unlink()
66
+ print(" Done!")
67
+ else:
68
+ print("\n[2/3] Skipping processed objects (directory not found)")
69
+
70
+ # Unpack houses individual files
71
+ houses_dir = target / "houses_2023_07_28"
72
+ if houses_dir.exists():
73
+ print("\n[3/3] Unpacking house files...")
74
+ for split in ["train", "test", "val"]:
75
+ tar_file = houses_dir / f"{split}_individual.tar"
76
+ if tar_file.exists():
77
+ print(f" Extracting {tar_file.name}...")
78
+ split_dir = houses_dir / split
79
+ split_dir.mkdir(exist_ok=True)
80
+ with tarfile.open(tar_file) as tar:
81
+ tar.extractall(split_dir)
82
+ tar_file.unlink() # Remove tar after extraction
83
+ print(" Done!")
84
+ else:
85
+ print("\n[3/3] Skipping house files (directory not found)")
86
+
87
+ # Summary
88
+ print("\n" + "=" * 60)
89
+ print("Dataset unpacked successfully!")
90
+ print("=" * 60)
91
+ print(f"\nLocation: {target}")
92
+ print("\nStructure:")
93
+ print(" processed_2023_07_28/ - ~40K 3D object directories")
94
+ print(" houses_2023_07_28/ - train/test/val house layouts")
95
+ print(" procthor_databases_2023_07_28/ - asset databases")
96
+ print(" 0.json - sample house")
97
+
98
+
99
+ def main():
100
+ if len(sys.argv) > 1:
101
+ if sys.argv[1] in ["-h", "--help"]:
102
+ print(__doc__)
103
+ sys.exit(0)
104
+ target = sys.argv[1]
105
+ else:
106
+ target = "./objaverse_vida"
107
+
108
+ unpack_dataset(target)
109
+
110
+
111
+ if __name__ == "__main__":
112
+ main()