Spaces:
Sleeping
Sleeping
| """ | |
| Simple HF Space checker without Unicode issues | |
| """ | |
| import os | |
| import sys | |
| # Fix Windows encoding | |
| if sys.platform == 'win32': | |
| import io | |
| sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8') | |
| from huggingface_hub import HfApi | |
| YOUR_USERNAME = "nocapdev" | |
| SPACE_NAME = "my-gradio-momask" | |
| TOKEN = os.getenv("HUGGINGFACE_TOKEN") | |
| print("=" * 80) | |
| print("HF SPACE STATUS CHECK") | |
| print("=" * 80) | |
| repo_id = f"{YOUR_USERNAME}/{SPACE_NAME}" | |
| if not TOKEN: | |
| print("\nWARNING: HUGGINGFACE_TOKEN not set") | |
| print(f"\nTo check your Space manually:") | |
| print(f"1. Visit: https://huggingface.co/spaces/{repo_id}") | |
| print(f"2. Click 'Logs' tab") | |
| print(f"3. Copy the last 50 lines") | |
| print(f"4. Look for ERROR messages") | |
| sys.exit(0) | |
| try: | |
| api = HfApi(token=TOKEN) | |
| print(f"\nSpace: {repo_id}") | |
| print(f"URL: https://huggingface.co/spaces/{repo_id}") | |
| print(f"Logs: https://huggingface.co/spaces/{repo_id}/logs") | |
| print("\n" + "-" * 80) | |
| print("RUNTIME INFO") | |
| print("-" * 80) | |
| runtime = api.get_space_runtime(repo_id=repo_id) | |
| print(f"\nStatus: {runtime.stage}") | |
| hardware = str(runtime.hardware) if runtime.hardware else "cpu-basic" | |
| print(f"Hardware: {hardware}") | |
| # Status analysis | |
| print("\n" + "-" * 80) | |
| print("ANALYSIS") | |
| print("-" * 80) | |
| if runtime.stage == "RUNNING": | |
| print("\n[OK] Space is RUNNING") | |
| elif runtime.stage == "BUILDING": | |
| print("\n[WAIT] Space is BUILDING (wait 2-3 minutes)") | |
| elif runtime.stage == "STOPPED": | |
| print("\n[ERROR] Space STOPPED - may have crashed") | |
| print("Check logs for errors!") | |
| else: | |
| print(f"\n[WARNING] Unexpected status: {runtime.stage}") | |
| # Hardware check | |
| if 'cpu' in hardware.lower() or hardware == "cpu-basic": | |
| print("\n[SLOW] Using CPU (free tier)") | |
| print(" - Generation time: 15-30 minutes per prompt") | |
| print(" - This is NORMAL for free tier") | |
| print(" - Solution: Wait OR upgrade to GPU") | |
| else: | |
| print(f"\n[FAST] Using GPU: {hardware}") | |
| print(" - Generation time: 30-60 seconds per prompt") | |
| # Check files | |
| print("\n" + "-" * 80) | |
| print("FILES CHECK") | |
| print("-" * 80) | |
| files = api.list_repo_files(repo_id=repo_id, repo_type="space") | |
| # Critical files | |
| for f in ['app.py', 'requirements.txt', 'README.md']: | |
| if f in files: | |
| print(f"[OK] {f}") | |
| else: | |
| print(f"[MISSING] {f}") | |
| # Checkpoints | |
| checkpoint_files = [f for f in files if 'checkpoint' in f.lower() or f.endswith('.tar')] | |
| if checkpoint_files: | |
| print(f"\n[OK] Found {len(checkpoint_files)} checkpoint files") | |
| else: | |
| print("\n[WARNING] No checkpoint files found!") | |
| print(" Models may not be uploaded") | |
| print(" App will fail to load") | |
| print("\n" + "=" * 80) | |
| print("NEXT STEPS") | |
| print("=" * 80) | |
| print(f"\n1. View logs at: https://huggingface.co/spaces/{repo_id}/logs") | |
| print("\n2. Look for:") | |
| print(" - 'Using device: cpu' or 'cuda'") | |
| print(" - 'Loading models...'") | |
| print(" - Any ERROR messages") | |
| print(" - 'Model checkpoints not found'") | |
| print("\n3. Copy the last 50 lines from logs") | |
| print(" Especially any lines with ERROR or Exception") | |
| print("\n4. Share those lines to get exact solution") | |
| print("\n" + "=" * 80) | |
| except Exception as e: | |
| print(f"\nERROR: {e}") | |
| print(f"\nManual check:") | |
| print(f"Visit: https://huggingface.co/spaces/{repo_id}/logs") | |
| print("Copy the last 50 lines and share them") | |