shaun3141 commited on
Commit
d632650
·
1 Parent(s): 87f348f

Switch to Docker SDK to use custom Dockerfile with multi-stage caching

Browse files
Files changed (2) hide show
  1. README.md +2 -3
  2. requirements.txt +5 -37
README.md CHANGED
@@ -3,9 +3,8 @@ title: Caribbean Voices - OWSM v3.1 Platform
3
  emoji: 🎤
4
  colorFrom: purple
5
  colorTo: pink
6
- sdk: gradio
7
- sdk_version: 5.49.1
8
- app_file: app.py
9
  pinned: false
10
  hardware: gpu-a10g-large
11
  license: mit
 
3
  emoji: 🎤
4
  colorFrom: purple
5
  colorTo: pink
6
+ sdk: docker
7
+ app_port: 7860
 
8
  pinned: false
9
  hardware: gpu-a10g-large
10
  license: mit
requirements.txt CHANGED
@@ -1,45 +1,13 @@
1
- # Complete requirements file for Hugging Face Spaces
2
- # NOTE: HF Spaces may use their own Dockerfile, so this file must be self-contained
3
- # IMPORTANT: torch MUST be listed before flash-attn (flash-attn needs torch to build)
4
-
5
- # Core dependencies
6
- numpy>=1.24.0
7
- pandas>=2.0.0
8
- scikit-learn>=1.3.0
9
-
10
- # Audio processing
11
- librosa>=0.10.0
12
- soundfile>=0.12.0
13
-
14
- # PyTorch - MUST be installed before flash-attn
15
- # PyTorch 2.2+ with CUDA support (required for Flash Attention on A10G GPUs)
16
- torch>=2.2.0
17
- torchaudio>=2.2.0
18
- # torchcodec required for audio decoding in datasets library
19
- torchcodec>=0.1.0,<1.0.0
20
-
21
- # ML frameworks
22
- transformers>=4.30.0
23
-
24
- # Hugging Face ecosystem
25
- huggingface_hub>=0.20.0
26
- datasets>=2.14.0
27
-
28
- # UI
29
- gradio>=5.49.1
30
-
31
- # ESPnet dependencies
32
- espnet>=202301
33
- espnet_model_zoo>=0.1.0
34
-
35
- # Evaluation
36
  jiwer>=3.0.0
37
 
38
  # Build dependencies for Flash Attention compilation
39
  packaging>=21.0
40
  ninja>=1.10.0
41
 
42
- # Flash Attention - MUST be installed AFTER torch
43
- # Optimized for NVIDIA A10G GPUs (Ampere architecture)
44
  # Required for best performance on HF Spaces with GPU
 
45
  flash-attn>=2.3.0
 
1
+ # Remaining dependencies not in requirements-base.txt or requirements-espnet.txt
2
+ # These are installed after base and ESPnet for optimal caching
3
+ # NOTE: torch is installed from requirements-base.txt, so flash-attn can build successfully
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  jiwer>=3.0.0
5
 
6
  # Build dependencies for Flash Attention compilation
7
  packaging>=21.0
8
  ninja>=1.10.0
9
 
10
+ # Flash Attention - optimized for NVIDIA A10G GPUs (Ampere architecture)
 
11
  # Required for best performance on HF Spaces with GPU
12
+ # NOTE: torch must be installed first (from requirements-base.txt) for flash-attn to build
13
  flash-attn>=2.3.0