GitHub Actions commited on
Commit
2897c5e
Β·
1 Parent(s): 0bc21a0

πŸš€ Deploy embedder from GitHub Actions - 2025-10-27 20:24:18

Browse files
Files changed (3) hide show
  1. Dockerfile +3 -1
  2. embedder.py +6 -1
  3. requirements.txt +2 -1
Dockerfile CHANGED
@@ -33,7 +33,9 @@ RUN pip install --no-cache-dir --upgrade pip && \
33
  pip install --no-cache-dir --extra-index-url https://download.pytorch.org/whl/cpu \
34
  torch torchvision && \
35
  pip install --no-cache-dir \
36
- open_clip_torch
 
 
37
 
38
 
39
  # Copy application code
 
33
  pip install --no-cache-dir --extra-index-url https://download.pytorch.org/whl/cpu \
34
  torch torchvision && \
35
  pip install --no-cache-dir \
36
+ open_clip_torch && \
37
+ pip install --no-cache-dir \
38
+ git+https://github.com/apple/ml-mobileclip.git
39
 
40
 
41
  # Copy application code
embedder.py CHANGED
@@ -11,6 +11,7 @@ from fastapi.responses import JSONResponse
11
  from PIL import Image
12
  from pydantic import BaseModel, Field
13
  from open_clip import create_model_and_transforms
 
14
 
15
 
16
  # --- Configuration ---
@@ -18,7 +19,7 @@ MAX_IMAGE_SIZE_MB = 10
18
  MAX_BATCH_SIZE = 10
19
  ALLOWED_EXTENSIONS = {"jpg", "jpeg", "png", "webp"}
20
  MODEL_NAME = "MobileCLIP2-S2"
21
- PRETRAINED = "datacomp1b"
22
 
23
 
24
  # --- Pydantic Models ---
@@ -89,6 +90,10 @@ def load_model():
89
  model = model.to(device)
90
  model.eval()
91
 
 
 
 
 
92
  print(f"βœ“ Model loaded: {MODEL_NAME}")
93
  print(f"βœ“ Pretrained: {PRETRAINED}")
94
  print(f"βœ“ Device: {device}")
 
11
  from PIL import Image
12
  from pydantic import BaseModel, Field
13
  from open_clip import create_model_and_transforms
14
+ from mobileclip.modules.common.mobileone import reparameterize_model
15
 
16
 
17
  # --- Configuration ---
 
19
  MAX_BATCH_SIZE = 10
20
  ALLOWED_EXTENSIONS = {"jpg", "jpeg", "png", "webp"}
21
  MODEL_NAME = "MobileCLIP2-S2"
22
+ PRETRAINED = "dfndr2b"
23
 
24
 
25
  # --- Pydantic Models ---
 
90
  model = model.to(device)
91
  model.eval()
92
 
93
+ # Reparameterize model for inference (required for MobileCLIP)
94
+ print("Reparameterizing model for inference...")
95
+ model = reparameterize_model(model)
96
+
97
  print(f"βœ“ Model loaded: {MODEL_NAME}")
98
  print(f"βœ“ Pretrained: {PRETRAINED}")
99
  print(f"βœ“ Device: {device}")
requirements.txt CHANGED
@@ -5,4 +5,5 @@ pillow
5
  numpy
6
  pydantic
7
  torch
8
- open_clip_torch
 
 
5
  numpy
6
  pydantic
7
  torch
8
+ open_clip_torch
9
+ ml-mobileclip @ git+https://github.com/apple/ml-mobileclip.git