Spaces:
Sleeping
Sleeping
revert
Browse files- OminiControl/src/flux/lora_controller.py +0 -6
- app.py +1 -1
OminiControl/src/flux/lora_controller.py
CHANGED
|
@@ -7,12 +7,6 @@ class enable_lora:
|
|
| 7 |
self.activated: bool = activated
|
| 8 |
if activated:
|
| 9 |
return
|
| 10 |
-
|
| 11 |
-
for lora_module in lora_modules:
|
| 12 |
-
for active_adapter in lora_module.active_adapters:
|
| 13 |
-
scaling = lora_module.scaling
|
| 14 |
-
if active_adapter not in scaling:
|
| 15 |
-
scaling[active_adapter] = 1.0
|
| 16 |
|
| 17 |
self.lora_modules: List[BaseTunerLayer] = [
|
| 18 |
each for each in lora_modules if isinstance(each, BaseTunerLayer)
|
|
|
|
| 7 |
self.activated: bool = activated
|
| 8 |
if activated:
|
| 9 |
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
self.lora_modules: List[BaseTunerLayer] = [
|
| 12 |
each for each in lora_modules if isinstance(each, BaseTunerLayer)
|
app.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
-
import spaces
|
| 3 |
from ominicontrol import generate_image
|
| 4 |
import os
|
| 5 |
|
|
@@ -195,6 +194,7 @@ def infer(
|
|
| 195 |
|
| 196 |
|
| 197 |
if USE_ZERO_GPU:
|
|
|
|
| 198 |
infer = spaces.GPU(infer)
|
| 199 |
|
| 200 |
if __name__ == "__main__":
|
|
|
|
| 1 |
import gradio as gr
|
|
|
|
| 2 |
from ominicontrol import generate_image
|
| 3 |
import os
|
| 4 |
|
|
|
|
| 194 |
|
| 195 |
|
| 196 |
if USE_ZERO_GPU:
|
| 197 |
+
import spaces
|
| 198 |
infer = spaces.GPU(infer)
|
| 199 |
|
| 200 |
if __name__ == "__main__":
|