mrbui1990 commited on
Commit
6c68c17
·
verified ·
1 Parent(s): 78e2465

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -4
app.py CHANGED
@@ -381,7 +381,7 @@ def encode_image(pil_image):
381
  dtype = torch.bfloat16
382
  device = "cuda" if torch.cuda.is_available() else "cpu"
383
 
384
- pipe = QwenImageEditPlusPipeline.from_pretrained("Qwen/Qwen-Image-Edit-2509",
385
  transformer= QwenImageTransformer2DModel.from_pretrained("linoyts/Qwen-Image-Edit-Rapid-AIO",
386
  subfolder='transformer',
387
  torch_dtype=dtype,
@@ -479,9 +479,6 @@ def infer(
479
  print(f"Calling pipeline with prompt: '{prompt}'")
480
  print(f"Negative Prompt: '{negative_prompt}'")
481
  print(f"Seed: {seed}, Steps: {num_inference_steps}, Guidance: {true_guidance_scale}, Size: {width}x{height}")
482
- if rewrite_prompt and len(pil_images) > 0:
483
- prompt = polish_prompt_hf(prompt, pil_images)
484
- print(f"Rewritten Prompt: {prompt}")
485
 
486
 
487
  # Generate the image
 
381
  dtype = torch.bfloat16
382
  device = "cuda" if torch.cuda.is_available() else "cpu"
383
 
384
+ pipe = QwenImageEditPlusPipeline.from_pretrained("Qwen/Qwen-Image-Edit-2511",
385
  transformer= QwenImageTransformer2DModel.from_pretrained("linoyts/Qwen-Image-Edit-Rapid-AIO",
386
  subfolder='transformer',
387
  torch_dtype=dtype,
 
479
  print(f"Calling pipeline with prompt: '{prompt}'")
480
  print(f"Negative Prompt: '{negative_prompt}'")
481
  print(f"Seed: {seed}, Steps: {num_inference_steps}, Guidance: {true_guidance_scale}, Size: {width}x{height}")
 
 
 
482
 
483
 
484
  # Generate the image