Skip to content

Commit d7dee3a

Browse files
authored
feat(diffusers): add support for Sana pipelines (#4603)
Signed-off-by: Ettore Di Giacinto <[email protected]>
1 parent b8d74e5 commit d7dee3a

File tree

1 file changed

+8
-1
lines changed

1 file changed

+8
-1
lines changed

backend/python/diffusers/backend.py

+8-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
import grpc
1919

20-
from diffusers import StableDiffusion3Pipeline, StableDiffusionXLPipeline, StableDiffusionDepth2ImgPipeline, DPMSolverMultistepScheduler, StableDiffusionPipeline, DiffusionPipeline, \
20+
from diffusers import SanaPipeline, StableDiffusion3Pipeline, StableDiffusionXLPipeline, StableDiffusionDepth2ImgPipeline, DPMSolverMultistepScheduler, StableDiffusionPipeline, DiffusionPipeline, \
2121
EulerAncestralDiscreteScheduler, FluxPipeline, FluxTransformer2DModel
2222
from diffusers import StableDiffusionImg2ImgPipeline, AutoPipelineForText2Image, ControlNetModel, StableVideoDiffusionPipeline
2323
from diffusers.pipelines.stable_diffusion import safety_checker
@@ -275,6 +275,13 @@ def LoadModel(self, request, context):
275275

276276
if request.LowVRAM:
277277
self.pipe.enable_model_cpu_offload()
278+
elif request.PipelineType == "SanaPipeline":
279+
self.pipe = SanaPipeline.from_pretrained(
280+
request.Model,
281+
variant="bf16",
282+
torch_dtype=torch.bfloat16)
283+
self.pipe.vae.to(torch.bfloat16)
284+
self.pipe.text_encoder.to(torch.bfloat16)
278285

279286
if CLIPSKIP and request.CLIPSkip != 0:
280287
self.clip_skip = request.CLIPSkip

0 commit comments

Comments
 (0)