INPaint
unknown
python
10 months ago
1.5 kB
9
No Index
# !pip install transformers accelerate from diffusers import StableDiffusionXLControlNetInpaintPipeline, ControlNetModel, DDIMScheduler from diffusers.utils import load_image import numpy as np import torch init_image = load_image( "https://huggingface.co/datasets/diffusers/test-arrays/resolve/main/stable_diffusion_inpaint/boy.png" ) init_image = init_image.resize((1024, 1024)) generator = torch.Generator(device="cpu").manual_seed(1) mask_image = load_image( "https://huggingface.co/datasets/diffusers/test-arrays/resolve/main/stable_diffusion_inpaint/boy_mask.png" ) mask_image = mask_image.resize((1024, 1024)) def make_canny_condition(image): image = np.array(image) image = cv2.Canny(image, 100, 200) image = image[:, :, None] image = np.concatenate([image, image, image], axis=2) image = Image.fromarray(image) return image control_image = make_canny_condition(init_image) controlnet = ControlNetModel.from_pretrained( "diffusers/controlnet-canny-sdxl-1.0", torch_dtype=torch.float16 ) pipe = StableDiffusionXLControlNetInpaintPipeline.from_pretrained( "Chan-Y/Stable-Flash-Lightning", controlnet=controlnet, torch_dtype=torch.float16 ) pipe.enable_model_cpu_offload() # generate image image = pipe( "a handsome man with ray-ban sunglasses", num_inference_steps=20, generator=generator, eta=1.0, image=init_image, mask_image=mask_image, control_image=control_image, ).images[0]
Editor is loading...
Leave a Comment