Skip to content

Commit

Permalink
[tests] fix AssertionError: Torch not compiled with CUDA enabled (#…
Browse files Browse the repository at this point in the history
…10356)

fix bug on xpu
  • Loading branch information
faaany authored Dec 24, 2024
1 parent c0c1168 commit 023b0e0
Show file tree
Hide file tree
Showing 8 changed files with 16 additions and 16 deletions.
4 changes: 2 additions & 2 deletions tests/single_file/single_file_testing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,14 +378,14 @@ def test_single_file_components_with_diffusers_config_local_files_only(
def test_single_file_format_inference_is_same_as_pretrained(self, expected_max_diff=1e-4):
sf_pipe = self.pipeline_class.from_single_file(self.ckpt_path, torch_dtype=torch.float16, safety_checker=None)
sf_pipe.unet.set_default_attn_processor()
sf_pipe.enable_model_cpu_offload()
sf_pipe.enable_model_cpu_offload(device=torch_device)

inputs = self.get_inputs(torch_device)
image_single_file = sf_pipe(**inputs).images[0]

pipe = self.pipeline_class.from_pretrained(self.repo_id, torch_dtype=torch.float16, safety_checker=None)
pipe.unet.set_default_attn_processor()
pipe.enable_model_cpu_offload()
pipe.enable_model_cpu_offload(device=torch_device)

inputs = self.get_inputs(torch_device)
image = pipe(**inputs).images[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,14 +76,14 @@ def test_single_file_format_inference_is_same_as_pretrained(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/control_v11p_sd15_canny")
pipe = self.pipeline_class.from_pretrained(self.repo_id, controlnet=controlnet)
pipe.unet.set_default_attn_processor()
pipe.enable_model_cpu_offload()
pipe.enable_model_cpu_offload(device=torch_device)

pipe_sf = self.pipeline_class.from_single_file(
self.ckpt_path,
controlnet=controlnet,
)
pipe_sf.unet.set_default_attn_processor()
pipe_sf.enable_model_cpu_offload()
pipe_sf.enable_model_cpu_offload(device=torch_device)

inputs = self.get_inputs(torch_device)
output = pipe(**inputs).images[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,11 +73,11 @@ def test_single_file_format_inference_is_same_as_pretrained(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/control_v11p_sd15_canny")
pipe = self.pipeline_class.from_pretrained(self.repo_id, controlnet=controlnet, safety_checker=None)
pipe.unet.set_default_attn_processor()
pipe.enable_model_cpu_offload()
pipe.enable_model_cpu_offload(device=torch_device)

pipe_sf = self.pipeline_class.from_single_file(self.ckpt_path, controlnet=controlnet, safety_checker=None)
pipe_sf.unet.set_default_attn_processor()
pipe_sf.enable_model_cpu_offload()
pipe_sf.enable_model_cpu_offload(device=torch_device)

inputs = self.get_inputs()
output = pipe(**inputs).images[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,14 @@ def test_single_file_format_inference_is_same_as_pretrained(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/control_v11p_sd15_canny")
pipe = self.pipeline_class.from_pretrained(self.repo_id, controlnet=controlnet)
pipe.unet.set_default_attn_processor()
pipe.enable_model_cpu_offload()
pipe.enable_model_cpu_offload(device=torch_device)

pipe_sf = self.pipeline_class.from_single_file(
self.ckpt_path,
controlnet=controlnet,
)
pipe_sf.unet.set_default_attn_processor()
pipe_sf.enable_model_cpu_offload()
pipe_sf.enable_model_cpu_offload(device=torch_device)

inputs = self.get_inputs()
output = pipe(**inputs).images[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,14 @@ def test_single_file_format_inference_is_same_as_pretrained(self):

prompt = "a cat sitting on a park bench"
pipe = StableDiffusionUpscalePipeline.from_pretrained(self.repo_id)
pipe.enable_model_cpu_offload()
pipe.enable_model_cpu_offload(device=torch_device)

generator = torch.Generator("cpu").manual_seed(0)
output = pipe(prompt=prompt, image=image, generator=generator, output_type="np", num_inference_steps=3)
image_from_pretrained = output.images[0]

pipe_from_single_file = StableDiffusionUpscalePipeline.from_single_file(self.ckpt_path)
pipe_from_single_file.enable_model_cpu_offload()
pipe_from_single_file.enable_model_cpu_offload(device=torch_device)

generator = torch.Generator("cpu").manual_seed(0)
output_from_single_file = pipe_from_single_file(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def test_single_file_format_inference_is_same_as_pretrained(self):
torch_dtype=torch.float16,
safety_checker=None,
)
pipe_single_file.enable_model_cpu_offload()
pipe_single_file.enable_model_cpu_offload(device=torch_device)
pipe_single_file.set_progress_bar_config(disable=None)

inputs = self.get_inputs()
Expand All @@ -88,7 +88,7 @@ def test_single_file_format_inference_is_same_as_pretrained(self):
torch_dtype=torch.float16,
safety_checker=None,
)
pipe.enable_model_cpu_offload()
pipe.enable_model_cpu_offload(device=torch_device)

inputs = self.get_inputs()
images = pipe(**inputs).images[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,15 +69,15 @@ def test_single_file_format_inference_is_same_as_pretrained(self):
self.ckpt_path, controlnet=controlnet, torch_dtype=torch.float16
)
pipe_single_file.unet.set_default_attn_processor()
pipe_single_file.enable_model_cpu_offload()
pipe_single_file.enable_model_cpu_offload(device=torch_device)
pipe_single_file.set_progress_bar_config(disable=None)

inputs = self.get_inputs(torch_device)
single_file_images = pipe_single_file(**inputs).images[0]

pipe = self.pipeline_class.from_pretrained(self.repo_id, controlnet=controlnet, torch_dtype=torch.float16)
pipe.unet.set_default_attn_processor()
pipe.enable_model_cpu_offload()
pipe.enable_model_cpu_offload(device=torch_device)

inputs = self.get_inputs(torch_device)
images = pipe(**inputs).images[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def test_single_file_format_inference_is_same_as_pretrained(self):
pipe = self.pipeline_class.from_pretrained(self.repo_id, torch_dtype=torch.float16)
pipe.scheduler = DDIMScheduler.from_config(pipe.scheduler.config)
pipe.unet.set_default_attn_processor()
pipe.enable_model_cpu_offload()
pipe.enable_model_cpu_offload(device=torch_device)

generator = torch.Generator(device="cpu").manual_seed(0)
image = pipe(
Expand All @@ -95,7 +95,7 @@ def test_single_file_format_inference_is_same_as_pretrained(self):
pipe_single_file = self.pipeline_class.from_single_file(self.ckpt_path, torch_dtype=torch.float16)
pipe_single_file.scheduler = DDIMScheduler.from_config(pipe_single_file.scheduler.config)
pipe_single_file.unet.set_default_attn_processor()
pipe_single_file.enable_model_cpu_offload()
pipe_single_file.enable_model_cpu_offload(device=torch_device)

generator = torch.Generator(device="cpu").manual_seed(0)
image_single_file = pipe_single_file(
Expand Down

0 comments on commit 023b0e0

Please sign in to comment.