RuntimeError: expected scalar type Float but found Half
#15
by
fshewl
- opened
Hi, I followed the same code in the doc to generate the image
base = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", torch_dtype=torch.float16)
base = base.to("cuda")
def generate_image(prompt):
image = base(
prompt=prompt,
negative_prompt=negative_prompt,
num_inference_steps=28,
guidance_scale=7.0,
).images[0]
return image
But I got the following error, any one knows what is the problem?
Traceback (most recent call last):
File "/usr/lib/python3.10/socketserver.py", line 316, in _handle_request_noblock
self.process_request(request, client_address)
File "/usr/lib/python3.10/socketserver.py", line 347, in process_request
self.finish_request(request, client_address)
File "/usr/lib/python3.10/socketserver.py", line 360, in finish_request
self.RequestHandlerClass(request, client_address, self)
File "/usr/lib/python3.10/http/server.py", line 668, in __init__
super().__init__(*args, **kwargs)
File "/usr/lib/python3.10/socketserver.py", line 747, in __init__
self.handle()
File "/usr/lib/python3.10/http/server.py", line 433, in handle
self.handle_one_request()
File "/usr/lib/python3.10/http/server.py", line 421, in handle_one_request
method()
File "//server.py", line 83, in do_POST
img = generate_image(image_text) # You'll need a function to generate the image
File "//server.py", line 104, in generate_image
image = base(
File "/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3.py", line 778, in __call__
) = self.encode_prompt(
File "/usr/local/lib/python3.10/dist-packages/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3.py", line 403, in encode_prompt
t5_prompt_embed = self._get_t5_prompt_embeds(
File "/usr/local/lib/python3.10/dist-packages/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3.py", line 242, in _get_t5_prompt_embeds
prompt_embeds = self.text_encoder_3(text_input_ids.to(device))[0]
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/transformers/models/t5/modeling_t5.py", line 1972, in forward
encoder_outputs = self.encoder(
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/transformers/models/t5/modeling_t5.py", line 1107, in forward
layer_outputs = layer_module(
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/transformers/models/t5/modeling_t5.py", line 687, in forward
self_attention_outputs = self.layer[0](
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/transformers/models/t5/modeling_t5.py", line 593, in forward
normed_hidden_states = self.layer_norm(hidden_states)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/apex/normalization/fused_layer_norm.py", line 386, in forward
return fused_rms_norm_affine(input, self.weight, self.normalized_shape, self.eps)
File "/usr/local/lib/python3.10/dist-packages/apex/normalization/fused_layer_norm.py", line 189, in fused_rms_norm_affine
return FusedRMSNormAffineFunction.apply(*args)
File "/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py", line 539, in apply
return super().apply(*args, **kwargs) # type: ignore[misc]
File "/usr/local/lib/python3.10/dist-packages/apex/normalization/fused_layer_norm.py", line 69, in forward
output, invvar = fused_layer_norm_cuda.rms_forward_affine(
RuntimeError: expected scalar type Float but found Half
pip install -U apex
Thanks @jiangzeyinzi , it works for me.
fshewl
changed discussion status to
closed