Skip to content

Commit

Permalink
fix for float8_e5m2 freeze model
Browse files Browse the repository at this point in the history
  • Loading branch information
wkpark committed Sep 17, 2024
1 parent acf31be commit e063757
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions modules/sd_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -581,7 +581,7 @@ def load_model_weights(model, checkpoint_info: CheckpointInfo, state_dict, timer

if found_unet_dtype in (torch.float16, torch.float32, torch.bfloat16):
model.half()
elif found_unet_dtype in (torch.float8_e4m3fn,):
elif found_unet_dtype in (torch.float8_e4m3fn, torch.float8_e5m2):
pass
else:
print("Fail to get a vaild UNet dtype. ignore...")
Expand All @@ -608,7 +608,7 @@ def load_model_weights(model, checkpoint_info: CheckpointInfo, state_dict, timer
if hasattr(module, 'fp16_bias'):
del module.fp16_bias

if found_unet_dtype not in (torch.float8_e4m3fn,) and check_fp8(model):
if found_unet_dtype not in (torch.float8_e4m3fn,torch.float8_e5m2) and check_fp8(model):
devices.fp8 = True

# do not convert vae, text_encoders.clip_l, clip_g, t5xxl
Expand Down

0 comments on commit e063757

Please sign in to comment.