Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

use shared.hf_endpoint #16611

Merged
merged 1 commit into from
Oct 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions modules/dat_model.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os

from modules import modelloader, errors
from modules.shared import cmd_opts, opts
from modules.shared import cmd_opts, opts, hf_endpoint
from modules.upscaler import Upscaler, UpscalerData
from modules.upscaler_utils import upscale_with_model

Expand Down Expand Up @@ -71,21 +71,21 @@ def get_dat_models(scaler):
return [
UpscalerData(
name="DAT x2",
path="https://huggingface.co/w-e-w/DAT/resolve/main/experiments/pretrained_models/DAT/DAT_x2.pth",
path=f"{hf_endpoint}/w-e-w/DAT/resolve/main/experiments/pretrained_models/DAT/DAT_x2.pth",
scale=2,
upscaler=scaler,
sha256='7760aa96e4ee77e29d4f89c3a4486200042e019461fdb8aa286f49aa00b89b51',
),
UpscalerData(
name="DAT x3",
path="https://huggingface.co/w-e-w/DAT/resolve/main/experiments/pretrained_models/DAT/DAT_x3.pth",
path=f"{hf_endpoint}/w-e-w/DAT/resolve/main/experiments/pretrained_models/DAT/DAT_x3.pth",
scale=3,
upscaler=scaler,
sha256='581973e02c06f90d4eb90acf743ec9604f56f3c2c6f9e1e2c2b38ded1f80d197',
),
UpscalerData(
name="DAT x4",
path="https://huggingface.co/w-e-w/DAT/resolve/main/experiments/pretrained_models/DAT/DAT_x4.pth",
path=f"{hf_endpoint}/w-e-w/DAT/resolve/main/experiments/pretrained_models/DAT/DAT_x4.pth",
scale=4,
upscaler=scaler,
sha256='391a6ce69899dff5ea3214557e9d585608254579217169faf3d4c353caff049e',
Expand Down
6 changes: 3 additions & 3 deletions modules/models/sd3/sd3_cond.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def __getitem__(self, key):
return self.file.get_tensor(key)


CLIPL_URL = "https://huggingface.co/AUTOMATIC/stable-diffusion-3-medium-text-encoders/resolve/main/clip_l.safetensors"
CLIPL_URL = f"{shared.hf_endpoint}/AUTOMATIC/stable-diffusion-3-medium-text-encoders/resolve/main/clip_l.safetensors"
CLIPL_CONFIG = {
"hidden_act": "quick_gelu",
"hidden_size": 768,
Expand All @@ -33,7 +33,7 @@ def __getitem__(self, key):
"num_hidden_layers": 12,
}

CLIPG_URL = "https://huggingface.co/AUTOMATIC/stable-diffusion-3-medium-text-encoders/resolve/main/clip_g.safetensors"
CLIPG_URL = f"{shared.hf_endpoint}/AUTOMATIC/stable-diffusion-3-medium-text-encoders/resolve/main/clip_g.safetensors"
CLIPG_CONFIG = {
"hidden_act": "gelu",
"hidden_size": 1280,
Expand All @@ -43,7 +43,7 @@ def __getitem__(self, key):
"textual_inversion_key": "clip_g",
}

T5_URL = "https://huggingface.co/AUTOMATIC/stable-diffusion-3-medium-text-encoders/resolve/main/t5xxl_fp16.safetensors"
T5_URL = f"{shared.hf_endpoint}/AUTOMATIC/stable-diffusion-3-medium-text-encoders/resolve/main/t5xxl_fp16.safetensors"
T5_CONFIG = {
"d_ff": 10240,
"d_model": 4096,
Expand Down
2 changes: 1 addition & 1 deletion modules/sd_disable_initialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def transformers_modeling_utils_load_pretrained_model(*args, **kwargs):
def transformers_utils_hub_get_file_from_cache(original, url, *args, **kwargs):

# this file is always 404, prevent making request
if url == 'https://huggingface.co/openai/clip-vit-large-patch14/resolve/main/added_tokens.json' or url == 'openai/clip-vit-large-patch14' and args[0] == 'added_tokens.json':
if url == f'{shared.hf_endpoint}/openai/clip-vit-large-patch14/resolve/main/added_tokens.json' or url == 'openai/clip-vit-large-patch14' and args[0] == 'added_tokens.json':
return None

try:
Expand Down