Skip to content

Commit

Permalink
Add ruff rule for empty line with trailing whitespace.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Dec 28, 2024
1 parent d170292 commit b504bd6
Show file tree
Hide file tree
Showing 20 changed files with 106 additions and 106 deletions.
6 changes: 3 additions & 3 deletions api_server/services/terminal_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def get_terminal_size(self):
def update_size(self):
columns, lines = self.get_terminal_size()
changed = False

if columns != self.cols:
self.cols = columns
changed = True
Expand All @@ -48,9 +48,9 @@ def unsubscribe(self, client_id):
def send_messages(self, entries):
if not len(entries) or not len(self.subscriptions):
return

new_size = self.update_size()

for client_id in self.subscriptions.copy(): # prevent: Set changed size during iteration
if client_id not in self.server.sockets:
# Automatically unsub if the socket has disconnected
Expand Down
2 changes: 1 addition & 1 deletion app/model_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def get_model_previews(self, filepath: str) -> list[str | BytesIO]:
safetensors_images = json.loads(safetensors_images)
for image in safetensors_images:
result.append(BytesIO(base64.b64decode(image)))

return result

def __exit__(self, exc_type, exc_value, traceback):
Expand Down
34 changes: 17 additions & 17 deletions comfy/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def __init__(self, strength_model=1.0, strength_clip=1.0):
@property
def strength_model(self):
return self._strength_model * self.strength

@property
def strength_clip(self):
return self._strength_clip * self.strength
Expand All @@ -118,7 +118,7 @@ def add_hook_patches(self, model: 'ModelPatcher', model_options: dict, target: E
strength = self._strength_model
else:
strength = self._strength_clip

if self.need_weight_init:
key_map = {}
if target == EnumWeightTarget.Model:
Expand Down Expand Up @@ -151,7 +151,7 @@ class PatchHook(Hook):
def __init__(self):
super().__init__(hook_type=EnumHookType.Patch)
self.patches: dict = None

def clone(self, subtype: Callable=None):
if subtype is None:
subtype = type(self)
Expand All @@ -164,7 +164,7 @@ class ObjectPatchHook(Hook):
def __init__(self):
super().__init__(hook_type=EnumHookType.ObjectPatch)
self.object_patches: dict = None

def clone(self, subtype: Callable=None):
if subtype is None:
subtype = type(self)
Expand All @@ -179,7 +179,7 @@ def __init__(self, key: str=None, models: list['ModelPatcher']=None):
self.key = key
self.models = models
self.append_when_same = True

def clone(self, subtype: Callable=None):
if subtype is None:
subtype = type(self)
Expand Down Expand Up @@ -216,7 +216,7 @@ def clone(self, subtype: Callable=None):
c: WrapperHook = super().clone(subtype)
c.wrappers_dict = self.wrappers_dict
return c

def add_hook_patches(self, model: 'ModelPatcher', model_options: dict, target: EnumWeightTarget, registered: list[Hook]):
if not self.should_register(model, model_options, target, registered):
return False
Expand All @@ -230,15 +230,15 @@ def __init__(self, key: str=None, injections: list['PatcherInjection']=None):
super().__init__(hook_type=EnumHookType.SetInjections)
self.key = key
self.injections = injections

def clone(self, subtype: Callable=None):
if subtype is None:
subtype = type(self)
c: SetInjectionsHook = super().clone(subtype)
c.key = self.key
c.injections = self.injections.copy() if self.injections else self.injections
return c

def add_hook_injections(self, model: 'ModelPatcher'):
# TODO: add functionality
pass
Expand All @@ -250,10 +250,10 @@ def __init__(self):
def add(self, hook: Hook):
if hook not in self.hooks:
self.hooks.append(hook)

def contains(self, hook: Hook):
return hook in self.hooks

def clone(self):
c = HookGroup()
for hook in self.hooks:
Expand All @@ -266,7 +266,7 @@ def clone_and_combine(self, other: 'HookGroup'):
for hook in other.hooks:
c.add(hook.clone())
return c

def set_keyframes_on_hooks(self, hook_kf: 'HookKeyframeGroup'):
if hook_kf is None:
hook_kf = HookKeyframeGroup()
Expand Down Expand Up @@ -365,7 +365,7 @@ def __init__(self, strength: float, start_percent=0.0, guarantee_steps=1):
self.start_percent = float(start_percent)
self.start_t = 999999999.9
self.guarantee_steps = guarantee_steps

def clone(self):
c = HookKeyframe(strength=self.strength,
start_percent=self.start_percent, guarantee_steps=self.guarantee_steps)
Expand Down Expand Up @@ -395,7 +395,7 @@ def reset(self):
self._current_strength = None
self.curr_t = -1.
self._set_first_as_current()

def add(self, keyframe: HookKeyframe):
# add to end of list, then sort
self.keyframes.append(keyframe)
Expand All @@ -407,20 +407,20 @@ def _set_first_as_current(self):
self._current_keyframe = self.keyframes[0]
else:
self._current_keyframe = None

def has_index(self, index: int):
return index >= 0 and index < len(self.keyframes)

def is_empty(self):
return len(self.keyframes) == 0

def clone(self):
c = HookKeyframeGroup()
for keyframe in self.keyframes:
c.keyframes.append(keyframe.clone())
c._set_first_as_current()
return c

def initialize_timesteps(self, model: 'BaseModel'):
for keyframe in self.keyframes:
keyframe.start_t = model.model_sampling.percent_to_sigma(keyframe.start_percent)
Expand Down Expand Up @@ -565,7 +565,7 @@ def load_hook_lora_for_models(model: 'ModelPatcher', clip: 'CLIP', lora: dict[st
else:
k = ()
new_modelpatcher = None

if clip is not None:
new_clip = clip.clone()
k1 = new_clip.patcher.add_hook_patches(hook=hook, patches=loaded, strength_patch=strength_clip)
Expand Down
30 changes: 15 additions & 15 deletions comfy/model_patcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def set_model_options_pre_cfg_function(model_options, pre_cfg_function, disable_

def create_model_options_clone(orig_model_options: dict):
return comfy.patcher_extension.copy_nested_dicts(orig_model_options)

def create_hook_patches_clone(orig_hook_patches):
new_hook_patches = {}
for hook_ref in orig_hook_patches:
Expand Down Expand Up @@ -141,7 +141,7 @@ def __init__(self, model: 'ModelPatcher', skip_and_inject_on_exit_only=False):
self.was_injected = False
self.prev_skip_injection = False
self.skip_and_inject_on_exit_only = skip_and_inject_on_exit_only

def __enter__(self):
self.was_injected = False
self.prev_skip_injection = self.model.skip_injection
Expand All @@ -164,7 +164,7 @@ def __init__(self, initial: int, minimum=0):
self.value = initial
self.minimum = minimum
# TODO: add a safe limit besides 0

def use(self, weight: torch.Tensor):
weight_size = weight.nelement() * weight.element_size()
if self.is_useable(weight_size):
Expand Down Expand Up @@ -711,7 +711,7 @@ def partially_unload(self, device_to, memory_to_free=0):
else:
comfy.utils.set_attr_param(self.model, key, bk.weight)
self.backup.pop(key)

weight_key = "{}.weight".format(n)
bias_key = "{}.bias".format(n)
if move_weight:
Expand Down Expand Up @@ -789,15 +789,15 @@ def add_callback(self, call_type: str, callback: Callable):
def add_callback_with_key(self, call_type: str, key: str, callback: Callable):
c = self.callbacks.setdefault(call_type, {}).setdefault(key, [])
c.append(callback)

def remove_callbacks_with_key(self, call_type: str, key: str):
c = self.callbacks.get(call_type, {})
if key in c:
c.pop(key)

def get_callbacks(self, call_type: str, key: str):
return self.callbacks.get(call_type, {}).get(key, [])

def get_all_callbacks(self, call_type: str):
c_list = []
for c in self.callbacks.get(call_type, {}).values():
Expand All @@ -810,7 +810,7 @@ def add_wrapper(self, wrapper_type: str, wrapper: Callable):
def add_wrapper_with_key(self, wrapper_type: str, key: str, wrapper: Callable):
w = self.wrappers.setdefault(wrapper_type, {}).setdefault(key, [])
w.append(wrapper)

def remove_wrappers_with_key(self, wrapper_type: str, key: str):
w = self.wrappers.get(wrapper_type, {})
if key in w:
Expand All @@ -831,7 +831,7 @@ def set_attachments(self, key: str, attachment):
def remove_attachments(self, key: str):
if key in self.attachments:
self.attachments.pop(key)

def get_attachment(self, key: str):
return self.attachments.get(key, None)

Expand All @@ -851,7 +851,7 @@ def remove_additional_models(self, key: str):

def get_additional_models_with_key(self, key: str):
return self.additional_models.get(key, [])

def get_additional_models(self):
all_models = []
for models in self.additional_models.values():
Expand Down Expand Up @@ -906,7 +906,7 @@ def pre_run(self):
self.model.current_patcher = self
for callback in self.get_all_callbacks(CallbacksMP.ON_PRE_RUN):
callback(self)

def prepare_state(self, timestep):
for callback in self.get_all_callbacks(CallbacksMP.ON_PREPARE_STATE):
callback(self, timestep)
Expand All @@ -918,7 +918,7 @@ def restore_hook_patches(self):

def set_hook_mode(self, hook_mode: comfy.hooks.EnumHookMode):
self.hook_mode = hook_mode

def prepare_hook_patches_current_keyframe(self, t: torch.Tensor, hook_group: comfy.hooks.HookGroup):
curr_t = t[0]
reset_current_hooks = False
Expand Down Expand Up @@ -975,7 +975,7 @@ def add_hook_patches(self, hook: comfy.hooks.WeightHook, patches, strength_patch
key = k[0]
if len(k) > 2:
function = k[2]

if key in model_sd:
p.add(k)
current_patches: list[tuple] = current_hook_patches.get(key, [])
Expand Down Expand Up @@ -1063,7 +1063,7 @@ def clear_cached_hook_weights(self):
def patch_hook_weight_to_device(self, hooks: comfy.hooks.HookGroup, combined_patches: dict, key: str, original_weights: dict, memory_counter: MemoryCounter):
if key not in combined_patches:
return

weight, set_func, convert_func = get_key_weight(self.model, key)
weight: torch.Tensor
if key not in self.hook_backup:
Expand Down Expand Up @@ -1098,7 +1098,7 @@ def patch_hook_weight_to_device(self, hooks: comfy.hooks.HookGroup, combined_pat
del temp_weight
del out_weight
del weight

def unpatch_hooks(self) -> None:
with self.use_ejected():
if len(self.hook_backup) == 0:
Expand All @@ -1107,7 +1107,7 @@ def unpatch_hooks(self) -> None:
keys = list(self.hook_backup.keys())
for k in keys:
comfy.utils.copy_to_param(self.model, k, self.hook_backup[k][0].to(device=self.hook_backup[k][1]))

self.hook_backup.clear()
self.current_hooks = None

Expand Down
6 changes: 3 additions & 3 deletions comfy/patcher_extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,12 +96,12 @@ def __init__(self, original: Callable, class_obj: object, wrappers: list[Callabl
self.wrappers = wrappers.copy()
self.idx = idx
self.is_last = idx == len(wrappers)

def __call__(self, *args, **kwargs):
"""Calls the next wrapper or original function, whichever is appropriate."""
new_executor = self._create_next_executor()
return new_executor.execute(*args, **kwargs)

def execute(self, *args, **kwargs):
"""Used to initiate executor internally - DO NOT use this if you received executor in wrapper."""
args = list(args)
Expand All @@ -121,7 +121,7 @@ def _create_next_executor(self) -> 'WrapperExecutor':
@classmethod
def new_executor(cls, original: Callable, wrappers: list[Callable], idx=0):
return cls(original, class_obj=None, wrappers=wrappers, idx=idx)

@classmethod
def new_class_executor(cls, original: Callable, class_obj: object, wrappers: list[Callable], idx=0):
return cls(original, class_obj, wrappers, idx=idx)
Expand Down
2 changes: 1 addition & 1 deletion comfy_execution/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def add_node(self, node_unique_id, include_lazy=False, subgraph_nodes=None):
if (include_lazy or not is_lazy) and not self.is_cached(from_node_id):
node_ids.append(from_node_id)
links.append((from_node_id, from_socket, unique_id))

for link in links:
self.add_strong_link(*link)

Expand Down
Loading

0 comments on commit b504bd6

Please sign in to comment.