1
mirror of https://github.com/comfyanonymous/ComfyUI.git synced 2025-08-02 06:44:49 +08:00

Compare commits

...

11 Commits

Author SHA1 Message Date
Jedrzej Kosinski
65fcf9a26b Remove pyproject.toml changes - should be done in separate PR 2025-07-30 20:02:03 -07:00
Jedrzej Kosinski
040e062800 Removed nodes_v3_test.py 2025-07-30 19:51:25 -07:00
Jedrzej Kosinski
22cbb96c63 Merge pull request #9124 from comfyanonymous/v3-definition-wip
V3 update - fixed ComfyExtension, removed v3/v1 test nodes from loading
2025-07-30 19:46:33 -07:00
Jedrzej Kosinski
006a8981f7 Removed nodes_v1_test.py, removed both v1/v3 test nodes from being attempted to be loaded in nodes.py 2025-07-30 19:45:16 -07:00
Jedrzej Kosinski
f90beb73f8 Fix ComfyExtension registration not working 2025-07-30 19:42:44 -07:00
Jedrzej Kosinski
7a522e4b6f Merge pull request #9103 from guill/js/extension-api-example
`ComfyExtension` Example (PR to v3-definition)
2025-07-30 19:26:24 -07:00
Jedrzej Kosinski
1d72917fad Merge branch 'v3-definition' into js/extension-api-example 2025-07-30 19:25:24 -07:00
Jedrzej Kosinski
6405730e00 Merge pull request #9120 from comfyanonymous/v3-definition-wip
V3 update - move ExecutionBlocker to graph_utils.py to avoid CI torch import too soon
2025-07-30 15:11:23 -07:00
Jedrzej Kosinski
ccfa2a80ff Moved ExecutionBlocker to graph_utils.py, maintained backwards compatibility 2025-07-30 15:09:39 -07:00
Jedrzej Kosinski
babd7bbf00 Adding TYPE_CHECKING ifs into _io.py to try to clean up failing CI 2025-07-30 14:59:15 -07:00
Jacob Segal
e9a9762ca0 Create a ComfyExtension class for future growth 2025-07-29 16:44:53 -07:00
9 changed files with 111 additions and 427 deletions

View File

@@ -1,5 +1,6 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from typing import Type, TYPE_CHECKING
from comfy_api.internal import ComfyAPIBase
from comfy_api.internal.singleton import ProxiedSingleton
@@ -75,6 +76,19 @@ class ComfyAPI_latest(ComfyAPIBase):
execution: Execution
class ComfyExtension(ABC):
async def on_load(self) -> None:
"""
Called when an extension is loaded.
This should be used to initialize any global resources neeeded by the extension.
"""
@abstractmethod
async def get_node_list(self) -> list[type[io.ComfyNode]]:
"""
Returns a list of nodes that this extension provides.
"""
class Input:
Image = ImageInput
Audio = AudioInput
@@ -106,4 +120,5 @@ __all__ = [
"Input",
"InputImpl",
"Types",
"ComfyExtension",
]

View File

@@ -6,26 +6,27 @@ from abc import ABC, abstractmethod
from collections import Counter
from dataclasses import asdict, dataclass
from enum import Enum
from typing import Any, Callable, Literal, TypedDict, TypeVar
from typing import Any, Callable, Literal, TypedDict, TypeVar, TYPE_CHECKING
from typing_extensions import NotRequired, final
# used for type hinting
import torch
from spandrel import ImageModelDescriptor
from typing_extensions import NotRequired, final
from comfy.clip_vision import ClipVisionModel
from comfy.clip_vision import Output as ClipVisionOutput_
from comfy.controlnet import ControlNet
from comfy.hooks import HookGroup, HookKeyframeGroup
from comfy.model_patcher import ModelPatcher
from comfy.samplers import CFGGuider, Sampler
from comfy.sd import CLIP, VAE
from comfy.sd import StyleModel as StyleModel_
from comfy_api.input import VideoInput
if TYPE_CHECKING:
from spandrel import ImageModelDescriptor
from comfy.clip_vision import ClipVisionModel
from comfy.clip_vision import Output as ClipVisionOutput_
from comfy.controlnet import ControlNet
from comfy.hooks import HookGroup, HookKeyframeGroup
from comfy.model_patcher import ModelPatcher
from comfy.samplers import CFGGuider, Sampler
from comfy.sd import CLIP, VAE
from comfy.sd import StyleModel as StyleModel_
from comfy_api.input import VideoInput
from comfy_api.internal import (_ComfyNodeInternal, _NodeOutputInternal, classproperty, copy_class, first_real_override, is_class,
prune_dict, shallow_clone_class)
from comfy_api.latest._resources import Resources, ResourcesLocal
from comfy_execution.graph import ExecutionBlocker
from comfy_execution.graph_utils import ExecutionBlocker
# from comfy_extras.nodes_images import SVG as SVG_ # NOTE: needs to be moved before can be imported due to circular reference
@@ -543,7 +544,8 @@ class Conditioning(ComfyTypeIO):
@comfytype(io_type="SAMPLER")
class Sampler(ComfyTypeIO):
Type = Sampler
if TYPE_CHECKING:
Type = Sampler
@comfytype(io_type="SIGMAS")
class Sigmas(ComfyTypeIO):
@@ -555,44 +557,54 @@ class Noise(ComfyTypeIO):
@comfytype(io_type="GUIDER")
class Guider(ComfyTypeIO):
Type = CFGGuider
if TYPE_CHECKING:
Type = CFGGuider
@comfytype(io_type="CLIP")
class Clip(ComfyTypeIO):
Type = CLIP
if TYPE_CHECKING:
Type = CLIP
@comfytype(io_type="CONTROL_NET")
class ControlNet(ComfyTypeIO):
Type = ControlNet
if TYPE_CHECKING:
Type = ControlNet
@comfytype(io_type="VAE")
class Vae(ComfyTypeIO):
Type = VAE
if TYPE_CHECKING:
Type = VAE
@comfytype(io_type="MODEL")
class Model(ComfyTypeIO):
Type = ModelPatcher
if TYPE_CHECKING:
Type = ModelPatcher
@comfytype(io_type="CLIP_VISION")
class ClipVision(ComfyTypeIO):
Type = ClipVisionModel
if TYPE_CHECKING:
Type = ClipVisionModel
@comfytype(io_type="CLIP_VISION_OUTPUT")
class ClipVisionOutput(ComfyTypeIO):
Type = ClipVisionOutput_
if TYPE_CHECKING:
Type = ClipVisionOutput_
@comfytype(io_type="STYLE_MODEL")
class StyleModel(ComfyTypeIO):
Type = StyleModel_
if TYPE_CHECKING:
Type = StyleModel_
@comfytype(io_type="GLIGEN")
class Gligen(ComfyTypeIO):
'''ModelPatcher that wraps around a 'Gligen' model.'''
Type = ModelPatcher
if TYPE_CHECKING:
Type = ModelPatcher
@comfytype(io_type="UPSCALE_MODEL")
class UpscaleModel(ComfyTypeIO):
Type = ImageModelDescriptor
if TYPE_CHECKING:
Type = ImageModelDescriptor
@comfytype(io_type="AUDIO")
class Audio(ComfyTypeIO):
@@ -603,7 +615,8 @@ class Audio(ComfyTypeIO):
@comfytype(io_type="VIDEO")
class Video(ComfyTypeIO):
Type = VideoInput
if TYPE_CHECKING:
Type = VideoInput
@comfytype(io_type="SVG")
class SVG(ComfyTypeIO):
@@ -629,11 +642,13 @@ class Mesh(ComfyTypeIO):
@comfytype(io_type="HOOKS")
class Hooks(ComfyTypeIO):
Type = HookGroup
if TYPE_CHECKING:
Type = HookGroup
@comfytype(io_type="HOOK_KEYFRAMES")
class HookKeyframes(ComfyTypeIO):
Type = HookKeyframeGroup
if TYPE_CHECKING:
Type = HookKeyframeGroup
@comfytype(io_type="TIMESTEPS_RANGE")
class TimestepsRange(ComfyTypeIO):

View File

@@ -6,7 +6,7 @@ from comfy_api.latest import (
)
from typing import Type, TYPE_CHECKING
from comfy_api.internal.async_to_sync import create_sync_class
from comfy_api.latest import io, ui #noqa: F401
from comfy_api.latest import io, ui, ComfyExtension #noqa: F401
class ComfyAPIAdapter_v0_0_2(ComfyAPI_latest):
@@ -41,4 +41,5 @@ __all__ = [
"Input",
"InputImpl",
"Types",
"ComfyExtension",
]

View File

@@ -4,9 +4,12 @@ from typing import Type, Literal
import nodes
import asyncio
import inspect
from comfy_execution.graph_utils import is_link
from comfy_execution.graph_utils import is_link, ExecutionBlocker
from comfy.comfy_types.node_typing import ComfyNodeABC, InputTypeDict, InputTypeOptions
# NOTE: ExecutionBlocker code got moved to graph_utils.py to prevent torch being imported too soon during unit tests
ExecutionBlocker = ExecutionBlocker
class DependencyCycleError(Exception):
pass
@@ -294,21 +297,3 @@ class ExecutionList(TopologicalSort):
del blocked_by[node_id]
to_remove = [node_id for node_id in blocked_by if len(blocked_by[node_id]) == 0]
return list(blocked_by.keys())
class ExecutionBlocker:
"""
Return this from a node and any users will be blocked with the given error message.
If the message is None, execution will be blocked silently instead.
Generally, you should avoid using this functionality unless absolutely necessary. Whenever it's
possible, a lazy input will be more efficient and have a better user experience.
This functionality is useful in two cases:
1. You want to conditionally prevent an output node from executing. (Particularly a built-in node
like SaveImage. For your own output nodes, I would recommend just adding a BOOL input and using
lazy evaluation to let it conditionally disable itself.)
2. You have a node with multiple possible outputs, some of which are invalid and should not be used.
(I would recommend not making nodes like this in the future -- instead, make multiple nodes with
different outputs. Unfortunately, there are several popular existing nodes using this pattern.)
"""
def __init__(self, message):
self.message = message

View File

@@ -137,3 +137,19 @@ def add_graph_prefix(graph, outputs, prefix):
return new_graph, tuple(new_outputs)
class ExecutionBlocker:
"""
Return this from a node and any users will be blocked with the given error message.
If the message is None, execution will be blocked silently instead.
Generally, you should avoid using this functionality unless absolutely necessary. Whenever it's
possible, a lazy input will be more efficient and have a better user experience.
This functionality is useful in two cases:
1. You want to conditionally prevent an output node from executing. (Particularly a built-in node
like SaveImage. For your own output nodes, I would recommend just adding a BOOL input and using
lazy evaluation to let it conditionally disable itself.)
2. You have a node with multiple possible outputs, some of which are invalid and should not be used.
(I would recommend not making nodes like this in the future -- instead, make multiple nodes with
different outputs. Unfortunately, there are several popular existing nodes using this pattern.)
"""
def __init__(self, message):
self.message = message

View File

@@ -1,77 +0,0 @@
import torch
from comfy.comfy_types.node_typing import ComfyNodeABC, IO
import asyncio
from comfy.utils import ProgressBar
import time
class TestNode(ComfyNodeABC):
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"image": (IO.IMAGE,),
"some_int": (IO.INT, {"display_name": "new_name",
"min": 0, "max": 127, "default": 42,
"tooltip": "My tooltip 😎", "display": "slider"}),
"combo": (IO.COMBO, {"options": ["a", "b", "c"], "tooltip": "This is a combo input"}),
"combo2": (IO.COMBO, {"options": ["a", "b", "c"], "multi_select": True, "tooltip": "This is a combo input"}),
},
"optional": {
"xyz": ("XYZ",),
"mask": (IO.MASK,),
}
}
RETURN_TYPES = (IO.INT, IO.IMAGE)
RETURN_NAMES = ("INT", "img🖼")
OUTPUT_TOOLTIPS = (None, "This is an image")
FUNCTION = "do_thing"
OUTPUT_NODE = True
CATEGORY = "v3 nodes"
def do_thing(self, image: torch.Tensor, some_int: int, combo: str, combo2: list[str], xyz=None, mask: torch.Tensor=None):
return (some_int, image)
class TestSleep(ComfyNodeABC):
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"value": (IO.ANY, {}),
"seconds": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 9999.0, "step": 0.01, "tooltip": "The amount of seconds to sleep."}),
},
"hidden": {
"unique_id": "UNIQUE_ID",
},
}
RETURN_TYPES = (IO.ANY,)
FUNCTION = "sleep"
CATEGORY = "_for_testing"
async def sleep(self, value, seconds, unique_id):
pbar = ProgressBar(seconds, node_id=unique_id)
start = time.time()
expiration = start + seconds
now = start
while now < expiration:
now = time.time()
pbar.update_absolute(now - start)
await asyncio.sleep(0.02)
return (value,)
NODE_CLASS_MAPPINGS = {
"V1TestNode1": TestNode,
"V1TestSleep": TestSleep,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"V1TestNode1": "V1 Test Node",
"V1TestSleep": "V1 Test Sleep",
}

View File

@@ -1,283 +0,0 @@
import torch
import time
from comfy_api.latest import io, ui, _io
import logging # noqa
import comfy.utils
import asyncio
@io.comfytype(io_type="XYZ")
class XYZ(io.ComfyTypeIO):
Type = tuple[int,str]
class V3TestNode(io.ComfyNode):
# NOTE: this is here just to test that state is not leaking
def __init__(self):
super().__init__()
self.hahajkunless = ";)"
@classmethod
def define_schema(cls):
return io.Schema(
node_id="V3_01_TestNode1",
display_name="V3 Test Node",
category="v3 nodes",
description="This is a funky V3 node test.",
inputs=[
io.Image.Input("image", display_name="new_image"),
XYZ.Input("xyz", optional=True),
io.Custom("JKL").Input("jkl", optional=True),
io.Mask.Input("mask", display_name="mask haha", optional=True),
io.Int.Input("some_int", display_name="new_name", min=0, max=127, default=42,
tooltip="My tooltip 😎", display_mode=io.NumberDisplay.slider),
io.Combo.Input("combo", options=["a", "b", "c"], tooltip="This is a combo input"),
io.MultiCombo.Input("combo2", options=["a","b","c"]),
io.MultiType.Input(io.Int.Input("int_multitype", display_name="haha"), types=[io.Float]),
io.MultiType.Input("multitype", types=[io.Mask, io.Float, io.Int], optional=True),
# ComboInput("combo", image_upload=True, image_folder=FolderType.output,
# remote=RemoteOptions(
# route="/internal/files/output",
# refresh_button=True,
# ),
# tooltip="This is a combo input"),
# IntegerInput("some_int", display_name="new_name", min=0, tooltip="My tooltip 😎", display=NumberDisplay.slider, ),
# ComboDynamicInput("mask", behavior=InputBehavior.optional),
# IntegerInput("some_int", display_name="new_name", min=0, tooltip="My tooltip 😎", display=NumberDisplay.slider,
# dependent_inputs=[ComboDynamicInput("mask", behavior=InputBehavior.optional)],
# dependent_values=[lambda my_value: IO.STRING if my_value < 5 else IO.NUMBER],
# ),
# ["option1", "option2". "option3"]
# ComboDynamicInput["sdfgjhl", [ComboDynamicOptions("option1", [IntegerInput("some_int", display_name="new_name", min=0, tooltip="My tooltip 😎", display=NumberDisplay.slider, ImageInput(), MaskInput(), String()]),
# CombyDynamicOptons("option2", [])
# ]]
],
outputs=[
io.Int.Output(),
io.Image.Output(display_name="img🖼", tooltip="This is an image"),
],
hidden=[
io.Hidden.prompt,
io.Hidden.auth_token_comfy_org,
io.Hidden.unique_id,
],
is_output_node=True,
)
@classmethod
def validate_inputs(cls, image: io.Image.Type, some_int: int, combo: io.Combo.Type, combo2: io.MultiCombo.Type, xyz: XYZ.Type=None, mask: io.Mask.Type=None, **kwargs):
if some_int < 0:
raise Exception("some_int must be greater than 0")
if combo == "c":
raise Exception("combo must be a or b")
return True
@classmethod
def execute(cls, image: io.Image.Type, some_int: int, combo: io.Combo.Type, combo2: io.MultiCombo.Type, xyz: XYZ.Type=None, mask: io.Mask.Type=None, **kwargs):
if hasattr(cls, "hahajkunless"):
raise Exception("The 'cls' variable leaked instance state between runs!")
if hasattr(cls, "doohickey"):
raise Exception("The 'cls' variable leaked state on class properties between runs!")
try:
cls.doohickey = "LOLJK"
except AttributeError:
pass
return io.NodeOutput(some_int, image, ui=ui.PreviewImage(image, cls=cls))
# class V3LoraLoader(io.ComfyNode):
# @classmethod
# def define_schema(cls):
# return io.Schema(
# node_id="V3_LoraLoader",
# display_name="V3 LoRA Loader",
# category="v3 nodes",
# description="LoRAs are used to modify diffusion and CLIP models, altering the way in which latents are denoised such as applying styles. Multiple LoRA nodes can be linked together.",
# inputs=[
# io.Model.Input("model", tooltip="The diffusion model the LoRA will be applied to."),
# io.Clip.Input("clip", tooltip="The CLIP model the LoRA will be applied to."),
# io.Combo.Input(
# "lora_name",
# options=folder_paths.get_filename_list("loras"),
# tooltip="The name of the LoRA."
# ),
# io.Float.Input(
# "strength_model",
# default=1.0,
# min=-100.0,
# max=100.0,
# step=0.01,
# tooltip="How strongly to modify the diffusion model. This value can be negative."
# ),
# io.Float.Input(
# "strength_clip",
# default=1.0,
# min=-100.0,
# max=100.0,
# step=0.01,
# tooltip="How strongly to modify the CLIP model. This value can be negative."
# ),
# ],
# outputs=[
# io.Model.Output(),
# io.Clip.Output(),
# ],
# )
# @classmethod
# def execute(cls, model: io.Model.Type, clip: io.Clip.Type, lora_name: str, strength_model: float, strength_clip: float, **kwargs):
# if strength_model == 0 and strength_clip == 0:
# return io.NodeOutput(model, clip)
# lora = cls.resources.get(resources.TorchDictFolderFilename("loras", lora_name))
# model_lora, clip_lora = comfy.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip)
# return io.NodeOutput(model_lora, clip_lora)
class NInputsTest(io.ComfyNode):
@classmethod
def define_schema(cls):
return io.Schema(
node_id="V3_NInputsTest",
display_name="V3 N Inputs Test",
inputs=[
_io.AutogrowDynamic.Input("nmock", template_input=io.Image.Input("image"), min=1, max=3),
_io.AutogrowDynamic.Input("nmock2", template_input=io.Int.Input("int"), optional=True, min=1, max=4),
],
outputs=[
io.Image.Output(),
],
)
@classmethod
def validate_inputs(cls, nmock, nmock2):
return True
@classmethod
def fingerprint_inputs(cls, nmock, nmock2):
return time.time()
@classmethod
def check_lazy_status(cls, **kwargs) -> list[str]:
need = [name for name in kwargs if kwargs[name] is None]
return need
@classmethod
def execute(cls, nmock, nmock2):
first_image = nmock[0]
all_images = []
for img in nmock:
if img.shape != first_image.shape:
img = img.movedim(-1,1)
img = comfy.utils.common_upscale(img, first_image.shape[2], first_image.shape[1], "lanczos", "center")
img = img.movedim(1,-1)
all_images.append(img)
combined_image = torch.cat(all_images, dim=0)
return io.NodeOutput(combined_image)
class V3TestSleep(io.ComfyNode):
@classmethod
def define_schema(cls):
return io.Schema(
node_id="V3_TestSleep",
display_name="V3 Test Sleep",
category="_for_testing",
description="Test async sleep functionality.",
inputs=[
io.AnyType.Input("value", display_name="Value"),
io.Float.Input("seconds", display_name="Seconds", default=1.0, min=0.0, max=9999.0, step=0.01, tooltip="The amount of seconds to sleep."),
],
outputs=[
io.AnyType.Output(),
],
hidden=[
io.Hidden.unique_id,
],
is_experimental=True,
)
@classmethod
async def execute(cls, value: io.AnyType.Type, seconds: io.Float.Type, **kwargs):
logging.info(f"V3TestSleep: {cls.hidden.unique_id}")
pbar = comfy.utils.ProgressBar(seconds, node_id=cls.hidden.unique_id)
start = time.time()
expiration = start + seconds
now = start
while now < expiration:
now = time.time()
pbar.update_absolute(now - start)
await asyncio.sleep(0.02)
return io.NodeOutput(value)
class V3DummyStart(io.ComfyNode):
@classmethod
def define_schema(cls):
return io.Schema(
node_id="V3_DummyStart",
display_name="V3 Dummy Start",
category="v3 nodes",
description="This is a dummy start node.",
inputs=[],
outputs=[
io.Custom("XYZ").Output(),
],
)
@classmethod
def execute(cls):
return io.NodeOutput(None)
class V3DummyEnd(io.ComfyNode):
COOL_VALUE = 123
@classmethod
def define_schema(cls):
return io.Schema(
node_id="V3_DummyEnd",
display_name="V3 Dummy End",
category="v3 nodes",
description="This is a dummy end node.",
inputs=[
io.Custom("XYZ").Input("xyz"),
],
outputs=[],
is_output_node=True,
)
@classmethod
def custom_action(cls):
return 456
@classmethod
def execute(cls, xyz: io.Custom("XYZ").Type):
logging.info(f"V3DummyEnd: {cls.COOL_VALUE}")
logging.info(f"V3DummyEnd: {cls.custom_action()}")
return
class V3DummyEndInherit(V3DummyEnd):
@classmethod
def define_schema(cls):
schema = super().define_schema()
schema.node_id = "V3_DummyEndInherit"
schema.display_name = "V3 Dummy End Inherit"
return schema
@classmethod
def execute(cls, xyz: io.Custom("XYZ").Type):
logging.info(f"V3DummyEndInherit: {cls.COOL_VALUE}")
return super().execute(xyz)
NODES_LIST: list[type[io.ComfyNode]] = [
V3TestNode,
# V3LoraLoader,
NInputsTest,
V3TestSleep,
V3DummyStart,
V3DummyEnd,
V3DummyEndInherit,
]

View File

@@ -6,6 +6,7 @@ import os
import sys
import json
import hashlib
import inspect
import traceback
import math
import time
@@ -29,7 +30,7 @@ import comfy.controlnet
from comfy.comfy_types import IO, ComfyNodeABC, InputTypeDict, FileLocator
from comfy_api.internal import register_versions, ComfyAPIWithVersion
from comfy_api.version_list import supported_versions
from comfy_api.latest import io
from comfy_api.latest import io, ComfyExtension
import comfy.clip_vision
@@ -2162,17 +2163,36 @@ async def load_custom_node(module_path: str, ignore=set(), module_parent="custom
if hasattr(module, "NODE_DISPLAY_NAME_MAPPINGS") and getattr(module, "NODE_DISPLAY_NAME_MAPPINGS") is not None:
NODE_DISPLAY_NAME_MAPPINGS.update(module.NODE_DISPLAY_NAME_MAPPINGS)
return True
# V3 node definition
elif getattr(module, "NODES_LIST", None) is not None:
for node_cls in module.NODES_LIST:
node_cls: io.ComfyNode
schema = node_cls.GET_SCHEMA()
if schema.node_id not in ignore:
NODE_CLASS_MAPPINGS[schema.node_id] = node_cls
node_cls.RELATIVE_PYTHON_MODULE = "{}.{}".format(module_parent, get_module_name(module_path))
if schema.display_name is not None:
NODE_DISPLAY_NAME_MAPPINGS[schema.node_id] = schema.display_name
return True
# V3 Extension Definition
elif hasattr(module, "comfy_entrypoint"):
entrypoint = getattr(module, "comfy_entrypoint")
if not callable(entrypoint):
logging.warning(f"comfy_entrypoint in {module_path} is not callable, skipping.")
return False
try:
if inspect.iscoroutinefunction(entrypoint):
extension = await entrypoint()
else:
extension = entrypoint()
if not isinstance(extension, ComfyExtension):
logging.warning(f"comfy_entrypoint in {module_path} did not return a ComfyExtension, skipping.")
return False
node_list = await extension.get_node_list()
if not isinstance(node_list, list):
logging.warning(f"comfy_entrypoint in {module_path} did not return a list of nodes, skipping.")
return False
for node_cls in node_list:
node_cls: io.ComfyNode
schema = node_cls.GET_SCHEMA()
if schema.node_id not in ignore:
NODE_CLASS_MAPPINGS[schema.node_id] = node_cls
node_cls.RELATIVE_PYTHON_MODULE = "{}.{}".format(module_parent, get_module_name(module_path))
if schema.display_name is not None:
NODE_DISPLAY_NAME_MAPPINGS[schema.node_id] = schema.display_name
return True
except Exception as e:
logging.warning(f"Error while calling comfy_entrypoint in {module_path}: {e}")
return False
else:
logging.warning(f"Skip {module_path} module for custom nodes due to the lack of NODE_CLASS_MAPPINGS or NODES_LIST (need one).")
return False
@@ -2300,8 +2320,6 @@ async def init_builtin_extra_nodes():
"nodes_camera_trajectory.py",
"nodes_edit_model.py",
"nodes_tcfg.py",
"nodes_v3_test.py", # TODO: remove
"nodes_v1_test.py", # TODO: remove
]
import_failed = []

View File

@@ -12,8 +12,6 @@ documentation = "https://docs.comfy.org/"
[tool.ruff]
lint.select = [
"E", # pycodestyle errors
"I", # isort
"N805", # invalid-first-argument-name-for-method
"S307", # suspicious-eval-usage
"S102", # exec
@@ -23,8 +21,4 @@ lint.select = [
# See all rules here: https://docs.astral.sh/ruff/rules/#pyflakes-f
"F",
]
lint.ignore = ["E501"] # disable line-length checking
exclude = ["*.ipynb", "**/generated/*.pyi"]
[tool.ruff.lint.per-file-ignores]
"!comfy_extras/v3/*" = ["E", "I"] # enable these rules only for V3 nodes