Spaces:
Running
Fix ComfyUI workflow validation errors
Browse files- Add missing UltralyticsDetectorProvider node (578) for face detection
- Fix FaceDetailer (581) parameters with correct types:
* steps: changed from "randomize" to 20 (INT)
* cfg: changed from 20 to 0.5 (FLOAT)
* sampler_name: changed from 0.5 to "euler" (STRING)
* scheduler: changed from "dpmpp_2m" to "simple" (valid option)
* denoise: changed from "simple" to 0.5 (FLOAT)
* sam_detection_hint: changed from 2.2 to "center-1" (valid option)
* sam_dilation: changed from "center-1" to 0 (INT)
* sam_mask_hint_use_negative: changed from 5 to "False" (valid option)
* drop_size: changed from "" to 10 (INT)
* cycle: changed from false to 1 (INT >= 1)
* bbox_detector: added connection to node 578
- Fix SeedVR2VideoUpscaler resolution: changed from 1 to 1080 (min 16)
- Fix ModelPatchLoader file path: controlnet/ → model_patches/
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
- app.py +1 -1
- simple_api_workflow.json +28 -20
|
@@ -56,7 +56,7 @@ def setup():
|
|
| 56 |
{"repo": "Comfy-Org/z_image_turbo", "file": "split_files/vae/ae.safetensors", "dest": "models/vae/ae.safetensors"},
|
| 57 |
{"repo": "Comfy-Org/z_image_turbo", "file": "split_files/diffusion_models/z_image_turbo_bf16.safetensors", "dest": "models/diffusion_models/z_image_turbo_bf16.safetensors"},
|
| 58 |
{"repo": "Comfy-Org/z_image_turbo", "file": "split_files/text_encoders/qwen_3_4b.safetensors", "dest": "models/text_encoders/qwen_3_4b.safetensors"},
|
| 59 |
-
{"repo": "alibaba-pai/Z-Image-Turbo-Fun-Controlnet-Union", "file": "Z-Image-Turbo-Fun-Controlnet-Union.safetensors", "dest": "models/
|
| 60 |
{"repo": "deepghs/yolo-face", "file": "yolov8n-face/model.pt", "dest": "models/ultralytics/bbox/yolov8n-face.pt"},
|
| 61 |
# SAM model for FaceDetailer
|
| 62 |
{"repo": "YouLiXiya/YL-SAM", "file": "sam_vit_b_01ec64.pth", "dest": "models/sams/sam_vit_b_01ec64.pth"}
|
|
|
|
| 56 |
{"repo": "Comfy-Org/z_image_turbo", "file": "split_files/vae/ae.safetensors", "dest": "models/vae/ae.safetensors"},
|
| 57 |
{"repo": "Comfy-Org/z_image_turbo", "file": "split_files/diffusion_models/z_image_turbo_bf16.safetensors", "dest": "models/diffusion_models/z_image_turbo_bf16.safetensors"},
|
| 58 |
{"repo": "Comfy-Org/z_image_turbo", "file": "split_files/text_encoders/qwen_3_4b.safetensors", "dest": "models/text_encoders/qwen_3_4b.safetensors"},
|
| 59 |
+
{"repo": "alibaba-pai/Z-Image-Turbo-Fun-Controlnet-Union", "file": "Z-Image-Turbo-Fun-Controlnet-Union.safetensors", "dest": "models/model_patches/Z-Image-Turbo-Fun-Controlnet-Union.safetensors"},
|
| 60 |
{"repo": "deepghs/yolo-face", "file": "yolov8n-face/model.pt", "dest": "models/ultralytics/bbox/yolov8n-face.pt"},
|
| 61 |
# SAM model for FaceDetailer
|
| 62 |
{"repo": "YouLiXiya/YL-SAM", "file": "sam_vit_b_01ec64.pth", "dest": "models/sams/sam_vit_b_01ec64.pth"}
|
|
@@ -219,7 +219,7 @@
|
|
| 219 |
"color_correction": "fixed",
|
| 220 |
"batch_size": 2160,
|
| 221 |
"uniform_batch_size": 0,
|
| 222 |
-
"resolution":
|
| 223 |
"max_resolution": false
|
| 224 |
}
|
| 225 |
},
|
|
@@ -310,6 +310,12 @@
|
|
| 310 |
"strength": 1.2
|
| 311 |
}
|
| 312 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 313 |
"581": {
|
| 314 |
"class_type": "FaceDetailer",
|
| 315 |
"inputs": {
|
|
@@ -337,32 +343,34 @@
|
|
| 337 |
"10",
|
| 338 |
0
|
| 339 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
| 340 |
"guide_size": 256,
|
| 341 |
"guide_size_for": true,
|
| 342 |
"max_size": 1024,
|
| 343 |
"seed": 869357132031987,
|
| 344 |
-
"steps":
|
| 345 |
-
"cfg":
|
| 346 |
-
"sampler_name":
|
| 347 |
-
"scheduler": "
|
| 348 |
-
"denoise":
|
| 349 |
"feather": 0.12,
|
| 350 |
"noise_mask": 8,
|
| 351 |
"force_inpaint": true,
|
| 352 |
-
"bbox_threshold":
|
| 353 |
-
"bbox_dilation":
|
| 354 |
-
"bbox_crop_factor":
|
| 355 |
-
"sam_detection_hint":
|
| 356 |
-
"sam_dilation":
|
| 357 |
-
"sam_threshold": 0,
|
| 358 |
-
"sam_bbox_expansion": 0
|
| 359 |
-
"sam_mask_hint_threshold": 0,
|
| 360 |
-
"
|
| 361 |
-
"
|
| 362 |
-
"
|
| 363 |
-
"
|
| 364 |
-
"wildcard": 1,
|
| 365 |
-
"cycle": false
|
| 366 |
}
|
| 367 |
},
|
| 368 |
"62": {
|
|
|
|
| 219 |
"color_correction": "fixed",
|
| 220 |
"batch_size": 2160,
|
| 221 |
"uniform_batch_size": 0,
|
| 222 |
+
"resolution": 1080,
|
| 223 |
"max_resolution": false
|
| 224 |
}
|
| 225 |
},
|
|
|
|
| 310 |
"strength": 1.2
|
| 311 |
}
|
| 312 |
},
|
| 313 |
+
"578": {
|
| 314 |
+
"class_type": "UltralyticsDetectorProvider",
|
| 315 |
+
"inputs": {
|
| 316 |
+
"model_name": "bbox/yolov8n-face.pt"
|
| 317 |
+
}
|
| 318 |
+
},
|
| 319 |
"581": {
|
| 320 |
"class_type": "FaceDetailer",
|
| 321 |
"inputs": {
|
|
|
|
| 343 |
"10",
|
| 344 |
0
|
| 345 |
],
|
| 346 |
+
"bbox_detector": [
|
| 347 |
+
"578",
|
| 348 |
+
0
|
| 349 |
+
],
|
| 350 |
"guide_size": 256,
|
| 351 |
"guide_size_for": true,
|
| 352 |
"max_size": 1024,
|
| 353 |
"seed": 869357132031987,
|
| 354 |
+
"steps": 20,
|
| 355 |
+
"cfg": 0.5,
|
| 356 |
+
"sampler_name": "euler",
|
| 357 |
+
"scheduler": "simple",
|
| 358 |
+
"denoise": 0.5,
|
| 359 |
"feather": 0.12,
|
| 360 |
"noise_mask": 8,
|
| 361 |
"force_inpaint": true,
|
| 362 |
+
"bbox_threshold": 0.5,
|
| 363 |
+
"bbox_dilation": 10,
|
| 364 |
+
"bbox_crop_factor": 3.0,
|
| 365 |
+
"sam_detection_hint": "center-1",
|
| 366 |
+
"sam_dilation": 0,
|
| 367 |
+
"sam_threshold": 0.93,
|
| 368 |
+
"sam_bbox_expansion": 0,
|
| 369 |
+
"sam_mask_hint_threshold": 0.7,
|
| 370 |
+
"sam_mask_hint_use_negative": "False",
|
| 371 |
+
"drop_size": 10,
|
| 372 |
+
"wildcard": "",
|
| 373 |
+
"cycle": 1
|
|
|
|
|
|
|
| 374 |
}
|
| 375 |
},
|
| 376 |
"62": {
|