Skip to content

Instantly share code, notes, and snippets.

@safa-dayo
Created November 26, 2024 03:23
Show Gist options
  • Save safa-dayo/f94b6f46bec780c9896ebf0e258b81d7 to your computer and use it in GitHub Desktop.
Save safa-dayo/f94b6f46bec780c9896ebf0e258b81d7 to your computer and use it in GitHub Desktop.
FluxのReduxで2枚の画像をミックスさせた画像を作るためのComfyUIワークフローファイル
{
"last_node_id": 47,
"last_link_id": 129,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [
588,
-1486
],
"size": [
210,
46
],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 24
},
{
"name": "vae",
"type": "VAE",
"link": 12
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": [],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 9,
"type": "SaveImage",
"pos": [
602,
-1390
],
"size": [
1427.7078857421875,
1019.7511596679688
],
"flags": {
"pinned": true
},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": [
"ComfyUI"
],
"color": "#222",
"bgcolor": "#000"
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [
259.50567626953125,
-779.6294555664062
],
"size": [
272.3617858886719,
124.53733825683594
],
"flags": {
"collapsed": false
},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 116,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"links": [
24
],
"slot_index": 0,
"shape": 3
},
{
"name": "denoised_output",
"type": "LATENT",
"links": null,
"shape": 3
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 16,
"type": "KSamplerSelect",
"pos": [
-116.49430847167969,
-1112.62939453125
],
"size": [
315,
58
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"links": [
19
],
"shape": 3
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": [
"euler"
]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [
218.5056915283203,
-1114.62939453125
],
"size": [
315,
106
],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 55,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"links": [
20
],
"shape": 3
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": [
"simple",
26,
1
]
},
{
"id": 22,
"type": "BasicGuider",
"pos": [
-136,
-571
],
"size": [
222.3482666015625,
46
],
"flags": {
"collapsed": true
},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 126,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"links": [
30
],
"slot_index": 0,
"shape": 3
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 25,
"type": "RandomNoise",
"pos": [
-117,
-999
],
"size": [
315,
82
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"links": [
37
],
"shape": 3
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [
510051283493939,
"randomize"
],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [
-124.23992156982422,
-1425.4127197265625
],
"size": [
317.4000244140625,
58
],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
122
],
"slot_index": 0,
"shape": 3
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [
2.8000000000000003
],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [
208.38201904296875,
-1427.62939453125
],
"size": [
210,
83.42689514160156
],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"link": 112,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": 113,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
116
],
"slot_index": 0,
"shape": 3
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [
1040,
1024,
1
]
},
{
"id": 30,
"type": "ModelSamplingFlux",
"pos": [
223.5056915283203,
-971.6294555664062
],
"size": [
315,
130
],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56,
"slot_index": 0
},
{
"name": "width",
"type": "INT",
"link": 115,
"slot_index": 1,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": 114,
"slot_index": 2,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
54,
55
],
"slot_index": 0,
"shape": 3
}
],
"properties": {
"Node name for S&R": "ModelSamplingFlux"
},
"widgets_values": [
1.15,
0.5,
1040,
1024
]
},
{
"id": 34,
"type": "PrimitiveNode",
"pos": [
-120.23989868164062,
-1311.413330078125
],
"size": [
278.0505065917969,
146.62425231933594
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [
112,
115
],
"slot_index": 0,
"widget": {
"name": "width"
}
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
1040,
"fixed"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 35,
"type": "PrimitiveNode",
"pos": [
173.76014709472656,
-1307.413330078125
],
"size": [
243.00711059570312,
139.43699645996094
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [
113,
114
],
"slot_index": 0,
"widget": {
"name": "height"
}
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
1024,
"fixed"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 37,
"type": "Note",
"pos": [
480,
1344
],
"size": [
314.99755859375,
117.98363494873047
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"The reference sampling implementation auto adjusts the shift value based on the resolution, if you don't want this you can just bypass (CTRL-B) this ModelSamplingFlux node.\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 38,
"type": "CLIPVisionLoader",
"pos": [
-1405.0826416015625,
-1018.4903564453125
],
"size": [
370,
60
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"links": [
117,
127
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": [
"sigclip_vision_patch14_384.safetensors"
],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 39,
"type": "CLIPVisionEncode",
"pos": [
-747,
-575
],
"size": [
290,
50
],
"flags": {
"collapsed": true
},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 117
},
{
"name": "image",
"type": "IMAGE",
"link": 118
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [
120
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": []
},
{
"id": 41,
"type": "StyleModelApply",
"pos": [
-556,
-572
],
"size": [
320,
70
],
"flags": {
"collapsed": true
},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 122
},
{
"name": "style_model",
"type": "STYLE_MODEL",
"link": 119
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 120,
"shape": 7
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
124
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "StyleModelApply"
},
"widgets_values": []
},
{
"id": 42,
"type": "StyleModelLoader",
"pos": [
-1408,
-904
],
"size": [
403.7030334472656,
79.98893737792969
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STYLE_MODEL",
"type": "STYLE_MODEL",
"links": [
119,
125
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "StyleModelLoader"
},
"widgets_values": [
"flux1-redux-dev.safetensors"
],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 44,
"type": "StyleModelApply",
"pos": [
-371,
-573
],
"size": [
320,
70
],
"flags": {
"collapsed": true
},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 124
},
{
"name": "style_model",
"type": "STYLE_MODEL",
"link": 125
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 129,
"shape": 7
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
126
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "StyleModelApply"
},
"widgets_values": []
},
{
"id": 46,
"type": "CLIPVisionEncode",
"pos": [
-929,
-577
],
"size": [
290,
50
],
"flags": {
"collapsed": true
},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 127
},
{
"name": "image",
"type": "IMAGE",
"link": 128
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [
129
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": []
},
{
"id": 28,
"type": "Note",
"pos": [
-1880,
-1860
],
"size": [
389.5084533691406,
271.7581787109375
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-dev.safetensors goes in: ComfyUI/models/unet/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/clip/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 47,
"type": "Note",
"pos": [
-1880,
-2460
],
"size": [
390.6860656738281,
326.7415771484375
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
" Update ComfyUI to the latest\nDownload `sigclip_patch14-384.safetensors` into `ComfyUI/models/clip_vision`\nMake sure flux1-dev model is in `ComfyUI/models/unet` folder\nDownload the Redux Model into `comfyui/models/style_models`\n\nSigClip_patch14-384\n\nhttps://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors\n\nRedux\n\nhttps://huggingface.co/black-forest-labs/FLUX.1-Redux-dev/resolve/main/flux1-redux-dev.safetensors"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 43,
"type": "Note",
"pos": [
-1880,
-2090
],
"size": [
392.7983703613281,
186.2510986328125
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"The redux model lets you prompt with images. It can be used with any Flux1 dev or schnell model workflow.\n\nYou can chain multiple \"Apply Style Model\" nodes if you want to mix multiple images together."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [
-1412.0826416015625,
-1418.4912109375
],
"size": [
315,
82
],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
56
],
"slot_index": 0,
"shape": 3
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": [
"flux1-dev.safetensors",
"default"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [
-1411.0826416015625,
-1288.4912109375
],
"size": [
315,
106
],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [
10
],
"slot_index": 0,
"shape": 3
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"t5xxl_fp16.safetensors",
"clip_l.safetensors",
"flux"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 10,
"type": "VAELoader",
"pos": [
-1409.0826416015625,
-1131.490478515625
],
"size": [
311.81634521484375,
60.429901123046875
],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [
12
],
"slot_index": 0,
"shape": 3
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": [
"ae.safetensors"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
-892,
-960
],
"size": [
691.6862182617188,
296.7965087890625
],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 10
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
41
],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"1 young woman, smile"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 45,
"type": "LoadImage",
"pos": [
-917,
-1427
],
"size": [
315,
314
],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
128
],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": [
"example.png",
"image"
]
},
{
"id": 40,
"type": "LoadImage",
"pos": [
-512,
-1428
],
"size": [
315,
314
],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
118
]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": [
"example.png",
"image"
]
}
],
"links": [
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
10,
11,
0,
6,
0,
"CLIP"
],
[
12,
10,
0,
8,
1,
"VAE"
],
[
19,
16,
0,
13,
2,
"SAMPLER"
],
[
20,
17,
0,
13,
3,
"SIGMAS"
],
[
24,
13,
0,
8,
0,
"LATENT"
],
[
30,
22,
0,
13,
1,
"GUIDER"
],
[
37,
25,
0,
13,
0,
"NOISE"
],
[
41,
6,
0,
26,
0,
"CONDITIONING"
],
[
54,
30,
0,
22,
0,
"MODEL"
],
[
55,
30,
0,
17,
0,
"MODEL"
],
[
56,
12,
0,
30,
0,
"MODEL"
],
[
112,
34,
0,
27,
0,
"INT"
],
[
113,
35,
0,
27,
1,
"INT"
],
[
114,
35,
0,
30,
2,
"INT"
],
[
115,
34,
0,
30,
1,
"INT"
],
[
116,
27,
0,
13,
4,
"LATENT"
],
[
117,
38,
0,
39,
0,
"CLIP_VISION"
],
[
118,
40,
0,
39,
1,
"IMAGE"
],
[
119,
42,
0,
41,
1,
"STYLE_MODEL"
],
[
120,
39,
0,
41,
2,
"CLIP_VISION_OUTPUT"
],
[
122,
26,
0,
41,
0,
"CONDITIONING"
],
[
124,
41,
0,
44,
0,
"CONDITIONING"
],
[
125,
42,
0,
44,
1,
"STYLE_MODEL"
],
[
126,
44,
0,
22,
1,
"CONDITIONING"
],
[
127,
38,
0,
46,
0,
"CLIP_VISION"
],
[
128,
45,
0,
46,
1,
"IMAGE"
],
[
129,
46,
0,
44,
2,
"CLIP_VISION_OUTPUT"
]
],
"groups": [
{
"id": 2,
"title": "Models",
"bounding": [
-1434.0643310546875,
-1512.511474609375,
460.077880859375,
706.5479125976562
],
"color": "#88A",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Style Images",
"bounding": [
-945.751708984375,
-1517.61669921875,
795.5681762695312,
459.0286560058594
],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "GenSettings",
"bounding": [
-134.91983032226562,
-1515.5543212890625,
713.0016479492188,
883.7788696289062
],
"color": "#b58b2a",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Prompt",
"bounding": [
-944.9072265625,
-1040.9501953125,
791.5633544921875,
412.6332702636719
],
"color": "#8A8",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.5131581182307067,
"offset": [
1012.2489003490022,
1635.3140942636417
]
},
"groupNodes": {},
"workspace_info": {
"id": "C6n70rfezeWlwXvzLXSSY"
}
},
"version": 0.4
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment