Flux_Xiang_lora / flux_dev_xiang_Flux_SRPO.json
svjack's picture
Upload flux_dev_xiang_Flux_SRPO.json
3dcd363 verified
raw
history blame
13.8 kB
{
"id": "908d0bfb-e192-4627-9b57-147496e6e2dd",
"revision": 0,
"last_node_id": 52,
"last_link_id": 79,
"nodes": [
{
"id": 39,
"type": "VAELoader",
"pos": [
-320,
470
],
"size": [
270,
58
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [
58
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "VAELoader",
"models": [
{
"name": "ae.safetensors",
"url": "https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors",
"directory": "vae"
}
]
},
"widgets_values": [
"ae.safetensors"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 42,
"type": "ConditioningZeroOut",
"pos": [
10,
470
],
"size": [
200,
30
],
"flags": {
"collapsed": true
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 66
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
63
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
200,
470
],
"size": [
210,
46
],
"flags": {
"collapsed": true
},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 58
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"slot_index": 0,
"links": [
79
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 43,
"type": "MarkdownNote",
"pos": [
-910,
110
],
"size": [
560,
360
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"**Diffusion Model**\n\n- [flux1-dev.safetensors](https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev.safetensors)\n\n**Text Encoder**\n\n- [clip_l.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/clip_l.safetensors)\n\n- [t5xxl_fp16.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors) or [t5xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn_scaled.safetensors)\n\n**VAE**\n\n- [ae.safetensors](https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors)\n\n\n```\nComfyUI/\n├── models/\n│ ├── diffusion_models/\n│ │ └─── flux1-dev.safetensors \n│ ├── text_encoders/\n│ │ ├── clip_l.safetensors\n│ │ └─── t5xxl_fp16.safetensors # or t5xxl_fp8_e4m3fn_scaled.safetensors\n│ └── vae/\n│ └── ae.safetensors\n```\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 40,
"type": "DualCLIPLoader",
"pos": [
-320,
290
],
"size": [
270,
130
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [
64
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "DualCLIPLoader",
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors",
"directory": "text_encoders"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors",
"directory": "text_encoders"
}
]
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp8_e4m3fn_scaled.safetensors",
"flux",
"default"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 38,
"type": "UNETLoader",
"pos": [
-309.27606201171875,
145.1256103515625
],
"size": [
270,
82
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
76
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "UNETLoader",
"models": [
{
"name": "flux1-dev.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev.safetensors",
"directory": "diffusion_models"
}
]
},
"widgets_values": [
"flux.1-dev-SRPO-bf16.safetensors",
"default"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 31,
"type": "KSampler",
"pos": [
2.194284439086914,
531.5657958984375
],
"size": [
315,
262
],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 72
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 67
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 63
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"slot_index": 0,
"links": [
52
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "KSampler"
},
"widgets_values": [
684878236885448,
"randomize",
20,
1,
"euler",
"simple",
1
]
},
{
"id": 48,
"type": "LoraLoaderModelOnly",
"pos": [
325.98583984375,
-37.134098052978516
],
"size": [
270,
82
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 71
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
72
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.59",
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": [
"Chinese_idol_flex2_lora_v1_000004500.safetensors",
0.3
]
},
{
"id": 47,
"type": "LoraLoaderModelOnly",
"pos": [
4.519396781921387,
-29.22198486328125
],
"size": [
270,
82
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 77
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
71
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.59",
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": [
"Flux_Xiang_lora_000005250.safetensors",
1
]
},
{
"id": 50,
"type": "LoraLoaderModelOnly",
"pos": [
-339.04840087890625,
-36.77546691894531
],
"size": [
270,
82
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 76
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
77
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.59",
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": [
"FLUX-dev-lora-add_details.safetensors",
0.5
]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [
-320,
630
],
"size": [
270,
120
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"slot_index": 0,
"links": [
51
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [
1024,
1024,
1
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 45,
"type": "CLIPTextEncode",
"pos": [
-17.92325210571289,
161.27252197265625
],
"size": [
400,
200
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 64
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
66,
67
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.59",
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"This photograph features a young Asian man with glasses with fair skin, short black hair, and brown eyes, smiling warmly at the camera. He is wearing a grey sleeveless hoodie with white drawstrings. The background reveals a rustic indoor setting with wooden beams, large windows, and potted green plants. Sunlight filters through the windows, casting a natural light on the subject. The image has a soft, warm tone, emphasizing the man's youthful and friendly appearance. The overall composition is casual and inviting."
]
},
{
"id": 52,
"type": "SaveImage",
"pos": [
481.1376647949219,
162.32000732421875
],
"size": [
640,
660
],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 79
}
],
"outputs": [],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40"
},
"widgets_values": [
"ComfyUI"
]
}
],
"links": [
[
51,
27,
0,
31,
3,
"LATENT"
],
[
52,
31,
0,
8,
0,
"LATENT"
],
[
58,
39,
0,
8,
1,
"VAE"
],
[
63,
42,
0,
31,
2,
"CONDITIONING"
],
[
64,
40,
0,
45,
0,
"CLIP"
],
[
66,
45,
0,
42,
0,
"CONDITIONING"
],
[
67,
45,
0,
31,
1,
"CONDITIONING"
],
[
71,
47,
0,
48,
0,
"MODEL"
],
[
72,
48,
0,
31,
0,
"MODEL"
],
[
76,
38,
0,
50,
0,
"MODEL"
],
[
77,
50,
0,
47,
0,
"MODEL"
],
[
79,
8,
0,
52,
0,
"IMAGE"
]
],
"groups": [
{
"id": 1,
"title": "Step 1 - Load Models Here",
"bounding": [
-330,
80,
300,
460
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Step 2 - Image Size",
"bounding": [
-330,
560,
300,
200
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Step 3 - Prompt",
"bounding": [
-10,
80,
360,
333.6000061035156
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 1.0725205354388132,
"offset": [
420.25571899631717,
129.75632303786102
]
},
"frontendVersion": "1.26.11",
"VHS_latentpreview": false,
"VHS_latentpreviewrate": 0,
"VHS_MetadataImage": true,
"VHS_KeepIntermediate": true
},
"version": 0.4
}