omnigen2_example_api.json 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298
  1. {
  2. "6": {
  3. "inputs": {
  4. "text": "the anime girl with massive fennec ears is wearing cargo pants while sitting on a log in the woods biting into a sandwitch beside a beautiful alpine lake",
  5. "clip": [
  6. "10",
  7. 0
  8. ]
  9. },
  10. "class_type": "CLIPTextEncode",
  11. "_meta": {
  12. "title": "CLIP Text Encode (Positive Prompt)"
  13. }
  14. },
  15. "7": {
  16. "inputs": {
  17. "text": "deformed, blurry, over saturation, bad anatomy, disfigured, poorly drawn face, mutation, mutated, extra_limb, ugly, poorly drawn hands, fused fingers, messy drawing, broken legs censor, censored, censor_bar",
  18. "clip": [
  19. "10",
  20. 0
  21. ]
  22. },
  23. "class_type": "CLIPTextEncode",
  24. "_meta": {
  25. "title": "CLIP Text Encode (Negative Prompt)"
  26. }
  27. },
  28. "8": {
  29. "inputs": {
  30. "samples": [
  31. "28",
  32. 0
  33. ],
  34. "vae": [
  35. "13",
  36. 0
  37. ]
  38. },
  39. "class_type": "VAEDecode",
  40. "_meta": {
  41. "title": "VAE Decode"
  42. }
  43. },
  44. "9": {
  45. "inputs": {
  46. "filename_prefix": "ComfyUI",
  47. "images": [
  48. "8",
  49. 0
  50. ]
  51. },
  52. "class_type": "SaveImage",
  53. "_meta": {
  54. "title": "Save Image"
  55. }
  56. },
  57. "10": {
  58. "inputs": {
  59. "clip_name": "qwen_2.5_vl_fp16.safetensors",
  60. "type": "omnigen2",
  61. "device": "default"
  62. },
  63. "class_type": "CLIPLoader",
  64. "_meta": {
  65. "title": "Load CLIP"
  66. }
  67. },
  68. "11": {
  69. "inputs": {
  70. "width": [
  71. "32",
  72. 0
  73. ],
  74. "height": [
  75. "32",
  76. 1
  77. ],
  78. "batch_size": 1
  79. },
  80. "class_type": "EmptySD3LatentImage",
  81. "_meta": {
  82. "title": "EmptySD3LatentImage"
  83. }
  84. },
  85. "12": {
  86. "inputs": {
  87. "unet_name": "omnigen2_fp16.safetensors",
  88. "weight_dtype": "default"
  89. },
  90. "class_type": "UNETLoader",
  91. "_meta": {
  92. "title": "Load Diffusion Model"
  93. }
  94. },
  95. "13": {
  96. "inputs": {
  97. "vae_name": "ae.safetensors"
  98. },
  99. "class_type": "VAELoader",
  100. "_meta": {
  101. "title": "Load VAE"
  102. }
  103. },
  104. "14": {
  105. "inputs": {
  106. "pixels": [
  107. "17",
  108. 0
  109. ],
  110. "vae": [
  111. "13",
  112. 0
  113. ]
  114. },
  115. "class_type": "VAEEncode",
  116. "_meta": {
  117. "title": "VAE Encode"
  118. }
  119. },
  120. "15": {
  121. "inputs": {
  122. "conditioning": [
  123. "6",
  124. 0
  125. ],
  126. "latent": [
  127. "14",
  128. 0
  129. ]
  130. },
  131. "class_type": "ReferenceLatent",
  132. "_meta": {
  133. "title": "ReferenceLatent"
  134. }
  135. },
  136. "16": {
  137. "inputs": {
  138. "image": "fennec_girl_sing.png"
  139. },
  140. "class_type": "LoadImage",
  141. "_meta": {
  142. "title": "Load Image"
  143. },
  144. "is_changed": [
  145. "8da6d35e6206689be1ac0d44e693a7eb13babffda52318c7a99b4f03cc8aa773"
  146. ]
  147. },
  148. "17": {
  149. "inputs": {
  150. "upscale_method": "area",
  151. "megapixels": 1.0,
  152. "image": [
  153. "16",
  154. 0
  155. ]
  156. },
  157. "class_type": "ImageScaleToTotalPixels",
  158. "_meta": {
  159. "title": "Scale Image to Total Pixels"
  160. }
  161. },
  162. "20": {
  163. "inputs": {
  164. "sampler_name": "euler"
  165. },
  166. "class_type": "KSamplerSelect",
  167. "_meta": {
  168. "title": "KSamplerSelect"
  169. }
  170. },
  171. "21": {
  172. "inputs": {
  173. "noise_seed": 832350079790627
  174. },
  175. "class_type": "RandomNoise",
  176. "_meta": {
  177. "title": "RandomNoise"
  178. }
  179. },
  180. "23": {
  181. "inputs": {
  182. "scheduler": "simple",
  183. "steps": 20,
  184. "denoise": 1.0,
  185. "model": [
  186. "12",
  187. 0
  188. ]
  189. },
  190. "class_type": "BasicScheduler",
  191. "_meta": {
  192. "title": "BasicScheduler"
  193. }
  194. },
  195. "27": {
  196. "inputs": {
  197. "cfg_conds": 5.0,
  198. "cfg_cond2_negative": 2.0,
  199. "model": [
  200. "12",
  201. 0
  202. ],
  203. "cond1": [
  204. "15",
  205. 0
  206. ],
  207. "cond2": [
  208. "29",
  209. 0
  210. ],
  211. "negative": [
  212. "7",
  213. 0
  214. ]
  215. },
  216. "class_type": "DualCFGGuider",
  217. "_meta": {
  218. "title": "DualCFGGuider"
  219. }
  220. },
  221. "28": {
  222. "inputs": {
  223. "noise": [
  224. "21",
  225. 0
  226. ],
  227. "guider": [
  228. "27",
  229. 0
  230. ],
  231. "sampler": [
  232. "20",
  233. 0
  234. ],
  235. "sigmas": [
  236. "23",
  237. 0
  238. ],
  239. "latent_image": [
  240. "11",
  241. 0
  242. ]
  243. },
  244. "class_type": "SamplerCustomAdvanced",
  245. "_meta": {
  246. "title": "SamplerCustomAdvanced"
  247. }
  248. },
  249. "29": {
  250. "inputs": {
  251. "conditioning": [
  252. "7",
  253. 0
  254. ],
  255. "latent": [
  256. "14",
  257. 0
  258. ]
  259. },
  260. "class_type": "ReferenceLatent",
  261. "_meta": {
  262. "title": "ReferenceLatent"
  263. }
  264. },
  265. "32": {
  266. "inputs": {
  267. "image": [
  268. "17",
  269. 0
  270. ]
  271. },
  272. "class_type": "GetImageSize",
  273. "_meta": {
  274. "title": "Get Image Size"
  275. }
  276. },
  277. "39": {
  278. "inputs": {
  279. "cfg": 5,
  280. "model": [
  281. "12",
  282. 0
  283. ],
  284. "positive": [
  285. "15",
  286. 0
  287. ],
  288. "negative": [
  289. "7",
  290. 0
  291. ]
  292. },
  293. "class_type": "CFGGuider",
  294. "_meta": {
  295. "title": "CFGGuider"
  296. }
  297. }
  298. }