seed
This commit is contained in:
parent
fbdd030ae6
commit
1495fae433
@ -3,7 +3,7 @@ import { txt23dApiFormatJSON } from "../comfyJson/txt23d";
|
||||
import axios from "axios";
|
||||
import { selectNodeFromApiJSONbyID } from "../utils/editComfyJson";
|
||||
import WebSocket from "ws";
|
||||
import { seed } from "../seed";
|
||||
import { globalMap } from "../seed";
|
||||
import { txt2imgAPIformatJSON } from "../comfyJson/txt2img";
|
||||
const baseUrl = "http://47.108.92.176:20000";
|
||||
const baseWsUrl = "ws://47.108.92.176:20000";
|
||||
@ -20,11 +20,8 @@ const Txt23DHandler: RequestHandler<any, any> = async (ctx) => {
|
||||
}
|
||||
const requestBody = ctx.request.body;
|
||||
const { prompt, url } = requestBody;
|
||||
const inputNode = selectNodeFromApiJSONbyID(txt23dApiFormatJSON, "33");
|
||||
const seedNode = selectNodeFromApiJSONbyID(txt23dApiFormatJSON, "10");
|
||||
const formSeedNode = selectNodeFromApiJSONbyID(txt2imgAPIformatJSON, "10");
|
||||
seedNode.inputs.seed = formSeedNode.inputs.seed;
|
||||
inputNode.inputs.string = prompt;
|
||||
const inputNode = selectNodeFromApiJSONbyID(txt23dApiFormatJSON, "6");
|
||||
inputNode.inputs.image = globalMap.step1url;
|
||||
try {
|
||||
ctx.body = {
|
||||
url: ctx.body = baseUrl + '/view?filename=' + await new Promise((resolve, reject) => {
|
||||
@ -47,7 +44,7 @@ const Txt23DHandler: RequestHandler<any, any> = async (ctx) => {
|
||||
const { type, data } = JSON.parse(event.data);
|
||||
if (type === "executed") {
|
||||
console.log(data.output);
|
||||
if (data.node === '13') {
|
||||
if (data.node === '3') {
|
||||
resolve(data.output.mesh[0].filename)
|
||||
}
|
||||
}
|
||||
|
@ -4,14 +4,13 @@ import axios from "axios";
|
||||
import { selectNodeFromApiJSONbyID } from "../utils/editComfyJson";
|
||||
import WebSocket from "ws";
|
||||
import { readJSONFile, saveJSONFile } from "../utils/jsonReader";
|
||||
import { seed } from "../seed";
|
||||
import { globalMap } from "../seed";
|
||||
const baseUrl = "http://47.108.92.176:20000";
|
||||
const baseWsUrl = "ws://47.108.92.176:20000";
|
||||
// const baseUrl = "http://localhost:8188";
|
||||
// const baseWsUrl = "ws://localhost:8188";
|
||||
axios.defaults.baseURL = baseUrl;
|
||||
const Txt2ImgHandler: RequestHandler<Txt2ImgRequest, any> = async (ctx, next) => {
|
||||
console.log(ctx.method);
|
||||
ctx.set('Access-Control-Allow-Origin', '*')
|
||||
ctx.set('Access-Control-Allow-Headers', 'Content-Type,Content-Length,Authorization,Accept,X-Requested-With')
|
||||
ctx.set('Access-Control-Allow-Methods', 'PUT,POST,GET,DELETE,OPTIONS')
|
||||
@ -56,7 +55,7 @@ const Txt2ImgHandler: RequestHandler<Txt2ImgRequest, any> = async (ctx, next) =>
|
||||
if (data.node === '21') {
|
||||
temp = data.output.text[0];
|
||||
}
|
||||
if (data.node === '94') {
|
||||
if (data.node === '77') {
|
||||
console.log(data.output);
|
||||
const filePath = './data.json';
|
||||
|
||||
@ -68,6 +67,7 @@ const Txt2ImgHandler: RequestHandler<Txt2ImgRequest, any> = async (ctx, next) =>
|
||||
// 保存回JSON文件
|
||||
saveJSONFile(filePath, data1);
|
||||
})
|
||||
globalMap.step1url = data.output.images[0].filename;
|
||||
resolve({ prompt: temp, url: baseUrl + '/view?filename=' + data.output.images[0].filename });
|
||||
}
|
||||
} else if (type === "executing") {
|
||||
|
@ -42,7 +42,7 @@ const UpscaleHandler: RequestHandler<any ,any> = async (ctx) => {
|
||||
const { type, data } = JSON.parse(event.data);
|
||||
if (type === "executed") {
|
||||
console.log(data.output);
|
||||
if (data.node === '47') {
|
||||
if (data.node === '9') {
|
||||
|
||||
resolve(data.output.images[0].filename)
|
||||
}
|
||||
|
@ -1,327 +1,40 @@
|
||||
const txt23dApiFormatJSON = {
|
||||
"10": {
|
||||
"3": {
|
||||
"inputs": {
|
||||
"seed": 374404110666628,
|
||||
"steps": 8,
|
||||
"cfg": 8,
|
||||
"sampler_name": "ddim",
|
||||
"scheduler": "normal",
|
||||
"denoise": 1,
|
||||
"preview_method": "auto",
|
||||
"vae_decode": "true",
|
||||
"model": [
|
||||
"69",
|
||||
0
|
||||
],
|
||||
"positive": [
|
||||
"20",
|
||||
1
|
||||
],
|
||||
"negative": [
|
||||
"20",
|
||||
2
|
||||
],
|
||||
"latent_image": [
|
||||
"20",
|
||||
3
|
||||
],
|
||||
"optional_vae": [
|
||||
"20",
|
||||
4
|
||||
]
|
||||
},
|
||||
"class_type": "KSampler (Efficient)"
|
||||
},
|
||||
"11": {
|
||||
"inputs": {
|
||||
"switch_1": "On",
|
||||
"lora_name_1": "form1024-xl\\form1024-xl-000006.safetensors",
|
||||
"model_weight_1": 1,
|
||||
"clip_weight_1": 1,
|
||||
"switch_2": "On",
|
||||
"lora_name_2": "Hyper-SD\\Hyper-SDXL-8steps-CFG-lora.safetensors",
|
||||
"model_weight_2": 0.5,
|
||||
"clip_weight_2": 1,
|
||||
"switch_3": "Off",
|
||||
"lora_name_3": "None",
|
||||
"model_weight_3": 1,
|
||||
"clip_weight_3": 1
|
||||
},
|
||||
"class_type": "CR LoRA Stack"
|
||||
},
|
||||
"13": {
|
||||
"inputs": {
|
||||
"preview3d": null,
|
||||
"mesh": [
|
||||
"14",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "TripoSRViewer"
|
||||
},
|
||||
"14": {
|
||||
"inputs": {
|
||||
"geometry_resolution": 256,
|
||||
"threshold": 10,
|
||||
"model": [
|
||||
"15",
|
||||
0
|
||||
],
|
||||
"reference_image": [
|
||||
"72",
|
||||
0
|
||||
],
|
||||
"reference_mask": [
|
||||
"72",
|
||||
1
|
||||
]
|
||||
},
|
||||
"class_type": "TripoSRSampler"
|
||||
},
|
||||
"15": {
|
||||
"inputs": {
|
||||
"model": "3D\\TripoSR.ckpt",
|
||||
"chunk_size": 8192
|
||||
},
|
||||
"class_type": "TripoSRModelLoader"
|
||||
},
|
||||
"19": {
|
||||
"inputs": {
|
||||
"text_positive": [
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"text_negative": "",
|
||||
"style": "base",
|
||||
"log_prompt": false,
|
||||
"style_positive": true,
|
||||
"style_negative": true
|
||||
},
|
||||
"class_type": "SDXLPromptStyler"
|
||||
},
|
||||
"20": {
|
||||
"inputs": {
|
||||
"ckpt_name": "sd_xl_base_1.0.safetensors",
|
||||
"vae_name": "Baked VAE",
|
||||
"clip_skip": -2,
|
||||
"lora_name": "None",
|
||||
"lora_model_strength": 1,
|
||||
"lora_clip_strength": 1,
|
||||
"positive": [
|
||||
"21",
|
||||
0
|
||||
],
|
||||
"negative": "CLIP_NEGATIVE",
|
||||
"token_normalization": "none",
|
||||
"weight_interpretation": "comfy",
|
||||
"empty_latent_width": 1024,
|
||||
"empty_latent_height": 1024,
|
||||
"batch_size": 1,
|
||||
"lora_stack": [
|
||||
"mode": "image-to-3d",
|
||||
"prompt": "",
|
||||
"image": [
|
||||
"11",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "Efficient Loader"
|
||||
"class_type": "TripoAPI_Zho"
|
||||
},
|
||||
"21": {
|
||||
"5": {
|
||||
"inputs": {
|
||||
"text": [
|
||||
"19",
|
||||
"preview3d": null,
|
||||
"mesh": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ShowText|pysssss"
|
||||
"class_type": "TripoGLBViewer_ZHO"
|
||||
},
|
||||
"22": {
|
||||
"6": {
|
||||
"inputs": {
|
||||
"text": [
|
||||
"19",
|
||||
1
|
||||
]
|
||||
},
|
||||
"class_type": "ShowText|pysssss"
|
||||
},
|
||||
"32": {
|
||||
"inputs": {
|
||||
"text": [
|
||||
"85",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ShowText|pysssss"
|
||||
},
|
||||
"33": {
|
||||
"inputs": {
|
||||
"string": "留春xiao不住,费尽莺儿语"
|
||||
},
|
||||
"class_type": "Simple String"
|
||||
},
|
||||
"46": {
|
||||
"inputs": {
|
||||
"image": "329247709_425681693072936_3081671045029849121_nstp=dst-jpg_e3.jpg",
|
||||
"image": "ComfyUI_00012_.png",
|
||||
"upload": "image"
|
||||
},
|
||||
"class_type": "LoadImage"
|
||||
},
|
||||
"68": {
|
||||
"11": {
|
||||
"inputs": {
|
||||
"image": [
|
||||
"46",
|
||||
"6",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "CLIPVisionEncode"
|
||||
},
|
||||
"69": {
|
||||
"inputs": {
|
||||
"config": "SDXL, Attention Injection",
|
||||
"weight": 1,
|
||||
"model": [
|
||||
"20",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LayeredDiffusionApply"
|
||||
},
|
||||
"70": {
|
||||
"inputs": {
|
||||
"sd_version": "SDXL",
|
||||
"sub_batch_size": 16,
|
||||
"samples": [
|
||||
"10",
|
||||
3
|
||||
],
|
||||
"images": [
|
||||
"10",
|
||||
5
|
||||
]
|
||||
},
|
||||
"class_type": "LayeredDiffusionDecodeRGBA"
|
||||
},
|
||||
"72": {
|
||||
"inputs": {
|
||||
"sd_version": "SDXL",
|
||||
"sub_batch_size": 16,
|
||||
"samples": [
|
||||
"10",
|
||||
3
|
||||
],
|
||||
"images": [
|
||||
"10",
|
||||
5
|
||||
]
|
||||
},
|
||||
"class_type": "LayeredDiffusionDecode"
|
||||
},
|
||||
"85": {
|
||||
"inputs": {
|
||||
"text": [
|
||||
"86",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ShowText|pysssss"
|
||||
},
|
||||
"86": {
|
||||
"inputs": {
|
||||
"system_prompt": "你是一个懂宋词分析的古诗词专家,我会给你一句宋词。请你分析古诗中的意象,按照重要性排序,并参照文件列出只英文的意象prompt。注意只需要英文,请控制输出长度在256token以内。",
|
||||
"user_prompt": [
|
||||
"33",
|
||||
0
|
||||
],
|
||||
"model_name": "glm-4",
|
||||
"temperature": 0.7,
|
||||
"is_memory": "disable",
|
||||
"is_tools_in_sys_prompt": "disable",
|
||||
"is_locked": "disable",
|
||||
"main_brain": "enable",
|
||||
"max_length": 2048,
|
||||
"file_content": [
|
||||
"89",
|
||||
0
|
||||
],
|
||||
"base_url": "",
|
||||
"api_key": "",
|
||||
"imgbb_api_key": ""
|
||||
},
|
||||
"class_type": "LLM"
|
||||
},
|
||||
"87": {
|
||||
"inputs": {
|
||||
"path": "analyse.txt",
|
||||
"is_enable": false,
|
||||
"path_type": "Relative_Path"
|
||||
},
|
||||
"class_type": "load_file"
|
||||
},
|
||||
"89": {
|
||||
"inputs": {
|
||||
"is_enable": true,
|
||||
"file1": [
|
||||
"87",
|
||||
0
|
||||
],
|
||||
"file2": [
|
||||
"90",
|
||||
0
|
||||
],
|
||||
"file3": [
|
||||
"91",
|
||||
0
|
||||
],
|
||||
"file4": [
|
||||
"92",
|
||||
0
|
||||
],
|
||||
"file5": [
|
||||
"93",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "file_combine_plus"
|
||||
},
|
||||
"90": {
|
||||
"inputs": {
|
||||
"path": "yixiang.txt",
|
||||
"is_enable": true,
|
||||
"path_type": "Relative_Path"
|
||||
},
|
||||
"class_type": "load_file"
|
||||
},
|
||||
"91": {
|
||||
"inputs": {
|
||||
"path": "cipai.txt",
|
||||
"is_enable": true,
|
||||
"path_type": "Relative_Path"
|
||||
},
|
||||
"class_type": "load_file"
|
||||
},
|
||||
"92": {
|
||||
"inputs": {
|
||||
"path": "words.txt",
|
||||
"is_enable": false,
|
||||
"path_type": "Relative_Path"
|
||||
},
|
||||
"class_type": "load_file"
|
||||
},
|
||||
"93": {
|
||||
"inputs": {
|
||||
"path": "sdrule.txt",
|
||||
"is_enable": true,
|
||||
"path_type": "Relative_Path"
|
||||
},
|
||||
"class_type": "load_file"
|
||||
},
|
||||
"94": {
|
||||
"inputs": {
|
||||
"filename_prefix": "ComfyUI",
|
||||
"images": [
|
||||
"70",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage"
|
||||
"class_type": "SplitImageWithAlpha"
|
||||
}
|
||||
}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,65 +1,16 @@
|
||||
const upScaleAPIFormatJSON = {
|
||||
"4": {
|
||||
"3": {
|
||||
"inputs": {
|
||||
"ckpt_name": "xxmix9realisticsdxl_testV20.safetensors"
|
||||
},
|
||||
"class_type": "CheckpointLoaderSimple"
|
||||
},
|
||||
"6": {
|
||||
"inputs": {
|
||||
"text": "coloured glaze, jade, glass, glasssculpture, transparent, translucent, Porcelain doll, Porcelain, Glazed Artwork, Ceramic, \n8k,ultra detailed, beautiful and aesthetic, masterpiece, best quality,",
|
||||
"clip": [
|
||||
"222",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "CLIPTextEncode"
|
||||
},
|
||||
"7": {
|
||||
"inputs": {
|
||||
"text": "nsfw, paintings, cartoon, anime, sketches, worst quality, low quality, normal quality, lowres, watermark, monochrome, grayscale, ugly, blurry, Tan skin, dark skin, black skin, skin spots, skin blemishes, age spot, glans, disabled, distorted, bad anatomy, morbid, malformation, amputation, bad proportions, twins, missing body, fused body, extra head, poorly drawn face, bad eyes, deformed eye, unclear eyes, cross-eyed, long neck, malformed limbs, extra limbs, extra arms, missing arms, bad tongue, strange fingers, mutated hands, missing hands, poorly drawn hands, extra hands, fused hands, connected hand, bad hands, wrong fingers, missing fingers, extra fingers, 4 fingers, 3 fingers, deformed hands, extra legs, bad legs, many legs, more than two legs, bad feet, wrong feet, extra feets,",
|
||||
"clip": [
|
||||
"222",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "CLIPTextEncode"
|
||||
},
|
||||
"11": {
|
||||
"inputs": {
|
||||
"pixels": [
|
||||
"218",
|
||||
"seed": 0,
|
||||
"steps": 30,
|
||||
"cfg": 6.5,
|
||||
"sampler_name": "dpmpp_2m",
|
||||
"scheduler": "karras",
|
||||
"denoise": 1,
|
||||
"model": [
|
||||
"15",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"4",
|
||||
2
|
||||
]
|
||||
},
|
||||
"class_type": "VAEEncode"
|
||||
},
|
||||
"12": {
|
||||
"inputs": {
|
||||
"image": "ComfyUI_00498_.png",
|
||||
"upload": "image"
|
||||
},
|
||||
"class_type": "LoadImage"
|
||||
},
|
||||
"15": {
|
||||
"inputs": {
|
||||
"amount": 1,
|
||||
"samples": [
|
||||
"11",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "RepeatLatentBatch"
|
||||
},
|
||||
"28": {
|
||||
"inputs": {
|
||||
"strength": 1,
|
||||
"start_percent": 0,
|
||||
"end_percent": 1,
|
||||
"positive": [
|
||||
"6",
|
||||
0
|
||||
@ -68,222 +19,57 @@ const upScaleAPIFormatJSON = {
|
||||
"7",
|
||||
0
|
||||
],
|
||||
"control_net": [
|
||||
"29",
|
||||
0
|
||||
],
|
||||
"image": [
|
||||
"30",
|
||||
"latent_image": [
|
||||
"5",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ACN_AdvancedControlNetApply"
|
||||
"class_type": "KSampler"
|
||||
},
|
||||
"29": {
|
||||
"4": {
|
||||
"inputs": {
|
||||
"control_net_name": "control-lora-canny-rank256.safetensors"
|
||||
"ckpt_name": "juggernautXL_v9Rdphoto2Lightning.safetensors"
|
||||
},
|
||||
"class_type": "ControlNetLoaderAdvanced"
|
||||
"class_type": "CheckpointLoaderSimple"
|
||||
},
|
||||
"30": {
|
||||
"5": {
|
||||
"inputs": {
|
||||
"preprocessor": "CannyEdgePreprocessor",
|
||||
"resolution": 1024,
|
||||
"image": [
|
||||
"218",
|
||||
0
|
||||
]
|
||||
"width": 1024,
|
||||
"height": 1024,
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "AIO_Preprocessor"
|
||||
"class_type": "EmptyLatentImage"
|
||||
},
|
||||
"34": {
|
||||
"6": {
|
||||
"inputs": {
|
||||
"preset": "PLUS (high strength)",
|
||||
"model": [
|
||||
"94",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "IPAdapterUnifiedLoader"
|
||||
},
|
||||
"47": {
|
||||
"inputs": {
|
||||
"filename_prefix": "image_",
|
||||
"images": [
|
||||
"250",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage"
|
||||
},
|
||||
"92": {
|
||||
"inputs": {
|
||||
"switch_1": "On",
|
||||
"lora_name_1": "琉璃少女xl1V10.safetensors",
|
||||
"model_weight_1": 1,
|
||||
"clip_weight_1": 1,
|
||||
"switch_2": "On",
|
||||
"lora_name_2": "sdxl_glass.safetensors",
|
||||
"model_weight_2": 0.8,
|
||||
"clip_weight_2": 1,
|
||||
"switch_3": "On",
|
||||
"lora_name_3": "jade.safetensors",
|
||||
"model_weight_3": 0.9,
|
||||
"clip_weight_3": 1
|
||||
},
|
||||
"class_type": "CR LoRA Stack"
|
||||
},
|
||||
"94": {
|
||||
"inputs": {
|
||||
"model": [
|
||||
"4",
|
||||
"text": [
|
||||
"18",
|
||||
0
|
||||
],
|
||||
"clip": [
|
||||
"4",
|
||||
1
|
||||
],
|
||||
"lora_stack": [
|
||||
"92",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "CR Apply LoRA Stack"
|
||||
"class_type": "CLIPTextEncode"
|
||||
},
|
||||
"97": {
|
||||
"7": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"30",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage"
|
||||
},
|
||||
"98": {
|
||||
"inputs": {
|
||||
"weight_style": 0.8,
|
||||
"weight_composition": 1,
|
||||
"expand_style": false,
|
||||
"combine_embeds": "concat",
|
||||
"start_at": 0,
|
||||
"end_at": 1,
|
||||
"embeds_scaling": "V only",
|
||||
"model": [
|
||||
"34",
|
||||
0
|
||||
],
|
||||
"ipadapter": [
|
||||
"34",
|
||||
"text": [
|
||||
"17",
|
||||
1
|
||||
],
|
||||
"image_style": [
|
||||
"218",
|
||||
0
|
||||
],
|
||||
"image_composition": [
|
||||
"218",
|
||||
0
|
||||
],
|
||||
"image_negative": [
|
||||
"177",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "IPAdapterStyleComposition"
|
||||
},
|
||||
"105": {
|
||||
"inputs": {
|
||||
"seed": 1064298728955951
|
||||
},
|
||||
"class_type": "Seed (rgthree)"
|
||||
},
|
||||
"108": {
|
||||
"inputs": {
|
||||
"strength": 0.65,
|
||||
"start_percent": 0,
|
||||
"end_percent": 0.9,
|
||||
"positive": [
|
||||
"28",
|
||||
0
|
||||
],
|
||||
"negative": [
|
||||
"28",
|
||||
"clip": [
|
||||
"4",
|
||||
1
|
||||
],
|
||||
"control_net": [
|
||||
"109",
|
||||
0
|
||||
],
|
||||
"image": [
|
||||
"110",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ACN_AdvancedControlNetApply"
|
||||
"class_type": "CLIPTextEncode"
|
||||
},
|
||||
"109": {
|
||||
"inputs": {
|
||||
"control_net_name": "control-lora-depth-rank256.safetensors"
|
||||
},
|
||||
"class_type": "ControlNetLoaderAdvanced"
|
||||
},
|
||||
"110": {
|
||||
"inputs": {
|
||||
"preprocessor": "DepthAnythingPreprocessor",
|
||||
"resolution": 1024,
|
||||
"image": [
|
||||
"218",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "AIO_Preprocessor"
|
||||
},
|
||||
"111": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"110",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage"
|
||||
},
|
||||
"143": {
|
||||
"inputs": {
|
||||
"add_noise": "enable",
|
||||
"noise_seed": [
|
||||
"105",
|
||||
0
|
||||
],
|
||||
"steps": 10,
|
||||
"cfg": 2,
|
||||
"sampler_name": "dpmpp_sde",
|
||||
"scheduler": "karras",
|
||||
"start_at_step": 3,
|
||||
"end_at_step": 10,
|
||||
"return_with_leftover_noise": "enable",
|
||||
"model": [
|
||||
"98",
|
||||
0
|
||||
],
|
||||
"positive": [
|
||||
"108",
|
||||
0
|
||||
],
|
||||
"negative": [
|
||||
"108",
|
||||
1
|
||||
],
|
||||
"latent_image": [
|
||||
"15",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "KSamplerAdvanced"
|
||||
},
|
||||
"144": {
|
||||
"8": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"143",
|
||||
"3",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
@ -293,119 +79,74 @@ const upScaleAPIFormatJSON = {
|
||||
},
|
||||
"class_type": "VAEDecode"
|
||||
},
|
||||
"177": {
|
||||
"9": {
|
||||
"inputs": {
|
||||
"type": "shuffle",
|
||||
"strength": 0.85,
|
||||
"blur": 2,
|
||||
"image_optional": [
|
||||
"218",
|
||||
"filename_prefix": "IPAdapter",
|
||||
"images": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "IPAdapterNoise"
|
||||
"class_type": "SaveImage"
|
||||
},
|
||||
"218": {
|
||||
"11": {
|
||||
"inputs": {
|
||||
"upscale_method": "lanczos",
|
||||
"width": 1024,
|
||||
"height": 0,
|
||||
"crop": "disabled",
|
||||
"image": [
|
||||
"preset": "PLUS (high strength)",
|
||||
"model": [
|
||||
"4",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "IPAdapterUnifiedLoader"
|
||||
},
|
||||
"12": {
|
||||
"inputs": {
|
||||
"image": "azalea,cuckoo4.png",
|
||||
"upload": "image"
|
||||
},
|
||||
"class_type": "LoadImage"
|
||||
},
|
||||
"15": {
|
||||
"inputs": {
|
||||
"weight_style": 0.45,
|
||||
"weight_composition": 1,
|
||||
"expand_style": false,
|
||||
"combine_embeds": "average",
|
||||
"start_at": 0,
|
||||
"end_at": 1,
|
||||
"embeds_scaling": "V only",
|
||||
"model": [
|
||||
"11",
|
||||
0
|
||||
],
|
||||
"ipadapter": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image_style": [
|
||||
"16",
|
||||
0
|
||||
],
|
||||
"image_composition": [
|
||||
"12",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageScale"
|
||||
"class_type": "IPAdapterStyleComposition"
|
||||
},
|
||||
"222": {
|
||||
"16": {
|
||||
"inputs": {
|
||||
"stop_at_clip_layer": -2,
|
||||
"clip": [
|
||||
"94",
|
||||
1
|
||||
]
|
||||
"image": "ART DIR Are.na-07.webp",
|
||||
"upload": "image"
|
||||
},
|
||||
"class_type": "CLIPSetLastLayer"
|
||||
"class_type": "LoadImage"
|
||||
},
|
||||
"226": {
|
||||
"17": {
|
||||
"inputs": {
|
||||
"temperature": 0,
|
||||
"hue": 0,
|
||||
"brightness": 7,
|
||||
"contrast": 10,
|
||||
"saturation": 20,
|
||||
"gamma": 1,
|
||||
"image": [
|
||||
"334",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ColorCorrect"
|
||||
},
|
||||
"250": {
|
||||
"inputs": {
|
||||
"black_level": 12,
|
||||
"mid_level": 127.5,
|
||||
"white_level": 255,
|
||||
"image": [
|
||||
"226",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "Image Levels Adjustment"
|
||||
},
|
||||
"321": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"250",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage"
|
||||
},
|
||||
"331": {
|
||||
"inputs": {
|
||||
"upscale_model": [
|
||||
"333",
|
||||
"text_positive": [
|
||||
"21",
|
||||
0
|
||||
],
|
||||
"image": [
|
||||
"144",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageUpscaleWithModel"
|
||||
},
|
||||
"333": {
|
||||
"inputs": {
|
||||
"model_name": "4x-UltraSharp.pth"
|
||||
},
|
||||
"class_type": "UpscaleModelLoader"
|
||||
},
|
||||
"334": {
|
||||
"inputs": {
|
||||
"upscale_method": "lanczos",
|
||||
"scale_by": 0.5,
|
||||
"image": [
|
||||
"331",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageScaleBy"
|
||||
},
|
||||
"335": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"144",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage"
|
||||
},
|
||||
"405": {
|
||||
"inputs": {
|
||||
"text_positive": "",
|
||||
"text_negative": "",
|
||||
"style": "base",
|
||||
"log_prompt": true,
|
||||
@ -413,9 +154,23 @@ const upScaleAPIFormatJSON = {
|
||||
"style_negative": true
|
||||
},
|
||||
"class_type": "SDXLPromptStyler"
|
||||
},
|
||||
"18": {
|
||||
"inputs": {
|
||||
"text": [
|
||||
"17",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ShowText|pysssss"
|
||||
},
|
||||
"21": {
|
||||
"inputs": {
|
||||
"positive": ""
|
||||
},
|
||||
"class_type": "easy positive"
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
upScaleAPIFormatJSON
|
||||
}
|
@ -1,3 +1,2 @@
|
||||
let seed = 0;
|
||||
let strength = 1;
|
||||
export { seed, strength };
|
||||
const globalMap = {step1url: ""};
|
||||
export { globalMap };
|
Loading…
x
Reference in New Issue
Block a user