This commit is contained in:
zjt
2024-05-21 21:59:30 +08:00
commit bfbed61780
21 changed files with 114397 additions and 0 deletions

55
src/biz/txt23d.ts Normal file
View File

@@ -0,0 +1,55 @@
import { RequestHandler, Txt2ImgRequest } from "../type/request";
import { txt23dApiFormatJSON } from "../comfyJson/txt23d";
import axios from "axios";
import { selectNodeFromApiJSONbyID } from "../utils/editComfyJson";
import WebSocket from "ws";
// const baseUrl = "http://47.108.92.176:20000";
// const baseWsUrl = "ws://47.108.92.176:20000";
const baseUrl = "http://localhost:8188";
const baseWsUrl = "ws://localhost:8188";
axios.defaults.baseURL = baseUrl;
const Txt23DHandler: RequestHandler<Txt2ImgRequest, any> = async (ctx) => {
ctx.set('Access-Control-Allow-Origin', '*')
ctx.set('Access-Control-Allow-Headers', 'Content-Type,Content-Length,Authorization,Accept,X-Requested-With')
ctx.set('Access-Control-Allow-Methods', 'PUT,POST,GET,DELETE,OPTIONS')
if (ctx.method == 'OPTIONS') {
ctx.body = 200;
return;
}
const requestBody = ctx.request.body;
const { prompt, loraDetail, loraModel } = requestBody;
const inputNode = selectNodeFromApiJSONbyID(txt23dApiFormatJSON, "33");
inputNode.inputs.string = prompt;
ctx.body = {
url: ctx.body = baseUrl + '/view?filename=' + await new Promise((resolve, reject) => {
const taskID = Math.random().toFixed(10);
const ws = new WebSocket(`${baseWsUrl}/ws?clientId=${taskID}`);
ws.onopen = () => {
try {
axios.post("/prompt", {
client_id: taskID,
prompt: txt23dApiFormatJSON,
// extra_data: txt2imgAPIformatExtraData
});
} catch (error) {
console.log(error);
}
ws.onmessage = (event) => {
if (typeof event.data === "string") {
const { type, data } = JSON.parse(event.data);
if (type === "executed") {
console.log(data.output);
if (data.node === '13') {
resolve(data.output.mesh[0].filename)
}
}
} else {
}
}
}
})
}
}
export default Txt23DHandler;

64
src/biz/txt2img.ts Normal file
View File

@@ -0,0 +1,64 @@
import { RequestHandler, Txt2ImgRequest } from "../type/request";
import { txt2imgAPIformatJSON, txt2imgAPIformatExtraData } from "../comfyJson/txt2img";
import axios from "axios";
import { selectNodeFromApiJSONbyID } from "../utils/editComfyJson";
import WebSocket from "ws";
// const baseUrl = "http://47.108.92.176:20000";
// const baseWsUrl = "ws://47.108.92.176:20000";
const baseUrl = "http://localhost:8188";
const baseWsUrl = "ws://localhost:8188";
axios.defaults.baseURL = baseUrl;
const Txt2ImgHandler: RequestHandler<Txt2ImgRequest, any> = async (ctx, next) => {
console.log(ctx.method);
ctx.set('Access-Control-Allow-Origin', '*')
ctx.set('Access-Control-Allow-Headers', 'Content-Type,Content-Length,Authorization,Accept,X-Requested-With')
ctx.set('Access-Control-Allow-Methods', 'PUT,POST,GET,DELETE,OPTIONS')
if (ctx.method == 'OPTIONS') {
ctx.body = 200;
return;
}
const requestBody = ctx.request.body;
const { prompt, loraDetail, loraModel } = requestBody;
const inputNode = selectNodeFromApiJSONbyID(txt2imgAPIformatJSON, "33");
let temp = "";
inputNode.inputs.string = prompt;
ctx.body = {
data: await new Promise((resolve, reject) => {
const taskID = Math.random().toFixed(10);
const ws = new WebSocket(`${baseWsUrl}/ws?clientId=${taskID}`);
ws.onopen = () => {
try {
axios.post("/prompt", {
client_id: taskID,
prompt: txt2imgAPIformatJSON,
// extra_data: txt2imgAPIformatExtraData
});
} catch (error) {
console.log(error);
}
ws.onmessage = (event) => {
if (typeof event.data === "string") {
const { type, data } = JSON.parse(event.data);
// if (type === "executed") {
// if (data.node === '94') {
// resolve(data.output.images[0].filename)
// }
// }
if (type === "executed") {
if (data.node === '21') {
temp = data.output.text[0];
}
if (data.node === '94') {
resolve({ prompt: temp, url: baseUrl + '/view?filename=' + data.output.images[0].filename })
}
}
} else {
}
}
}
})
}
}
export default Txt2ImgHandler;

55
src/biz/upScale.ts Normal file
View File

@@ -0,0 +1,55 @@
import { RequestHandler, Txt2ImgRequest } from "../type/request";
import { upScaleAPIFormatJSON } from "../comfyJson/upscale";
import axios from "axios";
import { selectNodeFromApiJSONbyID } from "../utils/editComfyJson";
import WebSocket from "ws";
// const baseUrl = "http://47.108.92.176:20000";
// const baseWsUrl = "ws://47.108.92.176:20000";
const baseUrl = "http://localhost:8188";
const baseWsUrl = "ws://localhost:8188";
axios.defaults.baseURL = baseUrl;
const UpscaleHandler: RequestHandler<any ,any> = async (ctx) => {
ctx.set('Access-Control-Allow-Origin', '*')
ctx.set('Access-Control-Allow-Headers', 'Content-Type,Content-Length,Authorization,Accept,X-Requested-With')
ctx.set('Access-Control-Allow-Methods', 'PUT,POST,GET,DELETE,OPTIONS')
if (ctx.method == 'OPTIONS') {
ctx.body = 200;
return;
}
const requestBody = ctx.request.body;
const { prompt, url } = requestBody;
const inputNode = selectNodeFromApiJSONbyID(upScaleAPIFormatJSON, "12");
inputNode.inputs.image = url;
ctx.body = {
url: baseUrl + '/view?filename=' + await new Promise((resolve, reject) => {
const taskID = Math.random().toFixed(10);
const ws = new WebSocket(`${baseWsUrl}/ws?clientId=${taskID}`);
ws.onopen = () => {
try {
axios.post("/prompt", {
client_id: taskID,
prompt: upScaleAPIFormatJSON,
// extra_data: txt2imgAPIformatExtraData
});
} catch (error) {
console.log(error);
}
ws.onmessage = (event) => {
if (typeof event.data === "string") {
const { type, data } = JSON.parse(event.data);
if (type === "executed") {
console.log(data.output);
if (data.node === '47') {
resolve(data.output.images[0].filename)
}
}
} else {
}
}
}
})
}
}
export default UpscaleHandler;

328
src/comfyJson/txt23d.ts Normal file
View File

@@ -0,0 +1,328 @@
const txt23dApiFormatJSON = {
"10": {
"inputs": {
"seed": 374404110666628,
"steps": 8,
"cfg": 8,
"sampler_name": "ddim",
"scheduler": "normal",
"denoise": 1,
"preview_method": "auto",
"vae_decode": "true",
"model": [
"69",
0
],
"positive": [
"20",
1
],
"negative": [
"20",
2
],
"latent_image": [
"20",
3
],
"optional_vae": [
"20",
4
]
},
"class_type": "KSampler (Efficient)"
},
"11": {
"inputs": {
"switch_1": "On",
"lora_name_1": "form1024-xl\\form1024-xl-000006.safetensors",
"model_weight_1": 1,
"clip_weight_1": 1,
"switch_2": "On",
"lora_name_2": "Hyper-SD\\Hyper-SDXL-8steps-CFG-lora.safetensors",
"model_weight_2": 0.5,
"clip_weight_2": 1,
"switch_3": "Off",
"lora_name_3": "None",
"model_weight_3": 1,
"clip_weight_3": 1
},
"class_type": "CR LoRA Stack"
},
"13": {
"inputs": {
"preview3d": null,
"mesh": [
"14",
0
]
},
"class_type": "TripoSRViewer"
},
"14": {
"inputs": {
"geometry_resolution": 256,
"threshold": 10,
"model": [
"15",
0
],
"reference_image": [
"72",
0
],
"reference_mask": [
"72",
1
]
},
"class_type": "TripoSRSampler"
},
"15": {
"inputs": {
"model": "3D\\TripoSR.ckpt",
"chunk_size": 8192
},
"class_type": "TripoSRModelLoader"
},
"19": {
"inputs": {
"text_positive": [
"32",
0
],
"text_negative": "",
"style": "base",
"log_prompt": false,
"style_positive": true,
"style_negative": true
},
"class_type": "SDXLPromptStyler"
},
"20": {
"inputs": {
"ckpt_name": "sd_xl_base_1.0.safetensors",
"vae_name": "Baked VAE",
"clip_skip": -2,
"lora_name": "None",
"lora_model_strength": 1,
"lora_clip_strength": 1,
"positive": [
"21",
0
],
"negative": "CLIP_NEGATIVE",
"token_normalization": "none",
"weight_interpretation": "comfy",
"empty_latent_width": 1024,
"empty_latent_height": 1024,
"batch_size": 1,
"lora_stack": [
"11",
0
]
},
"class_type": "Efficient Loader"
},
"21": {
"inputs": {
"text": [
"19",
0
]
},
"class_type": "ShowText|pysssss"
},
"22": {
"inputs": {
"text": [
"19",
1
]
},
"class_type": "ShowText|pysssss"
},
"32": {
"inputs": {
"text": [
"85",
0
]
},
"class_type": "ShowText|pysssss"
},
"33": {
"inputs": {
"string": "留春xiao不住费尽莺儿语"
},
"class_type": "Simple String"
},
"46": {
"inputs": {
"image": "329247709_425681693072936_3081671045029849121_nstp=dst-jpg_e3.jpg",
"upload": "image"
},
"class_type": "LoadImage"
},
"68": {
"inputs": {
"image": [
"46",
0
]
},
"class_type": "CLIPVisionEncode"
},
"69": {
"inputs": {
"config": "SDXL, Attention Injection",
"weight": 1,
"model": [
"20",
0
]
},
"class_type": "LayeredDiffusionApply"
},
"70": {
"inputs": {
"sd_version": "SDXL",
"sub_batch_size": 16,
"samples": [
"10",
3
],
"images": [
"10",
5
]
},
"class_type": "LayeredDiffusionDecodeRGBA"
},
"72": {
"inputs": {
"sd_version": "SDXL",
"sub_batch_size": 16,
"samples": [
"10",
3
],
"images": [
"10",
5
]
},
"class_type": "LayeredDiffusionDecode"
},
"85": {
"inputs": {
"text": [
"86",
0
]
},
"class_type": "ShowText|pysssss"
},
"86": {
"inputs": {
"system_prompt": "你是一个懂宋词分析的古诗词专家我会给你一句宋词。请你分析古诗中的意象按照重要性排序并参照文件列出只英文的意象prompt。注意只需要英文请控制输出长度在256token以内。",
"user_prompt": [
"33",
0
],
"model_name": "glm-4",
"temperature": 0.7,
"is_memory": "disable",
"is_tools_in_sys_prompt": "disable",
"is_locked": "disable",
"main_brain": "enable",
"max_length": 2048,
"file_content": [
"89",
0
],
"base_url": "",
"api_key": "",
"imgbb_api_key": ""
},
"class_type": "LLM"
},
"87": {
"inputs": {
"path": "analyse.txt",
"is_enable": false,
"path_type": "Relative_Path"
},
"class_type": "load_file"
},
"89": {
"inputs": {
"is_enable": true,
"file1": [
"87",
0
],
"file2": [
"90",
0
],
"file3": [
"91",
0
],
"file4": [
"92",
0
],
"file5": [
"93",
0
]
},
"class_type": "file_combine_plus"
},
"90": {
"inputs": {
"path": "yixiang.txt",
"is_enable": true,
"path_type": "Relative_Path"
},
"class_type": "load_file"
},
"91": {
"inputs": {
"path": "cipai.txt",
"is_enable": true,
"path_type": "Relative_Path"
},
"class_type": "load_file"
},
"92": {
"inputs": {
"path": "words.txt",
"is_enable": false,
"path_type": "Relative_Path"
},
"class_type": "load_file"
},
"93": {
"inputs": {
"path": "sdrule.txt",
"is_enable": true,
"path_type": "Relative_Path"
},
"class_type": "load_file"
},
"94": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"70",
0
]
},
"class_type": "SaveImage"
}
}
export {txt23dApiFormatJSON};

3604
src/comfyJson/txt2img.ts Normal file

File diff suppressed because it is too large Load Diff

421
src/comfyJson/upscale.ts Normal file
View File

@@ -0,0 +1,421 @@
const upScaleAPIFormatJSON = {
"4": {
"inputs": {
"ckpt_name": "xxmix9realisticsdxl_testV20.safetensors"
},
"class_type": "CheckpointLoaderSimple"
},
"6": {
"inputs": {
"text": "coloured glaze, jade, glass, glasssculpture, transparent, translucent, Porcelain doll, Porcelain, Glazed Artwork, Ceramic, \n8k,ultra detailed, beautiful and aesthetic, masterpiece, best quality,",
"clip": [
"222",
0
]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"text": "nsfw, paintings, cartoon, anime, sketches, worst quality, low quality, normal quality, lowres, watermark, monochrome, grayscale, ugly, blurry, Tan skin, dark skin, black skin, skin spots, skin blemishes, age spot, glans, disabled, distorted, bad anatomy, morbid, malformation, amputation, bad proportions, twins, missing body, fused body, extra head, poorly drawn face, bad eyes, deformed eye, unclear eyes, cross-eyed, long neck, malformed limbs, extra limbs, extra arms, missing arms, bad tongue, strange fingers, mutated hands, missing hands, poorly drawn hands, extra hands, fused hands, connected hand, bad hands, wrong fingers, missing fingers, extra fingers, 4 fingers, 3 fingers, deformed hands, extra legs, bad legs, many legs, more than two legs, bad feet, wrong feet, extra feets,",
"clip": [
"222",
0
]
},
"class_type": "CLIPTextEncode"
},
"11": {
"inputs": {
"pixels": [
"218",
0
],
"vae": [
"4",
2
]
},
"class_type": "VAEEncode"
},
"12": {
"inputs": {
"image": "ComfyUI_00498_.png",
"upload": "image"
},
"class_type": "LoadImage"
},
"15": {
"inputs": {
"amount": 1,
"samples": [
"11",
0
]
},
"class_type": "RepeatLatentBatch"
},
"28": {
"inputs": {
"strength": 1,
"start_percent": 0,
"end_percent": 1,
"positive": [
"6",
0
],
"negative": [
"7",
0
],
"control_net": [
"29",
0
],
"image": [
"30",
0
]
},
"class_type": "ACN_AdvancedControlNetApply"
},
"29": {
"inputs": {
"control_net_name": "control-lora-canny-rank256.safetensors"
},
"class_type": "ControlNetLoaderAdvanced"
},
"30": {
"inputs": {
"preprocessor": "CannyEdgePreprocessor",
"resolution": 1024,
"image": [
"218",
0
]
},
"class_type": "AIO_Preprocessor"
},
"34": {
"inputs": {
"preset": "PLUS (high strength)",
"model": [
"94",
0
]
},
"class_type": "IPAdapterUnifiedLoader"
},
"47": {
"inputs": {
"filename_prefix": "image_",
"images": [
"250",
0
]
},
"class_type": "SaveImage"
},
"92": {
"inputs": {
"switch_1": "On",
"lora_name_1": "琉璃少女xl1V10.safetensors",
"model_weight_1": 1,
"clip_weight_1": 1,
"switch_2": "On",
"lora_name_2": "sdxl_glass.safetensors",
"model_weight_2": 0.8,
"clip_weight_2": 1,
"switch_3": "On",
"lora_name_3": "jade.safetensors",
"model_weight_3": 0.9,
"clip_weight_3": 1
},
"class_type": "CR LoRA Stack"
},
"94": {
"inputs": {
"model": [
"4",
0
],
"clip": [
"4",
1
],
"lora_stack": [
"92",
0
]
},
"class_type": "CR Apply LoRA Stack"
},
"97": {
"inputs": {
"images": [
"30",
0
]
},
"class_type": "PreviewImage"
},
"98": {
"inputs": {
"weight_style": 0.8,
"weight_composition": 1,
"expand_style": false,
"combine_embeds": "concat",
"start_at": 0,
"end_at": 1,
"embeds_scaling": "V only",
"model": [
"34",
0
],
"ipadapter": [
"34",
1
],
"image_style": [
"218",
0
],
"image_composition": [
"218",
0
],
"image_negative": [
"177",
0
]
},
"class_type": "IPAdapterStyleComposition"
},
"105": {
"inputs": {
"seed": 1064298728955951
},
"class_type": "Seed (rgthree)"
},
"108": {
"inputs": {
"strength": 0.65,
"start_percent": 0,
"end_percent": 0.9,
"positive": [
"28",
0
],
"negative": [
"28",
1
],
"control_net": [
"109",
0
],
"image": [
"110",
0
]
},
"class_type": "ACN_AdvancedControlNetApply"
},
"109": {
"inputs": {
"control_net_name": "control-lora-depth-rank256.safetensors"
},
"class_type": "ControlNetLoaderAdvanced"
},
"110": {
"inputs": {
"preprocessor": "DepthAnythingPreprocessor",
"resolution": 1024,
"image": [
"218",
0
]
},
"class_type": "AIO_Preprocessor"
},
"111": {
"inputs": {
"images": [
"110",
0
]
},
"class_type": "PreviewImage"
},
"143": {
"inputs": {
"add_noise": "enable",
"noise_seed": [
"105",
0
],
"steps": 10,
"cfg": 2,
"sampler_name": "dpmpp_sde",
"scheduler": "karras",
"start_at_step": 3,
"end_at_step": 10,
"return_with_leftover_noise": "enable",
"model": [
"98",
0
],
"positive": [
"108",
0
],
"negative": [
"108",
1
],
"latent_image": [
"15",
0
]
},
"class_type": "KSamplerAdvanced"
},
"144": {
"inputs": {
"samples": [
"143",
0
],
"vae": [
"4",
2
]
},
"class_type": "VAEDecode"
},
"177": {
"inputs": {
"type": "shuffle",
"strength": 0.85,
"blur": 2,
"image_optional": [
"218",
0
]
},
"class_type": "IPAdapterNoise"
},
"218": {
"inputs": {
"upscale_method": "lanczos",
"width": 1024,
"height": 0,
"crop": "disabled",
"image": [
"12",
0
]
},
"class_type": "ImageScale"
},
"222": {
"inputs": {
"stop_at_clip_layer": -2,
"clip": [
"94",
1
]
},
"class_type": "CLIPSetLastLayer"
},
"226": {
"inputs": {
"temperature": 0,
"hue": 0,
"brightness": 7,
"contrast": 10,
"saturation": 20,
"gamma": 1,
"image": [
"334",
0
]
},
"class_type": "ColorCorrect"
},
"250": {
"inputs": {
"black_level": 12,
"mid_level": 127.5,
"white_level": 255,
"image": [
"226",
0
]
},
"class_type": "Image Levels Adjustment"
},
"321": {
"inputs": {
"images": [
"250",
0
]
},
"class_type": "PreviewImage"
},
"331": {
"inputs": {
"upscale_model": [
"333",
0
],
"image": [
"144",
0
]
},
"class_type": "ImageUpscaleWithModel"
},
"333": {
"inputs": {
"model_name": "4x-UltraSharp.pth"
},
"class_type": "UpscaleModelLoader"
},
"334": {
"inputs": {
"upscale_method": "lanczos",
"scale_by": 0.5,
"image": [
"331",
0
]
},
"class_type": "ImageScaleBy"
},
"335": {
"inputs": {
"images": [
"144",
0
]
},
"class_type": "PreviewImage"
},
"405": {
"inputs": {
"text_positive": "",
"text_negative": "",
"style": "base",
"log_prompt": true,
"style_positive": true,
"style_negative": true
},
"class_type": "SDXLPromptStyler"
}
}
export {
upScaleAPIFormatJSON
}

19
src/routers/handler.ts Normal file
View File

@@ -0,0 +1,19 @@
import Txt23DHandler from "../biz/txt23d";
import Txt2ImgHandler from "../biz/txt2img";
import UpscaleHandler from "../biz/upScale";
import { RegistHandler } from "../utils/register";
// 首页路由
const HelloworldHandler = async (ctx: any) => {
ctx.body = "helloworld";
}
const InitHandler = () => {
RegistHandler("post", "/txt2img", Txt2ImgHandler);
RegistHandler("options", "/txt2img", Txt2ImgHandler);
RegistHandler("post", "/txt23d", Txt23DHandler);
RegistHandler("options", "/txt23d", Txt23DHandler);
RegistHandler("options", "/upscale", UpscaleHandler);
RegistHandler("post", "/upscale", UpscaleHandler);
}
export default InitHandler;

3
src/routers/router.ts Normal file
View File

@@ -0,0 +1,3 @@
import Router from "koa-router";
const router = new Router();
export default router;

17
src/server.ts Normal file
View File

@@ -0,0 +1,17 @@
import Koa from "koa";
import router from "./routers/router";
import bodyParser from "koa-bodyparser";
import InitHandler from "./routers/handler";
// 使用路由
const app = new Koa();
app.use(bodyParser());
app.use(router.routes()).use(router.allowedMethods());
InitHandler();
console.log(router);
// 启动服务器
const PORT = 3000;
app.listen(PORT, () => {
console.log(`Server is running on http://localhost:${PORT}`);
});

8
src/type/request.ts Normal file
View File

@@ -0,0 +1,8 @@
import Router from "koa-router"
import DefaultContext, { ParameterizedContext } from "koa/index"
export type RequestHandler<U, T> = (ctx: ParameterizedContext<any, Router.IRouterParamContext<any, {}>, any> & { request: DefaultContext.Request & { body: U } }, next: () => Promise<any>)=>Promise<T>
export type Txt2ImgRequest = {
prompt: string,
loraDetail: number,
loraModel: number
}

View File

@@ -0,0 +1,14 @@
const selectNodeFromGraphByTitle = (graph: any, title: string): any => {
for(const node of graph.nodes){
if(node.title === title){
return node;
}
}
return null;
}
const selectNodeFromApiJSONbyID = (json: any, id: string): any => {
return json[id];
}
export { selectNodeFromGraphByTitle, selectNodeFromApiJSONbyID }

7
src/utils/register.ts Normal file
View File

@@ -0,0 +1,7 @@
import { RequestHandler } from "../type/request";
import router from "../routers/router";
const RegistHandler = (method: "get" | "post" | "options", url: string ,handler: RequestHandler<any, any>) => {
router[method](url, handler as any);
}
export { RegistHandler };