Compare commits

..

8 Commits

Author SHA1 Message Date
Ra-Liz
0b93ccd0a5 chore' 2024-07-15 11:25:31 +08:00
Ra-Liz
cd62ab6c9d added skinNbg ui 2024-07-11 22:24:34 +08:00
zjt
38c01b93d3 Merge pull request 'raliz' (#1) from raliz into master
Reviewed-on: #1
2024-07-03 10:57:35 +08:00
zjt
611cc184ca voice 2024-07-03 10:53:52 +08:00
zjt
201283fd94 voice 2024-07-01 15:46:22 +08:00
zjt
01271e8ef3 voice 2024-07-01 15:44:33 +08:00
zjt
e1cbab4d48 canvas 2024-07-01 15:36:41 +08:00
ed2869f179 refactor: updated helper page 2024-06-25 21:20:05 +08:00
14 changed files with 36216 additions and 3744 deletions

View File

@@ -5,7 +5,12 @@ const nextConfig = {
},
eslint: {
ignoreDuringBuilds: true,
}
},
webpack: (config) => {
config.resolve.fallback = { fs: false };
return config;
},
};
export default nextConfig;

1807
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -23,5 +23,10 @@
"postcss": "^8",
"tailwindcss": "^3.4.1",
"typescript": "^5"
},
"browser": {
"fs": false,
"os": false,
"path": false
}
}

View File

@@ -2,6 +2,10 @@
@tailwind components;
@tailwind utilities;
* {
box-sizing: border-box;
}
:root {
--foreground-rgb: 0, 0, 0;
--background-start-rgb: 214, 219, 220;

View File

@@ -1,222 +1,454 @@
"use client"
import dynamic from 'next/dynamic';
import { useEffect, useRef, useState } from 'react';
import '@/deps/live2d.min.js'
import useVoice2Txt from "@/hooks/useVoice2txt";
import useTxt2Voice from '@/hooks/useTxt2Voice';
import axios from 'axios';
import * as PIXI from 'pixi.js';
import { Live2DModel } from 'pixi-live2d-display/cubism2';
"use client";
import { useEffect, useRef, useState } from "react";
import "@/deps/cubism5.js";
import useVoice2Txt from "@/hooks/useVoice2txt";
import useTxt2Voice from "@/hooks/useTxt2Voice";
import axios from "axios";
import * as PIXI from "pixi.js";
import { Live2DModel } from "pixi-live2d-display/cubism4";
import { useSearchParams } from "next/navigation";
import useRequest from "@/hooks/use-openai-request";
import txt2Voice from "@/hooks/txt2VoiceAPI";
// fake list
const opList = [
{
id: 1,
title: "幸运转盘",
src: "http://picdown.jchysoft.com/uiIcon/xingyunzhuanpan.png",
},
{
id: 2,
title: "领券中心",
src: "http://picdown.jchysoft.com/uiIcon/lingjuanzhongxin.png",
}, // bjt错别字将错就错版
{
id: 3,
title: "0元抽",
src: "http://picdown.jchysoft.com/uiIcon/0yuanchou.png",
},
{
id: 4,
title: "欧宝赏",
src: "http://picdown.jchysoft.com/uiIcon/oubanshang.png",
}, // bjt错别字
];
//import "@/deps/live2dcubismcore.min.js"
export default function Home() {
function draggable(model: any) {
model.buttonMode = true;
model.on("pointerdown", (e: any) => {
model.dragging = true;
model._pointerX = e.data.global.x - model.x;
model._pointerY = e.data.global.y - model.y;
});
model.on("pointermove", (e: any) => {
if (model.dragging) {
model.position.x = e.data.global.x - model._pointerX;
model.position.y = e.data.global.y - model._pointerY;
}
});
model.on("pointerupoutside", () => (model.dragging = false));
model.on("pointerup", () => (model.dragging = false));
}
function addFrame(model: any) {
const foreground = PIXI.Sprite.from(PIXI.Texture.WHITE);
foreground.width = model.internalModel.width;
foreground.height = model.internalModel.height;
foreground.alpha = 0.2;
model.addChild(foreground);
checkbox("Model Frames", (checked: any) => (foreground.visible = checked));
}
function addHitAreaFrames(model: any) {
try {
//@ts-ignore
const hitAreaFrames = new PIXI.live2d.HitAreaFrames();
model.addChild(hitAreaFrames);
checkbox("Hit Area Frames", (checked: any) => (hitAreaFrames.visible = checked));
} catch (err) {
}
}
function checkbox(name: any, onChange: any) {
const id = name.replace(/\W/g, "").toLowerCase();
let checkbox = document.getElementById(id);
if (!checkbox) {
const p = document.createElement("p")!;
p.innerHTML = `<input type="checkbox" id="${id}"> <label for="${id}">${name}</label>`;
document!.getElementById("control")!.appendChild(p);
checkbox = p.firstChild as HTMLElement;
}
checkbox.addEventListener("change", () => {
//@ts-ignore
onChange(checkbox.checked);
});
//@ts-ignore
onChange(checkbox.checked);
}
const send = (inputText: string) => {
setResponse(inputText)
if (!inputText) return;
console.log(inputText)
let data = JSON.stringify({
"messages": [
{
"content": `回答用户的问题,尽可能简短。`,
"role": "system"
},
{
"content": inputText,
"role": "user"
}
],
"model": "deepseek-chat",
"frequency_penalty": 0,
"max_tokens": 2048,
"presence_penalty": 0,
"stop": null,
"stream": false,
"temperature": 1,
"top_p": 1
});
let config = {
method: 'post',
maxBodyLength: Infinity,
url: 'https://api.deepseek.com/chat/completions',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Authorization': 'Bearer sk-dd24ae704e8d4939aeed8f050d04d36b'
},
data: data
};
try {
axios(config)
.then((response) => {
console.log(`response`, response);
console.log(response.data);
typeof speak !== 'undefined' && speak(response.data.choices[0].message.content) || model!.motion('tap_body');;
setResponse(response.data.choices[0].message.content);
})
.catch((error) => {
setResponse(error!.toString());
console.log(error);
});
} catch (error) {
setResponse(error!.toString());
console.log(error);
}
}
const { start, end, text, isListening, error } = useVoice2Txt({
lang: 'cmn-Hans-CN',
continuous: false
const [useVoice, setUseVoice] = useState(false);
const query = useSearchParams();
const characterId = query.get("id");
const token = query.get("token");
localStorage.setItem("token", token || "");
const { complete, completion: data, isLoading, abort } = useRequest();
const voice2txt = (txt: string) => {
fetch("sharkapiBaseUrl/voice/txt2voice", {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `${token}`,
},
body: JSON.stringify({
txt: txt,
}),
});
};
function draggable(model: any) {
model.buttonMode = true;
model.on("pointerdown", (e: any) => {
model.dragging = true;
model._pointerX = e.data.global.x - model.x;
model._pointerY = e.data.global.y - model.y;
});
model.on("pointermove", (e: any) => {
if (model.dragging) {
model.position.x = e.data.global.x - model._pointerX;
model.position.y = e.data.global.y - model._pointerY;
}
});
model.on("pointerupoutside", () => (model.dragging = false));
model.on("pointerup", () => (model.dragging = false));
}
const {
isSpeaking,
speak,
stop
} = useTxt2Voice();
const [inputText, setInputText] = useState("");
const isMouthOpen = useRef(false);
const [response, setResponse] = useState("");
useEffect(() => {
console.log(text, error)
if (!text) return;
send(text);
if (error) {
setResponse(error);
return;
}
}, [text, error]);
const [model, setModel] = useState<Live2DModel>();
useEffect(() => {
if (!isSpeaking) {
isMouthOpen.current = false;
}
}, [isSpeaking]);
function addFrame(model: any) {
const foreground = PIXI.Sprite.from(PIXI.Texture.WHITE);
foreground.width = model.internalModel.width;
foreground.height = model.internalModel.height;
foreground.alpha = 0.2;
useEffect(() => {
if (!isListening && !isSpeaking) { }
}, [isListening]);
model.addChild(foreground);
}
useEffect(() => {
// expose PIXI to window so that this plugin is able to
// reference window.PIXI.Ticker to automatically update Live2D models
//@ts-ignore
typeof window !== 'undefined' && (window.PIXI = PIXI);
(async function () {
const app = new PIXI.Application({
view: document.getElementById('canvas') as HTMLCanvasElement,
backgroundAlpha: 1
});
function addHitAreaFrames(model: any) {
try {
//@ts-ignore
const hitAreaFrames = new PIXI.live2d.HitAreaFrames();
const model = await Live2DModel.from('https://cdn.jsdelivr.net/gh/guansss/pixi-live2d-display/test/assets/shizuku/shizuku.model.json');
model.addChild(hitAreaFrames);
app.stage.addChild(model);
const scaleX = (innerWidth * 0.4) / model.width;
const scaleY = (innerHeight * 0.8) / model.height;
checkbox(
"Hit Area Frames",
(checked: any) => (hitAreaFrames.visible = checked)
);
} catch (err) {}
}
// fit the window
model.scale.set(0.3);
function checkbox(name: any, onChange: any) {
const id = name.replace(/\W/g, "").toLowerCase();
model.y = innerHeight * 0.1;
let checkbox = document.getElementById(id);
draggable(model);
addFrame(model);
addHitAreaFrames(model);
setModel(model);
model.on('hit', (hitAreas) => {
if (hitAreas.includes('body')) {
model.motion('tap_body');
model.motion('speak')
}
});
})();
}, [])
return (
if (!checkbox) {
const p = document.createElement("p")!;
p.innerHTML = `<input type="checkbox" id="${id}"> <label for="${id}">${name}</label>`;
<main>
{
typeof window !== 'undefined'
&& typeof window.Live2D !== 'undefined'
&& (<div className='flex w-full flex-col h-full items-center justify-center relative'>
<canvas className='w-full h-full' id="canvas"></canvas>
<div className='absolute right-[20vw] top-4 bg-white rounded w-[20vw] h-auto text-sm text-black'>{response ? response : "请输入文字和我聊天吧"}</div>
<div id="control"></div>
<button onClick={start}></button>
<button onClick={end}></button>
<input className='text-black' value={inputText} onChange={(e) => {
setInputText(e.target.value);
console.log(e.target.value)
}}></input>
<button onClick={() => {
send(inputText);
setInputText("");
}}></button>
</div>)
}
</main>
document!.getElementById("control")!.appendChild(p);
checkbox = p.firstChild as HTMLElement;
}
checkbox.addEventListener("change", () => {
//@ts-ignore
onChange(checkbox.checked);
});
//@ts-ignore
onChange(checkbox.checked);
}
const send = (inputText: string) => {
setResponse(inputText);
if (!inputText) return;
complete(
1,
[],
[
{
content: inputText,
role: "user",
},
]
);
};
useEffect(() => {
(async () => {
if (data) {
setResponse(data);
if (typeof speak !== "undefined" && isLoading === false) {
const base64Voice =
"data:audio/mp3;base64," + (await txt2Voice(data, 4));
const audio = document.createElement("audio");
audio.src = base64Voice;
audio.play();
} else {
model!.motion("tap_body");
}
}
})();
}, [data, isLoading]);
const { start, end, text, isListening, error } = useVoice2Txt({
lang: "cmn-Hans-CN",
continuous: false,
});
const { isSpeaking, speak, stop } = useTxt2Voice();
const [inputText, setInputText] = useState("");
const isMouthOpen = useRef(false);
const [response, setResponse] = useState("来和我聊天吧~");
useEffect(() => {
console.log(text, error);
if (!text) return;
send(text);
// 先叉了这里防止消息展示error
// if (error) {
// setResponse(error);
// return;
// }
}, [text, error]);
const [model, setModel] = useState<Live2DModel>();
useEffect(() => {
if (!isSpeaking) {
isMouthOpen.current = false;
}
}, [isSpeaking]);
useEffect(() => {
if (!isListening && !isSpeaking) {
}
}, [isListening]);
useEffect(() => {
// expose PIXI to window so that this plugin is able to
// reference window.PIXI.Ticker to automatically update Live2D models
//@ts-ignore
typeof window !== "undefined" && (window.PIXI = PIXI);
(async function () {
const app = new PIXI.Application({
view: document.getElementById("canvas") as HTMLCanvasElement,
backgroundAlpha: 0,
});
const model = await Live2DModel.from(
"https://cdn.jsdelivr.net/gh/guansss/pixi-live2d-display/test/assets/haru/haru_greeter_t03.model3.json"
);
app.stage.addChild(model);
const scaleX = (innerWidth * 0.4) / model.width;
const scaleY = (innerHeight * 0.8) / model.height;
// fit the window
model.scale.set(0.3);
model.y = innerHeight * 0.1;
draggable(model);
addFrame(model);
addHitAreaFrames(model);
setModel(model);
model.on("hit", (hitAreas) => {
if (hitAreas.includes("body")) {
model.motion("tap_body");
model.motion("speak");
}
});
console.log("ok");
})();
}, []);
// 换肤/换背景相关
const [skinOpen, setSkinOpen] = useState(false);
const [bgOpen, setBgOpen] = useState(false);
return (
// TODO: 背景状态
<main className="w-full h-full bg-[url('http://picdown.jchysoft.com/live2d/beijing/bj1%20%281%29.webp')] bg-center bg-cover">
{typeof window !== "undefined" &&
typeof window.Live2DCubismCore !== "undefined" && (
<div className="flex w-full flex-col h-full items-center justify-center relative text-white">
{/* live2d */}
{/* TODO: live2d模型状态 */}
<canvas className="w-full " id="canvas"></canvas>
{/* 角色信息 */}
<div className="absolute left-[4vw] top-8 bg-gradient-to-r from-start-color to-end-color rounded-full w-[36vw] h-auto text-sm flex flex-nowrap px-2 py-1 gap-2">
<div className="w-10 h-10 rounded-full bg-white"></div>
<div className="text-xl"></div>
{/* <div className="flex flex-col justify-evenly">
<div className="text-xs">亲密度</div>
</div> */}
</div>
{/* 消息泡泡 */}
<div className="absolute left-1/2 -translate-x-1/2 top-[12vh] bg-black bg-opacity-30 rounded-2xl border-2 border-[#FCD161] w-[60vw] h-auto p-2 whitespace-normal break-words">
{response ? response : "请输入文字和我聊天吧"}
</div>
{/* 右侧选项 */}
<div className="absolute right-0 top-[30vh] w-[16vw] flex flex-col flex-nowrap items-center gap-10">
{opList.map((item) => (
<div
key={item.id}
className="bg-gradient-to-r from-end-color to-start-color rounded-l-full w-full h-auto text-sm flex flex-col flex-nowrap items-center p-1"
>
<div className="w-6 h-6 overflow-hidden">
<img
// 这图片中图标和空白的比例太邪门了
className="w-full h-full scale-[2.5]"
src={item.src}
alt={item.title}
/>
</div>
<div className="text-xs">{item.title}</div>
</div>
))}
</div>
{/* 皮肤列表 */}
{skinOpen && (
<SkinOrBgList
type="skin"
list={fakeList}
onSelect={(type, selectedItem) => {
console.log("选中的新的皮肤", type, selectedItem);
}}
/>
)}
{/* 背景列表 */}
{bgOpen && (
<SkinOrBgList
type="bg"
list={fakeList2}
onSelect={(type, selectedItem) => {
console.log("选中的新的背景", type, selectedItem);
}}
/>
)}
{/* 底部换肤 & 换背景 */}
<div className="absolute w-[90vw] bottom-14 left-1/2 -translate-x-1/2 flex flex-nowrap items-center justify-between">
<button
className="w-12 h-12 overflow-hidden"
onClick={() => {
console.log("换肤");
if (bgOpen && !skinOpen) {
setBgOpen(false);
}
setSkinOpen(!skinOpen);
}}
>
<img
className="w-full h-full scale-[1.5]"
src={`http://picdown.jchysoft.com/uiIcon/huanfu.png`}
alt="换肤"
/>
</button>
<button
className="w-12 h-12 overflow-hidden"
onClick={() => {
console.log("换背景");
if (skinOpen && !bgOpen) {
setSkinOpen(false);
}
setBgOpen(!bgOpen);
}}
>
<img
className="w-full h-full scale-[1.5]"
src={`http://picdown.jchysoft.com/uiIcon/huanbeijing.png`}
alt="换背景"
/>
</button>
</div>
{/* 底部消息输入发送框 */}
<div className="absolute w-[90vw] h-10 bottom-2 left-1/2 -translate-x-1/2 bg-white rounded-xl flex flex-nowrap items-center justify-evenly gap-2 px-2 py-1">
<button
className="w-6 h-6 overflow-hidden"
onClick={() => {
// 切换语音/键盘
useVoice ? setUseVoice(false) : setUseVoice(true);
}}
>
<img
className="w-full h-full scale-[2.8]"
src={`http://picdown.jchysoft.com/uiIcon/${
useVoice ? "jianpan" : "yuyin"
}.png`}
alt="输入方式"
/>
</button>
{useVoice ? (
<button
onMouseDown={start}
onMouseUp={end}
onTouchStart={start}
onTouchEnd={end}
className="h-full bg-gray-300 flex-1 text-center rounded active:bg-gray-500 focus:outline-none select-none"
>
</button>
) : (
<input
className="h-full text-black rounded flex-1 bg-gray-200 pl-2"
value={inputText}
onChange={(e) => {
setInputText(e.target.value);
console.log(e.target.value);
}}
></input>
)}
<button
className="w-6 h-6 overflow-hidden"
onClick={() => {
send(inputText);
setInputText("");
}}
>
<img
className="w-full h-full scale-[2.5]"
src="http://picdown.jchysoft.com/uiIcon/fasong.png"
alt="发送按钮"
/>
</button>
</div>
</div>
)}
</main>
);
}
const fakeList = [
{ id: 1, src: "", name: "皮肤1", state: 1 },
{ id: 2, src: "", name: "皮肤2", state: 1 },
{ id: 3, src: "", name: "皮肤3", state: 0 },
{ id: 4, src: "", name: "皮肤4", state: 0 },
{ id: 5, src: "", name: "皮肤5", state: 0 },
{ id: 6, src: "", name: "皮肤6", state: 0 },
{ id: 7, src: "", name: "皮肤7", state: 0 },
{ id: 8, src: "", name: "皮肤8", state: 0 },
{ id: 9, src: "", name: "皮肤9", state: 0 },
{ id: 10, src: "", name: "皮肤10", state: 0 },
];
const fakeList2 = [
{ id: 1, src: "", name: "背景1", state: 1 },
{ id: 2, src: "", name: "背景2", state: 1 },
{ id: 3, src: "", name: "背景3", state: 0 },
{ id: 4, src: "", name: "背景4", state: 0 },
{ id: 5, src: "", name: "背景5", state: 0 },
{ id: 6, src: "", name: "背景6", state: 0 },
{ id: 7, src: "", name: "背景7", state: 0 },
{ id: 8, src: "", name: "背景8", state: 0 },
{ id: 9, src: "", name: "背景9", state: 0 },
{ id: 10, src: "", name: "背景10", state: 0 },
];
type listItem = { id: number; src: string; name: string; state: number };
export const SkinOrBgList = ({
type,
list,
onSelect,
}: {
type: "skin" | "bg";
list: listItem[];
onSelect: (type: "skin" | "bg", item: listItem) => void;
}) => {
function handleSelect(selectedItem: listItem) {
if (selectedItem.state == 0) {
console.log("未解锁,不可用");
// TODO: 解锁操作
return;
}
onSelect(type, selectedItem);
}
return (
<div className="absolute left-1/2 -translate-x-1/2 bottom-24 mb-4 w-[90vw] h-32 bg-white bg-opacity-30 rounded-2xl py-4 px-6 flex flex-nowrap gap-2 overflow-x-auto">
{list.map((item) => (
<div
onClick={() => handleSelect(item)}
key={item.id}
className="h-full w-24 rounded-2xl border-4 border-dashed border-[#FCD161] bg-orange-100 bg-opacity-10 flex-shrink-0 relative overflow-hidden"
>
{item.name}
<img
className="absolute top-0 left-0 w-full h-full object-cover"
src="https://iconfont.alicdn.com/p/illus/preview_image/eZQFvSX6g8f1/70d91c51-e6b0-408c-84c5-50174d24a059.png"
alt={`${item.name}图片`}
/>
{item.state == 0 && (
<>
<div className="absolute top-0 left-0 w-full h-full bg-white bg-opacity-30"></div>
<img
className="absolute top-1/2 -translate-x-1/2 left-1/2 -translate-y-1/2 z-10 w-1/2 h-auto"
src="http://picdown.jchysoft.com/uiIcon/weijiesuo.png"
alt="未解锁"
/>
</>
)}
</div>
))}
</div>
);
};

View File

@@ -17,7 +17,7 @@ export default function RootLayout({
<head>
{/* <script src="http://publicjs.supmiao.com/live2dcubismcore.min.js"></script> */}
</head>
<body>{children}</body>
<body className="w-screen h-screen">{children}</body>
</html>
);
}

12
src/consts/consts.ts Normal file
View File

@@ -0,0 +1,12 @@
const apiBaseUrl = 'https://sharkai.data.vaala.tech/v1';
const imBaseUrl = 'http://localhost:3000/';
const chatBaseUrl = 'wss://chatserver.data.vaala.tech/chat/completions';
const ossUrl = 'http://localhost:3000/oss/';
const GlobalData = {
apiBaseUrl,
imBaseUrl,
chatBaseUrl,
ossUrl,
}
export default GlobalData;

17882
src/deps/cubism5.js Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

22
src/hooks/txt2VoiceAPI.ts Normal file
View File

@@ -0,0 +1,22 @@
import GlobalData from "@/consts/consts";
export default async function txt2Voice(txt: string, person?: number) {
const token = localStorage.getItem('token');
const url = GlobalData.apiBaseUrl + '/voice/txt2voice';
// 发送POST请求
const res = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': token || ""
},
body: JSON.stringify({
txt: txt,
person: person
})
});
const data = await res.json();
console.log(data)
return data.body.voiceInBase64 as string;
}

View File

@@ -0,0 +1,109 @@
import { useEffect, useRef, useState } from "react";
import GlobalData from "../consts/consts";
type FailedReason = string
type AIRequest = () => {
isLoading: boolean;
completion: string | null;
error: string | null;
complete: (promptId: number, args: string[], messages?: any[]) => void;
abort: () => void;
}
const useRequest: AIRequest = () => {
// 请求返回的数据
const completion = useRef<string>("");
const [data, setData] = useState<string>("");
// 请求返回的错误信息
const [error, setError] = useState<string | null>(null);
// 请求的loading 状态
const [isLoading, setLoading] = useState(false);
const openai = useRef<WebSocket | null>(null)
const abort = () => {
if (openai.current) {
openai.current.close();
openai.current = null;
setLoading(false);
}
}
async function complete(promptId: number, args: string[], messages?: any[]) {
const token = localStorage.getItem("token");
try {
setError("");
setData("");
completion.current = "";
if (messages) {
openai.current = new WebSocket(`${GlobalData.chatBaseUrl}?token=${token}&X-AI-Provider=sharkplus`);
openai.current.onopen = () => {
console.log('连接成功');
if (openai.current) {
openai.current.onmessage = (e) => {
if (e.data.toString().trim() != '"[DONE]"') {
const data = JSON.parse(e.data);
if (data.finish_reason) {
setLoading(false);
openai.current.close();
openai.current = null;
} else {
completion.current += data.choices[0]?.delta?.content || "";
setData(completion.current);
}
} else {
setLoading(false);
openai.current.close();
openai.current = null;
}
}
openai.current.send(JSON.stringify({
model: 'deepseek-chat-sharkplus',
stream: true,
temperature: 0.7,
messages: messages,
}))
}
}
} else {
openai.current = new WebSocket(`${GlobalData.chatBaseUrl}?token=${token}&X-AI-Provider=sharkplus`);
openai.current.onopen = () => {
console.log('连接成功');
if (openai.current) {
openai.current.onmessage = (e) => {
if (e.data.toString().trim() != '"[DONE]"') {
const data = JSON.parse(e.data);
if (data.finish_reason) {
setLoading(false);
openai.current.close();
openai.current = null;
} else {
completion.current += data.choices[0]?.delta?.content || "";
setData(completion.current);
}
} else {
setLoading(false);
openai.current.close();
openai.current = null;
}
}
openai.current.send(JSON.stringify({
model: 'deepseek-chat-sharkplus',
stream: true,
temperature: 0.7,
messages: [],
promptTemplateID: promptId,
args: args,
}))
}
}
}
setLoading(true);
} catch (err) {
setError(JSON.stringify(err));
}
}
return { complete, completion: data, error, isLoading, abort };
}
export default useRequest;

View File

@@ -1,4 +1,4 @@
import { useEffect, useState } from "react";
import { useEffect, useState, useMemo } from "react";
type Options = {
lang?: 'cmn-Hans-CN' | 'en-US' | 'ja-JP',
continuous?: boolean,
@@ -22,15 +22,29 @@ function useVoice2Txt(options: Options): Voice2Txt {
const [isListening, setIsListening] = useState(false);
const [error, setError] = useState<string | null>(null);
//@ts-ignore
const recognition = new webkitSpeechRecognition() || new SpeechRecognition();
for (let key in options) {
recognition[key] = options[key];
}
if (typeof recognition === 'undefined') {
setError("浏览器不支持语音识别");
} else {
console.log(recognition);
}
const recognition = useMemo(() => new webkitSpeechRecognition() || new SpeechRecognition(), []);
useEffect(() => {
for (let key in options) {
recognition[key] = options[key];
}
if (typeof recognition === 'undefined') {
setError("浏览器不支持语音识别");
} else {
console.log(recognition);
}
recognition.onresult = function (event) {
setIsListening(false);
setText(event.results[0][0].transcript)
console.log("转换完成", event)
console.log(event.results[0][0].transcript)
}
//@ts-ignore
recognition.onerror = (e) => {
setError(e)
}
}, [])
function start() {
if (isListening) return;
setIsListening(true);
@@ -48,17 +62,6 @@ function useVoice2Txt(options: Options): Voice2Txt {
}
//@ts-ignore
// 当调用recognition的stop的时候会触发此对象的onresult事件然后我们在这里获取我们的转换结果。
recognition.onresult = function (event) {
setIsListening(false);
setText(event.results[0][0].transcript)
console.log("转换完成", event)
console.log(event.results[0][0].transcript)
}
//@ts-ignore
recognition.onerror = (e) => {
setError(e)
}
return { text, start, end, isListening, error }
}

View File

@@ -12,6 +12,11 @@ const config: Config = {
"gradient-radial": "radial-gradient(var(--tw-gradient-stops))",
"gradient-conic":
"conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))",
"gradient-to-r": "linear-gradient(to right, var(--tw-gradient-stops))",
},
colors: {
"start-color": "#FD7A61",
"end-color": "#FF934D",
},
},
},

514
yarn.lock

File diff suppressed because it is too large Load Diff