This commit is contained in:
zjt 2024-07-03 10:53:52 +08:00
parent 201283fd94
commit 611cc184ca
9 changed files with 35540 additions and 1529 deletions

View File

@ -5,7 +5,12 @@ const nextConfig = {
},
eslint: {
ignoreDuringBuilds: true,
}
},
webpack: (config) => {
config.resolve.fallback = { fs: false };
return config;
},
};
export default nextConfig;

View File

@ -23,5 +23,10 @@
"postcss": "^8",
"tailwindcss": "^3.4.1",
"typescript": "^5"
},
"browser": {
"fs": false,
"os": false,
"path": false
}
}

View File

@ -1,12 +1,15 @@
"use client";
import { useEffect, useRef, useState } from "react";
import "@/deps/live2d.min.js";
import "@/deps/cubism5.js";
import useVoice2Txt from "@/hooks/useVoice2txt";
import useTxt2Voice from "@/hooks/useTxt2Voice";
import axios from "axios";
import * as PIXI from "pixi.js";
import { Live2DModel } from "pixi-live2d-display/cubism2";
import { Live2DModel } from "pixi-live2d-display/cubism4";
import { useSearchParams } from "next/navigation";
import useRequest from "@/hooks/use-openai-request";
import txt2Voice from "@/hooks/txt2VoiceAPI";
// fake list
const opList = [
@ -38,6 +41,8 @@ export default function Home() {
const query = useSearchParams();
const characterId = query.get("id");
const token = query.get("token");
localStorage.setItem("token", token || "");
const { complete, completion: data, isLoading, abort } = useRequest();
const voice2txt = (txt: string) => {
fetch("sharkapiBaseUrl/voice/txt2voice", {
method: "POST",
@ -113,61 +118,27 @@ export default function Home() {
const send = (inputText: string) => {
setResponse(inputText);
if (!inputText) return;
console.log(inputText);
let data = JSON.stringify({
messages: [
{
content: `回答用户的问题,尽可能简短。`,
role: "system",
},
{
content: inputText,
role: "user",
},
],
model: "deepseek-chat",
frequency_penalty: 0,
max_tokens: 2048,
presence_penalty: 0,
stop: null,
stream: false,
temperature: 1,
top_p: 1,
});
let config = {
method: "post",
maxBodyLength: Infinity,
url: "https://api.deepseek.com/chat/completions",
headers: {
"Content-Type": "application/json",
Accept: "application/json",
Authorization: "Bearer sk-dd24ae704e8d4939aeed8f050d04d36b",
},
data: data,
};
try {
axios(config)
.then((response) => {
console.log(`response`, response);
console.log(response.data);
if (typeof speak !== "undefined") {
setResponse(response.data.choices[0].message.content);
speak(response.data.choices[0].message.content);
} else {
model!.motion("tap_body");
}
})
.catch((error) => {
// setResponse(error!.toString());
console.log(error);
});
} catch (error) {
setResponse(error!.toString());
console.log(error);
}
complete(1, [], [{
content: inputText,
role: "user",
}]);
};
useEffect(() => {
(async () => {
if (data) {
setResponse(data);
if (typeof speak !== "undefined" && isLoading === false) {
const base64Voice = "data:audio/mp3;base64," + await txt2Voice(data, 4);
const audio = document.createElement("audio");
audio.src = base64Voice;
audio.play();
} else {
model!.motion("tap_body");
}
}
})();
}, [data, isLoading]);
const { start, end, text, isListening, error } = useVoice2Txt({
lang: "cmn-Hans-CN",
@ -212,7 +183,7 @@ export default function Home() {
});
const model = await Live2DModel.from(
"https://cdn.jsdelivr.net/gh/guansss/pixi-live2d-display/test/assets/shizuku/shizuku.model.json"
"https://cdn.jsdelivr.net/gh/guansss/pixi-live2d-display/test/assets/haru/haru_greeter_t03.model3.json"
);
app.stage.addChild(model);
@ -241,7 +212,7 @@ export default function Home() {
return (
<main className="w-full h-full bg-blue-200">
{typeof window !== "undefined" &&
typeof window.Live2D !== "undefined" && (
typeof window.Live2DCubismCore !== "undefined" && (
<div className="flex w-full flex-col h-full items-center justify-center relative text-white">
{/* live2d */}
<canvas className="w-full " id="canvas"></canvas>

12
src/consts/consts.ts Normal file
View File

@ -0,0 +1,12 @@
const apiBaseUrl = 'https://sharkai.data.vaala.tech/v1';
const imBaseUrl = 'http://localhost:3000/';
const chatBaseUrl = 'wss://chatserver.data.vaala.tech/chat/completions';
const ossUrl = 'http://localhost:3000/oss/';
const GlobalData = {
apiBaseUrl,
imBaseUrl,
chatBaseUrl,
ossUrl,
}
export default GlobalData;

17882
src/deps/cubism5.js Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

22
src/hooks/txt2VoiceAPI.ts Normal file
View File

@ -0,0 +1,22 @@
import GlobalData from "@/consts/consts";
export default async function txt2Voice(txt: string, person?: number) {
const token = localStorage.getItem('token');
const url = GlobalData.apiBaseUrl + '/voice/txt2voice';
// 发送POST请求
const res = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': token || ""
},
body: JSON.stringify({
txt: txt,
person: person
})
});
const data = await res.json();
console.log(data)
return data.body.voiceInBase64 as string;
}

View File

@ -0,0 +1,109 @@
import { useEffect, useRef, useState } from "react";
import GlobalData from "../consts/consts";
type FailedReason = string
type AIRequest = () => {
isLoading: boolean;
completion: string | null;
error: string | null;
complete: (promptId: number, args: string[], messages?: any[]) => void;
abort: () => void;
}
const useRequest: AIRequest = () => {
// 请求返回的数据
const completion = useRef<string>("");
const [data, setData] = useState<string>("");
// 请求返回的错误信息
const [error, setError] = useState<string | null>(null);
// 请求的loading 状态
const [isLoading, setLoading] = useState(false);
const openai = useRef<WebSocket | null>(null)
const abort = () => {
if (openai.current) {
openai.current.close();
openai.current = null;
setLoading(false);
}
}
async function complete(promptId: number, args: string[], messages?: any[]) {
const token = localStorage.getItem("token");
try {
setError("");
setData("");
completion.current = "";
if (messages) {
openai.current = new WebSocket(`${GlobalData.chatBaseUrl}?token=${token}&X-AI-Provider=sharkplus`);
openai.current.onopen = () => {
console.log('连接成功');
if (openai.current) {
openai.current.onmessage = (e) => {
if (e.data.toString().trim() != '"[DONE]"') {
const data = JSON.parse(e.data);
if (data.finish_reason) {
setLoading(false);
openai.current.close();
openai.current = null;
} else {
completion.current += data.choices[0]?.delta?.content || "";
setData(completion.current);
}
} else {
setLoading(false);
openai.current.close();
openai.current = null;
}
}
openai.current.send(JSON.stringify({
model: 'deepseek-chat-sharkplus',
stream: true,
temperature: 0.7,
messages: messages,
}))
}
}
} else {
openai.current = new WebSocket(`${GlobalData.chatBaseUrl}?token=${token}&X-AI-Provider=sharkplus`);
openai.current.onopen = () => {
console.log('连接成功');
if (openai.current) {
openai.current.onmessage = (e) => {
if (e.data.toString().trim() != '"[DONE]"') {
const data = JSON.parse(e.data);
if (data.finish_reason) {
setLoading(false);
openai.current.close();
openai.current = null;
} else {
completion.current += data.choices[0]?.delta?.content || "";
setData(completion.current);
}
} else {
setLoading(false);
openai.current.close();
openai.current = null;
}
}
openai.current.send(JSON.stringify({
model: 'deepseek-chat-sharkplus',
stream: true,
temperature: 0.7,
messages: [],
promptTemplateID: promptId,
args: args,
}))
}
}
}
setLoading(true);
} catch (err) {
setError(JSON.stringify(err));
}
}
return { complete, completion: data, error, isLoading, abort };
}
export default useRequest;

View File

@ -1,4 +1,4 @@
import { useEffect, useState } from "react";
import { useEffect, useState, useMemo } from "react";
type Options = {
lang?: 'cmn-Hans-CN' | 'en-US' | 'ja-JP',
continuous?: boolean,
@ -22,15 +22,29 @@ function useVoice2Txt(options: Options): Voice2Txt {
const [isListening, setIsListening] = useState(false);
const [error, setError] = useState<string | null>(null);
//@ts-ignore
const recognition = new webkitSpeechRecognition() || new SpeechRecognition();
for (let key in options) {
recognition[key] = options[key];
}
if (typeof recognition === 'undefined') {
setError("浏览器不支持语音识别");
} else {
console.log(recognition);
}
const recognition = useMemo(() => new webkitSpeechRecognition() || new SpeechRecognition(), []);
useEffect(() => {
for (let key in options) {
recognition[key] = options[key];
}
if (typeof recognition === 'undefined') {
setError("浏览器不支持语音识别");
} else {
console.log(recognition);
}
recognition.onresult = function (event) {
setIsListening(false);
setText(event.results[0][0].transcript)
console.log("转换完成", event)
console.log(event.results[0][0].transcript)
}
//@ts-ignore
recognition.onerror = (e) => {
setError(e)
}
}, [])
function start() {
if (isListening) return;
setIsListening(true);
@ -48,17 +62,6 @@ function useVoice2Txt(options: Options): Voice2Txt {
}
//@ts-ignore
// 当调用recognition的stop的时候会触发此对象的onresult事件然后我们在这里获取我们的转换结果。
recognition.onresult = function (event) {
setIsListening(false);
setText(event.results[0][0].transcript)
console.log("转换完成", event)
console.log(event.results[0][0].transcript)
}
//@ts-ignore
recognition.onerror = (e) => {
setError(e)
}
return { text, start, end, isListening, error }
}