raliz #1
@ -5,7 +5,12 @@ const nextConfig = {
|
|||||||
},
|
},
|
||||||
eslint: {
|
eslint: {
|
||||||
ignoreDuringBuilds: true,
|
ignoreDuringBuilds: true,
|
||||||
}
|
},
|
||||||
|
webpack: (config) => {
|
||||||
|
config.resolve.fallback = { fs: false };
|
||||||
|
|
||||||
|
return config;
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
export default nextConfig;
|
export default nextConfig;
|
@ -23,5 +23,10 @@
|
|||||||
"postcss": "^8",
|
"postcss": "^8",
|
||||||
"tailwindcss": "^3.4.1",
|
"tailwindcss": "^3.4.1",
|
||||||
"typescript": "^5"
|
"typescript": "^5"
|
||||||
|
},
|
||||||
|
"browser": {
|
||||||
|
"fs": false,
|
||||||
|
"os": false,
|
||||||
|
"path": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
"use client";
|
"use client";
|
||||||
import { useEffect, useRef, useState } from "react";
|
import { useEffect, useRef, useState } from "react";
|
||||||
import "@/deps/live2d.min.js";
|
|
||||||
|
import "@/deps/cubism5.js";
|
||||||
import useVoice2Txt from "@/hooks/useVoice2txt";
|
import useVoice2Txt from "@/hooks/useVoice2txt";
|
||||||
import useTxt2Voice from "@/hooks/useTxt2Voice";
|
import useTxt2Voice from "@/hooks/useTxt2Voice";
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import * as PIXI from "pixi.js";
|
import * as PIXI from "pixi.js";
|
||||||
import { Live2DModel } from "pixi-live2d-display/cubism2";
|
import { Live2DModel } from "pixi-live2d-display/cubism4";
|
||||||
import { useSearchParams } from "next/navigation";
|
import { useSearchParams } from "next/navigation";
|
||||||
|
import useRequest from "@/hooks/use-openai-request";
|
||||||
|
import txt2Voice from "@/hooks/txt2VoiceAPI";
|
||||||
|
|
||||||
// fake list
|
// fake list
|
||||||
const opList = [
|
const opList = [
|
||||||
@ -38,6 +41,8 @@ export default function Home() {
|
|||||||
const query = useSearchParams();
|
const query = useSearchParams();
|
||||||
const characterId = query.get("id");
|
const characterId = query.get("id");
|
||||||
const token = query.get("token");
|
const token = query.get("token");
|
||||||
|
localStorage.setItem("token", token || "");
|
||||||
|
const { complete, completion: data, isLoading, abort } = useRequest();
|
||||||
const voice2txt = (txt: string) => {
|
const voice2txt = (txt: string) => {
|
||||||
fetch("sharkapiBaseUrl/voice/txt2voice", {
|
fetch("sharkapiBaseUrl/voice/txt2voice", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
@ -113,61 +118,27 @@ export default function Home() {
|
|||||||
const send = (inputText: string) => {
|
const send = (inputText: string) => {
|
||||||
setResponse(inputText);
|
setResponse(inputText);
|
||||||
if (!inputText) return;
|
if (!inputText) return;
|
||||||
console.log(inputText);
|
complete(1, [], [{
|
||||||
let data = JSON.stringify({
|
|
||||||
messages: [
|
|
||||||
{
|
|
||||||
content: `回答用户的问题,尽可能简短。`,
|
|
||||||
role: "system",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
content: inputText,
|
content: inputText,
|
||||||
role: "user",
|
role: "user",
|
||||||
},
|
}]);
|
||||||
],
|
|
||||||
model: "deepseek-chat",
|
|
||||||
frequency_penalty: 0,
|
|
||||||
max_tokens: 2048,
|
|
||||||
presence_penalty: 0,
|
|
||||||
stop: null,
|
|
||||||
stream: false,
|
|
||||||
temperature: 1,
|
|
||||||
top_p: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
let config = {
|
|
||||||
method: "post",
|
|
||||||
maxBodyLength: Infinity,
|
|
||||||
url: "https://api.deepseek.com/chat/completions",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
Accept: "application/json",
|
|
||||||
Authorization: "Bearer sk-dd24ae704e8d4939aeed8f050d04d36b",
|
|
||||||
},
|
|
||||||
data: data,
|
|
||||||
};
|
};
|
||||||
try {
|
useEffect(() => {
|
||||||
axios(config)
|
(async () => {
|
||||||
.then((response) => {
|
if (data) {
|
||||||
console.log(`response`, response);
|
setResponse(data);
|
||||||
console.log(response.data);
|
if (typeof speak !== "undefined" && isLoading === false) {
|
||||||
|
const base64Voice = "data:audio/mp3;base64," + await txt2Voice(data, 4);
|
||||||
if (typeof speak !== "undefined") {
|
const audio = document.createElement("audio");
|
||||||
setResponse(response.data.choices[0].message.content);
|
audio.src = base64Voice;
|
||||||
speak(response.data.choices[0].message.content);
|
audio.play();
|
||||||
} else {
|
} else {
|
||||||
model!.motion("tap_body");
|
model!.motion("tap_body");
|
||||||
}
|
}
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
// setResponse(error!.toString());
|
|
||||||
console.log(error);
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
setResponse(error!.toString());
|
|
||||||
console.log(error);
|
|
||||||
}
|
}
|
||||||
};
|
})();
|
||||||
|
|
||||||
|
}, [data, isLoading]);
|
||||||
|
|
||||||
const { start, end, text, isListening, error } = useVoice2Txt({
|
const { start, end, text, isListening, error } = useVoice2Txt({
|
||||||
lang: "cmn-Hans-CN",
|
lang: "cmn-Hans-CN",
|
||||||
@ -212,7 +183,7 @@ export default function Home() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const model = await Live2DModel.from(
|
const model = await Live2DModel.from(
|
||||||
"https://cdn.jsdelivr.net/gh/guansss/pixi-live2d-display/test/assets/shizuku/shizuku.model.json"
|
"https://cdn.jsdelivr.net/gh/guansss/pixi-live2d-display/test/assets/haru/haru_greeter_t03.model3.json"
|
||||||
);
|
);
|
||||||
|
|
||||||
app.stage.addChild(model);
|
app.stage.addChild(model);
|
||||||
@ -241,7 +212,7 @@ export default function Home() {
|
|||||||
return (
|
return (
|
||||||
<main className="w-full h-full bg-blue-200">
|
<main className="w-full h-full bg-blue-200">
|
||||||
{typeof window !== "undefined" &&
|
{typeof window !== "undefined" &&
|
||||||
typeof window.Live2D !== "undefined" && (
|
typeof window.Live2DCubismCore !== "undefined" && (
|
||||||
<div className="flex w-full flex-col h-full items-center justify-center relative text-white">
|
<div className="flex w-full flex-col h-full items-center justify-center relative text-white">
|
||||||
{/* live2d */}
|
{/* live2d */}
|
||||||
<canvas className="w-full " id="canvas"></canvas>
|
<canvas className="w-full " id="canvas"></canvas>
|
||||||
|
12
src/consts/consts.ts
Normal file
12
src/consts/consts.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
const apiBaseUrl = 'https://sharkai.data.vaala.tech/v1';
|
||||||
|
const imBaseUrl = 'http://localhost:3000/';
|
||||||
|
const chatBaseUrl = 'wss://chatserver.data.vaala.tech/chat/completions';
|
||||||
|
|
||||||
|
const ossUrl = 'http://localhost:3000/oss/';
|
||||||
|
const GlobalData = {
|
||||||
|
apiBaseUrl,
|
||||||
|
imBaseUrl,
|
||||||
|
chatBaseUrl,
|
||||||
|
ossUrl,
|
||||||
|
}
|
||||||
|
export default GlobalData;
|
17882
src/deps/cubism5.js
Normal file
17882
src/deps/cubism5.js
Normal file
File diff suppressed because it is too large
Load Diff
17790
src/deps/live2dcubismcore.min.js
vendored
17790
src/deps/live2dcubismcore.min.js
vendored
File diff suppressed because one or more lines are too long
22
src/hooks/txt2VoiceAPI.ts
Normal file
22
src/hooks/txt2VoiceAPI.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import GlobalData from "@/consts/consts";
|
||||||
|
|
||||||
|
export default async function txt2Voice(txt: string, person?: number) {
|
||||||
|
const token = localStorage.getItem('token');
|
||||||
|
const url = GlobalData.apiBaseUrl + '/voice/txt2voice';
|
||||||
|
|
||||||
|
// 发送POST请求
|
||||||
|
const res = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': token || ""
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
txt: txt,
|
||||||
|
person: person
|
||||||
|
})
|
||||||
|
});
|
||||||
|
const data = await res.json();
|
||||||
|
console.log(data)
|
||||||
|
return data.body.voiceInBase64 as string;
|
||||||
|
}
|
109
src/hooks/use-openai-request.ts
Normal file
109
src/hooks/use-openai-request.ts
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
import { useEffect, useRef, useState } from "react";
|
||||||
|
import GlobalData from "../consts/consts";
|
||||||
|
type FailedReason = string
|
||||||
|
type AIRequest = () => {
|
||||||
|
isLoading: boolean;
|
||||||
|
completion: string | null;
|
||||||
|
error: string | null;
|
||||||
|
complete: (promptId: number, args: string[], messages?: any[]) => void;
|
||||||
|
abort: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
const useRequest: AIRequest = () => {
|
||||||
|
// 请求返回的数据
|
||||||
|
const completion = useRef<string>("");
|
||||||
|
const [data, setData] = useState<string>("");
|
||||||
|
// 请求返回的错误信息
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
// 请求的loading 状态
|
||||||
|
const [isLoading, setLoading] = useState(false);
|
||||||
|
const openai = useRef<WebSocket | null>(null)
|
||||||
|
const abort = () => {
|
||||||
|
if (openai.current) {
|
||||||
|
openai.current.close();
|
||||||
|
openai.current = null;
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function complete(promptId: number, args: string[], messages?: any[]) {
|
||||||
|
const token = localStorage.getItem("token");
|
||||||
|
try {
|
||||||
|
setError("");
|
||||||
|
setData("");
|
||||||
|
completion.current = "";
|
||||||
|
if (messages) {
|
||||||
|
openai.current = new WebSocket(`${GlobalData.chatBaseUrl}?token=${token}&X-AI-Provider=sharkplus`);
|
||||||
|
openai.current.onopen = () => {
|
||||||
|
console.log('连接成功');
|
||||||
|
if (openai.current) {
|
||||||
|
openai.current.onmessage = (e) => {
|
||||||
|
if (e.data.toString().trim() != '"[DONE]"') {
|
||||||
|
const data = JSON.parse(e.data);
|
||||||
|
if (data.finish_reason) {
|
||||||
|
setLoading(false);
|
||||||
|
openai.current.close();
|
||||||
|
openai.current = null;
|
||||||
|
} else {
|
||||||
|
completion.current += data.choices[0]?.delta?.content || "";
|
||||||
|
setData(completion.current);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
setLoading(false);
|
||||||
|
openai.current.close();
|
||||||
|
openai.current = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
openai.current.send(JSON.stringify({
|
||||||
|
model: 'deepseek-chat-sharkplus',
|
||||||
|
stream: true,
|
||||||
|
temperature: 0.7,
|
||||||
|
messages: messages,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
openai.current = new WebSocket(`${GlobalData.chatBaseUrl}?token=${token}&X-AI-Provider=sharkplus`);
|
||||||
|
openai.current.onopen = () => {
|
||||||
|
console.log('连接成功');
|
||||||
|
if (openai.current) {
|
||||||
|
openai.current.onmessage = (e) => {
|
||||||
|
if (e.data.toString().trim() != '"[DONE]"') {
|
||||||
|
const data = JSON.parse(e.data);
|
||||||
|
if (data.finish_reason) {
|
||||||
|
setLoading(false);
|
||||||
|
openai.current.close();
|
||||||
|
openai.current = null;
|
||||||
|
} else {
|
||||||
|
completion.current += data.choices[0]?.delta?.content || "";
|
||||||
|
setData(completion.current);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
setLoading(false);
|
||||||
|
openai.current.close();
|
||||||
|
openai.current = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
openai.current.send(JSON.stringify({
|
||||||
|
model: 'deepseek-chat-sharkplus',
|
||||||
|
stream: true,
|
||||||
|
temperature: 0.7,
|
||||||
|
messages: [],
|
||||||
|
promptTemplateID: promptId,
|
||||||
|
args: args,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setLoading(true);
|
||||||
|
} catch (err) {
|
||||||
|
setError(JSON.stringify(err));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { complete, completion: data, error, isLoading, abort };
|
||||||
|
}
|
||||||
|
|
||||||
|
export default useRequest;
|
@ -1,4 +1,4 @@
|
|||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState, useMemo } from "react";
|
||||||
type Options = {
|
type Options = {
|
||||||
lang?: 'cmn-Hans-CN' | 'en-US' | 'ja-JP',
|
lang?: 'cmn-Hans-CN' | 'en-US' | 'ja-JP',
|
||||||
continuous?: boolean,
|
continuous?: boolean,
|
||||||
@ -22,7 +22,8 @@ function useVoice2Txt(options: Options): Voice2Txt {
|
|||||||
const [isListening, setIsListening] = useState(false);
|
const [isListening, setIsListening] = useState(false);
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
const recognition = new webkitSpeechRecognition() || new SpeechRecognition();
|
const recognition = useMemo(() => new webkitSpeechRecognition() || new SpeechRecognition(), []);
|
||||||
|
useEffect(() => {
|
||||||
for (let key in options) {
|
for (let key in options) {
|
||||||
recognition[key] = options[key];
|
recognition[key] = options[key];
|
||||||
}
|
}
|
||||||
@ -31,6 +32,19 @@ function useVoice2Txt(options: Options): Voice2Txt {
|
|||||||
} else {
|
} else {
|
||||||
console.log(recognition);
|
console.log(recognition);
|
||||||
}
|
}
|
||||||
|
recognition.onresult = function (event) {
|
||||||
|
setIsListening(false);
|
||||||
|
setText(event.results[0][0].transcript)
|
||||||
|
console.log("转换完成", event)
|
||||||
|
console.log(event.results[0][0].transcript)
|
||||||
|
}
|
||||||
|
//@ts-ignore
|
||||||
|
recognition.onerror = (e) => {
|
||||||
|
setError(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
}, [])
|
||||||
|
|
||||||
function start() {
|
function start() {
|
||||||
if (isListening) return;
|
if (isListening) return;
|
||||||
setIsListening(true);
|
setIsListening(true);
|
||||||
@ -48,17 +62,6 @@ function useVoice2Txt(options: Options): Voice2Txt {
|
|||||||
}
|
}
|
||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
// 当调用recognition的stop的时候会触发此对象的onresult事件,然后我们在这里获取我们的转换结果。
|
// 当调用recognition的stop的时候会触发此对象的onresult事件,然后我们在这里获取我们的转换结果。
|
||||||
recognition.onresult = function (event) {
|
|
||||||
setIsListening(false);
|
|
||||||
setText(event.results[0][0].transcript)
|
|
||||||
console.log("转换完成", event)
|
|
||||||
console.log(event.results[0][0].transcript)
|
|
||||||
}
|
|
||||||
//@ts-ignore
|
|
||||||
recognition.onerror = (e) => {
|
|
||||||
setError(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { text, start, end, isListening, error }
|
return { text, start, end, isListening, error }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user