Compare commits

..

12 Commits

21 changed files with 954 additions and 50 deletions

22
123.py Normal file
View File

@ -0,0 +1,22 @@
import requests
import json
url = "https://api.mixrai.com/v1/chat/completions"
# 'xxx' is your API key, 换成你的令牌
api_key = "xxx"
payload = json.dumps(
{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "Hello!"}]}
)
headers = {
"Accept": "application/json",
"Authorization": api_key,
"User-Agent": "Apifox/1.0.0 (https://apifox.com)",
"Content-Type": "application/json",
}
response = requests.request("POST", url, headers=headers, data=payload)
print(response.text)
print("----下面是API Key----")
print(api_key)

25
2598290917api_test.py Normal file
View File

@ -0,0 +1,25 @@
import requests
import json
url = "https://chatapi.midjourney-vip.cn/v1/chat/completions"
payload = json.dumps({
"model": "gpt-3.5-turbo",
"messages": [
{
"role": "user",
"content": "Hello!"
}
]
})
headers = {
'Accept': 'application/json',
# 'xxx' is your API key, 换成你的令牌
'Authorization': 'sk-QxlvoGgkYT1idnvP129595EdA4324330A84f441b58A7E478',
'User-Agent': 'Apifox/1.0.0 (https://apifox.com)',
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=payload)
print(response.text)

36
2818902639api_test(3).py Normal file
View File

@ -0,0 +1,36 @@
import requests
import json
def send_request(session, data):
url = "https://api.mixrai.com/v1/chat/completions"
try:
headers = {
"Accept": "application/json",
"Authorization":"sk-QxlvoGgkYT1idnvP129595EdA4324330A84f441b58A7E478",
"User-Agent": "Apifox/1.0.0 (https://apifox.com)",
"Content-Type": "application/json",
}
response = session.post(url, json=data, headers=headers)
response.raise_for_status()
result = response.json()
response_text = result["choices"][0]["message"]["content"]
except requests.exceptions.RequestException as e:
print(f"请求失败: {e}")
response_text = "抱歉,我现在太忙了,休息一会,请稍后再试。"
return response_text
messages=[{"role": "system", "content": "你好"}]
data = {
"model": "gpt-3.5-turbo",
"messages": messages,
"temperature": 0.3,
"max_tokens": 2000,
"user": f"user_0"
}
session = requests.Session()
session.verify = False
print(send_request(session,data))

View File

@ -0,0 +1,6 @@
torch
modelscope
testresources
websockets==10.4
torchaudio
FunASR

Binary file not shown.

View File

@ -26,7 +26,7 @@
},
"items": [],
"source": {
"automatic_player_status": true,
"automatic_player_status": false,
"automatic_player_url": "http://127.0.0.1:6000",
"liveRoom": {
"enabled": false,
@ -34,7 +34,7 @@
},
"record": {
"device": "",
"enabled": true
"enabled": false
},
"wake_word": "\u5c0f\u6a44\u6984",
"wake_word_enabled": true,

View File

@ -66,6 +66,22 @@ modules = {
}
def get_public_base():
"""
方案A返回大屏可访问的后端基址
优先用 config.json server.public_base
否则退回 cfg.fay_url:5000你原来的
"""
try:
config_util.load_config()
pb = config_util.config.get("server", {}).get("public_base", "")
if pb:
return pb.rstrip("/")
except Exception:
pass
return f"http://{cfg.fay_url}:5000"
#大语言模型回复
def handle_chat_message(msg, username='User', observation=''):
text = ''
@ -431,6 +447,15 @@ class FeiFei:
can_auto_play = False
self.speaking = True
# ✅ 方案A把本次音频的 http url 推给大屏web端播放
http_audio = f"{get_public_base()}/audio/{os.path.basename(file_url)}"
if wsa_server.get_web_instance().is_connected(interact.data.get("user")):
wsa_server.get_web_instance().add_cmd({
"Username": interact.data.get("user"),
"audioUrl": http_audio
})
#推送远程音频
MyThread(target=self.__send_remote_device_audio, args=[file_url, interact]).start()
@ -454,8 +479,8 @@ class FeiFei:
threading.Timer(audio_length, self.send_play_end_msg, [interact]).start()
#面板播放
if config_util.config["interact"]["playSound"]:
self.__play_sound(file_url, audio_length, interact)
# if config_util.config["interact"]["playSound"]:
# self.__play_sound(file_url, audio_length, interact)
except Exception as e:
print(e)

View File

@ -16,11 +16,68 @@ import tempfile
import wave
from core import fay_core
from core import interact
# ===== 新增:用于前置唤醒词句首容错 =====
import re
import unicodedata
# 启动时间 (秒)
_ATTACK = 0.2
_ATTACK = 0.08 # ↓ 改小:让系统更早进入拾音,避免“唤醒词前半截被吃掉”
# 释放时间 (秒)
_RELEASE = 0.7
_RELEASE = 0.55 # ↓ 略微缩短,避免一句话被切成两段
# ===== 新增:前置唤醒词句首规范化与匹配 =====
_PUNCS = ",。!?!?,.、:;“”\"'()[]【】<>《》-—…" # 常见中文标点
_FILLER_PREFIX = ("", "", "", "", "", "", "那个", "就是", "然后") # 常见句首语气词ASR 很容易加)
def _norm_head(s: str) -> str:
"""只做句首容错:去不可见/空白/句首标点/句首语气词,不改变正文结构。"""
if not s:
return ""
s = unicodedata.normalize("NFKC", s).strip()
# 去掉开头空白
s = re.sub(r"^\s+", "", s)
# 去掉开头标点(可重复)
s = re.sub(r"^[{}]+".format(re.escape(_PUNCS)), "", s)
# 去掉句首常见语气词(允许多次叠加)
changed = True
while changed:
changed = False
for fp in _FILLER_PREFIX:
if s.startswith(fp):
s = s[len(fp):]
s = re.sub(r"^\s+", "", s)
s = re.sub(r"^[{}]+".format(re.escape(_PUNCS)), "", s)
changed = True
break
return s
def _front_wake_match(text: str, wake_words):
"""
前置唤醒词匹配严格前置
- 唤醒词必须在规范化后的最前面
- 不允许句中唤醒
"""
t = _norm_head(text)
for w in wake_words:
w = w.strip()
if not w:
continue
# 允许:唤醒词后面紧跟空格/标点/语气助词
# 例:"小橄榄,帮我..." "小橄榄啊 帮我..."
if t.startswith(w):
rest = t[len(w):] # 去掉唤醒词,得到真正的问题
# 去掉紧随其后的标点 / 空格 / 语气助词
rest = rest.lstrip(" \t\r\n" + _PUNCS)
rest = re.sub(r"^(啊|呀|呢|吧|哈|哎|诶|欸)\s*", "", rest)
rest = rest.lstrip(" \t\r\n" + _PUNCS)
return True, w, rest
return False, None, ""
class Recorder:
@ -142,34 +199,44 @@ class Recorder:
self.timer = threading.Timer(60, self.reset_wakeup_status) # 重设计时器为60秒
self.timer.start()
#前置唤醒词模式
elif cfg.config['source']['wake_word_type'] == 'front':
wake_word = cfg.config['source']['wake_word']
wake_word_list = wake_word.split(',')
wake_up = False
for word in wake_word_list:
if text.startswith(word):
wake_up_word = word
wake_up = True
break
if wake_up:
# 前置唤醒词模式(严格前置,但句首做容错)
elif cfg.config['source']['wake_word_type'] == 'front':
# 读取配置的唤醒词(支持多个)
wake_word = cfg.config['source']['wake_word']
wake_word_list = [w.strip() for w in wake_word.split(',') if w.strip()]
matched, wake_up_word, question = _front_wake_match(text, wake_word_list)
if matched:
util.printInfo(1, self.username, "唤醒成功!")
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": "唤醒成功!", "Username" : self.username , 'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'})
wsa_server.get_web_instance().add_cmd({"panelMsg": "唤醒成功!", "Username": self.username,
'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': "唤醒成功!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'}
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': "唤醒成功!"},
'Username': self.username,
'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'}
wsa_server.get_instance().add_cmd(content)
#去除唤醒词后语句
question = text#[len(wake_up_word):].lstrip()
self.on_speaking(question)
# 在识别到【前置唤醒词】后,发送“去掉唤醒词后的问题”
if question:
self.on_speaking(question)
else:
intt = interact.Interact("auto_play", 2, {'user': self.username, 'text': "在呢,你说?"})
self.__fay.on_interact(intt)
self.processing = False
else:
util.printInfo(1, self.username, "[!] 待唤醒!")
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": "[!] 待唤醒!", "Username" : self.username , 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'})
wsa_server.get_web_instance().add_cmd({"panelMsg": "[!] 待唤醒!", "Username": self.username,
'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': "[!] 待唤醒!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'}
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': "[!] 待唤醒!"},
'Username': self.username,
'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'}
wsa_server.get_instance().add_cmd(content)
self.processing = False
#非唤醒模式
else:
@ -220,12 +287,8 @@ class Recorder:
continue
#是否可以拾音,不可以就掉弃录音
can_listen = True
#没有开唤醒,但面板或数字人正在播音时不能拾音
if cfg.config['source']['wake_word_enabled'] == False and self.__fay.speaking == True:
can_listen = False
#普通唤醒模式已经激活,并且面板或数字人正在输出声音时不能拾音
if cfg.config['source']['wake_word_enabled'] == True and cfg.config['source']['wake_word_type'] == 'common' and self.wakeup_matched == True and self.__fay.speaking == True:
if self.__fay.speaking == True:
# 只要数字人/面板在播放TTS就禁拾音避免把自己的声音识别成用户输入
can_listen = False
if can_listen == False:#掉弃录音
@ -234,7 +297,7 @@ class Recorder:
#计算音量是否满足激活拾音
level = audioop.rms(data, 2)
if len(self.__history_data) >= 10:#保存激活前的音频,以免信息掉失
if len(self.__history_data) >= 20:#保存激活前的音频,以免信息掉失
self.__history_data.pop(0)
if len(self.__history_level) >= 500:
self.__history_level.pop(0)
@ -242,11 +305,18 @@ class Recorder:
self.__history_level.append(level)
percentage = level / self.__MAX_LEVEL
history_percentage = self.__get_history_percentage(30)
if history_percentage > self.__dynamic_threshold:
self.__dynamic_threshold += (history_percentage - self.__dynamic_threshold) * 0.0025
elif history_percentage < self.__dynamic_threshold:
self.__dynamic_threshold += (history_percentage - self.__dynamic_threshold) * 1
# ===== 改进:阈值平滑变化,避免断句导致唤醒词被截断 =====
up_alpha = 0.01 # 环境变吵:慢慢升
down_alpha = 0.05 # 环境变安静:也不要瞬间掉
if history_percentage > self.__dynamic_threshold:
self.__dynamic_threshold += (history_percentage - self.__dynamic_threshold) * up_alpha
else:
self.__dynamic_threshold += (history_percentage - self.__dynamic_threshold) * down_alpha
# 给阈值一个下限,防止过度灵敏
self.__dynamic_threshold = max(self.__dynamic_threshold, 0.02)
#激活拾音
if percentage > self.__dynamic_threshold:

BIN
fay.db

Binary file not shown.

View File

@ -520,12 +520,19 @@ def setting():
@__app.route('/chat')
def chat():
return render_template('chat.html')
@__app.route('/chat1')
def chat1():
return render_template('chat1.html')
# 输出的音频http
@__app.route('/audio/<filename>')
def serve_audio(filename):
audio_file = os.path.join(os.getcwd(), "samples", filename)
if os.path.exists(audio_file):
return send_file(audio_file)
resp = send_file(audio_file)
# ✅ 推荐:避免浏览器缓存旧音频(大屏轮播/重复播放时更稳)
resp.headers["Cache-Control"] = "no-store"
return resp
else:
return jsonify({'error': '文件未找到'}), 404

144
gui/static/css/chat1.css Normal file
View File

@ -0,0 +1,144 @@
html, body {
margin: 0;
padding: 0;
height: 100%;
font-family: 'Orbitron', sans-serif;
background: linear-gradient(135deg, #0f2027, #203a43, #2c5364);
color: #fff;
}
#chat-widget {
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
display: flex;
flex-direction: column;
z-index: 1000;
background: rgba(255, 255, 255, 0.05);
backdrop-filter: blur(10px);
border-radius: 15px;
margin: 20px;
box-shadow: 0 0 20px rgba(0, 255, 255, 0.2);
}
.top_info {
font-size: 14px;
font-family: 'Orbitron', sans-serif;
color: #00f0ff;
line-height: 48px;
text-align: left;
padding: 0 20px;
margin: 10px 20px 0 20px; /* 与 chat-widget 的 padding 对齐 */
background: rgba(255, 255, 255, 0.05);
border: 1px solid rgba(0, 255, 255, 0.2);
box-shadow: 0 0 10px rgba(0, 255, 255, 0.1);
border-radius: 10px;
backdrop-filter: blur(6px);
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
}
.top_info_text {
font-size: 15px;
font-weight: bold;
color: #00ffff;
}
#chat-container {
display: flex;
flex-direction: column;
flex: 1;
overflow: hidden;
}
#messages {
flex: 1;
padding: 10px;
overflow-y: auto;
}
.message {
display: flex;
margin-bottom: 10px;
}
.message.receiver-message {
justify-content: flex-start;
padding-right: 30%;
margin-right: auto;
}
.message.sender-message {
justify-content: flex-end;
padding-left: 30%;
margin-left: auto;
}
.message-content {
flex: 0 1 auto;
}
.message-bubble {
border-radius: 10px;
padding: 12px 16px;
font-size: 15px;
background: rgba(255, 255, 255, 0.1);
color: #e0e0e0;
box-shadow: 0 0 8px rgba(0, 255, 255, 0.3);
word-break: break-word;
white-space: pre-wrap;
text-align: left;
}
.message.sender-message .message-bubble {
text-align: right;
background: linear-gradient(135deg, #00c6ff, #0072ff);
color: white;
box-shadow: 0 0 10px rgba(0, 114, 255, 0.7);
}
#status-indicator {
height: 20px;
text-align: center;
color: #aaa;
font-size: 14px;
}
#input-area {
display: flex;
align-items: center;
padding: 10px;
border-top: 1px solid rgba(255, 255, 255, 0.2);
background-color: rgba(255, 255, 255, 0.05);
backdrop-filter: blur(8px);
}
#input-area button {
background: none;
border: none;
cursor: pointer;
margin-right: 10px;
}
#input-area img {
width: 25px;
height: 25px;
filter: drop-shadow(0 0 5px #00ffff);
}
#message-input {
flex: 1;
padding: 8px;
border: 1px solid #00c6ff;
border-radius: 5px;
background-color: rgba(0, 0, 0, 0.4);
color: white;
}
#message-input:disabled {
background-color: #444;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

508
gui/static/js/chat1.js Normal file
View File

@ -0,0 +1,508 @@
// chat1.js
class FayInterface {
constructor(baseWsUrl, baseApiUrl, vueInstance) {
this.baseWsUrl = baseWsUrl;
this.baseApiUrl = baseApiUrl;
this.websocket = null;
this.vueInstance = vueInstance;
}
connectWebSocket() {
if (this.websocket) {
this.websocket.onopen = null;
this.websocket.onmessage = null;
this.websocket.onclose = null;
this.websocket.onerror = null;
}
this.websocket = new WebSocket(this.baseWsUrl);
this.websocket.onopen = () => {
console.log('WebSocket connection opened');
};
this.websocket.onmessage = (event) => {
const data = JSON.parse(event.data);
this.handleIncomingMessage(data);
};
this.websocket.onclose = () => {
console.log('WebSocket connection closed. Attempting to reconnect...');
setTimeout(() => this.connectWebSocket(), 5000);
};
this.websocket.onerror = (error) => {
console.error('WebSocket error:', error);
};
}
async fetchData(url, options = {}) {
try {
const response = await fetch(url, options);
if (!response.ok) throw new Error(`HTTP error! Status: ${response.status}`);
return await response.json();
} catch (error) {
console.error('Error fetching data:', error);
return null;
}
}
getVoiceList() {
return this.fetchData(`${this.baseApiUrl}/api/get-voice-list`);
}
getAudioDeviceList() {
return this.fetchData(`${this.baseApiUrl}/api/get-audio-device-list`);
}
submitConfig(config) {
return this.fetchData(`${this.baseApiUrl}/api/submit`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ config })
});
}
controlEyes(state) {
return this.fetchData(`${this.baseApiUrl}/api/control-eyes`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ state })
});
}
startLive() {
return this.fetchData(`${this.baseApiUrl}/api/start-live`, {
method: 'POST'
});
}
stopLive() {
return this.fetchData(`${this.baseApiUrl}/api/stop-live`, {
method: 'POST'
});
}
getRunStatus() {
return this.fetchData(`${this.baseApiUrl}/api/get_run_status`, {
method: 'POST'
});
}
getMessageHistory(username) {
return new Promise((resolve, reject) => {
const url = `${this.baseApiUrl}/api/get-msg`;
const xhr = new XMLHttpRequest();
xhr.open("POST", url);
xhr.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
const send_data = `data=${encodeURIComponent(JSON.stringify({ username }))}`;
xhr.send(send_data);
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
try {
const data = JSON.parse(xhr.responseText);
if (data && data.list) {
const combinedList = data.list.flat();
resolve(combinedList);
} else {
resolve([]);
}
} catch (e) {
console.error('Error parsing response:', e);
reject(e);
}
} else {
reject(new Error(`Request failed with status ${xhr.status}`));
}
}
};
});
}
getUserList() {
return this.fetchData(`${this.baseApiUrl}/api/get-member-list`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' }
});
}
getData() {
return this.fetchData(`${this.baseApiUrl}/api/get-data`, {
method: 'POST',
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
});
}
getTime(){
const date = new Date();
const year = date.getFullYear();
const month = (date.getMonth() + 1).toString().padStart(2, '0'); // 月份从0开始需要+1
const day = date.getDate().toString().padStart(2, '0');
const hours = date.getHours().toString().padStart(2, '0');
const minutes = date.getMinutes().toString().padStart(2, '0');
const seconds = date.getSeconds().toString().padStart(2, '0');
const milliseconds = date.getMilliseconds().toString().padStart(3, '0');
const currentDateTimeWithMs = `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${milliseconds}`;
return currentDateTimeWithMs
}
handleIncomingMessage(data) {
const vueInstance = this.vueInstance;
if (data.panelReply !== undefined) {
vueInstance.panelReply = data.panelReply.content;
// 发送消息给父窗口,并指定目标 origin必须是父组件的域名
if (window.parent) {
window.parent.postMessage(
{type: 'panelReply', data: data.panelReply.content},
'*' // 父组件的域名
);
}
}
// console.log('Incoming message:', data);
if (data.liveState !== undefined) {
vueInstance.liveState = data.liveState;
if (data.liveState === 1) {
vueInstance.configEditable = false;
} else if (data.liveState === 0) {
vueInstance.configEditable = true;
}
}
if (data.voiceList !== undefined) {
vueInstance.voiceList = data.voiceList.map(voice => ({
value: voice.id,
label: voice.name
}));
}
if (data.deviceList !== undefined) {
vueInstance.deviceList = data.deviceList.map(device => ({
value: device,
label: device
}));
}
if (data.panelMsg !== undefined) {
vueInstance.panelMsg = data.panelMsg;
}
if (data.robot) {
console.log(data.robot)
vueInstance.$set(vueInstance, 'robot', data.robot);
}
if (data.panelReply !== undefined) {
vueInstance.panelReply = data.panelReply.content;
const userExists = vueInstance.userList.some(user => user[1] === data.panelReply.username);
if (!userExists) {
vueInstance.userList.push([data.panelReply.uid, data.panelReply.username]);
}
if (vueInstance.selectedUser && data.panelReply.username === vueInstance.selectedUser[1]) {
vueInstance.messages.push({
id: data.panelReply.id,
username: data.panelReply.username,
content: data.panelReply.content,
type: data.panelReply.type,
timetext: this.getTime(),
is_adopted:0
});
vueInstance.$nextTick(() => {
const chatContainer = vueInstance.$el.querySelector('#messages');
if (chatContainer) {
chatContainer.scrollTop = chatContainer.scrollHeight;
}
});
}
}
if (data.is_connect !== undefined) {
vueInstance.isConnected = data.is_connect;
}
if (data.remote_audio_connect !== undefined) {
vueInstance.remoteAudioConnected = data.remote_audio_connect;
}
}
}
new Vue({
el: '#chat-app',
delimiters: ["[[", "]]"],
data() {
return {
messages: [],
newMessage: '',
fayService: null,
liveState: 0,
isConnected: false,
remoteAudioConnected: false,
userList: [],
selectedUser: null,
loading: false,
chatMessages: {},
panelMsg: '',
panelReply: '',
robot:'static/images/Normal.gif',
base_url: 'http://127.0.0.1:5000',
play_sound_enabled: false,
source_record_enabled: false,
//后加的
/*isRecording: false,*/
status: '',// 用于显示“聆听中...”或“思考中...”
};
},
created() {
this.initFayService();
this.getData();
/*this.startLive();//启动聊天功能*/
//后加的
this.initSocket();
},
methods: {
// 初始化 WebSocket 连接
initSocket() {
const socket = io.connect('http://localhost:5000'); // 请根据实际 WebSocket 地址替换
socket.on('connect', () => {
this.status = '已连接';
});
socket.on('message', (data) => {
this.addMessage(data);
});
socket.on('disconnect', () => {
this.status = '连接已断开';
});
},
// 切换录音状态
/*toggleRecording() {
this.isRecording = !this.isRecording;
},*/
// 添加消息到消息列表
addMessage(message) {
message.timetext = new Date().toLocaleTimeString();
this.messages.push(message);
},
//
//index.js原有的
initFayService() {
this.fayService = new FayInterface('ws://127.0.0.1:10003', this.base_url, this);
this.fayService.connectWebSocket();
this.fayService.websocket.addEventListener('open', () => {
this.loadUserList();
});
},
sendMessage() {
let _this = this;
let text = _this.newMessage;
if (!text) {
alert('请输入内容');
return;
}
if (_this.selectedUser === 'others' && !_this.othersUser) {
alert('请输入自定义用户名');
return;
}
if (this.liveState != 1) {
alert('请先开启服务');
return;
}
let usernameToSend = _this.selectedUser === 'others' ? _this.othersUser : _this.selectedUser[1];
this.timer = setTimeout(() => {
let height = document.querySelector('#messages').scrollHeight;
document.querySelector('#messages').scrollTop = height;
}, 1000);//1秒后滑动到聊天底部
_this.newMessage = '';
let url = `${this.base_url}/api/send`;
let send_data = {
"msg": text,
"username": usernameToSend
};
let xhr = new XMLHttpRequest();
xhr.open("post", url);
xhr.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
xhr.send('data=' + encodeURIComponent(JSON.stringify(send_data)));
let executed = false;
xhr.onreadystatechange = async function () {
if (!executed && xhr.status === 200) {
executed = true;
}
};
},
getData() {
this.fayService.getRunStatus().then((data) => {
if (data) {
if(data.status){
this.liveState = 1;
this.configEditable = false;
}else{
this.liveState = 0;
this.configEditable = true;
}
}
});
this.fayService.getData().then((data) => {
if (data) {
this.updateConfigFromData(data.config);
}
});
},
updateConfigFromData(config) {
if (config.interact) {
this.play_sound_enabled = config.interact.playSound;
}
if (config.source && config.source.record) {
this.source_record_enabled = config.source.record.enabled;
}
},
saveConfig() {
let url = `${this.base_url}/api/submit`;
let send_data = {
"config": {
"source": {
"record": {
"enabled": this.source_record_enabled,
},
},
"interact": {
"playSound": this.play_sound_enabled,
}
}
};
let xhr = new XMLHttpRequest()
xhr.open("post", url)
xhr.setRequestHeader("Content-type", "application/x-www-form-urlencoded")
xhr.send('data=' + JSON.stringify(send_data))
let executed = false
xhr.onreadystatechange = async function () {
if (!executed && xhr.status === 200) {
try {
let data = await eval('(' + xhr.responseText + ')')
executed = true
} catch (e) {
}
}
}
},
changeRecord(){
if(this.source_record_enabled){
this.source_record_enabled = false
}else{
this.source_record_enabled = true
}
this.saveConfig()
},
changeSound(){
if(this.play_sound_enabled){
this.play_sound_enabled = false
}else{
this.play_sound_enabled = true
}
this.saveConfig()
},
loadUserList() {
this.fayService.getUserList().then((response) => {
if (response && response.list) {
if (response.list.length == 0){
info = [];
info[0] = 1;
info[1] = 'User';
this.userList.push(info)
this.selectUser(info);
}else{
this.userList = response.list;
this.selectUser(this.userList[0]);
}
}
});
},
selectUser(user) {
this.selectedUser = user;
this.fayService.websocket.send(JSON.stringify({ "Username": user[1] }));
this.loadMessageHistory(user[1], 'common');
},
startLive() {
this.liveState = 2 //把这个变量持久化到会话中实现livestate=2时刷新浏览器不要重新执行此方法
this.fayService.startLive().then(() => {
this.sendSuccessMsg('已开启!');
this.getData();
});
},
stopLive() {
this.fayService.stopLive().then(() => {
this.liveState = 3
this.sendSuccessMsg('已关闭!');
});
},
loadMessageHistory(username, type) {
this.fayService.getMessageHistory(username).then((response) => {
if (response) {
this.messages = response;
if(type == 'common'){
this.$nextTick(() => {
const chatContainer = this.$el.querySelector('#messages');
if (chatContainer) {
chatContainer.scrollTop = chatContainer.scrollHeight;
}
});
}
}
});
},
sendSuccessMsg(message) {
this.$notify({
title: '成功',
message,
type: 'success',
});
} ,
adoptText(id) {
// 调用采纳接口
this.fayService.fetchData(`${this.base_url}/api/adopt_msg`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ id }) // 发送采纳请求
})
.then((response) => {
if (response && response.status === 'success') {
// 处理成功的响应
this.$notify({
title: '成功',
message: response.msg, // 显示成功消息
type: 'success',
});
this.loadMessageHistory(this.selectedUser[1], 'adopt');
} else {
// 处理失败的响应
this.$notify({
title: '失败',
message: response ? response.msg : '请求失败',
type: 'error',
});
}
})
.catch((error) => {
// 处理网络错误或HTTP错误
this.$notify({
title: '错误',
message: error.message || '请求失败',
type: 'error',
});
});
}
,
}
});

View File

@ -390,7 +390,7 @@ class FayInterface {
this.liveState = 2
this.fayService.startLive().then(() => {
this.sendSuccessMsg('已开启!');
this.getData();
this.fayService.getData();
});
},
stopLive() {

View File

@ -17,7 +17,6 @@
<body>
<div id="chat-app">
<div id="chat-widget">
<button id="toggle-chat" @click="toggleChat">打开聊天</button>
<div id="chat-container" class="hidden">
<div id="messages">
<!-- 消息气泡将动态添加到这里 -->

64
gui/templates/chat1.html Normal file
View File

@ -0,0 +1,64 @@
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>聊天窗口</title>
<link rel="icon" href="static/images/Normal.ico" type="image/x-icon">
<link href="https://fonts.googleapis.com/css2?family=Orbitron:wght@400;700&display=swap" rel="stylesheet">
<link rel="stylesheet" href="static/css/chat1.css">
<!-- 引入 Socket.IO 客户端库 -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.5.4/socket.io.min.js" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
<!-- 引入 Vue.js -->
<script src="static/js/vue.js"></script>
<!-- 引入 Element UI -->
<link rel="stylesheet" href="static/css/element/theme-chalk.css" />
<script src="static/js/element-ui.js"></script>
<!-- 引入 chat1.js -->
<script src="static/js/chat1.js" defer></script>
</head>
<body>
<div id="chat-app">
<div id="chat-widget">
<div class="top_info"><span class="top_info_text">消息:</span>[[panelMsg]]</div>
<!--聊天框-->
<div id="chat-container">
<div id="messages">
<!-- 消息气泡将动态添加到这里 -->
<div v-for="(message, index) in messages" :key="index" class="message">
<div class="message receiver-message" v-if="message.type == 'fay'">
<div class="message-content">
<div class="message-bubble">[[message.content]]</div>
</div>
</div>
<div class="message sender-message" v-else>
<div class="message-content">
<div class="sender-message message-bubble">[[message.content]]</div>
</div>
</div>
</div>
</div>
<div id="status-indicator">[[ status ]]</div>
<!--输入区域-->
<div id="input-area">
<!-- 开关按钮 -->
<el-button v-if="liveState == 1" type="success" class="btn_close"
style="width: 90px; height: 32px; margin-left: 10px; background-color: #007aff;"
@click="stopLive()">关闭</el-button>
<el-button v-else type="primary"
style="width: 90px; height: 32px; margin-left: 10px; background-color: #007aff;"
@click="startLive()">开启</el-button>
<!-- 输入框和发送按钮 -->
<input type="text" id="message-input" placeholder="请输入内容..." v-model="newMessage" @keyup.enter="sendMessage">
<button id="send-btn" @click="sendMessage">
<img src="static/images/send.png" alt="发送">
</button>
</div>
</div>
</div>
</div>
</body>
</html>

View File

@ -139,10 +139,6 @@ if __name__ == '__main__':
#启动http服务器
flask_server.start()
#监听控制台
util.log(1, '注册命令...')
MyThread(target=console_listener).start()
#普通模式下启动窗口
if config_util.start_mode == 'common':
app = QApplication(sys.argv)

2
qa.csv
View File

@ -1,4 +1,4 @@
你好,你好,我是小橄榄!有什么我可以帮助你的吗
你好,你好我是小橄榄!有什么我可以帮助你的吗
我们现在在哪里,我们现在在冕宁元升农业的展览厅,可以参观游览我们先进的油橄榄产业园区哦!
介绍一下基地,元升集团油橄榄种植基地是中国目前最大的油橄榄种植庄园目前整个庄园面积已接近30000亩。
介绍一下元升集团,2011年冕宁元升农业董事长林春福跟随周恩来总理的脚步在冕宁地区开启了油橄榄庄园打造之路。经过十年的发展冕宁元升农业目前已成为国家林业局示范基地、国家林业重点龙头企业、四川省第一种植庄园、四川省脱贫标杆企业获得各种荣誉奖项200余项。

1 你好,你好,我是小橄榄!有什么我可以帮助你的吗 你好 你好,我是小橄榄!有什么我可以帮助你的吗
2 我们现在在哪里,我们现在在冕宁元升农业的展览厅,可以参观游览我们先进的油橄榄产业园区哦! 我们现在在哪里 我们现在在冕宁元升农业的展览厅,可以参观游览我们先进的油橄榄产业园区哦!
3 介绍一下基地,元升集团油橄榄种植基地是中国目前最大的油橄榄种植庄园,目前整个庄园面积已接近30000亩。 介绍一下基地 元升集团油橄榄种植基地是中国目前最大的油橄榄种植庄园,目前整个庄园面积已接近30000亩。
4 介绍一下元升集团,2011年,冕宁元升农业董事长林春福跟随周恩来总理的脚步,在冕宁地区开启了油橄榄庄园打造之路。经过十年的发展,冕宁元升农业目前已成为国家林业局示范基地、国家林业重点龙头企业、四川省第一种植庄园、四川省脱贫标杆企业,获得各种荣誉奖项200余项。 介绍一下元升集团 2011年,冕宁元升农业董事长林春福跟随周恩来总理的脚步,在冕宁地区开启了油橄榄庄园打造之路。经过十年的发展,冕宁元升农业目前已成为国家林业局示范基地、国家林业重点龙头企业、四川省第一种植庄园、四川省脱贫标杆企业,获得各种荣誉奖项200余项。

View File

@ -13,7 +13,7 @@ azure-cognitiveservices-speech
aliyun-python-sdk-core
simhash
pytz
gevent~=22.10.1
gevent
edge_tts
pydub
chromadb
@ -28,3 +28,5 @@ langchain_openai
langgraph
langchain-community
bs4
flask_socketio
-r asr/funasr/requirements.txt

View File

@ -52,11 +52,11 @@ lingju_api_authcode=
#gpt 服务密钥(NLP多选1) https://openai.com/
#免费key只支持gpt 3.5 若想使用其他model可到 https://api.zyai.online/register/?aff_code=MyCI 下购买申请。
gpt_api_key=sk-4Spva89SGSikpacz3a70Dd081cA84c9a8dEd345f19C9BdFc
gpt_api_key=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
#gpt base url 如https://api.openai.com/v1、https://rwkv.ai-creator.net/chntuned/v1、https://api.fastgpt.in/api/v1、https://api.moonshot.cn/v1
gpt_base_url=https://api.zyai.online/v1
gpt_base_url=https://openrouter.ai/api/v1
#gpt model engine 如gpt-3.5-turbo、moonshot-v1-8k
gpt_model_engine=gpt-3.5-turbo
gpt_model_engine=qwen/qwen3-4b:free
#gpt(fastgpt)代理(可为空填写例子127.0.0.1:7890)
proxy_config=