新模型替换

This commit is contained in:
2025-11-16 22:53:11 +08:00
parent bc4670ad59
commit d56009ef86
16 changed files with 1595 additions and 72 deletions

View File

@@ -346,6 +346,11 @@
<version>${jsoup.version}</version> <version>${jsoup.version}</version>
</dependency> </dependency>
<dependency>
<groupId>io.github.jaredmdobson</groupId>
<artifactId>concentus</artifactId>
<version>1.0.2</version>
</dependency>
</dependencies> </dependencies>
</dependencyManagement> </dependencyManagement>

View File

@@ -86,6 +86,11 @@
<artifactId>spring-boot-starter-websocket</artifactId> <artifactId>spring-boot-starter-websocket</artifactId>
</dependency> </dependency>
<dependency>
<groupId>io.github.jaredmdobson</groupId>
<artifactId>concentus</artifactId>
</dependency>
</dependencies> </dependencies>

View File

@@ -0,0 +1,47 @@
package com.vetti.socket;
import javax.websocket.Session;
import java.nio.ByteBuffer;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* 语音发送
*/
public class AudioSender {
private final Queue<byte[]> queue = new ConcurrentLinkedQueue<>();
private final AtomicBoolean sending = new AtomicBoolean(false);
public void sendAudio(Session session, byte[] data) {
queue.add(data);
trySend(session);
}
private void trySend(Session session) {
if (!sending.compareAndSet(false, true)) {
return; // 已在发送中,退出
}
byte[] chunk = queue.poll();
if (chunk == null) {
sending.set(false);
return;
}
ByteBuffer buffer = ByteBuffer.wrap(chunk);
session.getAsyncRemote().sendBinary(buffer, result -> {
sending.set(false);
if (!result.isOK()) {
result.getException().printStackTrace();
}
// 递归发送下一片
trySend(session);
});
}
}

View File

@@ -19,15 +19,12 @@ import javax.websocket.*;
import javax.websocket.server.PathParam; import javax.websocket.server.PathParam;
import javax.websocket.server.ServerEndpoint; import javax.websocket.server.ServerEndpoint;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.HashMap; import java.util.*;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
/** /**
* 语音面试 web处理器 * 语音面试 web处理器
*/ */
@@ -36,12 +33,6 @@ import java.util.concurrent.ConcurrentHashMap;
@Component @Component
public class ChatWebSocketHandler { public class ChatWebSocketHandler {
/**
* 评分标记
*/
private final String SCORE_FLAG = "Score:";
/** /**
* 缓存客户端流式解析的语音文本数据 * 缓存客户端流式解析的语音文本数据
*/ */
@@ -56,7 +47,10 @@ public class ChatWebSocketHandler {
* 缓存客户端,面试回答信息 * 缓存客户端,面试回答信息
*/ */
private final Map<String, String> cacheMsgMapData = new ConcurrentHashMap<>(); private final Map<String, String> cacheMsgMapData = new ConcurrentHashMap<>();
/**
* 缓存客户端,面试回答信息
*/
private final Map<String, String> cacheMsgMapData1 = new ConcurrentHashMap<>();
/** /**
* 缓存客户端,AI提问的问题结果信息 * 缓存客户端,AI提问的问题结果信息
*/ */
@@ -67,6 +61,11 @@ public class ChatWebSocketHandler {
*/ */
private final Map<String, Map<String, Integer>> cacheScoreResult = new ConcurrentHashMap<>(); private final Map<String, Map<String, Integer>> cacheScoreResult = new ConcurrentHashMap<>();
/**
* 缓存客户端,回答问题次数-回答5轮就自动停止当前问答,返回对应的评分
*/
private final Map<String,Long> cacheQuestionNum = new ConcurrentHashMap<>();
// 语音文件保存目录 // 语音文件保存目录
private static final String VOICE_STORAGE_DIR = "/voice_files/"; private static final String VOICE_STORAGE_DIR = "/voice_files/";
@@ -99,6 +98,8 @@ public class ChatWebSocketHandler {
cacheReplyFlag.put(session.getId(), "YES"); cacheReplyFlag.put(session.getId(), "YES");
//初始化面试回答数据记录 //初始化面试回答数据记录
cacheMsgMapData.put(session.getId(), ""); cacheMsgMapData.put(session.getId(), "");
//初始化面试回答数据记录
cacheMsgMapData1.put(session.getId(), "");
//初始化面试问题 //初始化面试问题
cacheQuestionResult.put(session.getId(), ""); cacheQuestionResult.put(session.getId(), "");
//初始化得分结果记录 //初始化得分结果记录
@@ -108,6 +109,8 @@ public class ChatWebSocketHandler {
scoreResultData.put("2-3", 0); scoreResultData.put("2-3", 0);
scoreResultData.put("2-5", 0); scoreResultData.put("2-5", 0);
cacheScoreResult.put(session.getId(), scoreResultData); cacheScoreResult.put(session.getId(), scoreResultData);
//初始化问答次数
cacheQuestionNum.put(session.getId(), 0L);
//发送初始化面试官语音流 //发送初始化面试官语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "opening.wav"; String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "opening.wav";
sendVoiceBuffer(openingPathUrl, session); sendVoiceBuffer(openingPathUrl, session);
@@ -158,55 +161,53 @@ public class ChatWebSocketHandler {
if (StrUtil.isNotEmpty(msgMapData)) { if (StrUtil.isNotEmpty(msgMapData)) {
List<Map> list = JSONUtil.toList(msgMapData, Map.class); List<Map> list = JSONUtil.toList(msgMapData, Map.class);
//获取最后一条数据记录 //获取最后一条数据记录
Map<String, String> mapEntity = new HashMap<>();
mapEntity.put("role", "user");
mapEntity.put("content", cacheResultText);
promptJson = JSONUtil.toJsonStr(list);
cacheMsgMapData.put(session.getId(), promptJson);
}
//记录新的数据
String msgMapData1 = cacheMsgMapData1.get(session.getId());
if (StrUtil.isNotEmpty(msgMapData1)) {
List<Map> list = JSONUtil.toList(msgMapData1, Map.class);
//获取最后一条数据记录
Map<String, String> mapEntity = list.get(list.size() - 1); Map<String, String> mapEntity = list.get(list.size() - 1);
//更新问题记录 //更新问题记录
String content = mapEntity.get("content"); String content = mapEntity.get("content");
mapEntity.put("content", StrUtil.format(content, cacheResultText)); mapEntity.put("content", StrUtil.format(content, cacheResultText));
promptJson = JSONUtil.toJsonStr(list); cacheMsgMapData1.put(session.getId(), JSONUtil.toJsonStr(list));
cacheMsgMapData.put(session.getId(), promptJson);
} }
//开始返回衔接语
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "good.wav";
sendVoiceBuffer(openingPathUrl, session);
//开始使用模型进行追问 //验证是否结速
//把提问的文字发送给GPT Boolean isEndFlag = checkIsEnd(session);
ChatGPTClient chatGPTClient = SpringUtils.getBean(ChatGPTClient.class); if(isEndFlag){
log.info("AI提示词为:{}", promptJson); //开始返回衔接语
log.info("开始请求AI:{}",System.currentTimeMillis()/1000); String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "good.wav";
String resultMsg = chatGPTClient.handleAiChat(promptJson,"QA"); sendVoiceBuffer(openingPathUrl, session);
if(StrUtil.isNotEmpty(resultMsg)) { //开始使用模型进行追问
//开始解析返回结果 //把提问的文字发送给GPT
Map mapResultData = JSONUtil.toBean(resultMsg,Map.class); ChatGPTClient chatGPTClient = SpringUtils.getBean(ChatGPTClient.class);
//验证是否有追问问题返回,如果没有问题返回直接返回评分停止面试 log.info("AI提示词为:{}", promptJson);
Boolean isEndFlagFollow = checkInterviewIsEnd(resultMsg,session); log.info("开始请求AI:{}",System.currentTimeMillis()/1000);
if(isEndFlagFollow){ String resultMsg = chatGPTClient.handleAiChat(promptJson,"QA");
//获取评分 if(StrUtil.isNotEmpty(resultMsg)) {
//验证是否触发对应的规则 //直接返回问题
Boolean isEndFlag = getInterviewScore(resultMsg, session); //开始进行语音输出-流式持续输出
if(isEndFlag){ sendTTSBuffer(clientId, resultMsg, session);
log.info("面试回答符合条件规则,继续追问啦!!!!!"); // 实时输出内容
int resultNum = (int) (Math.random() * 2); try {
List<String> questions = JSONUtil.toList(mapResultData.get("follow_up_questions").toString(), String.class); //把文本也给前端返回去
String questionStr = questions.get(resultNum); Map<String, String> dataText = new HashMap<>();
if (StrUtil.isNotEmpty(questionStr)) { dataText.put("type", "question");
//开始进行语音输出-流式持续输出 dataText.put("content", resultMsg);
sendTTSBuffer(clientId, questionStr, session); log.info("提问的问题文本发送啦:{}",JSONUtil.toJsonStr(dataText));
// 实时输出内容 session.getBasicRemote().sendText(JSONUtil.toJsonStr(dataText));
try { } catch (Exception e) {
//把文本也给前端返回去 e.printStackTrace();
Map<String, String> dataText = new HashMap<>();
dataText.put("type", "question");
dataText.put("content", questionStr);
log.info("提问的问题文本发送啦:{}",JSONUtil.toJsonStr(dataText));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(dataText));
} catch (Exception e) {
e.printStackTrace();
}
//开始对问题进行缓存
recordQuestion(questionStr,session);
}
} }
//开始对问题进行缓存
recordQuestion(resultMsg,session);
} }
} }
log.info("结束请求AI:{}",System.currentTimeMillis()/1000); log.info("结束请求AI:{}",System.currentTimeMillis()/1000);
@@ -283,11 +284,13 @@ public class ChatWebSocketHandler {
try { try {
//文件转换成文件流 //文件转换成文件流
ByteBuffer outByteBuffer = convertFileToByteBuffer(pathUrl); ByteBuffer outByteBuffer = convertFileToByteBuffer(pathUrl);
// sendInChunks(session, outByteBuffer, 2048);
//发送文件流数据 //发送文件流数据
session.getBasicRemote().sendBinary(outByteBuffer); session.getBasicRemote().sendBinary(outByteBuffer);
// 发送响应确认 // 发送响应确认
log.info("已经成功发送了语音流给前端:{}", DateUtil.now()); log.info("已经成功发送了语音流给前端:{}", DateUtil.now());
} catch (IOException e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
@@ -323,11 +326,47 @@ public class ChatWebSocketHandler {
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "good.wav"; String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "good.wav";
sendVoiceBuffer(openingPathUrl, session); sendVoiceBuffer(openingPathUrl, session);
//初始化面试流程的提问 //初始化面试流程的提问
//先记录这个问题
List<Map<String, String>> list = new LinkedList(); List<Map<String, String>> list = new LinkedList();
Map<String, String> mapEntity = new HashMap<>(); Map<String, String> mapEntity = new HashMap<>();
mapEntity.put("role", "system"); mapEntity.put("role", "system");
mapEntity.put("content", "You are a construction industry interview expert. Evaluate candidate responses and provide scores (1-5) and follow-up questions when needed. Always respond in JSON format."); mapEntity.put("content", "You are an expert HR interviewer and behavioural assessment analyst. You conduct structured, unbiased interviews while maintaining a natural, warm, and conversational speaking style. You are calm, friendly, and professional. You never express personal opinions or emotions. You rely strictly on the candidate's spoken words, evidence, and globally accepted HR competency standards.\n" +
"\n" +
"Environment:\n" +
"You are running a live job interview on behalf of an employer. The candidate cannot see you. Your entire understanding comes from what they say aloud. You must guide them through the interview with clear questions, natural conversational pacing, and psychological safety. You operate under strict global HR compliance rules. All evaluations must be based only on job-relevant behaviours and never on assumptions.\n" +
"\n" +
"Tone:\n" +
"Your tone is warm, human, and conversational while remaining professional and precise. You sound like a real interviewer, not a script. Your phrasing is simple, clear, and spoken naturally. You acknowledge candidate responses with brief, human phrases such as \"Thank you,\" \"I understand,\" or \"That makes sense.\" You never overwhelm the candidate with long questions. You speak in Australian English.\n" +
"\n" +
"Goal:\n" +
"Your primary goal is to conduct a structured, fair, and evidence-based behavioural interview while maintaining a natural conversational flow.\n" +
"\n" +
"Process:\n" +
"1. Begin by welcoming the candidate and briefly explaining the competencies you will be assessing\n" +
"2. Guide the candidate through behavioural questions one at a time\n" +
"3. Encourage them to share real examples using STAR structure (Situation, Task, Action, Result)\n" +
"4. Ask probing follow-ups ONLY when necessary to clarify their personal role, actions, decisions, or results\n" +
"5. Focus strictly on what they describe, never speculating or assuming missing details\n" +
"6. Maintain a psychologically safe and supportive conversational environment\n" +
"\n" +
"Guardrails:\n" +
"- Stay strictly within HR compliance and anti-discrimination standards\n" +
"- Be specific and concrete about the candidate's stated actions, experience, and results\n" +
"- Do NOT discuss or evaluate protected characteristics (age, gender, ethnicity, religion, disability, health, family status)\n" +
"- Do NOT guess or infer details that were not stated\n" +
"- Do NOT ask leading questions or hypotheticals unless explicitly required for the role\n" +
"- Do NOT provide legal, medical, financial, or immigration advice\n" +
"- When speaking to the candidate, do NOT use bullet points, lists, or prefixes\n" +
"- Respond in natural conversational English, NOT in JSON format");
list.add(mapEntity); list.add(mapEntity);
//记录另外一个评分的提示词
List<Map<String, String>> list1 = new LinkedList();
Map<String, String> mapEntity1 = new HashMap<>();
mapEntity1.put("role", "system");
mapEntity1.put("content", "You are a construction industry interview expert. Evaluate candidate responses and provide scores (1-5) and follow-up questions when needed. Always respond in JSON format.");
list1.add(mapEntity1);
//获取预设问题-直接TTS转换返回语音结果 //获取预设问题-直接TTS转换返回语音结果
IHotakeProblemBaseInfoService problemBaseInfoService = SpringUtils.getBean(IHotakeProblemBaseInfoService.class); IHotakeProblemBaseInfoService problemBaseInfoService = SpringUtils.getBean(IHotakeProblemBaseInfoService.class);
HotakeProblemBaseInfo queryPro = new HotakeProblemBaseInfo(); HotakeProblemBaseInfo queryPro = new HotakeProblemBaseInfo();
@@ -341,10 +380,19 @@ public class ChatWebSocketHandler {
int random_index = (int) (Math.random() * qStrs.length); int random_index = (int) (Math.random() * qStrs.length);
//获取问题文本 //获取问题文本
String question = qStrs[random_index]; String question = qStrs[random_index];
//面试者提问问题了
Map<String, String> mapEntityQ = new HashMap<>(); Map<String, String> mapEntityQ = new HashMap<>();
mapEntityQ.put("role", "user"); mapEntityQ.put("role", "assistant");
mapEntityQ.put("content", "Question" + question + "\\nCandidate Answer{}"); mapEntityQ.put("content", question);
list.add(mapEntityQ); list.add(mapEntityQ);
//开始记录评分问题
Map<String, String> mapEntityQ1 = new HashMap<>();
mapEntityQ1.put("role", "user");
mapEntityQ1.put("content", "Question" + question + "\\nCandidate Answer{}");
list1.add(mapEntityQ1);
log.info("开始提问啦:{}",JSONUtil.toJsonStr(list)); log.info("开始提问啦:{}",JSONUtil.toJsonStr(list));
//直接对该问题进行转换处理返回语音流 //直接对该问题进行转换处理返回语音流
log.info("第一个问题为:{}",question); log.info("第一个问题为:{}",question);
@@ -364,6 +412,7 @@ public class ChatWebSocketHandler {
} }
//初始化记录提示词数据到-缓存中 //初始化记录提示词数据到-缓存中
cacheMsgMapData.put(session.getId(), JSONUtil.toJsonStr(list)); cacheMsgMapData.put(session.getId(), JSONUtil.toJsonStr(list));
cacheMsgMapData1.put(session.getId(), JSONUtil.toJsonStr(list));
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
log.error("面试流程初始化失败:{}", e.getMessage()); log.error("面试流程初始化失败:{}", e.getMessage());
@@ -526,13 +575,22 @@ public class ChatWebSocketHandler {
*/ */
private void recordQuestion(String questionResult,Session session) { private void recordQuestion(String questionResult,Session session) {
if (StrUtil.isNotEmpty(questionResult)) { if (StrUtil.isNotEmpty(questionResult)) {
//获取缓存记录 String msgMapData1 = cacheMsgMapData1.get(session.getId());
if (StrUtil.isNotEmpty(msgMapData1)) {
List<Map> list = JSONUtil.toList(msgMapData1, Map.class);
Map<String, String> mapEntity = new HashMap<>();
mapEntity.put("role", "user");
mapEntity.put("content", "Question" + questionResult + "\\nCandidate Answer{}");
list.add(mapEntity);
cacheMsgMapData1.put(session.getId(), JSONUtil.toJsonStr(list));
}
//评分获取缓存记录
String msgMapData = cacheMsgMapData.get(session.getId()); String msgMapData = cacheMsgMapData.get(session.getId());
if (StrUtil.isNotEmpty(msgMapData)) { if (StrUtil.isNotEmpty(msgMapData)) {
List<Map> list = JSONUtil.toList(msgMapData, Map.class); List<Map> list = JSONUtil.toList(msgMapData, Map.class);
Map<String, String> mapEntity = new HashMap<>(); Map<String, String> mapEntity = new HashMap<>();
mapEntity.put("role", "user"); mapEntity.put("role", "assistant");
mapEntity.put("content", "Question" + questionResult + "\\nCandidate Answer{}"); mapEntity.put("content", questionResult);
list.add(mapEntity); list.add(mapEntity);
cacheMsgMapData.put(session.getId(), JSONUtil.toJsonStr(list)); cacheMsgMapData.put(session.getId(), JSONUtil.toJsonStr(list));
} }
@@ -572,5 +630,51 @@ public class ChatWebSocketHandler {
return flag; return flag;
} }
/**
* 验证面试是否结束
* @param session
* @return
*/
private Boolean checkIsEnd(Session session){
Long replyNums = cacheQuestionNum.get(session.getId());
//回答次数大于等于5就直接结束面试
Boolean flag = true;
if(replyNums >= 5){
//获取问答评分记录
String promptJson = cacheMsgMapData1.get(session.getId());
//根据模型获取评分
ChatGPTClient chatGPTClient = SpringUtils.getBean(ChatGPTClient.class);
String resultMsg = chatGPTClient.handleAiChat(promptJson,"PF");
if(StrUtil.isNotEmpty(resultMsg)) {
//直接返回问题了
//开始解析返回结果
Map mapResultData = JSONUtil.toBean(resultMsg, Map.class);
//获取评分
Object scoreStr = mapResultData.get("score");
Object assessment = mapResultData.get("assessment");
//发送面试官结束语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "end.wav";
sendVoiceBuffer(openingPathUrl, session);
Map<String, String> resultEntity = new HashMap<>();
resultEntity.put("content", scoreStr +"\n"+assessment);
resultEntity.put("type", "score");
//返回评分结果
try {
log.info("返回最终的评分结果:{}",JSONUtil.toJsonStr(resultEntity));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(resultEntity));
}catch (Exception e) {
e.printStackTrace();
}
}
flag = false;
}else{
cacheQuestionNum.put(session.getId(), replyNums+1);
}
return flag;
}
} }

View File

@@ -0,0 +1,598 @@
package com.vetti.socket;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONUtil;
import com.vetti.common.ai.elevenLabs.ElevenLabsClient;
import com.vetti.common.ai.gpt.ChatGPTClient;
import com.vetti.common.config.RuoYiConfig;
import com.vetti.common.utils.spring.SpringUtils;
import com.vetti.hotake.domain.HotakeProblemBaseInfo;
import com.vetti.hotake.service.IHotakeProblemBaseInfoService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.springframework.stereotype.Component;
import javax.websocket.*;
import javax.websocket.server.PathParam;
import javax.websocket.server.ServerEndpoint;
import java.io.File;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* 语音面试 web处理器
*/
@Slf4j
@ServerEndpoint("/voice-websocket-old/{clientId}")
@Component
public class ChatWebSocketHandlerOld {
/**
* 评分标记
*/
private final String SCORE_FLAG = "Score:";
/**
* 缓存客户端流式解析的语音文本数据
*/
private final Map<String, String> cacheClientTts = new ConcurrentHashMap<>();
/**
* 缓存客户端,标记是否是自我介绍后的初次问答
*/
private final Map<String, String> cacheReplyFlag = new ConcurrentHashMap<>();
/**
* 缓存客户端,面试回答信息
*/
private final Map<String, String> cacheMsgMapData = new ConcurrentHashMap<>();
/**
* 缓存客户端,AI提问的问题结果信息
*/
private final Map<String, String> cacheQuestionResult = new ConcurrentHashMap<>();
/**
* 缓存客户端,得分结果记录
*/
private final Map<String, Map<String, Integer>> cacheScoreResult = new ConcurrentHashMap<>();
// 语音文件保存目录
private static final String VOICE_STORAGE_DIR = "/voice_files/";
// 语音结果文件保存目录
private static final String VOICE_STORAGE_RESULT_DIR = "/voice_result_files/";
// 系统语音目录
private static final String VOICE_SYSTEM_DIR = "/system_files/";
public ChatWebSocketHandlerOld() {
// 初始化存储目录
File dir = new File(RuoYiConfig.getProfile() + VOICE_STORAGE_DIR);
if (!dir.exists()) {
dir.mkdirs();
}
File resultDir = new File(RuoYiConfig.getProfile() + VOICE_STORAGE_RESULT_DIR);
if (!resultDir.exists()) {
resultDir.mkdirs();
}
}
// 连接建立时调用
@OnOpen
public void onOpen(Session session, @PathParam("clientId") String clientId) {
log.info("WebSocket 链接已建立:{}", clientId);
log.info("WebSocket session 链接已建立:{}", session.getId());
cacheClientTts.put(clientId, new String());
//是初次自我介绍后的问答环节
cacheReplyFlag.put(session.getId(), "YES");
//初始化面试回答数据记录
cacheMsgMapData.put(session.getId(), "");
//初始化面试问题
cacheQuestionResult.put(session.getId(), "");
//初始化得分结果记录
Map<String, Integer> scoreResultData = new HashMap<>();
scoreResultData.put("0-1", 0);
scoreResultData.put("4-5", 0);
scoreResultData.put("2-3", 0);
scoreResultData.put("2-5", 0);
cacheScoreResult.put(session.getId(), scoreResultData);
//发送初始化面试官语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "opening.wav";
sendVoiceBuffer(openingPathUrl, session);
}
/**
* 接收文本消息
*
* @param session 客户端会话
* @param message 消息
* 如:
* {
* "type": "start | done | end",
* "content": "内容"
* }
* @param clientId 用户ID
*/
@OnMessage
public void onTextMessage(Session session, String message, @PathParam("clientId") String clientId) {
log.info("我是接收文本消息:{}", message);
try {
//处理文本结果
if (StrUtil.isNotEmpty(message)) {
Map<String, String> mapResult = JSONUtil.toBean(JSONUtil.parseObj(message), Map.class);
String resultFlag = mapResult.get("type");
if ("done".equals(resultFlag)) {
//开始合并语音流
String startFlag = cacheReplyFlag.get(session.getId());
//语音结束,开始进行回答解析
log.info("开始文本处理,客户端ID为:{}", clientId);
String cacheResultText = mapResult.get("content");
log.info("开始文本处理,面试者回答信息为:{}", cacheResultText);
if (StrUtil.isEmpty(cacheResultText)) {
cacheResultText = "";
}
//这是初次处理的逻辑
if ("YES".equals(startFlag)) {
//初始化-不走大模型-直接对候选人进行提问
initializationQuestion(clientId, session);
//发送完第一次消息后,直接删除标记,开始进行正常的面试问答流程
cacheReplyFlag.put(session.getId(), "");
} else {
//开始根据面试者回答的问题,进行追问回答
//获取面试者回答信息
//获取缓存记录
String promptJson = "";
String msgMapData = cacheMsgMapData.get(session.getId());
if (StrUtil.isNotEmpty(msgMapData)) {
List<Map> list = JSONUtil.toList(msgMapData, Map.class);
//获取最后一条数据记录
Map<String, String> mapEntity = list.get(list.size() - 1);
//更新问题记录
String content = mapEntity.get("content");
mapEntity.put("content", StrUtil.format(content, cacheResultText));
promptJson = JSONUtil.toJsonStr(list);
cacheMsgMapData.put(session.getId(), promptJson);
}
//开始返回衔接语
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "good.wav";
sendVoiceBuffer(openingPathUrl, session);
//开始使用模型进行追问
//把提问的文字发送给GPT
ChatGPTClient chatGPTClient = SpringUtils.getBean(ChatGPTClient.class);
log.info("AI提示词为:{}", promptJson);
log.info("开始请求AI:{}",System.currentTimeMillis()/1000);
String resultMsg = chatGPTClient.handleAiChat(promptJson,"QA");
if(StrUtil.isNotEmpty(resultMsg)) {
//开始解析返回结果
Map mapResultData = JSONUtil.toBean(resultMsg,Map.class);
//验证是否有追问问题返回,如果没有问题返回直接返回评分停止面试
Boolean isEndFlagFollow = checkInterviewIsEnd(resultMsg,session);
if(isEndFlagFollow){
//获取评分
//验证是否触发对应的规则
Boolean isEndFlag = getInterviewScore(resultMsg, session);
if(isEndFlag){
log.info("面试回答符合条件规则,继续追问啦!!!!!");
int resultNum = (int) (Math.random() * 2);
List<String> questions = JSONUtil.toList(mapResultData.get("follow_up_questions").toString(), String.class);
String questionStr = questions.get(resultNum);
if (StrUtil.isNotEmpty(questionStr)) {
//开始进行语音输出-流式持续输出
sendTTSBuffer(clientId, questionStr, session);
// 实时输出内容
try {
//把文本也给前端返回去
Map<String, String> dataText = new HashMap<>();
dataText.put("type", "question");
dataText.put("content", questionStr);
log.info("提问的问题文本发送啦:{}",JSONUtil.toJsonStr(dataText));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(dataText));
} catch (Exception e) {
e.printStackTrace();
}
//开始对问题进行缓存
recordQuestion(questionStr,session);
}
}
}
}
log.info("结束请求AI:{}",System.currentTimeMillis()/1000);
}
} else if ("end".equals(resultFlag)) {
log.info("面试结束啦!!!!!");
handleInterviewEnd(clientId,session,"");
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
// 接收二进制消息(流数据)
@OnMessage
public void onBinaryMessage(Session session, @PathParam("clientId") String clientId, ByteBuffer byteBuffer) {
log.info("我是接受二进制流的-客户端ID为:{}", clientId);
}
// 连接关闭时调用
@OnClose
public void onClose(Session session, CloseReason reason) {
System.out.println("WebSocket连接已关闭: " + session.getId() + ", 原因: " + reason.getReasonPhrase());
//链接关闭,清空内存
//是初次自我介绍后的问答环节
cacheReplyFlag.put(session.getId(), "");
//初始化面试回答数据记录
cacheMsgMapData.put(session.getId(), "");
//初始化面试问题
cacheQuestionResult.put(session.getId(), "");
cacheScoreResult.put(session.getId(), null);
}
// 发生错误时调用
@OnError
public void onError(Session session, Throwable throwable) {
System.err.println("WebSocket发生错误: 页面关闭,链接断开了");
if(session != null) {
//是初次自我介绍后的问答环节
cacheReplyFlag.put(session.getId(), "");
//初始化面试回答数据记录
cacheMsgMapData.put(session.getId(), "");
//初始化面试问题
cacheQuestionResult.put(session.getId(), "");
cacheScoreResult.put(session.getId(), null);
}
}
/**
* File 转换成 ByteBuffer
*
* @param fileUrl 文件路径
* @return
*/
private ByteBuffer convertFileToByteBuffer(String fileUrl) {
File file = new File(fileUrl);
try {
return ByteBuffer.wrap(FileUtils.readFileToByteArray(file));
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* 发送语音流给前端
*
* @param pathUrl 语音文件地址
* @param session 客户端会话
*/
private void sendVoiceBuffer(String pathUrl, Session session) {
try {
//文件转换成文件流
ByteBuffer outByteBuffer = convertFileToByteBuffer(pathUrl);
// sendInChunks(session, outByteBuffer, 2048);
//发送文件流数据
session.getBasicRemote().sendBinary(outByteBuffer);
// 发送响应确认
log.info("已经成功发送了语音流给前端:{}", DateUtil.now());
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 发送文本转语音,发送语音流给前端
*
* @param clientId 用户ID
* @param content 文本内容
* @param session 客户端会话ID
*/
private void sendTTSBuffer(String clientId, String content, Session session) {
String resultFileName = clientId + "_" + System.currentTimeMillis() + ".wav";
String resultPathUrl = RuoYiConfig.getProfile() + VOICE_STORAGE_RESULT_DIR + resultFileName;
ElevenLabsClient elevenLabsClient = SpringUtils.getBean(ElevenLabsClient.class);
elevenLabsClient.handleTextToVoice(content, resultPathUrl);
//持续返回数据流给客户端
log.info("发送语音流成功啦!!!!!!!");
sendVoiceBuffer(resultPathUrl, session);
}
/**
* 对候选者初次进行提问业务逻辑处理(初始化系统随机获取第一个问题)
*
* @param clientId 用户ID
* @param session 客户端会话
*/
private void initializationQuestion(String clientId, Session session) {
try {
log.info("开始获取到clientid :{}",clientId);
//自我介绍结束后马上返回一个Good
//发送初始化面试官语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "good.wav";
sendVoiceBuffer(openingPathUrl, session);
//初始化面试流程的提问
List<Map<String, String>> list = new LinkedList();
Map<String, String> mapEntity = new HashMap<>();
mapEntity.put("role", "system");
mapEntity.put("content", "You are a construction industry interview expert. Evaluate candidate responses and provide scores (1-5) and follow-up questions when needed. Always respond in JSON format.");
list.add(mapEntity);
//获取预设问题-直接TTS转换返回语音结果
IHotakeProblemBaseInfoService problemBaseInfoService = SpringUtils.getBean(IHotakeProblemBaseInfoService.class);
HotakeProblemBaseInfo queryPro = new HotakeProblemBaseInfo();
queryPro.setUserId(Long.valueOf(clientId));
List<HotakeProblemBaseInfo> baseInfoList = problemBaseInfoService.selectHotakeProblemBaseInfoList(queryPro);
log.info("准备进行第一个问题的提问:{}",JSONUtil.toJsonStr(baseInfoList));
if (CollectionUtil.isNotEmpty(baseInfoList)) {
HotakeProblemBaseInfo baseInfo = baseInfoList.get(0);
if (StrUtil.isNotEmpty(baseInfo.getContents())) {
String[] qStrs = baseInfo.getContents().split("#AA#");
int random_index = (int) (Math.random() * qStrs.length);
//获取问题文本
String question = qStrs[random_index];
Map<String, String> mapEntityQ = new HashMap<>();
mapEntityQ.put("role", "user");
mapEntityQ.put("content", "Question" + question + "\\nCandidate Answer{}");
list.add(mapEntityQ);
log.info("开始提问啦:{}",JSONUtil.toJsonStr(list));
//直接对该问题进行转换处理返回语音流
log.info("第一个问题为:{}",question);
sendTTSBuffer(clientId, question, session);
//发送问题文本
try {
//把文本也给前端返回去
Map<String, String> dataText = new HashMap<>();
dataText.put("type", "question");
dataText.put("content", question);
log.info("提问的问题文本发送啦:{}",JSONUtil.toJsonStr(dataText));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(dataText));
} catch (Exception e) {
e.printStackTrace();
}
}
}
//初始化记录提示词数据到-缓存中
cacheMsgMapData.put(session.getId(), JSONUtil.toJsonStr(list));
} catch (Exception e) {
e.printStackTrace();
log.error("面试流程初始化失败:{}", e.getMessage());
}
}
/**
* 处理面试结束业务逻辑
*
* @param session 客户端会话
* @param position 职位
*/
private void handleInterviewEnd(String clientId,Session session,String position) {
//暂时的业务逻辑
//发送面试官结束语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "end.wav";
sendVoiceBuffer(openingPathUrl, session);
//返回文本评分
//处理模型提问逻辑
//获取缓存记录
String msgMapData = cacheMsgMapData.get(session.getId());
String promptJson = "";
if (StrUtil.isNotEmpty(msgMapData)) {
List<Map> list = JSONUtil.toList(msgMapData, Map.class);
//获取第一条数据记录
Map<String, String> mapEntity = list.get(0);
//更新问题记录
mapEntity.put("role", "system");
mapEntity.put("content", "You are a construction industry interview expert. Evaluate candidate responses and provide scores (1-5) and follow-up questions when needed. Always respond in JSON format.");
//每个回答的内容前面要加上候选人的职位
if (StrUtil.isNotEmpty(position)) {
for (Map map : list) {
if ("user".equals(map.get("role").toString())) {
map.put("content", "Position: " + position + "\\n" + map.get("content"));
}
}
}
promptJson = JSONUtil.toJsonStr(list);
//结束回答要清空问答数据
cacheMsgMapData.put(session.getId(), "");
}
log.info("结束AI提示词为:{}", promptJson);
ChatGPTClient gptClient = SpringUtils.getBean(ChatGPTClient.class);
String resultMsg = gptClient.handleAiChat(promptJson, "QA");
//开始解析返回结果
Map mapResultData = JSONUtil.toBean(resultMsg,Map.class);
//获取评分
Object scoreStr = mapResultData.get("score");
Object assessment = mapResultData.get("assessment");
Map<String, String> resultEntity = new HashMap<>();
resultEntity.put("content", scoreStr +"\n"+assessment);
resultEntity.put("type", "score");
try{
//返回最终的评分结构
log.info("返回最终的评分结构:{}",JSONUtil.toJsonStr(resultEntity));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(resultEntity));
}catch (Exception e){
e.printStackTrace();
}
}
/**
* 处理评分记录
* 触发规则:
* 1、获得 0-1 分 大于1次 立即结束面试
* 2、获取 4-5 分 大于3次 立即结束面试
* 3、获取 2-3 分 大于3次 立即结束面试
* 4、获取 2-5 分 大于4次 立即结束面试
*
* @param content
* @param session return false 立即结束面试
*/
private Boolean handleScoreRecord(Object content, Session session) {
Map<String, Integer> scoreRecordMap = cacheScoreResult.get(session.getId());
log.info("获取评分结果:{}",content);
//对评分进行处理
if (ObjectUtil.isNotEmpty(content)) {
String[] strs = content.toString().split("/");
//取第一个数就是对应的评分
log.info("获取的数据为:{}",strs[0]);
BigDecimal score = new BigDecimal(strs[0].trim());
//记录Key为1
if (BigDecimal.ZERO.compareTo(score) <= 0 && BigDecimal.ONE.compareTo(score) >= 0) {
Integer n1 = scoreRecordMap.get("0-1") + 1;
scoreRecordMap.put("0-1", n1);
if (n1 > 1) {
return false;
}
}
//记录Key为2
if (new BigDecimal(4).compareTo(score) <= 0 && new BigDecimal(5).compareTo(score) >= 0) {
Integer n1 = scoreRecordMap.get("4-5") + 1;
scoreRecordMap.put("4-5", n1);
if (n1 > 3) {
return false;
}
}
//记录Key为3
if (new BigDecimal(2).compareTo(score) <= 0 && new BigDecimal(3).compareTo(score) >= 0) {
Integer n1 = scoreRecordMap.get("2-3") + 1;
scoreRecordMap.put("2-3", n1);
if (n1 > 3) {
return false;
}
}
//记录Key为4
if (new BigDecimal(2).compareTo(score) <= 0 && new BigDecimal(5).compareTo(score) >= 0) {
Integer n1 = scoreRecordMap.get("2-5") + 1;
scoreRecordMap.put("2-5", n1);
if (n1 > 4) {
return false;
}
}
}
return true;
}
/**
* 校验是否结束面试,结束后直接返回评分
*
* @param resultMsg 问答AI返回的结果数据
* @param session 客户端会话
*/
private Boolean getInterviewScore(String resultMsg, Session session) {
//返回文本评分
//开始解析返回结果
Map mapResultData = JSONUtil.toBean(resultMsg,Map.class);
//获取评分
Object scoreStr = mapResultData.get("score");
Object assessment = mapResultData.get("assessment");
//校验面试是否结束
Boolean flag = handleScoreRecord(scoreStr, session);
try {
if (!flag) {
//发送面试官结束语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "end.wav";
sendVoiceBuffer(openingPathUrl, session);
Map<String, String> resultEntity = new HashMap<>();
resultEntity.put("content", scoreStr +"\n"+assessment);
resultEntity.put("type", "score");
//返回评分结果
log.info("返回最终的评分结果:{}",JSONUtil.toJsonStr(resultEntity));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(resultEntity));
}
} catch (Exception e) {
e.printStackTrace();
}
return flag;
}
/**
* 记录问题
* @param questionResult
* @param session
*/
private void recordQuestion(String questionResult,Session session) {
if (StrUtil.isNotEmpty(questionResult)) {
//获取缓存记录
String msgMapData = cacheMsgMapData.get(session.getId());
if (StrUtil.isNotEmpty(msgMapData)) {
List<Map> list = JSONUtil.toList(msgMapData, Map.class);
Map<String, String> mapEntity = new HashMap<>();
mapEntity.put("role", "user");
mapEntity.put("content", "Question" + questionResult + "\\nCandidate Answer{}");
list.add(mapEntity);
cacheMsgMapData.put(session.getId(), JSONUtil.toJsonStr(list));
}
}
}
/**
* 验证面试是否结束,不继续追问了
* @param resultMsg
* @param session
* @return
*/
private Boolean checkInterviewIsEnd(String resultMsg, Session session){
Map mapResultData = JSONUtil.toBean(resultMsg,Map.class);
//获取评分
Object scoreStr = mapResultData.get("score");
Object assessment = mapResultData.get("assessment");
Object followUpNeeded = mapResultData.get("follow_up_needed");
Boolean flag = Boolean.valueOf(followUpNeeded.toString());
try {
//不继续追问了
if (ObjectUtil.isNotEmpty(followUpNeeded) && !flag) {
//发送面试官结束语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "end.wav";
sendVoiceBuffer(openingPathUrl, session);
Map<String, String> resultEntity = new HashMap<>();
resultEntity.put("content", scoreStr +"\n"+assessment);
resultEntity.put("type", "score");
//返回评分结果
log.info("返回最终的评分结果:{}",JSONUtil.toJsonStr(resultEntity));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(resultEntity));
}
} catch (Exception e) {
e.printStackTrace();
}
return flag;
}
/**
* 发送小块语音流
* @param session
* @param buffer
* @param chunkSize
*/
public void sendInChunks(Session session, ByteBuffer buffer, int chunkSize) {
int offset = 0;
ByteBuffer duplicate = buffer.slice();
byte[] audioData = new byte[duplicate.remaining()];
duplicate.get(audioData);
while (offset < audioData.length) {
int end = Math.min(offset + chunkSize, audioData.length);
byte[] chunk = Arrays.copyOfRange(audioData, offset, end);
synchronized(session) {
session.getAsyncRemote().sendBinary(ByteBuffer.wrap(chunk));
}
offset = end;
}
}
}

View File

@@ -0,0 +1,596 @@
package com.vetti.socket;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONUtil;
import com.vetti.common.ai.elevenLabs.ElevenLabsClient;
import com.vetti.common.ai.gpt.ChatGPTClient;
import com.vetti.common.config.RuoYiConfig;
import com.vetti.common.utils.spring.SpringUtils;
import com.vetti.hotake.domain.HotakeProblemBaseInfo;
import com.vetti.hotake.service.IHotakeProblemBaseInfoService;
import com.vetti.socket.util.Pcm16ToOpusRealtime;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.springframework.stereotype.Component;
import javax.websocket.*;
import javax.websocket.server.PathParam;
import javax.websocket.server.ServerEndpoint;
import java.io.File;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* 语音面试 web处理器
*/
@Slf4j
@ServerEndpoint("/voice-websocket-opus/{clientId}")
@Component
public class ChatWebSocketOpusHandler {
/**
* 缓存客户端流式解析的语音文本数据
*/
private final Map<String, String> cacheClientTts = new ConcurrentHashMap<>();
/**
* 缓存客户端,标记是否是自我介绍后的初次问答
*/
private final Map<String, String> cacheReplyFlag = new ConcurrentHashMap<>();
/**
* 缓存客户端,面试回答信息
*/
private final Map<String, String> cacheMsgMapData = new ConcurrentHashMap<>();
/**
* 缓存客户端,AI提问的问题结果信息
*/
private final Map<String, String> cacheQuestionResult = new ConcurrentHashMap<>();
/**
* 缓存客户端,得分结果记录
*/
private final Map<String, Map<String, Integer>> cacheScoreResult = new ConcurrentHashMap<>();
// 语音文件保存目录
private static final String VOICE_STORAGE_DIR = "/voice_files/";
// 语音结果文件保存目录
private static final String VOICE_STORAGE_RESULT_DIR = "/voice_result_files/";
// 系统语音目录
private static final String VOICE_SYSTEM_DIR = "/system_files/";
public ChatWebSocketOpusHandler() {
// 初始化存储目录
File dir = new File(RuoYiConfig.getProfile() + VOICE_STORAGE_DIR);
if (!dir.exists()) {
dir.mkdirs();
}
File resultDir = new File(RuoYiConfig.getProfile() + VOICE_STORAGE_RESULT_DIR);
if (!resultDir.exists()) {
resultDir.mkdirs();
}
}
// 连接建立时调用
@OnOpen
public void onOpen(Session session, @PathParam("clientId") String clientId) {
log.info("WebSocket 链接已建立:{}", clientId);
log.info("WebSocket session 链接已建立:{}", session.getId());
cacheClientTts.put(clientId, new String());
//是初次自我介绍后的问答环节
cacheReplyFlag.put(session.getId(), "YES");
//初始化面试回答数据记录
cacheMsgMapData.put(session.getId(), "");
//初始化面试问题
cacheQuestionResult.put(session.getId(), "");
//初始化得分结果记录
Map<String, Integer> scoreResultData = new HashMap<>();
scoreResultData.put("0-1", 0);
scoreResultData.put("4-5", 0);
scoreResultData.put("2-3", 0);
scoreResultData.put("2-5", 0);
cacheScoreResult.put(session.getId(), scoreResultData);
//发送初始化面试官语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "opening.wav";
sendVoiceBuffer(openingPathUrl, session);
}
/**
* 接收文本消息
*
* @param session 客户端会话
* @param message 消息
* 如:
* {
* "type": "start | done | end",
* "content": "内容"
* }
* @param clientId 用户ID
*/
@OnMessage
public void onTextMessage(Session session, String message, @PathParam("clientId") String clientId) {
log.info("我是接收文本消息:{}", message);
try {
//处理文本结果
if (StrUtil.isNotEmpty(message)) {
Map<String, String> mapResult = JSONUtil.toBean(JSONUtil.parseObj(message), Map.class);
String resultFlag = mapResult.get("type");
if ("done".equals(resultFlag)) {
//开始合并语音流
String startFlag = cacheReplyFlag.get(session.getId());
//语音结束,开始进行回答解析
log.info("开始文本处理,客户端ID为:{}", clientId);
String cacheResultText = mapResult.get("content");
log.info("开始文本处理,面试者回答信息为:{}", cacheResultText);
if (StrUtil.isEmpty(cacheResultText)) {
cacheResultText = "";
}
//这是初次处理的逻辑
if ("YES".equals(startFlag)) {
//初始化-不走大模型-直接对候选人进行提问
initializationQuestion(clientId, session);
//发送完第一次消息后,直接删除标记,开始进行正常的面试问答流程
cacheReplyFlag.put(session.getId(), "");
} else {
//开始根据面试者回答的问题,进行追问回答
//获取面试者回答信息
//获取缓存记录
String promptJson = "";
String msgMapData = cacheMsgMapData.get(session.getId());
if (StrUtil.isNotEmpty(msgMapData)) {
List<Map> list = JSONUtil.toList(msgMapData, Map.class);
//获取最后一条数据记录
Map<String, String> mapEntity = list.get(list.size() - 1);
//更新问题记录
String content = mapEntity.get("content");
mapEntity.put("content", StrUtil.format(content, cacheResultText));
promptJson = JSONUtil.toJsonStr(list);
cacheMsgMapData.put(session.getId(), promptJson);
}
//开始返回衔接语
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "good.wav";
sendVoiceBuffer(openingPathUrl, session);
//开始使用模型进行追问
//把提问的文字发送给GPT
ChatGPTClient chatGPTClient = SpringUtils.getBean(ChatGPTClient.class);
log.info("AI提示词为:{}", promptJson);
log.info("开始请求AI:{}",System.currentTimeMillis()/1000);
String resultMsg = chatGPTClient.handleAiChat(promptJson,"QA");
if(StrUtil.isNotEmpty(resultMsg)) {
//开始解析返回结果
Map mapResultData = JSONUtil.toBean(resultMsg,Map.class);
//验证是否有追问问题返回,如果没有问题返回直接返回评分停止面试
Boolean isEndFlagFollow = checkInterviewIsEnd(resultMsg,session);
if(isEndFlagFollow){
//获取评分
//验证是否触发对应的规则
Boolean isEndFlag = getInterviewScore(resultMsg, session);
if(isEndFlag){
log.info("面试回答符合条件规则,继续追问啦!!!!!");
int resultNum = (int) (Math.random() * 2);
List<String> questions = JSONUtil.toList(mapResultData.get("follow_up_questions").toString(), String.class);
String questionStr = questions.get(resultNum);
if (StrUtil.isNotEmpty(questionStr)) {
//开始进行语音输出-流式持续输出
sendTTSBuffer(clientId, questionStr, session);
// 实时输出内容
try {
//把文本也给前端返回去
Map<String, String> dataText = new HashMap<>();
dataText.put("type", "question");
dataText.put("content", questionStr);
log.info("提问的问题文本发送啦:{}",JSONUtil.toJsonStr(dataText));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(dataText));
} catch (Exception e) {
e.printStackTrace();
}
//开始对问题进行缓存
recordQuestion(questionStr,session);
}
}
}
}
log.info("结束请求AI:{}",System.currentTimeMillis()/1000);
}
} else if ("end".equals(resultFlag)) {
log.info("面试结束啦!!!!!");
handleInterviewEnd(clientId,session,"");
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
// 接收二进制消息(流数据)
@OnMessage
public void onBinaryMessage(Session session, @PathParam("clientId") String clientId, ByteBuffer byteBuffer) {
log.info("我是接受二进制流的-客户端ID为:{}", clientId);
}
// 连接关闭时调用
@OnClose
public void onClose(Session session, CloseReason reason) {
System.out.println("WebSocket连接已关闭: " + session.getId() + ", 原因: " + reason.getReasonPhrase());
//链接关闭,清空内存
//是初次自我介绍后的问答环节
cacheReplyFlag.put(session.getId(), "");
//初始化面试回答数据记录
cacheMsgMapData.put(session.getId(), "");
//初始化面试问题
cacheQuestionResult.put(session.getId(), "");
cacheScoreResult.put(session.getId(), null);
}
// 发生错误时调用
@OnError
public void onError(Session session, Throwable throwable) {
System.err.println("WebSocket发生错误: 页面关闭,链接断开了");
if(session != null) {
//是初次自我介绍后的问答环节
cacheReplyFlag.put(session.getId(), "");
//初始化面试回答数据记录
cacheMsgMapData.put(session.getId(), "");
//初始化面试问题
cacheQuestionResult.put(session.getId(), "");
cacheScoreResult.put(session.getId(), null);
}
}
/**
* File 转换成 ByteBuffer
*
* @param fileUrl 文件路径
* @return
*/
private ByteBuffer convertFileToByteBuffer(String fileUrl) {
File file = new File(fileUrl);
try {
return ByteBuffer.wrap(FileUtils.readFileToByteArray(file));
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* 发送语音流给前端
*
* @param pathUrl 语音文件地址
* @param session 客户端会话
*/
private void sendVoiceBuffer(String pathUrl, Session session) {
try {
//文件转换成文件流
ByteBuffer outByteBuffer = convertFileToByteBuffer(pathUrl);
byte[] bytes = new byte[outByteBuffer.remaining()];
//从缓冲区中读取数据并存储到指定的字节数组中
outByteBuffer.get(bytes);
Pcm16ToOpusRealtime realtime = new Pcm16ToOpusRealtime(16000);
byte[] bytesOut = realtime.encodeOneFrame(bytes);
//发送文件流数据
session.getBasicRemote().sendBinary(ByteBuffer.wrap(bytesOut));
// 发送响应确认
log.info("已经成功发送了语音流给前端:{}", DateUtil.now());
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 发送文本转语音,发送语音流给前端
*
* @param clientId 用户ID
* @param content 文本内容
* @param session 客户端会话ID
*/
private void sendTTSBuffer(String clientId, String content, Session session) {
String resultFileName = clientId + "_" + System.currentTimeMillis() + ".wav";
String resultPathUrl = RuoYiConfig.getProfile() + VOICE_STORAGE_RESULT_DIR + resultFileName;
ElevenLabsClient elevenLabsClient = SpringUtils.getBean(ElevenLabsClient.class);
elevenLabsClient.handleTextToVoice(content, resultPathUrl);
//持续返回数据流给客户端
log.info("发送语音流成功啦!!!!!!!");
sendVoiceBuffer(resultPathUrl, session);
}
/**
* 对候选者初次进行提问业务逻辑处理(初始化系统随机获取第一个问题)
*
* @param clientId 用户ID
* @param session 客户端会话
*/
private void initializationQuestion(String clientId, Session session) {
try {
log.info("开始获取到clientid :{}",clientId);
//自我介绍结束后马上返回一个Good
//发送初始化面试官语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "good.wav";
sendVoiceBuffer(openingPathUrl, session);
//初始化面试流程的提问
List<Map<String, String>> list = new LinkedList();
Map<String, String> mapEntity = new HashMap<>();
mapEntity.put("role", "system");
mapEntity.put("content", "You are a construction industry interview expert. Evaluate candidate responses and provide scores (1-5) and follow-up questions when needed. Always respond in JSON format.");
list.add(mapEntity);
//获取预设问题-直接TTS转换返回语音结果
IHotakeProblemBaseInfoService problemBaseInfoService = SpringUtils.getBean(IHotakeProblemBaseInfoService.class);
HotakeProblemBaseInfo queryPro = new HotakeProblemBaseInfo();
queryPro.setUserId(Long.valueOf(clientId));
List<HotakeProblemBaseInfo> baseInfoList = problemBaseInfoService.selectHotakeProblemBaseInfoList(queryPro);
log.info("准备进行第一个问题的提问:{}",JSONUtil.toJsonStr(baseInfoList));
if (CollectionUtil.isNotEmpty(baseInfoList)) {
HotakeProblemBaseInfo baseInfo = baseInfoList.get(0);
if (StrUtil.isNotEmpty(baseInfo.getContents())) {
String[] qStrs = baseInfo.getContents().split("#AA#");
int random_index = (int) (Math.random() * qStrs.length);
//获取问题文本
String question = qStrs[random_index];
Map<String, String> mapEntityQ = new HashMap<>();
mapEntityQ.put("role", "user");
mapEntityQ.put("content", "Question" + question + "\\nCandidate Answer{}");
list.add(mapEntityQ);
log.info("开始提问啦:{}",JSONUtil.toJsonStr(list));
//直接对该问题进行转换处理返回语音流
log.info("第一个问题为:{}",question);
sendTTSBuffer(clientId, question, session);
//发送问题文本
try {
//把文本也给前端返回去
Map<String, String> dataText = new HashMap<>();
dataText.put("type", "question");
dataText.put("content", question);
log.info("提问的问题文本发送啦:{}",JSONUtil.toJsonStr(dataText));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(dataText));
} catch (Exception e) {
e.printStackTrace();
}
}
}
//初始化记录提示词数据到-缓存中
cacheMsgMapData.put(session.getId(), JSONUtil.toJsonStr(list));
} catch (Exception e) {
e.printStackTrace();
log.error("面试流程初始化失败:{}", e.getMessage());
}
}
/**
* 处理面试结束业务逻辑
*
* @param session 客户端会话
* @param position 职位
*/
private void handleInterviewEnd(String clientId,Session session,String position) {
//暂时的业务逻辑
//发送面试官结束语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "end.wav";
sendVoiceBuffer(openingPathUrl, session);
//返回文本评分
//处理模型提问逻辑
//获取缓存记录
String msgMapData = cacheMsgMapData.get(session.getId());
String promptJson = "";
if (StrUtil.isNotEmpty(msgMapData)) {
List<Map> list = JSONUtil.toList(msgMapData, Map.class);
//获取第一条数据记录
Map<String, String> mapEntity = list.get(0);
//更新问题记录
mapEntity.put("role", "system");
mapEntity.put("content", "You are a construction industry interview expert. Evaluate candidate responses and provide scores (1-5) and follow-up questions when needed. Always respond in JSON format.");
//每个回答的内容前面要加上候选人的职位
if (StrUtil.isNotEmpty(position)) {
for (Map map : list) {
if ("user".equals(map.get("role").toString())) {
map.put("content", "Position: " + position + "\\n" + map.get("content"));
}
}
}
promptJson = JSONUtil.toJsonStr(list);
//结束回答要清空问答数据
cacheMsgMapData.put(session.getId(), "");
}
log.info("结束AI提示词为:{}", promptJson);
ChatGPTClient gptClient = SpringUtils.getBean(ChatGPTClient.class);
String resultMsg = gptClient.handleAiChat(promptJson, "QA");
//开始解析返回结果
Map mapResultData = JSONUtil.toBean(resultMsg,Map.class);
//获取评分
Object scoreStr = mapResultData.get("score");
Object assessment = mapResultData.get("assessment");
Map<String, String> resultEntity = new HashMap<>();
resultEntity.put("content", scoreStr +"\n"+assessment);
resultEntity.put("type", "score");
try{
//返回最终的评分结构
log.info("返回最终的评分结构:{}",JSONUtil.toJsonStr(resultEntity));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(resultEntity));
}catch (Exception e){
e.printStackTrace();
}
}
/**
* 处理评分记录
* 触发规则:
* 1、获得 0-1 分 大于1次 立即结束面试
* 2、获取 4-5 分 大于3次 立即结束面试
* 3、获取 2-3 分 大于3次 立即结束面试
* 4、获取 2-5 分 大于4次 立即结束面试
*
* @param content
* @param session return false 立即结束面试
*/
private Boolean handleScoreRecord(Object content, Session session) {
Map<String, Integer> scoreRecordMap = cacheScoreResult.get(session.getId());
log.info("获取评分结果:{}",content);
//对评分进行处理
if (ObjectUtil.isNotEmpty(content)) {
String[] strs = content.toString().split("/");
//取第一个数就是对应的评分
log.info("获取的数据为:{}",strs[0]);
BigDecimal score = new BigDecimal(strs[0].trim());
//记录Key为1
if (BigDecimal.ZERO.compareTo(score) <= 0 && BigDecimal.ONE.compareTo(score) >= 0) {
Integer n1 = scoreRecordMap.get("0-1") + 1;
scoreRecordMap.put("0-1", n1);
if (n1 > 1) {
return false;
}
}
//记录Key为2
if (new BigDecimal(4).compareTo(score) <= 0 && new BigDecimal(5).compareTo(score) >= 0) {
Integer n1 = scoreRecordMap.get("4-5") + 1;
scoreRecordMap.put("4-5", n1);
if (n1 > 3) {
return false;
}
}
//记录Key为3
if (new BigDecimal(2).compareTo(score) <= 0 && new BigDecimal(3).compareTo(score) >= 0) {
Integer n1 = scoreRecordMap.get("2-3") + 1;
scoreRecordMap.put("2-3", n1);
if (n1 > 3) {
return false;
}
}
//记录Key为4
if (new BigDecimal(2).compareTo(score) <= 0 && new BigDecimal(5).compareTo(score) >= 0) {
Integer n1 = scoreRecordMap.get("2-5") + 1;
scoreRecordMap.put("2-5", n1);
if (n1 > 4) {
return false;
}
}
}
return true;
}
/**
* 校验是否结束面试,结束后直接返回评分
*
* @param resultMsg 问答AI返回的结果数据
* @param session 客户端会话
*/
private Boolean getInterviewScore(String resultMsg, Session session) {
//返回文本评分
//开始解析返回结果
Map mapResultData = JSONUtil.toBean(resultMsg,Map.class);
//获取评分
Object scoreStr = mapResultData.get("score");
Object assessment = mapResultData.get("assessment");
//校验面试是否结束
Boolean flag = handleScoreRecord(scoreStr, session);
try {
if (!flag) {
//发送面试官结束语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "end.wav";
sendVoiceBuffer(openingPathUrl, session);
Map<String, String> resultEntity = new HashMap<>();
resultEntity.put("content", scoreStr +"\n"+assessment);
resultEntity.put("type", "score");
//返回评分结果
log.info("返回最终的评分结果:{}",JSONUtil.toJsonStr(resultEntity));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(resultEntity));
}
} catch (Exception e) {
e.printStackTrace();
}
return flag;
}
/**
* 记录问题
* @param questionResult
* @param session
*/
private void recordQuestion(String questionResult,Session session) {
if (StrUtil.isNotEmpty(questionResult)) {
//获取缓存记录
String msgMapData = cacheMsgMapData.get(session.getId());
if (StrUtil.isNotEmpty(msgMapData)) {
List<Map> list = JSONUtil.toList(msgMapData, Map.class);
Map<String, String> mapEntity = new HashMap<>();
mapEntity.put("role", "user");
mapEntity.put("content", "Question" + questionResult + "\\nCandidate Answer{}");
list.add(mapEntity);
cacheMsgMapData.put(session.getId(), JSONUtil.toJsonStr(list));
}
}
}
/**
* 验证面试是否结束,不继续追问了
* @param resultMsg
* @param session
* @return
*/
private Boolean checkInterviewIsEnd(String resultMsg, Session session){
Map mapResultData = JSONUtil.toBean(resultMsg,Map.class);
//获取评分
Object scoreStr = mapResultData.get("score");
Object assessment = mapResultData.get("assessment");
Object followUpNeeded = mapResultData.get("follow_up_needed");
Boolean flag = Boolean.valueOf(followUpNeeded.toString());
try {
//不继续追问了
if (ObjectUtil.isNotEmpty(followUpNeeded) && !flag) {
//发送面试官结束语音流
String openingPathUrl = RuoYiConfig.getProfile() + VOICE_SYSTEM_DIR + "end.wav";
sendVoiceBuffer(openingPathUrl, session);
Map<String, String> resultEntity = new HashMap<>();
resultEntity.put("content", scoreStr +"\n"+assessment);
resultEntity.put("type", "score");
//返回评分结果
log.info("返回最终的评分结果:{}",JSONUtil.toJsonStr(resultEntity));
session.getBasicRemote().sendText(JSONUtil.toJsonStr(resultEntity));
}
} catch (Exception e) {
e.printStackTrace();
}
return flag;
}
/**
* 发送小块语音流
* @param session
* @param buffer
* @param chunkSize
*/
public void sendInChunks(Session session, ByteBuffer buffer, int chunkSize) {
int offset = 0;
ByteBuffer duplicate = buffer.slice();
byte[] audioData = new byte[duplicate.remaining()];
duplicate.get(audioData);
while (offset < audioData.length) {
int end = Math.min(offset + chunkSize, audioData.length);
byte[] chunk = Arrays.copyOfRange(audioData, offset, end);
synchronized(session) {
session.getAsyncRemote().sendBinary(ByteBuffer.wrap(chunk));
}
offset = end;
}
}
}

View File

@@ -0,0 +1,58 @@
package com.vetti.socket;
import javax.websocket.*;
import java.net.URI;
// 客户端端点类
@ClientEndpoint
public class MyWebSocketClient {
// 连接成功时触发
@OnOpen
public void onOpen(Session session) {
System.out.println("连接已建立Session ID: " + session.getId());
try {
// 发送消息到服务端
session.getBasicRemote().sendText("Hello, Server!");
} catch (Exception e) {
e.printStackTrace();
}
}
// 收到服务端消息时触发
@OnMessage
public void onMessage(String message, Session session) {
System.out.println("收到服务端消息: " + message);
// 可根据消息内容做后续处理
}
// 连接关闭时触发
@OnClose
public void onClose(Session session, CloseReason reason) {
System.out.println("连接关闭,原因: " + reason.getReasonPhrase());
}
// 发生错误时触发
@OnError
public void onError(Session session, Throwable error) {
System.err.println("发生错误: ");
error.printStackTrace();
}
public static void main(String[] args) {
// WebSocket服务端地址示例
String serverUri = "ws://vetti.hotake.cn/prod-api/voice-websocket-opus/104";
try {
// 获取WebSocket容器
WebSocketContainer container = ContainerProvider.getWebSocketContainer();
// 连接服务端传入客户端端点实例和服务端URI
container.connectToServer(new MyWebSocketClient(), new URI(serverUri));
// 阻塞主线程,避免程序退出(实际场景根据需求处理)
Thread.sleep(60000);
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,94 @@
package com.vetti.socket.util;
import io.github.jaredmdobson.concentus.OpusApplication;
import io.github.jaredmdobson.concentus.OpusDecoder;
import io.github.jaredmdobson.concentus.OpusEncoder;
import io.github.jaredmdobson.concentus.OpusException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.List;
public class Pcm16ToOpusRealtime {
private static final int FRAME_MS = 20;
private final int frameSize;
private final int frameByte; // 960
private final OpusEncoder enc;
private final OpusDecoder dec;
/* -------------- 编码侧 -------------- */
private final byte[] encBuf; // 缓存
private int encPos = 0; // 当前缓存字节数
public Pcm16ToOpusRealtime(int sampleRate) throws OpusException {
this.enc = new OpusEncoder(sampleRate, 1, OpusApplication.OPUS_APPLICATION_AUDIO);
this.dec = new OpusDecoder(sampleRate, 1);
this.frameSize = sampleRate * FRAME_MS / 1000;
this.frameByte = frameSize * 2;
this.encBuf = new byte[frameByte];
}
/**
* 把任意长度的 24kHz-16bit-单声道 PCM 喂进来,
* 返回当前已经能编出的完整 Opus 帧(可能 0~n 个)
*/
public List<byte[]> encodeStream(byte[] pcmIn) throws OpusException {
List<byte[]> outFrames = new ArrayList<>();
int off = 0;
while (off < pcmIn.length) {
int canCopy = Math.min(pcmIn.length - off, frameByte - encPos);
System.arraycopy(pcmIn, off, encBuf, encPos, canCopy);
encPos += canCopy;
off += canCopy;
if (encPos == frameByte) { // 凑够一帧
outFrames.add(encodeOneFrame(encBuf));
encPos = 0; // 清空缓存
}
}
return outFrames;
}
/** 强制把尾巴编码掉(用 0 补齐) */
public byte[] flush() throws OpusException {
if (encPos == 0) return null; // 没有尾巴
// 补 0
for (int i = encPos; i < frameByte; i++) encBuf[i] = 0;
byte[] last = encodeOneFrame(encBuf);
encPos = 0;
return last;
}
public byte[] encodeOneFrame(byte[] encBuf) throws OpusException {
short[] pcm = byteArrToShortArr(encBuf);
byte[] opus= new byte[400];
int len = enc.encode(pcm, 0, frameSize, opus, 0, opus.length);
byte[] trim= new byte[len];
System.arraycopy(opus, 0, trim, 0, len);
return trim;
}
/* -------------- 解码侧(一次一帧) -------------- */
public byte[] decodeOneFrame(byte[] opusFrame) throws OpusException {
short[] pcm = new short[frameSize];
int samples = dec.decode(opusFrame, 0, opusFrame.length,
pcm, 0, frameSize, false);
return shortArrToByteArr(pcm, samples);
}
/* ====================================================================== */
private static short[] byteArrToShortArr(byte[] b) {
short[] s = new short[b.length / 2];
ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(s);
return s;
}
private static byte[] shortArrToByteArr(short[] s, int len) {
byte[] b = new byte[len * 2];
ByteBuffer.wrap(b).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().put(s, 0, len);
return b;
}
}

View File

@@ -56,11 +56,21 @@ public class CommonServiceImpl implements ICommonService {
// 2. 构建请求体(空 JSON 对象) // 2. 构建请求体(空 JSON 对象)
Map<String,Object> params = new HashMap<>(); Map<String,Object> params = new HashMap<>();
Map<String, String> bodyEntity = new HashMap<>(); Map<String, Object> bodyEntity = new HashMap<>();
bodyEntity.put("model",MODEL); bodyEntity.put("model",MODEL);
bodyEntity.put("prompt",prompt); bodyEntity.put("prompt",prompt);
bodyEntity.put("language",language); bodyEntity.put("language",language);
params.put("input_audio_transcription",bodyEntity); params.put("input_audio_transcription",bodyEntity);
Map<String, Object> bodyEntityConfig = new HashMap<>();
bodyEntityConfig.put("type","semantic_vad");
bodyEntityConfig.put("eagerness","high");
params.put("turn_detection",bodyEntityConfig);
// Map<String, Object> bodyEntityStream = new HashMap<>();
// bodyEntityStream.put("type","full");
// params.put("streaming",bodyEntityStream);
String requestBody = JSONUtil.toJsonStr(params); String requestBody = JSONUtil.toJsonStr(params);
// 3. 构建 HTTP 请求 // 3. 构建 HTTP 请求
HttpRequest request = HttpRequest.newBuilder() HttpRequest request = HttpRequest.newBuilder()

View File

@@ -159,7 +159,7 @@ elevenLabs:
# 语音转文本 # 语音转文本
whisper: whisper:
apiUrl: https://api.openai.com/v1/audio/transcriptions apiUrl: https://api.openai.com/v1/audio/transcriptions
model: whisper-1 model: gpt-4o-mini-transcribe
apiKey: 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 apiKey: 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
language: en language: en
apiClientTokenUrl: https://api.openai.com/v1/realtime/sessions apiClientTokenUrl: https://api.openai.com/v1/realtime/sessions
@@ -170,6 +170,7 @@ chatGpt:
apiKey: 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 apiKey: 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
apiUrl: https://api.openai.com/v1/chat/completions apiUrl: https://api.openai.com/v1/chat/completions
model: ft:gpt-3.5-turbo-0125:vetti:interview-unified:CaGyCXOr model: ft:gpt-3.5-turbo-0125:vetti:interview-unified:CaGyCXOr
modelQuestion: gpt-4o-mini
modelCV: ft:gpt-3.5-turbo-0125:vetti:vetti-resume-full:CYT0C8JG modelCV: ft:gpt-3.5-turbo-0125:vetti:vetti-resume-full:CYT0C8JG
role: system role: system

View File

@@ -79,6 +79,6 @@ xss:
# 过滤开关 # 过滤开关
enabled: true enabled: true
# 排除链接(多个用逗号分隔) # 排除链接(多个用逗号分隔)
excludes: /system/notice,/voice-websocket/* excludes: /system/notice,/voice-websocket/*,/voice-websocket-opus/*
# 匹配链接 # 匹配链接
urlPatterns: /system/*,/monitor/*,/tool/*,/voice-websocket/* urlPatterns: /system/*,/monitor/*,/tool/*,/voice-websocket/*,/voice-websocket-opus/*

View File

@@ -159,7 +159,7 @@ elevenLabs:
# 语音转文本 # 语音转文本
whisper: whisper:
apiUrl: https://api.openai.com/v1/audio/transcriptions apiUrl: https://api.openai.com/v1/audio/transcriptions
model: whisper-1 model: gpt-4o-mini-transcribe
apiKey: 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 apiKey: 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
language: en language: en
apiClientTokenUrl: https://api.openai.com/v1/realtime/sessions apiClientTokenUrl: https://api.openai.com/v1/realtime/sessions

View File

@@ -79,6 +79,6 @@ xss:
# 过滤开关 # 过滤开关
enabled: true enabled: true
# 排除链接(多个用逗号分隔) # 排除链接(多个用逗号分隔)
excludes: /system/notice,/voice-websocket/* excludes: /system/notice,/voice-websocket/*,/voice-websocket-opus/*
# 匹配链接 # 匹配链接
urlPatterns: /system/*,/monitor/*,/tool/*,/voice-websocket/* urlPatterns: /system/*,/monitor/*,/tool/*,/voice-websocket/*,/voice-websocket-opus/*

View File

@@ -101,7 +101,7 @@ public class ElevenLabsClient {
CloseableHttpClient httpClient = HttpClients.createDefault(); CloseableHttpClient httpClient = HttpClients.createDefault();
try { try {
// 使用第一个可用语音进行文本转语音(澳洲本地女声) // 使用第一个可用语音进行文本转语音(澳洲本地女声)
String firstVoiceId = "21m00Tcm4TlvDq8ikWAM"; String firstVoiceId = "LwSYl3oLKw4IEbIEei6q";
textToSpeech(inputText, firstVoiceId, outputFile,httpClient); textToSpeech(inputText, firstVoiceId, outputFile,httpClient);
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();

View File

@@ -33,6 +33,9 @@ public class ChatGPTClient {
@Value("${chatGpt.model}") @Value("${chatGpt.model}")
private String model; private String model;
@Value("${chatGpt.modelQuestion}")
private String modelQuestion;
@Value("${chatGpt.modelCV}") @Value("${chatGpt.modelCV}")
private String modelCV; private String modelCV;
@@ -60,9 +63,11 @@ public class ChatGPTClient {
if("CV".equals(type)){ if("CV".equals(type)){
resultText = sendMessage(promptText, modelCV,objectMapper,client,role); resultText = sendMessage(promptText, modelCV,objectMapper,client,role);
}else if("QA".equals(type)){ }else if("QA".equals(type)){
resultText = sendMessage(promptText, modelQuestion,objectMapper,client,role);
} else if("PF".equals(type)){
resultText = sendMessage(promptText, model,objectMapper,client,role); resultText = sendMessage(promptText, model,objectMapper,client,role);
} else { }else {
resultText = sendMessage(promptText, model,objectMapper,client,role); resultText = sendMessage(promptText, modelQuestion,objectMapper,client,role);
} }
} catch (IOException | InterruptedException e) { } catch (IOException | InterruptedException e) {

View File

@@ -113,7 +113,7 @@ public class SecurityConfig
permitAllUrl.getUrls().forEach(url -> requests.antMatchers(url).permitAll()); permitAllUrl.getUrls().forEach(url -> requests.antMatchers(url).permitAll());
// 对于登录login 注册register 验证码captchaImage 允许匿名访问 // 对于登录login 注册register 验证码captchaImage 允许匿名访问
requests.antMatchers("/login", "/register", "/captchaImage","/aiCommon/**", requests.antMatchers("/login", "/register", "/captchaImage","/aiCommon/**",
"/voice-websocket/**","/verification/email/send","/verification/email/verify","/verification/phone/send", "/voice-websocket/**","/voice-websocket-opus/**","/verification/email/send","/verification/email/verify","/verification/phone/send",
"/forgotPassword").permitAll() "/forgotPassword").permitAll()
// 静态资源,可匿名访问 // 静态资源,可匿名访问
.antMatchers(HttpMethod.GET, "/", "/*.html", "/**/*.html", "/**/*.css", "/**/*.js", "/profile/**").permitAll() .antMatchers(HttpMethod.GET, "/", "/*.html", "/**/*.html", "/**/*.css", "/**/*.js", "/profile/**").permitAll()