local
	
		
			
	
		
	
	
		
	
		
			All checks were successful
		
		
	
	
		
			
				
	
				Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 53s
				
			
		
		
	
	
				
					
				
			
		
			All checks were successful
		
		
	
	Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 53s
				
			This commit is contained in:
		
							parent
							
								
									16417c09a9
								
							
						
					
					
						commit
						ebd2f6fabb
					
				
							
								
								
									
										4
									
								
								.env
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								.env
									
									
									
									
									
								
							| @ -1,6 +1,10 @@ | ||||
| # LLM API Configuration | ||||
| LLM_API_URL=http://tianchat.zenithsafe.com:5001/v1 | ||||
| LLM_API_KEY=app-WVRIlrX75YnR3GsAKLlxiOEa | ||||
| 
 | ||||
| LLMOurApiUrl=https://ark.cn-beijing.volces.com/api/v3/bots/chat/completions | ||||
| LLMOurApiKey=e999a241-6bf3-4ee0-99a8-e4de9b617f28 | ||||
| 
 | ||||
| MiniMaxApiKey=eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJHcm91cE5hbWUiOiLkuIrmtbfpopzpgJTnp5HmioDmnInpmZDlhazlj7giLCJVc2VyTmFtZSI6IuadqOmqpSIsIkFjY291bnQiOiIiLCJTdWJqZWN0SUQiOiIxNzI4NzEyMzI0OTc5NjI2ODM5IiwiUGhvbmUiOiIxMzM4MTU1OTYxOCIsIkdyb3VwSUQiOiIxNzI4NzEyMzI0OTcxMjM4MjMxIiwiUGFnZU5hbWUiOiIiLCJNYWlsIjoiIiwiQ3JlYXRlVGltZSI6IjIwMjUtMDYtMTYgMTY6Mjk6NTkiLCJUb2tlblR5cGUiOjEsImlzcyI6Im1pbmltYXgifQ.D_JF0-nO89NdMZCYq4ocEyqxtZ9SeEdtMvbeSkZTWspt0XfX2QpPAVh-DI3MCPZTeSmjNWLf4fA_Th2zpVrj4UxWMbGKBeLZWLulNpwAHGMUTdqenuih3daCDPCzs0duhlFyQnZgGcEOGQ476HL72N2klujP8BUy_vfAh_Zv0po-aujQa5RxardDSOsbs49NTPEw0SQEXwaJ5bVmiZ5s-ysJ9pZWSEiyJ6SX9z3JeZHKj9DxHdOw5roZR8izo54e4IoqyLlzEfhOMW7P15-ffDH3M6HGiEmeBaGRYGAIciELjZS19ONNMKsTj-wXNGWtKG-sjAB1uuqkkT5Ul9Dunw | ||||
| MiniMaxApiURL=https://api.minimaxi.com/v1/t2a_v2 | ||||
| APP_ID=1364994890450210816 | ||||
|  | ||||
							
								
								
									
										2
									
								
								main.go
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								main.go
									
									
									
									
									
								
							| @ -26,6 +26,8 @@ func main() { | ||||
| 		MiniMaxApiKey: os.Getenv("MiniMaxApiKey"), | ||||
| 		MiniMaxApiURL: os.Getenv("MiniMaxApiURL"), | ||||
| 		FILE_URL:      os.Getenv("FILE_URL"), | ||||
| 		LLMOurApiUrl:  os.Getenv("LLMOurApiUrl"), | ||||
| 		LLMOurApiKey:  os.Getenv("LLMOurApiKey"), | ||||
| 	}) | ||||
| 
 | ||||
| 	fmt.Println("config: ", llmService) | ||||
|  | ||||
| @ -24,6 +24,8 @@ type Config struct { | ||||
| 	MiniMaxApiKey string | ||||
| 	MiniMaxApiURL string | ||||
| 	FILE_URL      string | ||||
| 	LLMOurApiUrl  string | ||||
| 	LLMOurApiKey  string | ||||
| } | ||||
| 
 | ||||
| // LLMService handles communication with the LLM API | ||||
| @ -51,6 +53,7 @@ type RequestPayload struct { | ||||
| 	ConversationID string                 `json:"conversation_id"` | ||||
| 	Files          []interface{}          `json:"files"` | ||||
| 	Audio          string                 `json:"audio"` | ||||
| 	LlmType        string                 `json:"llm_type"` | ||||
| } | ||||
| 
 | ||||
| // VoiceSetting represents voice configuration | ||||
| @ -112,6 +115,18 @@ type SpeechResponse struct { | ||||
| 	BaseResp  BaseResponse `json:"base_resp"` | ||||
| } | ||||
| 
 | ||||
| type LLMOurMessage struct { | ||||
| 	Role    string `json:"role"` | ||||
| 	Content string `json:"content"` | ||||
| } | ||||
| 
 | ||||
| type LLMOurRequestPayload struct { | ||||
| 	Model         string                 `json:"model"` | ||||
| 	Stream        bool                   `json:"stream"` | ||||
| 	StreamOptions map[string]interface{} `json:"stream_options"` | ||||
| 	Messages      []LLMOurMessage        `json:"messages"` | ||||
| } | ||||
| 
 | ||||
| // NewLLMService creates a new instance of LLMService | ||||
| func NewLLMService(config Config) *LLMService { | ||||
| 	return &LLMService{ | ||||
| @ -130,6 +145,7 @@ func (s *LLMService) CallLLMAPI(data map[string]interface{}) (interface{}, error | ||||
| 		ConversationID: getString(data, "conversation_id"), | ||||
| 		Files:          make([]interface{}, 0), | ||||
| 		Audio:          getString(data, "audio"), | ||||
| 		LlmType:        getString(data, "llm_type"), | ||||
| 	} | ||||
| 
 | ||||
| 	fmt.Printf("前端传来的数据:%+v\n", payload) | ||||
| @ -137,13 +153,52 @@ func (s *LLMService) CallLLMAPI(data map[string]interface{}) (interface{}, error | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("error marshaling payload: %v", err) | ||||
| 	} | ||||
| 	fmt.Println(s.config.LLMApiURL + "/chat-messages") | ||||
| 	req, err := http.NewRequest("POST", s.config.LLMApiURL+"/chat-messages", bytes.NewBuffer(jsonData)) | ||||
| 	// req, err := http.NewRequest("GET", "http://localhost:8080/stream-text", nil) | ||||
| 
 | ||||
| 	currentUrl := s.config.LLMApiURL + "/chat-messages" | ||||
| 	fmt.Println(currentUrl) | ||||
| 	req := &http.Request{} | ||||
| 	if payload.LlmType == "ours" { | ||||
| 		// 动态构造 messages | ||||
| 		var messages []LLMOurMessage | ||||
| 		if msgs, ok := data["messages"]; ok { | ||||
| 			if arr, ok := msgs.([]interface{}); ok { | ||||
| 				for _, m := range arr { | ||||
| 					if mMap, ok := m.(map[string]interface{}); ok { | ||||
| 						role, _ := mMap["role"].(string) | ||||
| 						content, _ := mMap["content"].(string) | ||||
| 						messages = append(messages, LLMOurMessage{Role: role, Content: content}) | ||||
| 					} | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 		// fallback: 如果没有 messages,则用 query 作为 user 消息 | ||||
| 		if len(messages) == 0 && payload.Query != "" { | ||||
| 			messages = append(messages, LLMOurMessage{Role: "user", Content: payload.Query}) | ||||
| 		} | ||||
| 		ourPayload := LLMOurRequestPayload{ | ||||
| 			Model:         "bot-20250522162100-44785", // 可根据 data 或配置传入 | ||||
| 			Stream:        true, | ||||
| 			StreamOptions: map[string]interface{}{"include_usage": true}, | ||||
| 			Messages:      messages, | ||||
| 		} | ||||
| 		jsonData, err = json.Marshal(ourPayload) | ||||
| 		if err != nil { | ||||
| 			return nil, fmt.Errorf("error marshaling ourPayload: %v", err) | ||||
| 		} | ||||
| 		currentUrl = s.config.LLMOurApiUrl | ||||
| 		req, err = http.NewRequest("POST", currentUrl, bytes.NewBuffer(jsonData)) | ||||
| 		if err != nil { | ||||
| 			return nil, fmt.Errorf("error creating request: %v", err) | ||||
| 		} | ||||
| 		req.Header.Set("Authorization", "Bearer "+s.config.LLMOurApiKey) | ||||
| 		req.Header.Set("Content-Type", "application/json") | ||||
| 		return s.handleStreamingResponseV2(req, data, payload.Audio) | ||||
| 	} | ||||
| 
 | ||||
| 	req, err = http.NewRequest("POST", currentUrl, bytes.NewBuffer(jsonData)) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("error creating request: %v", err) | ||||
| 	} | ||||
| 	req.Header.Set("Authorization", "Bearer "+s.config.LLMApiKey) | ||||
| 	req.Header.Set("Content-Type", "application/json") | ||||
| 
 | ||||
| @ -155,6 +210,134 @@ func (s *LLMService) CallLLMAPI(data map[string]interface{}) (interface{}, error | ||||
| 	return s.handleNonStreamingResponse(req) | ||||
| } | ||||
| 
 | ||||
| // processStreamSegment 处理流式文本分段、语音合成等逻辑,返回 new_message、audio、是否需要发送 | ||||
| func (s *LLMService) processStreamSegment(initialSessage *string, all_message *string, answer string, audio_type string) (string, string, bool) { | ||||
| 	// 定义标点符号map | ||||
| 	punctuations := map[string]bool{ | ||||
| 		",": true, ",": true, // 逗号 | ||||
| 		".": true, "。": true, // 句号 | ||||
| 		"!": true, "!": true, // 感叹号 | ||||
| 		"?": true, "?": true, // 问号 | ||||
| 		";": true, ";": true, // 分号 | ||||
| 		":": true, ":": true, // 冒号 | ||||
| 		"、": true, | ||||
| 	} | ||||
| 
 | ||||
| 	// 删除字符串前后的标点符号 | ||||
| 	trimPunctuation := func(s string) string { | ||||
| 		if len(s) > 0 { | ||||
| 			lastRune, size := utf8.DecodeLastRuneInString(s) | ||||
| 			if punctuations[string(lastRune)] { | ||||
| 				s = s[:len(s)-size] | ||||
| 			} | ||||
| 		} | ||||
| 		return s | ||||
| 	} | ||||
| 
 | ||||
| 	// 判断字符串是否包含标点符号 | ||||
| 	containsPunctuation := func(s string) bool { | ||||
| 		for _, char := range s { | ||||
| 			if punctuations[string(char)] { | ||||
| 				return true | ||||
| 			} | ||||
| 		} | ||||
| 		return false | ||||
| 	} | ||||
| 
 | ||||
| 	// 按标点符号分割文本 | ||||
| 	splitByPunctuation := func(s string) []string { | ||||
| 		var result []string | ||||
| 		var current string | ||||
| 		for _, char := range s { | ||||
| 			if punctuations[string(char)] { | ||||
| 				if current != "" { | ||||
| 					result = append(result, current+string(char)) | ||||
| 					current = "" | ||||
| 				} | ||||
| 			} else { | ||||
| 				current += string(char) | ||||
| 			} | ||||
| 		} | ||||
| 		if current != "" { | ||||
| 			result = append(result, current) | ||||
| 		} | ||||
| 		return result | ||||
| 	} | ||||
| 
 | ||||
| 	*initialSessage += answer | ||||
| 	*all_message += answer | ||||
| 	new_message := "" | ||||
| 	if containsPunctuation(*initialSessage) { | ||||
| 		segments := splitByPunctuation(*initialSessage) | ||||
| 		if len(segments) > 1 { | ||||
| 			format_message := strings.Join(segments[:len(segments)-1], "") | ||||
| 			if utf8.RuneCountInString(format_message) > 10 { | ||||
| 				*initialSessage = segments[len(segments)-1] | ||||
| 				new_message = strings.Join(segments[:len(segments)-1], "") | ||||
| 			} else { | ||||
| 				return "", "", false | ||||
| 			} | ||||
| 		} else { | ||||
| 			if utf8.RuneCountInString(*initialSessage) > 10 { | ||||
| 				new_message = *initialSessage | ||||
| 				*initialSessage = "" | ||||
| 			} else if utf8.RuneCountInString(*initialSessage) <= 10 && strings.HasSuffix(*initialSessage, "。") { | ||||
| 				new_message = *initialSessage | ||||
| 				*initialSessage = "" | ||||
| 			} else { | ||||
| 				return "", "", false | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	if new_message == "" { | ||||
| 		return "", "", false | ||||
| 	} | ||||
| 	s_msg := strings.TrimSpace(new_message) | ||||
| 	new_message = trimPunctuation(s_msg) | ||||
| 
 | ||||
| 	audio := "" | ||||
| 	for i := 0; i < 1; i++ { | ||||
| 		speechResp, err := s.SynthesizeSpeech(new_message, audio_type) | ||||
| 		if err != nil { | ||||
| 			fmt.Printf("Error synthesizing speech: %v\n", err) | ||||
| 			break | ||||
| 		} | ||||
| 		audio = speechResp.Data.Audio | ||||
| 		if audio != "" { | ||||
| 			resp, err := http.Get(audio) | ||||
| 			if err != nil { | ||||
| 				fmt.Printf("Error downloading audio: %v\n", err) | ||||
| 			} else { | ||||
| 				defer resp.Body.Close() | ||||
| 				audioBytes, err := io.ReadAll(resp.Body) | ||||
| 				if err != nil { | ||||
| 					fmt.Printf("Error reading audio data: %v\n", err) | ||||
| 				} else { | ||||
| 					originalPath := fmt.Sprintf("audio/original_%d.wav", time.Now().UnixNano()) | ||||
| 					if err := os.WriteFile(originalPath, audioBytes, 0644); err != nil { | ||||
| 						fmt.Printf("Error saving original audio: %v\n", err) | ||||
| 					} | ||||
| 					audioBase64 := base64.StdEncoding.EncodeToString(audioBytes) | ||||
| 					trimmedAudio, err := s.TrimAudioSilence(audioBase64) | ||||
| 					if err != nil { | ||||
| 						fmt.Printf("Error trimming audio silence: %v\n", err) | ||||
| 					} else { | ||||
| 						audio_path := fmt.Sprintf("trimmed_%d.wav", time.Now().UnixNano()) | ||||
| 						outputPath := "audio/" + audio_path | ||||
| 						if err := s.SaveBase64AsWAV(trimmedAudio, outputPath); err != nil { | ||||
| 							fmt.Printf("Error saving trimmed WAV file: %v\n", err) | ||||
| 						} | ||||
| 						audio = s.config.FILE_URL + audio_path | ||||
| 					} | ||||
| 				} | ||||
| 			} | ||||
| 			break | ||||
| 		} | ||||
| 	} | ||||
| 	return new_message, audio, true | ||||
| } | ||||
| 
 | ||||
| // handleStreamingResponse processes streaming responses | ||||
| func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]interface{}, audio_type string) (chan Message, error) { | ||||
| 	resp, err := s.client.Do(req) | ||||
| @ -374,6 +557,95 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string] | ||||
| 	return messageChan, nil | ||||
| } | ||||
| 
 | ||||
| // handleStreamingResponseV2 适配新流式返回格式 | ||||
| func (s *LLMService) handleStreamingResponseV2(req *http.Request, data map[string]interface{}, audio_type string) (chan Message, error) { | ||||
| 	resp, err := s.client.Do(req) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("error making request: %v", err) | ||||
| 	} | ||||
| 
 | ||||
| 	if resp.StatusCode != http.StatusOK { | ||||
| 		return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode) | ||||
| 	} | ||||
| 
 | ||||
| 	messageChan := make(chan Message, 100) | ||||
| 	all_message := "" | ||||
| 	initialSessage := "" | ||||
| 	go func() { | ||||
| 		defer resp.Body.Close() | ||||
| 		defer close(messageChan) | ||||
| 		reader := bufio.NewReader(resp.Body) | ||||
| 		for { | ||||
| 			line, err := reader.ReadString('\n') | ||||
| 			if err != nil { | ||||
| 				if err == io.EOF { | ||||
| 					break | ||||
| 				} | ||||
| 				fmt.Printf("Error reading line: %v\n", err) | ||||
| 				continue | ||||
| 			} | ||||
| 
 | ||||
| 			line = strings.TrimSpace(line) | ||||
| 			if line == "" { | ||||
| 				continue | ||||
| 			} | ||||
| 
 | ||||
| 			// line = strings.TrimSpace(line) | ||||
| 			if strings.HasPrefix(line, "data:") { | ||||
| 				line = strings.TrimSpace(strings.TrimPrefix(line, "data:")) | ||||
| 			} | ||||
| 
 | ||||
| 			fmt.Println("line: ", line) | ||||
| 
 | ||||
| 			if line == "[DONE]" { | ||||
| 				messageChan <- Message{ | ||||
| 					Answer:         "", | ||||
| 					IsEnd:          true, | ||||
| 					ConversationID: getString(data, "conversation_id"), | ||||
| 					TaskID:         getString(data, "task_id"), | ||||
| 				} | ||||
| 				return | ||||
| 			} | ||||
| 
 | ||||
| 			var jsonData map[string]interface{} | ||||
| 			if err := json.Unmarshal([]byte(line), &jsonData); err != nil { | ||||
| 				fmt.Printf("Error unmarshaling JSON: %v\n", err) | ||||
| 				continue | ||||
| 			} | ||||
| 			choices, ok := jsonData["choices"].([]interface{}) | ||||
| 			if !ok || len(choices) == 0 { | ||||
| 				continue | ||||
| 			} | ||||
| 			choice, ok := choices[0].(map[string]interface{}) | ||||
| 			if !ok { | ||||
| 				continue | ||||
| 			} | ||||
| 			delta, ok := choice["delta"].(map[string]interface{}) | ||||
| 			if !ok { | ||||
| 				continue | ||||
| 			} | ||||
| 			content, _ := delta["content"].(string) | ||||
| 			if content == "" { | ||||
| 				continue | ||||
| 			} | ||||
| 
 | ||||
| 			new_message, audio, needSend := s.processStreamSegment(&initialSessage, &all_message, content, audio_type) | ||||
| 			if !needSend { | ||||
| 				continue | ||||
| 			} | ||||
| 			messageChan <- Message{ | ||||
| 				Answer:         new_message, | ||||
| 				IsEnd:          false, | ||||
| 				ConversationID: getString(data, "conversation_id"), | ||||
| 				TaskID:         getString(data, "task_id"), | ||||
| 				ClientID:       getString(data, "conversation_id"), | ||||
| 				AudioData:      audio, | ||||
| 			} | ||||
| 		} | ||||
| 	}() | ||||
| 	return messageChan, nil | ||||
| } | ||||
| 
 | ||||
| // handleNonStreamingResponse processes non-streaming responses | ||||
| func (s *LLMService) handleNonStreamingResponse(req *http.Request) (map[string]interface{}, error) { | ||||
| 	resp, err := s.client.Do(req) | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	 Song367
						Song367