Compare commits
37 Commits
master
...
prod-yantu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
96a8076246 | ||
|
|
1b6a7541de | ||
|
|
f3ac9f1675 | ||
|
|
6cdc1c76ac | ||
|
|
a2994f3905 | ||
|
|
5b0e17be2e | ||
|
|
de881414dc | ||
|
|
9822b9bfc9 | ||
|
|
53159ca24c | ||
|
|
fe13751624 | ||
|
|
9aee63f624 | ||
|
|
f299cf0145 | ||
|
|
bbf7843a66 | ||
|
|
dee9dd7b08 | ||
|
|
0781e8bd86 | ||
|
|
602bb1fce2 | ||
|
|
a9230fcf81 | ||
|
|
ebd2f6fabb | ||
|
|
16417c09a9 | ||
|
|
b73c2d2412 | ||
|
|
d52a8064db | ||
|
|
f4b35f4596 | ||
|
|
7f89c584c0 | ||
|
|
ef15f5894e | ||
|
|
99d6cb9f9f | ||
|
|
1534a0228e | ||
|
|
962b9d785a | ||
|
|
68919732db | ||
|
|
0c708da80d | ||
|
|
112a7dd70d | ||
|
|
927406b6c8 | ||
|
|
76ac16eb13 | ||
|
|
2dfb2cc519 | ||
|
|
a4d2cfe3e1 | ||
|
|
1585a4491c | ||
|
|
d35dfba895 | ||
|
|
a632bc7332 |
16
.env
16
.env
@ -1,11 +1,15 @@
|
||||
# LLM API Configuration
|
||||
LLM_API_URL=http://tianchat.zenithsafe.com:5001/v1
|
||||
LLM_API_KEY=app-k9WhnUvAPCVcSoPDEYVUxXgC
|
||||
LLM_API_URL=http://101.133.149.116:8777/v1
|
||||
LLM_API_KEY=app-7mh1IAGueaBodwdMflz8Omqv
|
||||
|
||||
LLMOurApiUrl=https://ark.cn-beijing.volces.com/api/v3/bots/chat/completions
|
||||
LLMOurApiKey=e999a241-6bf3-4ee0-99a8-e4de9b617f28
|
||||
|
||||
MiniMaxApiKey=eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJHcm91cE5hbWUiOiLkuIrmtbfpopzpgJTnp5HmioDmnInpmZDlhazlj7giLCJVc2VyTmFtZSI6IuadqOmqpSIsIkFjY291bnQiOiIiLCJTdWJqZWN0SUQiOiIxNzI4NzEyMzI0OTc5NjI2ODM5IiwiUGhvbmUiOiIxMzM4MTU1OTYxOCIsIkdyb3VwSUQiOiIxNzI4NzEyMzI0OTcxMjM4MjMxIiwiUGFnZU5hbWUiOiIiLCJNYWlsIjoiIiwiQ3JlYXRlVGltZSI6IjIwMjUtMDYtMTYgMTY6Mjk6NTkiLCJUb2tlblR5cGUiOjEsImlzcyI6Im1pbmltYXgifQ.D_JF0-nO89NdMZCYq4ocEyqxtZ9SeEdtMvbeSkZTWspt0XfX2QpPAVh-DI3MCPZTeSmjNWLf4fA_Th2zpVrj4UxWMbGKBeLZWLulNpwAHGMUTdqenuih3daCDPCzs0duhlFyQnZgGcEOGQ476HL72N2klujP8BUy_vfAh_Zv0po-aujQa5RxardDSOsbs49NTPEw0SQEXwaJ5bVmiZ5s-ysJ9pZWSEiyJ6SX9z3JeZHKj9DxHdOw5roZR8izo54e4IoqyLlzEfhOMW7P15-ffDH3M6HGiEmeBaGRYGAIciELjZS19ONNMKsTj-wXNGWtKG-sjAB1uuqkkT5Ul9Dunw
|
||||
MiniMaxApiURL=https://api.minimaxi.com/v1/t2a_v2
|
||||
APP_ID=1364994890450210816
|
||||
APP_KEY=b4839cb2-cb81-4472-a2c1-2abf31e4bb27
|
||||
APP_ID=1364966010532270080
|
||||
APP_KEY=a72c98fa-cbe3-449e-b004-36523437bc5d
|
||||
SIG_EXP=3600
|
||||
FILE_URL=http://172.17.0.1:6200/
|
||||
FILE_URL=http://14.103.114.237/gongzheng-backend/audio/
|
||||
# Server Configuration
|
||||
PORT=8080
|
||||
PORT=80
|
||||
24
.gitea/charts/Chart.yaml
Normal file
24
.gitea/charts/Chart.yaml
Normal file
@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: homeland
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
||||
22
.gitea/charts/templates/NOTES.txt
Normal file
22
.gitea/charts/templates/NOTES.txt
Normal file
@ -0,0 +1,22 @@
|
||||
1. Get the application URL by running these commands:
|
||||
{{- if .Values.ingress.enabled }}
|
||||
{{- range $host := .Values.ingress.hosts }}
|
||||
{{- range .paths }}
|
||||
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- else if contains "NodePort" .Values.service.type }}
|
||||
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "homeland.fullname" . }})
|
||||
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
|
||||
echo http://$NODE_IP:$NODE_PORT
|
||||
{{- else if contains "LoadBalancer" .Values.service.type }}
|
||||
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
|
||||
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "homeland.fullname" . }}'
|
||||
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "homeland.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
|
||||
echo http://$SERVICE_IP:{{ .Values.service.port }}
|
||||
{{- else if contains "ClusterIP" .Values.service.type }}
|
||||
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "homeland.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
|
||||
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
|
||||
echo "Visit http://127.0.0.1:8080 to use your application"
|
||||
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
|
||||
{{- end }}
|
||||
62
.gitea/charts/templates/_helpers.tpl
Normal file
62
.gitea/charts/templates/_helpers.tpl
Normal file
@ -0,0 +1,62 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "homeland.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "homeland.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "homeland.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "homeland.labels" -}}
|
||||
helm.sh/chart: {{ include "homeland.chart" . }}
|
||||
{{ include "homeland.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "homeland.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "homeland.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "homeland.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "homeland.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
61
.gitea/charts/templates/deployment.yaml
Normal file
61
.gitea/charts/templates/deployment.yaml
Normal file
@ -0,0 +1,61 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "homeland.fullname" . }}
|
||||
labels:
|
||||
{{- include "homeland.labels" . | nindent 4 }}
|
||||
spec:
|
||||
{{- if not .Values.autoscaling.enabled }}
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
{{- end }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "homeland.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "homeland.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "homeland.serviceAccountName" . }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: {{ .Values.service.port }}
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
32
.gitea/charts/templates/hpa.yaml
Normal file
32
.gitea/charts/templates/hpa.yaml
Normal file
@ -0,0 +1,32 @@
|
||||
{{- if .Values.autoscaling.enabled }}
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: {{ include "homeland.fullname" . }}
|
||||
labels:
|
||||
{{- include "homeland.labels" . | nindent 4 }}
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: {{ include "homeland.fullname" . }}
|
||||
minReplicas: {{ .Values.autoscaling.minReplicas }}
|
||||
maxReplicas: {{ .Values.autoscaling.maxReplicas }}
|
||||
metrics:
|
||||
{{- if .Values.autoscaling.targetCPUUtilizationPercentage }}
|
||||
- type: Resource
|
||||
resource:
|
||||
name: cpu
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }}
|
||||
{{- end }}
|
||||
{{- if .Values.autoscaling.targetMemoryUtilizationPercentage }}
|
||||
- type: Resource
|
||||
resource:
|
||||
name: memory
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
61
.gitea/charts/templates/ingress.yaml
Normal file
61
.gitea/charts/templates/ingress.yaml
Normal file
@ -0,0 +1,61 @@
|
||||
{{- if .Values.ingress.enabled -}}
|
||||
{{- $fullName := include "homeland.fullname" . -}}
|
||||
{{- $svcPort := .Values.service.port -}}
|
||||
{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
|
||||
{{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }}
|
||||
{{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}}
|
||||
apiVersion: networking.k8s.io/v1
|
||||
{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}}
|
||||
apiVersion: networking.k8s.io/v1beta1
|
||||
{{- else -}}
|
||||
apiVersion: extensions/v1beta1
|
||||
{{- end }}
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{ $fullName }}
|
||||
labels:
|
||||
{{- include "homeland.labels" . | nindent 4 }}
|
||||
{{- with .Values.ingress.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
|
||||
ingressClassName: {{ .Values.ingress.className }}
|
||||
{{- end }}
|
||||
{{- if .Values.ingress.tls }}
|
||||
tls:
|
||||
{{- range .Values.ingress.tls }}
|
||||
- hosts:
|
||||
{{- range .hosts }}
|
||||
- {{ . | quote }}
|
||||
{{- end }}
|
||||
secretName: {{ .secretName }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
rules:
|
||||
{{- range .Values.ingress.hosts }}
|
||||
- host: {{ .host | quote }}
|
||||
http:
|
||||
paths:
|
||||
{{- range .paths }}
|
||||
- path: {{ .path }}
|
||||
{{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }}
|
||||
pathType: {{ .pathType }}
|
||||
{{- end }}
|
||||
backend:
|
||||
{{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }}
|
||||
service:
|
||||
name: {{ $fullName }}
|
||||
port:
|
||||
number: {{ $svcPort }}
|
||||
{{- else }}
|
||||
serviceName: {{ $fullName }}
|
||||
servicePort: {{ $svcPort }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
15
.gitea/charts/templates/service.yaml
Normal file
15
.gitea/charts/templates/service.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "homeland.fullname" . }}
|
||||
labels:
|
||||
{{- include "homeland.labels" . | nindent 4 }}
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
- port: {{ .Values.service.port }}
|
||||
targetPort: http
|
||||
protocol: TCP
|
||||
name: http
|
||||
selector:
|
||||
{{- include "homeland.selectorLabels" . | nindent 4 }}
|
||||
12
.gitea/charts/templates/serviceaccount.yaml
Normal file
12
.gitea/charts/templates/serviceaccount.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
{{- if .Values.serviceAccount.create -}}
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ include "homeland.serviceAccountName" . }}
|
||||
labels:
|
||||
{{- include "homeland.labels" . | nindent 4 }}
|
||||
{{- with .Values.serviceAccount.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
15
.gitea/charts/templates/tests/test-connection.yaml
Normal file
15
.gitea/charts/templates/tests/test-connection.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
apiVersion: v1
|
||||
kind: Pod
|
||||
metadata:
|
||||
name: "{{ include "homeland.fullname" . }}-test-connection"
|
||||
labels:
|
||||
{{- include "homeland.labels" . | nindent 4 }}
|
||||
annotations:
|
||||
"helm.sh/hook": test
|
||||
spec:
|
||||
containers:
|
||||
- name: wget
|
||||
image: busybox
|
||||
command: ['wget']
|
||||
args: ['{{ include "homeland.fullname" . }}:{{ .Values.service.port }}']
|
||||
restartPolicy: Never
|
||||
84
.gitea/charts/values.yaml
Normal file
84
.gitea/charts/values.yaml
Normal file
@ -0,0 +1,84 @@
|
||||
# Default values for homeland.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: 172.16.54.94:5000/homeland
|
||||
pullPolicy: IfNotPresent
|
||||
# Overrides the image tag whose default is the chart appVersion.
|
||||
tag: "29"
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
serviceAccount:
|
||||
# Specifies whether a service account should be created
|
||||
create: true
|
||||
# Annotations to add to the service account
|
||||
annotations: {}
|
||||
# The name of the service account to use.
|
||||
# If not set and create is true, a name is generated using the fullname template
|
||||
name: ""
|
||||
|
||||
podAnnotations: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext: {}
|
||||
# capabilities:
|
||||
# drop:
|
||||
# - ALL
|
||||
# readOnlyRootFilesystem: true
|
||||
# runAsNonRoot: true
|
||||
# runAsUser: 1000
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 80
|
||||
|
||||
ingress:
|
||||
enabled: false
|
||||
className: ""
|
||||
annotations: {}
|
||||
# kubernetes.io/ingress.class: nginx
|
||||
# kubernetes.io/tls-acme: "true"
|
||||
hosts:
|
||||
- host: homeland.local
|
||||
paths:
|
||||
- path: /
|
||||
pathType: ImplementationSpecific
|
||||
tls: []
|
||||
# - secretName: chart-example-tls
|
||||
# hosts:
|
||||
# - chart-example.local
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
autoscaling:
|
||||
enabled: false
|
||||
minReplicas: 1
|
||||
maxReplicas: 100
|
||||
targetCPUUtilizationPercentage: 80
|
||||
# targetMemoryUtilizationPercentage: 80
|
||||
|
||||
nodeSelector: {
|
||||
kubernetes.io/hostname: devsvr1
|
||||
}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
@ -3,7 +3,7 @@ run-name: ${{ gitea.actor }} is testing out Gitea Actions 🚀
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'prod'
|
||||
- 'prod-yantu'
|
||||
|
||||
env:
|
||||
BUILD: staging
|
||||
@ -11,7 +11,7 @@ env:
|
||||
|
||||
jobs:
|
||||
Explore-Gitea-Actions:
|
||||
runs-on: stream9
|
||||
runs-on: yantoo-ci
|
||||
steps:
|
||||
- run: echo "🎉 The job was automatically triggered by a ${{ gitea.event_name }} event."
|
||||
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by Gitea!"
|
||||
@ -29,18 +29,13 @@ jobs:
|
||||
- name: Build and push
|
||||
uses: https://gitea.yantootech.com/neil/build-push-action@v6
|
||||
with:
|
||||
push: false
|
||||
tags: gong-zheng-api:${{ gitea.run_id }}
|
||||
- name: Run docker
|
||||
push: true
|
||||
tags: 14.103.114.237:30005/gongzheng-backend:${{ gitea.run_id }}
|
||||
- name: Install
|
||||
run: |
|
||||
pwd
|
||||
if [ "$(docker ps -q -f name=^gong-zheng-api$)" ]; then
|
||||
docker stop gong-zheng-api
|
||||
fi
|
||||
docker run -d --rm --name gong-zheng-api \
|
||||
-v /usr/share/fonts/opentype/noto:/usr/share/fonts \
|
||||
-v $(pwd)/audio:/app/audio \
|
||||
-p 6211:8080 \
|
||||
-p 6212:8000 \
|
||||
gong-zheng-api:${{ gitea.run_id }}
|
||||
helm upgrade --install gongzheng-backend ./.gitea/charts \
|
||||
--namespace gongzhengb \
|
||||
--create-namespace \
|
||||
--set image.repository=14.103.114.237:30005/gongzheng-backend \
|
||||
--set image.tag=${{ gitea.run_id }}
|
||||
- run: echo "🍏 This job's status is ${{ job.status }}."
|
||||
53
Dockerfile
53
Dockerfile
@ -1,56 +1,31 @@
|
||||
# 构建 Go 服务
|
||||
FROM golang:1.21-alpine AS go-builder
|
||||
|
||||
WORKDIR /app
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# 安装必要的构建工具
|
||||
RUN apk add --no-cache gcc musl-dev
|
||||
|
||||
# 设置 GOPROXY 环境变量
|
||||
ENV GOPROXY=https://goproxy.cn,direct
|
||||
|
||||
# 复制 Go 项目文件
|
||||
COPY . .
|
||||
|
||||
# 构建 Go 服务
|
||||
RUN go build -o main ./main.go
|
||||
|
||||
# 构建 Python 服务
|
||||
FROM python:3.11-slim
|
||||
# 运行阶段
|
||||
FROM alpine:latest
|
||||
|
||||
WORKDIR /app
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# 安装必要的系统依赖
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# 从构建阶段复制编译好的二进制文件和配置文件
|
||||
COPY --from=go-builder /usr/src/app/main .
|
||||
COPY --from=go-builder /usr/src/app/.env .
|
||||
|
||||
# 创建音频目录
|
||||
RUN mkdir -p /app/audio
|
||||
# 暴露端口(根据你的 API 服务端口修改)
|
||||
EXPOSE 8080
|
||||
|
||||
# 复制 Python 文件服务器
|
||||
COPY file_server.py .
|
||||
|
||||
# 从 go-builder 阶段复制编译好的 Go 服务
|
||||
COPY --from=go-builder /app/main .
|
||||
|
||||
# 复制配置文件(如果有的话)
|
||||
COPY --from=go-builder /app/config.yaml .
|
||||
|
||||
# 设置环境变量
|
||||
ENV PORT=8000
|
||||
ENV GO_PORT=8080
|
||||
|
||||
# 创建启动脚本
|
||||
RUN echo '#!/bin/bash\n\
|
||||
# 启动 Go 服务\n\
|
||||
./main &\n\
|
||||
# 启动 Python 文件服务器\n\
|
||||
python file_server.py -p $PORT\n\
|
||||
' > /app/start.sh && chmod +x /app/start.sh
|
||||
|
||||
# 暴露端口
|
||||
EXPOSE 8000 8080
|
||||
|
||||
# 设置工作目录
|
||||
WORKDIR /app
|
||||
|
||||
# 启动服务
|
||||
CMD ["/app/start.sh"]
|
||||
# 运行服务
|
||||
CMD ["./main"]
|
||||
|
||||
@ -4,11 +4,9 @@ services:
|
||||
app:
|
||||
build: .
|
||||
ports:
|
||||
- "8000:8000" # Python 文件服务器端口
|
||||
- "8080:8080" # Go 服务端口
|
||||
volumes:
|
||||
- ./audio:/app/audio # 挂载音频目录
|
||||
environment:
|
||||
- PORT=8000
|
||||
- GO_PORT=8080
|
||||
restart: unless-stopped
|
||||
@ -62,7 +62,6 @@ class FileHandler(http.server.SimpleHTTPRequestHandler):
|
||||
with open(file_path, 'rb') as f:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', content_type)
|
||||
self.send_header('Content-Disposition', f'attachment; filename="{os.path.basename(file_path)}"')
|
||||
self.end_headers()
|
||||
self.wfile.write(f.read())
|
||||
except Exception as e:
|
||||
|
||||
22
main.go
22
main.go
@ -1,6 +1,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
@ -25,8 +26,12 @@ func main() {
|
||||
MiniMaxApiKey: os.Getenv("MiniMaxApiKey"),
|
||||
MiniMaxApiURL: os.Getenv("MiniMaxApiURL"),
|
||||
FILE_URL: os.Getenv("FILE_URL"),
|
||||
LLMOurApiUrl: os.Getenv("LLMOurApiUrl"),
|
||||
LLMOurApiKey: os.Getenv("LLMOurApiKey"),
|
||||
})
|
||||
|
||||
fmt.Println("config: ", llmService)
|
||||
|
||||
// Get token configuration from environment variables
|
||||
sigExp, err := strconv.Atoi(os.Getenv("SIG_EXP"))
|
||||
if err != nil {
|
||||
@ -69,9 +74,24 @@ func main() {
|
||||
router.POST("/speech/synthesize", llmHandler.SynthesizeSpeech)
|
||||
router.GET("/stream-text", llmHandler.StreamText)
|
||||
router.POST("/token", tokenHandler.GenerateToken)
|
||||
// Define routes
|
||||
router.GET("/", func(c *gin.Context) {
|
||||
c.JSON(200, gin.H{
|
||||
"status": "ok",
|
||||
"message": "Service is healthy",
|
||||
})
|
||||
})
|
||||
// Define routes
|
||||
router.GET("/health", func(c *gin.Context) {
|
||||
c.JSON(200, gin.H{
|
||||
"status": "ok",
|
||||
"message": "Service is healthy",
|
||||
})
|
||||
})
|
||||
|
||||
// Serve static files
|
||||
router.Static("/static", "./static")
|
||||
router.Static("/audio", "./audio") // 添加音频文件访问服务
|
||||
|
||||
// Get host and port from environment variables
|
||||
host := os.Getenv("HOST")
|
||||
@ -80,7 +100,7 @@ func main() {
|
||||
}
|
||||
port := os.Getenv("PORT")
|
||||
if port == "" {
|
||||
port = "8080"
|
||||
port = "80"
|
||||
}
|
||||
|
||||
// Start server
|
||||
|
||||
@ -24,6 +24,8 @@ type Config struct {
|
||||
MiniMaxApiKey string
|
||||
MiniMaxApiURL string
|
||||
FILE_URL string
|
||||
LLMOurApiUrl string
|
||||
LLMOurApiKey string
|
||||
}
|
||||
|
||||
// LLMService handles communication with the LLM API
|
||||
@ -51,6 +53,7 @@ type RequestPayload struct {
|
||||
ConversationID string `json:"conversation_id"`
|
||||
Files []interface{} `json:"files"`
|
||||
Audio string `json:"audio"`
|
||||
LlmType string `json:"llm_type"`
|
||||
}
|
||||
|
||||
// VoiceSetting represents voice configuration
|
||||
@ -112,6 +115,18 @@ type SpeechResponse struct {
|
||||
BaseResp BaseResponse `json:"base_resp"`
|
||||
}
|
||||
|
||||
type LLMOurMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
type LLMOurRequestPayload struct {
|
||||
Model string `json:"model"`
|
||||
Stream bool `json:"stream"`
|
||||
StreamOptions map[string]interface{} `json:"stream_options"`
|
||||
Messages []LLMOurMessage `json:"messages"`
|
||||
}
|
||||
|
||||
// NewLLMService creates a new instance of LLMService
|
||||
func NewLLMService(config Config) *LLMService {
|
||||
return &LLMService{
|
||||
@ -130,6 +145,7 @@ func (s *LLMService) CallLLMAPI(data map[string]interface{}) (interface{}, error
|
||||
ConversationID: getString(data, "conversation_id"),
|
||||
Files: make([]interface{}, 0),
|
||||
Audio: getString(data, "audio"),
|
||||
LlmType: getString(data, "llm_type"),
|
||||
}
|
||||
|
||||
fmt.Printf("前端传来的数据:%+v\n", payload)
|
||||
@ -138,12 +154,51 @@ func (s *LLMService) CallLLMAPI(data map[string]interface{}) (interface{}, error
|
||||
return nil, fmt.Errorf("error marshaling payload: %v", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("POST", s.config.LLMApiURL+"/chat-messages", bytes.NewBuffer(jsonData))
|
||||
// req, err := http.NewRequest("GET", "http://localhost:8080/stream-text", nil)
|
||||
currentUrl := s.config.LLMApiURL + "/chat-messages"
|
||||
fmt.Println(currentUrl)
|
||||
req := &http.Request{}
|
||||
if payload.LlmType == "ours" {
|
||||
// 动态构造 messages
|
||||
var messages []LLMOurMessage
|
||||
if msgs, ok := data["messages"]; ok {
|
||||
if arr, ok := msgs.([]interface{}); ok {
|
||||
for _, m := range arr {
|
||||
if mMap, ok := m.(map[string]interface{}); ok {
|
||||
role, _ := mMap["role"].(string)
|
||||
content, _ := mMap["content"].(string)
|
||||
messages = append(messages, LLMOurMessage{Role: role, Content: content})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// fallback: 如果没有 messages,则用 query 作为 user 消息
|
||||
if len(messages) == 0 && payload.Query != "" {
|
||||
messages = append(messages, LLMOurMessage{Role: "user", Content: payload.Query})
|
||||
}
|
||||
ourPayload := LLMOurRequestPayload{
|
||||
Model: "bot-20250522162100-44785", // 可根据 data 或配置传入
|
||||
Stream: true,
|
||||
StreamOptions: map[string]interface{}{"include_usage": true},
|
||||
Messages: messages,
|
||||
}
|
||||
jsonData, err = json.Marshal(ourPayload)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error marshaling ourPayload: %v", err)
|
||||
}
|
||||
currentUrl = s.config.LLMOurApiUrl
|
||||
req, err = http.NewRequest("POST", currentUrl, bytes.NewBuffer(jsonData))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating request: %v", err)
|
||||
}
|
||||
req.Header.Set("Authorization", "Bearer "+s.config.LLMOurApiKey)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
return s.handleStreamingResponseV2(req, data, payload.Audio)
|
||||
}
|
||||
|
||||
req, err = http.NewRequest("POST", currentUrl, bytes.NewBuffer(jsonData))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating request: %v", err)
|
||||
}
|
||||
req.Header.Set("Authorization", "Bearer "+s.config.LLMApiKey)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
@ -155,6 +210,135 @@ func (s *LLMService) CallLLMAPI(data map[string]interface{}) (interface{}, error
|
||||
return s.handleNonStreamingResponse(req)
|
||||
}
|
||||
|
||||
// processStreamSegment 处理流式文本分段、语音合成等逻辑,返回 new_message、audio、是否需要发送
|
||||
func (s *LLMService) processStreamSegment(initialSessage *string, all_message *string, answer string, audio_type string) (string, string, bool) {
|
||||
// 定义标点符号map
|
||||
punctuations := map[string]bool{
|
||||
",": true, ",": true, // 逗号
|
||||
".": true, "。": true, // 句号
|
||||
"!": true, "!": true, // 感叹号
|
||||
"?": true, "?": true, // 问号
|
||||
";": true, ";": true, // 分号
|
||||
":": true, ":": true, // 冒号
|
||||
"、": true,
|
||||
}
|
||||
|
||||
// 删除字符串前后的标点符号
|
||||
trimPunctuation := func(s string) string {
|
||||
if len(s) > 0 {
|
||||
lastRune, size := utf8.DecodeLastRuneInString(s)
|
||||
if punctuations[string(lastRune)] {
|
||||
s = s[:len(s)-size]
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// 判断字符串是否包含标点符号
|
||||
containsPunctuation := func(s string) bool {
|
||||
for _, char := range s {
|
||||
if punctuations[string(char)] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// 按标点符号分割文本
|
||||
splitByPunctuation := func(s string) []string {
|
||||
var result []string
|
||||
var current string
|
||||
for _, char := range s {
|
||||
if punctuations[string(char)] {
|
||||
if current != "" {
|
||||
result = append(result, current+string(char))
|
||||
current = ""
|
||||
}
|
||||
} else {
|
||||
current += string(char)
|
||||
}
|
||||
}
|
||||
if current != "" {
|
||||
result = append(result, current)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
*initialSessage += answer
|
||||
*all_message += answer
|
||||
new_message := ""
|
||||
if containsPunctuation(*initialSessage) {
|
||||
segments := splitByPunctuation(*initialSessage)
|
||||
if len(segments) > 1 {
|
||||
format_message := strings.Join(segments[:len(segments)-1], "")
|
||||
if utf8.RuneCountInString(format_message) > 10 {
|
||||
*initialSessage = segments[len(segments)-1]
|
||||
new_message = strings.Join(segments[:len(segments)-1], "")
|
||||
} else {
|
||||
return "", "", false
|
||||
}
|
||||
} else {
|
||||
if utf8.RuneCountInString(*initialSessage) > 10 {
|
||||
new_message = *initialSessage
|
||||
*initialSessage = ""
|
||||
} else if utf8.RuneCountInString(*initialSessage) <= 10 && strings.HasSuffix(*initialSessage, "。") {
|
||||
new_message = *initialSessage
|
||||
*initialSessage = ""
|
||||
} else {
|
||||
return "", "", false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if new_message == "" {
|
||||
return "", "", false
|
||||
}
|
||||
s_msg := strings.TrimSpace(new_message)
|
||||
new_message = trimPunctuation(s_msg)
|
||||
|
||||
audio := ""
|
||||
for i := 0; i < 1; i++ {
|
||||
speechResp, err := s.SynthesizeSpeech(new_message, audio_type)
|
||||
if err != nil {
|
||||
fmt.Printf("Error synthesizing speech: %v\n", err)
|
||||
break
|
||||
}
|
||||
fmt.Println("触发音频", speechResp)
|
||||
audio = speechResp.Data.Audio
|
||||
if audio != "" {
|
||||
resp, err := http.Get(audio)
|
||||
if err != nil {
|
||||
fmt.Printf("Error downloading audio: %v\n", err)
|
||||
} else {
|
||||
defer resp.Body.Close()
|
||||
audioBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
fmt.Printf("Error reading audio data: %v\n", err)
|
||||
} else {
|
||||
originalPath := fmt.Sprintf("audio/original_%d.wav", time.Now().UnixNano())
|
||||
if err := os.WriteFile(originalPath, audioBytes, 0644); err != nil {
|
||||
fmt.Printf("Error saving original audio: %v\n", err)
|
||||
}
|
||||
audioBase64 := base64.StdEncoding.EncodeToString(audioBytes)
|
||||
trimmedAudio, err := s.TrimAudioSilence(audioBase64)
|
||||
if err != nil {
|
||||
fmt.Printf("Error trimming audio silence: %v\n", err)
|
||||
} else {
|
||||
audio_path := fmt.Sprintf("trimmed_%d.wav", time.Now().UnixNano())
|
||||
outputPath := "audio/" + audio_path
|
||||
if err := s.SaveBase64AsWAV(trimmedAudio, outputPath); err != nil {
|
||||
fmt.Printf("Error saving trimmed WAV file: %v\n", err)
|
||||
}
|
||||
audio = s.config.FILE_URL + audio_path
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return s_msg, audio, true
|
||||
}
|
||||
|
||||
// handleStreamingResponse processes streaming responses
|
||||
func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]interface{}, audio_type string) (chan Message, error) {
|
||||
resp, err := s.client.Do(req)
|
||||
@ -167,6 +351,7 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
}
|
||||
|
||||
messageChan := make(chan Message, 100) // Buffered channel for better performance
|
||||
all_message := ""
|
||||
initialSessage := ""
|
||||
go func() {
|
||||
defer resp.Body.Close()
|
||||
@ -200,6 +385,7 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
switch event {
|
||||
case "message":
|
||||
answer := getString(jsonData, "answer")
|
||||
fmt.Println("源文本:", answer)
|
||||
var audio string
|
||||
|
||||
// 定义标点符号map
|
||||
@ -209,7 +395,7 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
"!": true, "!": true, // 感叹号
|
||||
"?": true, "?": true, // 问号
|
||||
";": true, ";": true, // 分号
|
||||
":": true, ":": true, // 冒号
|
||||
":": true, // 冒号
|
||||
"、": true,
|
||||
}
|
||||
|
||||
@ -256,6 +442,7 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
}
|
||||
new_message := ""
|
||||
initialSessage += answer
|
||||
all_message += answer
|
||||
if containsPunctuation(initialSessage) {
|
||||
segments := splitByPunctuation(initialSessage)
|
||||
// fmt.Printf("原始文本: %s\n", initialSessage)
|
||||
@ -264,11 +451,32 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
// fmt.Printf("片段 %d: %s\n", i+1, segment)
|
||||
// }
|
||||
if len(segments) > 1 {
|
||||
|
||||
format_message := strings.Join(segments[:len(segments)-1], "")
|
||||
// 检查initialSessage的字符长度是否超过15个
|
||||
if utf8.RuneCountInString(format_message) > 15 {
|
||||
initialSessage = segments[len(segments)-1]
|
||||
// 如果超过10个字符,将其添加到new_message中并清空initialSessage
|
||||
new_message = strings.Join(segments[:len(segments)-1], "")
|
||||
// initialSessage = ""
|
||||
} else {
|
||||
if containsPunctuation(format_message) && utf8.RuneCountInString(format_message) > 10 {
|
||||
|
||||
initialSessage = segments[len(segments)-1]
|
||||
new_message = strings.Join(segments[:len(segments)-1], "")
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
if utf8.RuneCountInString(initialSessage) > 15 {
|
||||
new_message = initialSessage
|
||||
initialSessage = ""
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
|
||||
}
|
||||
// fmt.Printf("新消息: %s\n", new_message)
|
||||
// fmt.Printf("剩余文本: %s\n", initialSessage)
|
||||
@ -280,8 +488,8 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
s_msg := strings.TrimSpace(new_message)
|
||||
// Trim punctuation from the message
|
||||
new_message = trimPunctuation(s_msg)
|
||||
// fmt.Println("new_message", new_message)
|
||||
|
||||
fmt.Println("new_message", new_message)
|
||||
// println(new_message)
|
||||
// 最多重试一次
|
||||
for i := 0; i < 1; i++ {
|
||||
speechResp, err := s.SynthesizeSpeech(new_message, audio_type)
|
||||
@ -303,7 +511,7 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
fmt.Printf("Error reading audio data: %v\n", err)
|
||||
} else {
|
||||
// Save original audio first
|
||||
originalPath := fmt.Sprintf("audio/original_%d.wav", time.Now().Unix())
|
||||
originalPath := fmt.Sprintf("audio/original_%d.wav", time.Now().UnixNano())
|
||||
if err := os.WriteFile(originalPath, audioBytes, 0644); err != nil {
|
||||
fmt.Printf("Error saving original audio: %v\n", err)
|
||||
}
|
||||
@ -315,7 +523,7 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
fmt.Printf("Error trimming audio silence: %v\n", err)
|
||||
} else {
|
||||
// Save the trimmed audio as WAV file
|
||||
audio_path := fmt.Sprintf("trimmed_%d.wav", time.Now().Unix())
|
||||
audio_path := fmt.Sprintf("trimmed_%d.wav", time.Now().UnixNano())
|
||||
outputPath := "audio/" + audio_path
|
||||
if err := s.SaveBase64AsWAV(trimmedAudio, outputPath); err != nil {
|
||||
fmt.Printf("Error saving trimmed WAV file: %v\n", err)
|
||||
@ -326,12 +534,12 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
}
|
||||
break // 获取到音频就退出
|
||||
}
|
||||
fmt.Println("audio is empty, retry", speechResp)
|
||||
// fmt.Println("audio is empty, retry", speechResp)
|
||||
// time.Sleep(1 * time.Second)
|
||||
}
|
||||
|
||||
fmt.Println("所有消息:", all_message)
|
||||
messageChan <- Message{
|
||||
Answer: new_message,
|
||||
Answer: s_msg,
|
||||
IsEnd: false,
|
||||
ConversationID: getString(jsonData, "conversation_id"),
|
||||
TaskID: getString(jsonData, "task_id"),
|
||||
@ -339,6 +547,96 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
AudioData: audio, // Update to use the correct path to audio data
|
||||
}
|
||||
case "message_end":
|
||||
// 在流结束前,处理剩余的文本生成音频
|
||||
if initialSessage != "" {
|
||||
// 不管文本长度,直接生成音频
|
||||
s_msg := strings.TrimSpace(initialSessage)
|
||||
// 定义标点符号map
|
||||
punctuations := map[string]bool{
|
||||
",": true, ",": true, // 逗号
|
||||
".": true, "。": true, // 句号
|
||||
"!": true, "!": true, // 感叹号
|
||||
"?": true, "?": true, // 问号
|
||||
";": true, ";": true, // 分号
|
||||
":": true, ":": true, // 冒号
|
||||
"、": true,
|
||||
}
|
||||
|
||||
// 删除字符串前后的标点符号
|
||||
trimPunctuation := func(s string) string {
|
||||
if len(s) > 0 {
|
||||
lastRune, size := utf8.DecodeLastRuneInString(s)
|
||||
if punctuations[string(lastRune)] {
|
||||
s = s[:len(s)-size]
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
new_message := trimPunctuation(s_msg)
|
||||
fmt.Println("最后一段文本生成音频:", new_message)
|
||||
|
||||
// 生成语音
|
||||
var audio string
|
||||
for i := 0; i < 1; i++ {
|
||||
speechResp, err := s.SynthesizeSpeech(new_message, audio_type)
|
||||
if err != nil {
|
||||
fmt.Printf("Error synthesizing speech: %v\n", err)
|
||||
break
|
||||
}
|
||||
fmt.Println("语音:", speechResp)
|
||||
audio = speechResp.Data.Audio
|
||||
if audio != "" {
|
||||
// 下载并处理音频
|
||||
resp, err := http.Get(audio)
|
||||
if err != nil {
|
||||
fmt.Printf("Error downloading audio: %v\n", err)
|
||||
} else {
|
||||
defer resp.Body.Close()
|
||||
audioBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
fmt.Printf("Error reading audio data: %v\n", err)
|
||||
} else {
|
||||
// 保存原始音频
|
||||
originalPath := fmt.Sprintf("audio/original_%d.wav", time.Now().UnixNano())
|
||||
if err := os.WriteFile(originalPath, audioBytes, 0644); err != nil {
|
||||
fmt.Printf("Error saving original audio: %v\n", err)
|
||||
}
|
||||
|
||||
// 静音裁剪
|
||||
audioBase64 := base64.StdEncoding.EncodeToString(audioBytes)
|
||||
trimmedAudio, err := s.TrimAudioSilence(audioBase64)
|
||||
if err != nil {
|
||||
fmt.Printf("Error trimming audio silence: %v\n", err)
|
||||
} else {
|
||||
audio_path := fmt.Sprintf("trimmed_%d.wav", time.Now().UnixNano())
|
||||
outputPath := "audio/" + audio_path
|
||||
if err := s.SaveBase64AsWAV(trimmedAudio, outputPath); err != nil {
|
||||
fmt.Printf("Error saving trimmed WAV file: %v\n", err)
|
||||
}
|
||||
audio = s.config.FILE_URL + audio_path
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// 发送最后一段文本的消息
|
||||
messageChan <- Message{
|
||||
Answer: s_msg,
|
||||
IsEnd: false,
|
||||
ConversationID: getString(jsonData, "conversation_id"),
|
||||
TaskID: getString(jsonData, "task_id"),
|
||||
ClientID: getString(data, "conversation_id"),
|
||||
AudioData: audio,
|
||||
}
|
||||
|
||||
// 清空剩余文本
|
||||
initialSessage = ""
|
||||
}
|
||||
|
||||
// 发送结束消息
|
||||
messageChan <- Message{
|
||||
Answer: "",
|
||||
IsEnd: true,
|
||||
@ -353,6 +651,95 @@ func (s *LLMService) handleStreamingResponse(req *http.Request, data map[string]
|
||||
return messageChan, nil
|
||||
}
|
||||
|
||||
// handleStreamingResponseV2 适配新流式返回格式
|
||||
func (s *LLMService) handleStreamingResponseV2(req *http.Request, data map[string]interface{}, audio_type string) (chan Message, error) {
|
||||
resp, err := s.client.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error making request: %v", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
messageChan := make(chan Message, 100)
|
||||
all_message := ""
|
||||
initialSessage := ""
|
||||
go func() {
|
||||
defer resp.Body.Close()
|
||||
defer close(messageChan)
|
||||
reader := bufio.NewReader(resp.Body)
|
||||
for {
|
||||
line, err := reader.ReadString('\n')
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
fmt.Printf("Error reading line: %v\n", err)
|
||||
continue
|
||||
}
|
||||
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// line = strings.TrimSpace(line)
|
||||
if strings.HasPrefix(line, "data:") {
|
||||
line = strings.TrimSpace(strings.TrimPrefix(line, "data:"))
|
||||
}
|
||||
|
||||
// fmt.Println("line: ", line)
|
||||
|
||||
if line == "[DONE]" {
|
||||
messageChan <- Message{
|
||||
Answer: "",
|
||||
IsEnd: true,
|
||||
ConversationID: getString(data, "conversation_id"),
|
||||
TaskID: getString(data, "task_id"),
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var jsonData map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(line), &jsonData); err != nil {
|
||||
fmt.Printf("Error unmarshaling JSON: %v\n", err)
|
||||
continue
|
||||
}
|
||||
choices, ok := jsonData["choices"].([]interface{})
|
||||
if !ok || len(choices) == 0 {
|
||||
continue
|
||||
}
|
||||
choice, ok := choices[0].(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
delta, ok := choice["delta"].(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
content, _ := delta["content"].(string)
|
||||
if content == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
new_message, audio, needSend := s.processStreamSegment(&initialSessage, &all_message, content, audio_type)
|
||||
if !needSend {
|
||||
continue
|
||||
}
|
||||
messageChan <- Message{
|
||||
Answer: new_message,
|
||||
IsEnd: false,
|
||||
ConversationID: getString(data, "conversation_id"),
|
||||
TaskID: getString(data, "task_id"),
|
||||
ClientID: getString(data, "conversation_id"),
|
||||
AudioData: audio,
|
||||
}
|
||||
}
|
||||
}()
|
||||
return messageChan, nil
|
||||
}
|
||||
|
||||
// handleNonStreamingResponse processes non-streaming responses
|
||||
func (s *LLMService) handleNonStreamingResponse(req *http.Request) (map[string]interface{}, error) {
|
||||
resp, err := s.client.Do(req)
|
||||
@ -424,7 +811,7 @@ func (s *LLMService) DeleteConversation(conversationID, user string) (map[string
|
||||
// SynthesizeSpeech converts text to speech
|
||||
func (s *LLMService) SynthesizeSpeech(text string, audio string) (*SpeechResponse, error) {
|
||||
payload := SpeechRequest{
|
||||
Model: "speech-02-turbo",
|
||||
Model: "speech-02-hd",
|
||||
Text: text,
|
||||
Stream: false,
|
||||
LanguageBoost: "auto",
|
||||
@ -433,8 +820,8 @@ func (s *LLMService) SynthesizeSpeech(text string, audio string) (*SpeechRespons
|
||||
VoiceID: audio,
|
||||
Speed: 1,
|
||||
Vol: 1,
|
||||
Pitch: 0,
|
||||
Emotion: "happy",
|
||||
Pitch: -1,
|
||||
Emotion: "neutral",
|
||||
},
|
||||
AudioSetting: AudioSetting{
|
||||
SampleRate: 32000,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user