Commit 482bc9f5 authored by 健杭 徐's avatar 健杭 徐
Browse files

base backend

parent 6e279d4c
package handlers
import (
"fmt"
"net/http"
"path/filepath"
"github.com/gin-gonic/gin"
"your-app/models"
"your-app/services"
)
func SendMessage(c *gin.Context) {
conversationID := c.Param("conversationId")
// 处理多模态输入
text := c.PostForm("text")
file, err := c.FormFile("image")
var imageBase64 string
if file != nil {
// 保存图片并转换为Base64
dst := fmt.Sprintf("./uploads/%s", file.Filename)
if err := c.SaveUploadedFile(file, dst); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
imageBase64 = services.ImageToBase64(dst)
}
// 保存用户消息到数据库
userMsg := models.Message{
ConversationID: conversationID,
Role: "user",
Type: models.Text,
Content: text,
}
if imageBase64 != "" {
userMsg.Type = models.Image
userMsg.Content = imageBase64
}
// 保存消息(伪代码)
db.Create(&userMsg)
// 调用LLM服务
llmResponse, err := services.CallLLMAPI(conversationID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "LLM服务调用失败"})
return
}
// 保存AI回复
aiMsg := models.Message{
ConversationID: conversationID,
Role: "assistant",
Type: models.Text,
Content: llmResponse,
}
db.Create(&aiMsg)
c.JSON(http.StatusOK, gin.H{
"userMessage": userMsg,
"aiMessage": aiMsg,
})
}
\ No newline at end of file
package models
import "time"
type MessageType string
const (
Text MessageType = "text"
Image MessageType = "image"
)
type Message struct {
ID uint `gorm:"primaryKey" json:"id"`
ConversationID uint `json:"conversation_id"`
Role string `json:"role"` // "user" or "assistant"
Type MessageType `json:"type"`
Content string `json:"content"` // 文本内容或图片Base64
CreatedAt time.Time `json:"created_at"`
}
\ No newline at end of file
package services
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"your-app/models"
)
func CallLLMAPI(conversationID string) (string, error) {
// 获取对话上下文
var messages []models.Message
db.Where("conversation_id = ?", conversationID).Find(&messages)
// 构造多模态请求
payload := map[string]interface{}{
"model": "gpt-4-vision-preview",
"messages": buildMessages(messages),
}
payloadBytes, _ := json.Marshal(payload)
req, _ := http.NewRequest("POST", "https://api.openai.com/v1/chat/completions", bytes.NewBuffer(payloadBytes))
req.Header.Set("Authorization", "Bearer "+os.Getenv("OPENAI_API_KEY"))
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
body, _ := ioutil.ReadAll(resp.Body)
// 解析响应
var result map[string]interface{}
json.Unmarshal(body, &result)
choices := result["choices"].([]interface{})
message := choices[0].(map[string]interface{})["message"].(map[string]interface{})
return message["content"].(string), nil
}
// 构建多模态消息
func buildMessages(messages []models.Message) []map[string]interface{} {
var result []map[string]interface{}
for _, msg := range messages {
content := []interface{}{{"type": "text", "text": msg.Content}}
if msg.Type == models.Image {
content = append(content, map[string]interface{}{
"type": "image_url",
"image_url": map[string]string{
"url": fmt.Sprintf("data:image/jpeg;base64,%s", msg.Content),
},
})
}
result = append(result, map[string]interface{}{
"role": msg.Role,
"content": content,
})
}
return result
}
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment