Compare commits
7 Commits
v0.5.10-al
...
v0.5.10-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5df751f5ca | ||
|
|
b519170c06 | ||
|
|
3d9ad0024a | ||
|
|
67c64e71c8 | ||
|
|
97030e27f8 | ||
|
|
461f5dab56 | ||
|
|
af378c59af |
14
.github/workflows/docker-image-amd64.yml
vendored
14
.github/workflows/docker-image-amd64.yml
vendored
@@ -9,6 +9,8 @@ on:
|
||||
name:
|
||||
description: 'reason'
|
||||
required: false
|
||||
permissions:
|
||||
packages: write
|
||||
jobs:
|
||||
push_to_registries:
|
||||
name: Push Docker image to multiple registries
|
||||
@@ -25,12 +27,6 @@ jobs:
|
||||
git describe --tags > VERSION
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@@ -41,9 +37,7 @@ jobs:
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
justsong/one-api
|
||||
ghcr.io/${{ github.repository }}
|
||||
images: ghcr.io/${{ github.repository }}
|
||||
|
||||
- name: Build and push Docker images
|
||||
uses: docker/build-push-action@v3
|
||||
@@ -51,4 +45,4 @@ jobs:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
@@ -73,13 +73,7 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
|
||||
+ [x] [智谱 ChatGLM 系列模型](https://bigmodel.cn)
|
||||
+ [x] [360 智脑](https://ai.360.cn)
|
||||
+ [x] [腾讯混元大模型](https://cloud.tencent.com/document/product/1729)
|
||||
2. 支持配置镜像以及众多第三方代理服务:
|
||||
+ [x] [OpenAI-SB](https://openai-sb.com)
|
||||
+ [x] [CloseAI](https://referer.shadowai.xyz/r/2412)
|
||||
+ [x] [API2D](https://api2d.com/r/197971)
|
||||
+ [x] [OhMyGPT](https://aigptx.top?aff=uFpUl2Kf)
|
||||
+ [x] [AI Proxy](https://aiproxy.io/?i=OneAPI) (邀请码:`OneAPI`)
|
||||
+ [x] 自定义渠道:例如各种未收录的第三方代理服务
|
||||
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
||||
3. 支持通过**负载均衡**的方式访问多个渠道。
|
||||
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
||||
5. 支持**多机部署**,[详见此处](#多机部署)。
|
||||
|
||||
@@ -114,7 +114,7 @@ func requestOpenAI2Gemini(textRequest GeneralOpenAIRequest) *GeminiChatRequest {
|
||||
Role: "model",
|
||||
Parts: []GeminiPart{
|
||||
{
|
||||
Text: "ok",
|
||||
Text: "Okay",
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -130,6 +130,16 @@ type GeminiChatResponse struct {
|
||||
PromptFeedback GeminiChatPromptFeedback `json:"promptFeedback"`
|
||||
}
|
||||
|
||||
func (g *GeminiChatResponse) GetResponseText() string {
|
||||
if g == nil {
|
||||
return ""
|
||||
}
|
||||
if len(g.Candidates) > 0 && len(g.Candidates[0].Content.Parts) > 0 {
|
||||
return g.Candidates[0].Content.Parts[0].Text
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type GeminiChatCandidate struct {
|
||||
Content GeminiChatContent `json:"content"`
|
||||
FinishReason string `json:"finishReason"`
|
||||
@@ -158,10 +168,13 @@ func responseGeminiChat2OpenAI(response *GeminiChatResponse) *OpenAITextResponse
|
||||
Index: i,
|
||||
Message: Message{
|
||||
Role: "assistant",
|
||||
Content: candidate.Content.Parts[0].Text,
|
||||
Content: "",
|
||||
},
|
||||
FinishReason: stopFinishReason,
|
||||
}
|
||||
if len(candidate.Content.Parts) > 0 {
|
||||
choice.Message.Content = candidate.Content.Parts[0].Text
|
||||
}
|
||||
fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
|
||||
}
|
||||
return &fullTextResponse
|
||||
@@ -169,9 +182,7 @@ func responseGeminiChat2OpenAI(response *GeminiChatResponse) *OpenAITextResponse
|
||||
|
||||
func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) *ChatCompletionsStreamResponse {
|
||||
var choice ChatCompletionsStreamResponseChoice
|
||||
if len(geminiResponse.Candidates) > 0 && len(geminiResponse.Candidates[0].Content.Parts) > 0 {
|
||||
choice.Delta.Content = geminiResponse.Candidates[0].Content.Parts[0].Text
|
||||
}
|
||||
choice.Delta.Content = geminiResponse.GetResponseText()
|
||||
choice.FinishReason = &stopFinishReason
|
||||
var response ChatCompletionsStreamResponse
|
||||
response.Object = "chat.completion.chunk"
|
||||
@@ -276,7 +287,7 @@ func geminiChatHandler(c *gin.Context, resp *http.Response, promptTokens int, mo
|
||||
}, nil
|
||||
}
|
||||
fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse)
|
||||
completionTokens := countTokenText(geminiResponse.Candidates[0].Content.Parts[0].Text, model)
|
||||
completionTokens := countTokenText(geminiResponse.GetResponseText(), model)
|
||||
usage := Usage{
|
||||
PromptTokens: promptTokens,
|
||||
CompletionTokens: completionTokens,
|
||||
|
||||
@@ -58,6 +58,9 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
||||
if err != nil {
|
||||
return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest)
|
||||
}
|
||||
if textRequest.MaxTokens < 0 || textRequest.MaxTokens > math.MaxInt32/2 {
|
||||
return errorWrapper(errors.New("max_tokens is invalid"), "invalid_max_tokens", http.StatusBadRequest)
|
||||
}
|
||||
if relayMode == RelayModeModerations && textRequest.Model == "" {
|
||||
textRequest.Model = "text-moderation-latest"
|
||||
}
|
||||
|
||||
@@ -6,14 +6,15 @@ import (
|
||||
"gorm.io/gorm"
|
||||
"one-api/common"
|
||||
"strings"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// User if you add sensitive fields, don't forget to clean them in setupLogin function.
|
||||
// Otherwise, the sensitive information will be saved on local storage in plain text!
|
||||
type User struct {
|
||||
Id int `json:"id"`
|
||||
Username string `json:"username" gorm:"unique;index" validate:"max=12"`
|
||||
Password string `json:"password" gorm:"not null;" validate:"min=8,max=20"`
|
||||
Username string `json:"username" gorm:"unique;index" validate:"max=30"`
|
||||
Password string `json:"password" gorm:"not null;" validate:"min=8,max=30"`
|
||||
DisplayName string `json:"display_name" gorm:"index" validate:"max=20"`
|
||||
Role int `json:"role" gorm:"type:int;default:1"` // admin, common
|
||||
Status int `json:"status" gorm:"type:int;default:1"` // enabled, disabled
|
||||
@@ -42,7 +43,12 @@ func GetAllUsers(startIdx int, num int) (users []*User, err error) {
|
||||
}
|
||||
|
||||
func SearchUsers(keyword string) (users []*User, err error) {
|
||||
err = DB.Omit("password").Where("id = ? or username LIKE ? or email LIKE ? or display_name LIKE ?", keyword, keyword+"%", keyword+"%", keyword+"%").Find(&users).Error
|
||||
if uid, ok := strconv.Atoi(keyword); ok == nil {
|
||||
err = DB.Omit("password").Where("id = ? or username LIKE ? or email LIKE ? or display_name LIKE ?", uid, keyword+"%", keyword+"%", keyword+"%").Find(&users).Error
|
||||
} else {
|
||||
err = DB.Omit("password").Where("username LIKE ? or email LIKE ? or display_name LIKE ?", keyword+"%", keyword+"%", keyword+"%").Find(&users).Error
|
||||
}
|
||||
|
||||
return users, err
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user