Compare commits

...

17 Commits
0.5.1 ... 0.5.2

Author SHA1 Message Date
ckt1031
4b9756b257 feat: support chatbot ui 2023-07-17 15:35:02 +08:00
ckt1031
a6ae20ed54 fix: chatgptweb 2023-07-16 21:48:54 +08:00
ckt1031
617149d731 fix: custom models 2023-07-16 21:23:56 +08:00
ckt1031
edd2c4f6e9 fix: testing issue 2023-07-16 16:01:52 +08:00
ckt1031
481c4ebf49 fix: chatgptweb issue 2023-07-16 15:35:32 +08:00
ckt1031
203471d7a9 Merge remote-tracking branch 'upstream/main' 2023-07-16 13:12:45 +08:00
JustSong
4139a7036f chore: make subscription api compatible with official api 2023-07-15 23:01:54 +08:00
JustSong
02da0b51f8 docs: update README 2023-07-15 19:07:38 +08:00
JustSong
35cfebee12 feat: retry on failed (close #112) 2023-07-15 19:06:51 +08:00
JustSong
0e088f7c3e feat: support ChatGLM2 (close #274) 2023-07-15 17:07:05 +08:00
JustSong
f61d326721 revert: do not enable turnstile check on login 2023-07-15 16:06:01 +08:00
JustSong
74b06b643a Merge branch 'main' of github.com:songquanpeng/one-api 2023-07-15 13:52:26 +08:00
JustSong
ccf7709e23 feat: support custom model now (close #276) 2023-07-15 13:51:46 +08:00
ckt
d592e2c8b8 feat: add turnstile for login form (#263) 2023-07-15 12:41:21 +08:00
ckt
b520b54625 feat: initial support of Dall-E (#148, #266)
* feat: initial support of Dall-E

* fix: fix N not timed

---------

Co-authored-by: JustSong <songquanpeng@foxmail.com>
Co-authored-by: JustSong <39998050+songquanpeng@users.noreply.github.com>
2023-07-15 12:30:06 +08:00
玩牛牛
81c5901123 feat: add support for /v1/engines/text-embedding-ada-002/embeddings (#224, close #222) 2023-07-15 12:03:23 +08:00
JustSong
abc53cb208 feat: disable channel when account_deactivated received (close #271) 2023-07-15 11:49:58 +08:00
6 changed files with 239 additions and 109 deletions

View File

@@ -158,6 +158,7 @@ const (
// Reserve engineering for public projects
ChannelTypeChatGPTWeb = 14 // Chanzhaoyu/chatgpt-web
ChannelTypeChatbotUI = 15 // mckaywrigley/chatbot-ui
)
var ChannelBaseURLs = []string{
@@ -178,4 +179,5 @@ var ChannelBaseURLs = []string{
// Reserve engineering for public projects
"", // 14 // Chanzhaoyu/chatgpt-web
"", // 15 // mckaywrigley/chatbot-ui
}

View File

@@ -8,7 +8,8 @@ import (
)
func GetSubscription(c *gin.Context) {
var quota int
var remainQuota int
var usedQuota int
var err error
var expirationDate int64
@@ -18,10 +19,14 @@ func GetSubscription(c *gin.Context) {
expirationDate = token.ExpiredTime
if common.DisplayTokenStatEnabled {
quota = token.RemainQuota
tokenId := c.GetInt("token_id")
token, err = model.GetTokenById(tokenId)
remainQuota = token.RemainQuota
usedQuota = token.UsedQuota
} else {
userId := c.GetInt("id")
quota, err = model.GetUserQuota(userId)
remainQuota, err = model.GetUserQuota(userId)
usedQuota, err = model.GetUserUsedQuota(userId)
}
if err != nil {
openAIError := OpenAIError{
@@ -33,6 +38,7 @@ func GetSubscription(c *gin.Context) {
})
return
}
quota := remainQuota + usedQuota
amount := float64(quota)
if common.DisplayInCurrencyEnabled {
amount /= common.QuotaPerUnit

View File

@@ -18,6 +18,13 @@ import (
"github.com/gin-gonic/gin"
)
func formatFloat(input float64) float64 {
if input == float64(int64(input)) {
return input
}
return float64(int64(input*10)) / 10
}
func testChannel(channel *model.Channel, request ChatRequest) error {
switch channel.Type {
case common.ChannelTypeAzure:
@@ -32,6 +39,10 @@ func testChannel(channel *model.Channel, request ChatRequest) error {
if channel.BaseURL != "" {
requestURL = channel.BaseURL
}
} else if channel.Type == common.ChannelTypeChatbotUI {
if channel.BaseURL != "" {
requestURL = channel.BaseURL
}
} else {
if channel.BaseURL != "" {
requestURL = channel.BaseURL
@@ -65,16 +76,49 @@ func testChannel(channel *model.Channel, request ChatRequest) error {
}
// Construct json data without adding escape character
map1 := map[string]string{
"prompt": prompt,
"systemMessage": systemMessage.Content,
"temperature": strconv.FormatFloat(request.Temperature, 'f', 2, 64),
"top_p": strconv.FormatFloat(request.TopP, 'f', 2, 64),
map1 := make(map[string]interface{})
map1["prompt"] = prompt
map1["systemMessage"] = systemMessage.Content
if request.Temperature != 0 {
map1["temperature"] = formatFloat(request.Temperature)
}
if request.TopP != 0 {
map1["top_p"] = formatFloat(request.TopP)
}
// Convert map to json string
jsonData, err = json.Marshal(map1)
} else if channel.Type == common.ChannelTypeChatbotUI {
// Get system message from Message json, Role == "system"
var systemMessage string
for _, message := range request.Messages {
if message.Role == "system" {
systemMessage = message.Content
break
}
}
// Construct json data without adding escape character
map1 := make(map[string]interface{})
map1["prompt"] = systemMessage
map1["temperature"] = formatFloat(request.Temperature)
map1["key"] = ""
map1["messages"] = request.Messages
map1["model"] = map[string]interface{}{
"id": request.Model,
}
// Convert map to json string
jsonData, err = json.Marshal(map1)
//Print jsoinData to console
log.Println(string(jsonData))
}
if err != nil {
return err
}
@@ -119,11 +163,11 @@ func testChannel(channel *model.Channel, request ChatRequest) error {
return errors.New("error response: " + strconv.Itoa(resp.StatusCode))
}
var done = false
var streamResponseText = ""
if channel.Type != common.ChannelTypeChatGPTWeb {
scanner := bufio.NewScanner(resp.Body)
scanner := bufio.NewScanner(resp.Body)
if channel.Type != common.ChannelTypeChatGPTWeb && channel.Type != common.ChannelTypeChatbotUI {
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
@@ -139,12 +183,35 @@ func testChannel(channel *model.Channel, request ChatRequest) error {
return 0, nil, nil
})
for scanner.Scan() {
data := scanner.Text()
if len(data) < 6 { // must be something wrong!
continue
}
for scanner.Scan() {
data := scanner.Text()
if len(data) < 6 { // must be something wrong!
continue
}
if channel.Type == common.ChannelTypeChatGPTWeb {
var chatResponse ChatGptWebChatResponse
err = json.Unmarshal([]byte(data), &chatResponse)
if err != nil {
// Print the body in string
buf := new(bytes.Buffer)
buf.ReadFrom(resp.Body)
common.SysError("error unmarshalling chat response: " + err.Error() + " " + buf.String())
return err
}
// if response role is assistant and contains delta, append the content to streamResponseText
if chatResponse.Role == "assistant" && chatResponse.Detail != nil {
for _, choice := range chatResponse.Detail.Choices {
streamResponseText += choice.Delta.Content
}
}
} else if channel.Type == common.ChannelTypeChatbotUI {
streamResponseText += data
} else if channel.Type != common.ChannelTypeChatGPTWeb {
// If data has event: event content inside, remove it, it can be prefix or inside the data
if strings.HasPrefix(data, "event:") || strings.Contains(data, "event:") {
// Remove event: event in the front or back
@@ -181,38 +248,15 @@ func testChannel(channel *model.Channel, request ChatRequest) error {
for _, choice := range streamResponse.Choices {
streamResponseText += choice.Delta.Content
}
} else {
done = true
break
}
}
} else if channel.Type == common.ChannelTypeChatGPTWeb {
scanner := bufio.NewScanner(resp.Body)
go func() {
for scanner.Scan() {
var chatResponse ChatGptWebChatResponse
err = json.Unmarshal(scanner.Bytes(), &chatResponse)
if err != nil {
log.Println("error unmarshal chat response: " + err.Error())
continue
}
// if response role is assistant and contains delta, append the content to streamResponseText
if chatResponse.Role == "assistant" && chatResponse.Detail != nil {
for _, choice := range chatResponse.Detail.Choices {
streamResponseText += choice.Delta.Content
}
}
}
}()
}
defer resp.Body.Close()
// Check if streaming is complete and streamResponseText is populated
if streamResponseText == "" || !done && channel.Type != common.ChannelTypeChatGPTWeb {
if streamResponseText == "" {
return errors.New("Streaming not complete")
}
@@ -226,7 +270,7 @@ func buildTestRequest() *ChatRequest {
}
testMessage := Message{
Role: "user",
Content: "say hi word only",
Content: "Hello ChatGPT!",
}
testRequest.Messages = append(testRequest.Messages, testMessage)
return testRequest

View File

@@ -13,6 +13,7 @@ import (
"one-api/model"
"strconv"
"strings"
"time"
"github.com/gin-gonic/gin"
)
@@ -122,6 +123,10 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
// remove /v1/chat/completions from request url
requestURL := strings.Split(requestURL, "/v1/chat/completions")[0]
fullRequestURL = fmt.Sprintf("%s%s", baseURL, requestURL)
} else if channelType == common.ChannelTypeChatbotUI {
// remove /v1/chat/completions from request url
requestURL := strings.Split(requestURL, "/v1/chat/completions")[0]
fullRequestURL = fmt.Sprintf("%s%s", baseURL, requestURL)
} else if channelType == common.ChannelTypePaLM {
err := relayPaLM(textRequest, c)
return err
@@ -223,11 +228,57 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
}
// Construct json data without adding escape character
map1 := map[string]string{
"prompt": prompt,
"systemMessage": systemMessage.Content,
"temperature": strconv.FormatFloat(reqBody.Temperature, 'f', 2, 64),
"top_p": strconv.FormatFloat(reqBody.TopP, 'f', 2, 64),
map1 := make(map[string]interface{})
map1["prompt"] = prompt + "\nResponse as assistant, but do not include the role in response."
map1["systemMessage"] = systemMessage.Content
if reqBody.Temperature != 0 {
map1["temperature"] = formatFloat(reqBody.Temperature)
}
if reqBody.TopP != 0 {
map1["top_p"] = formatFloat(reqBody.TopP)
}
// Convert map to json string
jsonData, err := json.Marshal(map1)
if err != nil {
return errorWrapper(err, "marshal_json_failed", http.StatusInternalServerError)
}
// Convert json string to io.Reader
requestBody = bytes.NewReader(jsonData)
} else if channelType == common.ChannelTypeChatbotUI {
// Get system message from Message json, Role == "system"
var reqBody ChatRequest
// Parse requestBody into systemMessage
err := json.NewDecoder(requestBody).Decode(&reqBody)
if err != nil {
return errorWrapper(err, "decode_request_body_failed", http.StatusInternalServerError)
}
// Get system message from Message json, Role == "system"
var systemMessage string
for _, message := range reqBody.Messages {
if message.Role == "system" {
systemMessage = message.Content
break
}
}
// Construct json data without adding escape character
map1 := make(map[string]interface{})
map1["prompt"] = systemMessage
map1["temperature"] = formatFloat(reqBody.Temperature)
map1["key"] = ""
map1["messages"] = reqBody.Messages
map1["model"] = map[string]interface{}{
"id": reqBody.Model,
}
// Convert map to json string
@@ -344,20 +395,46 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
}
}()
if isStream {
if isStream || channelType == common.ChannelTypeChatGPTWeb || channelType == common.ChannelTypeChatbotUI {
dataChan := make(chan string)
stopChan := make(chan bool)
if channelType == common.ChannelTypeChatGPTWeb {
scanner := bufio.NewScanner(resp.Body)
go func() {
for scanner.Scan() {
var chatResponse ChatGptWebChatResponse
err = json.Unmarshal(scanner.Bytes(), &chatResponse)
scanner := bufio.NewScanner(resp.Body)
if channelType != common.ChannelTypeChatGPTWeb && channelType != common.ChannelTypeChatbotUI {
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := strings.Index(string(data), "\n"); i >= 0 {
return i + 2, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
}
go func() {
for scanner.Scan() {
data := scanner.Text()
if len(data) < 6 { // must be something wrong!
continue
}
if channelType == common.ChannelTypeChatGPTWeb {
var chatResponse ChatGptWebChatResponse
err = json.Unmarshal([]byte(data), &chatResponse)
if err != nil {
log.Println("error unmarshal chat response: " + err.Error())
continue
// Print the body in string
buf := new(bytes.Buffer)
buf.ReadFrom(resp.Body)
common.SysError("error unmarshalling chat response: " + err.Error() + " " + buf.String())
return
}
// if response role is assistant and contains delta, append the content to streamResponseText
@@ -387,33 +464,28 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
dataChan <- "data: " + string(jsonData)
}
}
}
stopChan <- true
}()
} else {
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := strings.Index(string(data), "\n"); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
go func() {
for scanner.Scan() {
data := scanner.Text()
if len(data) < 6 { // must be something wrong!
// common.SysError("invalid stream response: " + data)
continue
} else if channelType == common.ChannelTypeChatbotUI {
returnObj := map[string]interface{}{
"id": "chatcmpl-" + strconv.Itoa(int(time.Now().UnixNano())),
"object": "text_completion",
"created": time.Now().Unix(),
"model": textRequest.Model,
"choices": []map[string]interface{}{
// set finish_reason to null in json
{
"finish_reason": nil,
"index": 0,
"delta": map[string]interface{}{
"content": data,
},
},
},
}
jsonData, _ := json.Marshal(returnObj)
dataChan <- "data: " + string(jsonData)
} else {
// If data has event: event content inside, remove it, it can be prefix or inside the data
if strings.HasPrefix(data, "event:") || strings.Contains(data, "event:") {
// Remove event: event in the front or back
@@ -463,10 +535,11 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
}
}
}
stopChan <- true
}()
}
}
stopChan <- true
}()
c.Writer.Header().Set("Content-Type", "text/event-stream")
c.Writer.Header().Set("Cache-Control", "no-cache")

View File

@@ -14,4 +14,5 @@ export const CHANNEL_OPTIONS = [
//
{ key: 14, text: 'Chanzhaoyu/chatgpt-web', value: 14, color: 'purple' },
{ key: 14, text: 'mckaywrigley/chatbot-ui', value: 15, color: 'orange' },
];

View File

@@ -51,7 +51,7 @@ const EditChannel = () => {
setInputs((inputs) => ({ ...inputs, [name]: value }));
};
const loadChannel = async () => {
const loadChannel = async (modelOptions) => {
let res = await API.get(`/api/channel/${channelId}`);
const { success, message, data } = res.data;
if (success) {
@@ -59,19 +59,19 @@ const EditChannel = () => {
data.models = [];
} else {
data.models = data.models.split(',');
// setTimeout(() => {
// let localModelOptions = [...modelOptions];
// data.models.forEach((model) => {
// if (!localModelOptions.find((option) => option.key === model)) {
// localModelOptions.push({
// key: model,
// text: model,
// value: model,
// });
// }
// });
// setModelOptions(localModelOptions);
// }, 1000);
setTimeout(() => {
let localModelOptions = [...modelOptions];
data.models.forEach((model) => {
if (!localModelOptions.find((option) => option.key === model)) {
localModelOptions.push({
key: model,
text: model,
value: model,
});
}
});
setModelOptions(localModelOptions);
}, 1000);
}
if (data.group === '') {
data.groups = [];
@@ -95,19 +95,23 @@ const EditChannel = () => {
const fetchModels = async () => {
try {
let res = await API.get(`/api/channel/models`);
setModelOptions(
res.data.data.map((model) => ({
key: model.id,
text: model.id,
value: model.id,
})),
);
setFullModels(res.data.data.map((model) => model.id));
setBasicModels(
res.data.data
.filter((model) => !model.id.startsWith('gpt-4'))
.map((model) => model.id),
);
const allModels = res.data.data.map((model) => ({
key: model.id,
text: model.id,
value: model.id,
}));
setModelOptions(allModels);
return allModels;
} catch (error) {
showError(error.message);
}
@@ -128,12 +132,12 @@ const EditChannel = () => {
}
};
useEffect(() => {
useEffect(async () => {
const models = await fetchModels();
await fetchGroups();
if (isEdit) {
loadChannel().then();
await loadChannel(models);
}
fetchModels().then();
fetchGroups().then();
}, []);
const submit = async () => {