Skip to content

Commit

Permalink
v0.6.2.4 开发分支,支持 openai max token 设置, 调整插件依赖
Browse files Browse the repository at this point in the history
  • Loading branch information
whyiyhw committed May 27, 2023
1 parent 3c78964 commit 020341c
Show file tree
Hide file tree
Showing 7 changed files with 31 additions and 10 deletions.
5 changes: 3 additions & 2 deletions chat/common/openai/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ func (c *ChatClient) ChatStream(req []ChatModelMessage, channel chan string) (st
continue
}
//估算长度
if NumTokensFromMessages(req[len(req)-i-1:], ChatModel) < (TotalToken - c.MaxToken) {
if NumTokensFromMessages(req[len(req)-i-1:], ChatModel) < (c.TotalToken - c.MaxToken) {
first = len(req) - i - 1
} else {
break
Expand All @@ -51,6 +51,7 @@ func (c *ChatClient) ChatStream(req []ChatModelMessage, channel chan string) (st
Content: message.Content,
})
}
fmt.Println("current", c.Model)
if _, ok := Models[c.Model]; !ok {
c.Model = ChatModel
}
Expand Down Expand Up @@ -117,7 +118,7 @@ func (c *ChatClient) Chat(req []ChatModelMessage) (string, error) {
continue
}
//估算长度
if NumTokensFromMessages(req[len(req)-i-1:], ChatModel) < (TotalToken - c.MaxToken) {
if NumTokensFromMessages(req[len(req)-i-1:], ChatModel) < (c.TotalToken - c.MaxToken) {
first = len(req) - i - 1
} else {
break
Expand Down
3 changes: 2 additions & 1 deletion chat/common/openai/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,8 @@ func (c *UserContext) doSummary(summary []ChatModelMessage) ([]ChatModelMessage,
logx.Info("summary_req_length", ": ", len([]rune(newPrompt)))

// 调用 openai api 进行 summary 简化到 100 字以内
summaryStr, err := c.Client.WithModel(TextModel).WithMaxToken(1500).WithTemperature(0.1).
sc := c
summaryStr, err := sc.Client.WithModel(TextModel).WithMaxToken(1500).WithTemperature(0).
Completion(newPrompt)

logx.Info("summary_reps", ": "+summaryStr)
Expand Down
8 changes: 8 additions & 0 deletions chat/common/openai/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,15 @@ type ChatClient struct {
Model string `json:"model"`
BaseHost string `json:"base_host"`
MaxToken int `json:"max_token"`
TotalToken int `json:"total_token"`
Temperature float32 `json:"temperature"`
}

func NewChatClient(apiKey string) *ChatClient {
return &ChatClient{
APIKey: apiKey,
MaxToken: MaxToken,
TotalToken: TotalToken,
Temperature: float32(Temperature),
}
}
Expand Down Expand Up @@ -98,6 +100,12 @@ func (c *ChatClient) WithTemperature(temperature float32) *ChatClient {
return c
}

// WithTotalToken 设置总token数
func (c *ChatClient) WithTotalToken(totalToken int) *ChatClient {
c.TotalToken = totalToken
return c
}

func (c *ChatClient) WithHttpProxy(proxyUrl string) *ChatClient {
c.HttpProxy = proxyUrl
return c
Expand Down
5 changes: 4 additions & 1 deletion chat/service/chat/api/etc/chat-api.yaml.complete.bak
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ OpenAi: # openai配置
Host: "https://api.openai.com" # openai host (可选,使用cf进行反向代理时,修改可用)
Origin: "open_ai" # 默认为 调用 open_ai 也支持 azure , azure_ad (可选 默认为 open_ai)
Engine: "deployment_name" # engine = "deployment_name"(当 Origin 为 azure, azure_ad 时必填)
MaxToken: 2000 # 一次会话能响应内容的最大 token
TotalToken 3900 # 一次对话话 openai 能处理的最大 token 数量 gpt3:4096 /gpt4:8192 /gpt-4-32k:32768
Temperature: 0.8 # 对话的创造性,当其逼近与0时,其响应的结果更加死板,当其趋近于1时,其对话更加符有跳跃与创造力

Proxy: # 代理配置 (可选)
Enable: false # 是否启用代理,默认为 false(可选)
Expand Down Expand Up @@ -75,7 +78,7 @@ Plugins:
Auth:
Type: "none"
API:
URL: "http://192.168.1.202:8885/chat"
URL: "http://192.168.1.202:8885/search"
- NameForHuman: "维基百科查询"
NameForModel: "wikipedia"
DescForHuman: "这个插件可以提供关于人、地点、公司、历史事件或其他主题的一般性问题。"
Expand Down
11 changes: 7 additions & 4 deletions chat/service/chat/api/internal/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,13 @@ type Config struct {

// openai 配置
OpenAi struct {
Key string
Host string `json:"host,optional,default=https://api.openai.com"`
Origin string `json:"origin,optional,default=open_ai"`
Engine string `json:"engine,optional,default="`
Key string
Host string `json:"host,optional,default=https://api.openai.com"`
Origin string `json:"origin,optional,default=open_ai"`
Engine string `json:"engine,optional,default="`
MaxToken int `json:"max_token,optional,default=2000"`
TotalToken int `json:"total_token,optional,default=3900"`
Temperature float32 `json:"temperature,optional,default=0.8"`
}

// http proxy 设置
Expand Down
9 changes: 7 additions & 2 deletions chat/service/chat/api/internal/logic/chatlogic.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,11 @@ func (l *ChatLogic) Chat(req *types.ChatReq) (resp *types.ChatReply, err error)
WithModel(l.model).
WithBaseHost(l.baseHost).
WithOrigin(l.svcCtx.Config.OpenAi.Origin).
WithEngine(l.svcCtx.Config.OpenAi.Engine)
WithEngine(l.svcCtx.Config.OpenAi.Engine).
WithMaxToken(l.svcCtx.Config.OpenAi.MaxToken).
WithTemperature(l.svcCtx.Config.OpenAi.Temperature).
WithTotalToken(l.svcCtx.Config.OpenAi.TotalToken)

if l.svcCtx.Config.Proxy.Enable {
c = c.WithHttpProxy(l.svcCtx.Config.Proxy.Http).WithSocks5Proxy(l.svcCtx.Config.Proxy.Socket5)
}
Expand Down Expand Up @@ -185,7 +189,8 @@ func (l *ChatLogic) Chat(req *types.ChatReq) (resp *types.ChatReply, err error)
API: i2.API,
})
}
pluginInfo, err := c.WithMaxToken(1000).WithTemperature(0).
pc := c
pluginInfo, err := pc.WithMaxToken(1000).WithTemperature(0).
Completion(plugin.GetPluginPromptInfo(req.MSG, p))
if err == nil {
msg, ok := plugin.RunPlugin(pluginInfo, p)
Expand Down
Binary file modified plugins/search/requirements.txt
Binary file not shown.

0 comments on commit 020341c

Please sign in to comment.