Skip to content

Commit

Permalink
Merge pull request ConnectAI-E#194 from LufeiCheng/feat_ai_mode
Browse files Browse the repository at this point in the history
feat ConnectAI-E#181 add temperature params modify by choose ai mode
  • Loading branch information
Leizhenpeng authored Apr 14, 2023
2 parents 880fcf1 + 9b9a6cc commit c5afe23
Show file tree
Hide file tree
Showing 10 changed files with 168 additions and 8 deletions.
38 changes: 38 additions & 0 deletions code/handlers/card_ai_mode_action.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
package handlers

import (
"context"

"start-feishubot/services"
"start-feishubot/services/openai"

larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
)

// AIModeChooseKind is the kind of card action for choosing AI mode
func NewAIModeCardHandler(cardMsg CardMsg,
m MessageHandler) CardHandlerFunc {
return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {

if cardMsg.Kind == AIModeChooseKind {
newCard, err, done := CommonProcessAIMode(cardMsg, cardAction,
m.sessionCache)
if done {
return newCard, err
}
return nil, nil
}
return nil, ErrNextHandler
}
}

// CommonProcessAIMode is the common process for choosing AI mode
func CommonProcessAIMode(msg CardMsg, cardAction *larkcard.CardAction,
cache services.SessionServiceCacheInterface) (interface{},
error, bool) {
option := cardAction.Action.Option
replyMsg(context.Background(), "已选择AI模式:"+option,
&msg.MsgId)
cache.SetAIMode(msg.SessionId, openai.AIModeMap[option])
return nil, nil, true
}
1 change: 1 addition & 0 deletions code/handlers/card_common_action.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ func NewCardHandler(m MessageHandler) CardHandlerFunc {
NewPicModeChangeHandler,
NewRoleTagCardHandler,
NewRoleCardHandler,
NewAIModeCardHandler,
}

return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) {
Expand Down
12 changes: 12 additions & 0 deletions code/handlers/event_common_action.go
Original file line number Diff line number Diff line change
Expand Up @@ -153,3 +153,15 @@ func (*RoleListAction) Execute(a *ActionInfo) bool {
}
return true
}

type AIModeAction struct { /*AI模式*/
}

func (*AIModeAction) Execute(a *ActionInfo) bool {
if _, foundMode := utils.EitherCutPrefix(a.info.qParsed,
"/ai_mode", "AI模式"); foundMode {
SendAIModeListsCard(*a.ctx, a.info.sessionId, a.info.msgId, openai.AIModeStrs)
return false
}
return true
}
4 changes: 3 additions & 1 deletion code/handlers/event_msg_action.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ func (*MessageAction) Execute(a *ActionInfo) bool {
msg = append(msg, openai.Messages{
Role: "user", Content: a.info.qParsed,
})
completions, err := a.handler.gpt.Completions(msg)
// get ai mode as temperature
aiMode := a.handler.sessionCache.GetAIMode(*a.info.sessionId)
completions, err := a.handler.gpt.Completions(msg, aiMode)
if err != nil {
replyMsg(*a.ctx, fmt.Sprintf(
"🤖️:消息机器人摆烂了,请稍后再试~\n错误信息: %v", err), a.info.msgId)
Expand Down
1 change: 1 addition & 0 deletions code/handlers/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ func (m MessageHandler) msgReceivedHandler(ctx context.Context, event *larkim.P2
&EmptyAction{}, //空消息处理
&ClearAction{}, //清除消息处理
&PicAction{}, //图片处理
&AIModeAction{}, //模式切换处理
&RoleListAction{}, //角色列表处理
&HelpAction{}, //帮助处理
&BalanceAction{}, //余额处理
Expand Down
38 changes: 38 additions & 0 deletions code/handlers/msg.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ var (
PicVarMoreKind = CardKind("pic_var_more") // 变量图片
RoleTagsChooseKind = CardKind("role_tags_choose") // 内置角色所属标签选择
RoleChooseKind = CardKind("role_choose") // 内置角色选择
AIModeChooseKind = CardKind("ai_mode_choose") // AI模式选择
)

var (
Expand Down Expand Up @@ -408,6 +409,32 @@ func withRoleBtn(sessionID *string, titles ...string) larkcard.
return actions
}

func withAIModeBtn(sessionID *string, aiModeStrs []string) larkcard.MessageCardElement {
var menuOptions []MenuOption
for _, label := range aiModeStrs {
menuOptions = append(menuOptions, MenuOption{
label: label,
value: label,
})
}

cancelMenu := newMenu("选择模式",
map[string]interface{}{
"value": "0",
"kind": AIModeChooseKind,
"sessionId": *sessionID,
"msgId": *sessionID,
},
menuOptions...,
)

actions := larkcard.NewMessageCardAction().
Actions([]larkcard.MessageCardActionElement{cancelMenu}).
Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
Build()
return actions
}

func replyMsg(ctx context.Context, msg string, msgId *string) error {
msg, i := processMessage(msg)
if i != nil {
Expand Down Expand Up @@ -651,6 +678,8 @@ func sendHelpCard(ctx context.Context,
"sessionId": *sessionId,
}, larkcard.MessageCardButtonTypeDanger)),
withSplitLine(),
withMainMd("🤖 **AI模式选择** \n"+" 文本回复 *AI模式* 或 */ai_mode*"),
withSplitLine(),
withMainMd("🛖 **内置角色列表** \n"+" 文本回复 *角色列表* 或 */roles*"),
withSplitLine(),
withMainMd("🥷 **角色扮演模式**\n文本回复*角色扮演* 或 */system*+空格+角色信息"),
Expand Down Expand Up @@ -740,3 +769,12 @@ func SendRoleListCard(ctx context.Context,
withNote("提醒:选择内置场景,快速进入角色扮演模式。"))
replyCard(ctx, msgId, newCard)
}

func SendAIModeListsCard(ctx context.Context,
sessionId *string, msgId *string, aiModeStrs []string) {
newCard, _ := newSendCard(
withHeader("🤖 AI模式选择", larkcard.TemplateIndigo),
withAIModeBtn(sessionId, aiModeStrs),
withNote("提醒:选择内置模式,让AI更好的理解您的需求。"))
replyCard(ctx, msgId, newCard)
}
34 changes: 28 additions & 6 deletions code/services/openai/gpt3.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,32 @@ import (
"github.com/pandodao/tokenizer-go"
)

type AIMode float64

const (
Fresh AIMode = 0.1
Warmth AIMode = 0.4
Balance AIMode = 0.7
Creativity AIMode = 1.0
)

var AIModeMap = map[string]AIMode{
"清新": Fresh,
"温暖": Warmth,
"平衡": Balance,
"创意": Creativity,
}

var AIModeStrs = []string{
"清新",
"温暖",
"平衡",
"创意",
}

const (
maxTokens = 2000
temperature = 0.7
engine = "gpt-3.5-turbo"
maxTokens = 2000
engine = "gpt-3.5-turbo"
)

type Messages struct {
Expand Down Expand Up @@ -39,7 +61,7 @@ type ChatGPTRequestBody struct {
Model string `json:"model"`
Messages []Messages `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float32 `json:"temperature"`
Temperature AIMode `json:"temperature"`
TopP int `json:"top_p"`
FrequencyPenalty int `json:"frequency_penalty"`
PresencePenalty int `json:"presence_penalty"`
Expand All @@ -50,13 +72,13 @@ func (msg *Messages) CalculateTokenLength() int {
return tokenizer.MustCalToken(text)
}

func (gpt *ChatGPT) Completions(msg []Messages) (resp Messages,
func (gpt *ChatGPT) Completions(msg []Messages, aiMode AIMode) (resp Messages,
err error) {
requestBody := ChatGPTRequestBody{
Model: engine,
Messages: msg,
MaxTokens: maxTokens,
Temperature: temperature,
Temperature: aiMode,
TopP: 1,
FrequencyPenalty: 0,
PresencePenalty: 0,
Expand Down
2 changes: 1 addition & 1 deletion code/services/openai/gpt3_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ func TestCompletions(t *testing.T) {

gpt := NewChatGPT(*config)

resp, err := gpt.Completions(msgs)
resp, err := gpt.Completions(msgs, Balance)
if err != nil {
t.Errorf("TestCompletions failed with error: %v", err)
}
Expand Down
44 changes: 44 additions & 0 deletions code/services/sessionCache.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ type SessionMeta struct {
Mode SessionMode `json:"mode"`
Msg []openai.Messages `json:"msg,omitempty"`
PicSetting PicSetting `json:"pic_setting,omitempty"`
AIMode openai.AIMode `json:"ai_mode,omitempty"`
}

const (
Expand All @@ -34,17 +35,37 @@ const (
)

type SessionServiceCacheInterface interface {
Get(sessionId string) *SessionMeta
Set(sessionId string, sessionMeta *SessionMeta)
GetMsg(sessionId string) []openai.Messages
SetMsg(sessionId string, msg []openai.Messages)
SetMode(sessionId string, mode SessionMode)
GetMode(sessionId string) SessionMode
GetAIMode(sessionId string) openai.AIMode
SetAIMode(sessionId string, aiMode openai.AIMode)
SetPicResolution(sessionId string, resolution Resolution)
GetPicResolution(sessionId string) string
Clear(sessionId string)
}

var sessionServices *SessionService

// implement Get interface
func (s *SessionService) Get(sessionId string) *SessionMeta {
sessionContext, ok := s.cache.Get(sessionId)
if !ok {
return nil
}
sessionMeta := sessionContext.(*SessionMeta)
return sessionMeta
}

// implement Set interface
func (s *SessionService) Set(sessionId string, sessionMeta *SessionMeta) {
maxCacheTime := time.Hour * 12
s.cache.Set(sessionId, sessionMeta, maxCacheTime)
}

func (s *SessionService) GetMode(sessionId string) SessionMode {
// Get the session mode from the cache.
sessionContext, ok := s.cache.Get(sessionId)
Expand All @@ -68,6 +89,29 @@ func (s *SessionService) SetMode(sessionId string, mode SessionMode) {
s.cache.Set(sessionId, sessionMeta, maxCacheTime)
}

func (s *SessionService) GetAIMode(sessionId string) openai.AIMode {
sessionContext, ok := s.cache.Get(sessionId)
if !ok {
return openai.Balance
}
sessionMeta := sessionContext.(*SessionMeta)
return sessionMeta.AIMode
}

// SetAIMode set the ai mode for the session.
func (s *SessionService) SetAIMode(sessionId string, aiMode openai.AIMode) {
maxCacheTime := time.Hour * 12
sessionContext, ok := s.cache.Get(sessionId)
if !ok {
sessionMeta := &SessionMeta{AIMode: aiMode}
s.cache.Set(sessionId, sessionMeta, maxCacheTime)
return
}
sessionMeta := sessionContext.(*SessionMeta)
sessionMeta.AIMode = aiMode
s.cache.Set(sessionId, sessionMeta, maxCacheTime)
}

func (s *SessionService) GetMsg(sessionId string) (msg []openai.Messages) {
sessionContext, ok := s.cache.Get(sessionId)
if !ok {
Expand Down
2 changes: 2 additions & 0 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@

🎭 角色扮演:支持场景模式,增添讨论乐趣和创意

🤖 AI模式:内置4种AI模式,感受AI的智慧与创意

🔄 上下文保留:回复对话框即可继续同一话题讨论

⏰ 自动结束:超时自动结束对话,支持清除讨论历史
Expand Down

0 comments on commit c5afe23

Please sign in to comment.