Skip to content

Commit

Permalink
1. add request model param temperature and max tokens
Browse files Browse the repository at this point in the history
2. add token usage for response
  • Loading branch information
robinyeeh committed Dec 25, 2023
1 parent a47ed79 commit 73706d7
Show file tree
Hide file tree
Showing 2 changed files with 81 additions and 6 deletions.
67 changes: 64 additions & 3 deletions client/completion.go
Original file line number Diff line number Diff line change
Expand Up @@ -379,9 +379,11 @@ func (cr *CompletionRequest) GetDocTagIds() []int64 {
}

type CompletionRequestModelParameter struct {
TopK int32 `json:"TopK,omitempty"`
Seed int32 `json:"Seed,omitempty"`
UseRawPrompt bool `json:"UseRawPrompt,omitempty"`
TopK int32 `json:"TopK,omitempty"`
Seed int32 `json:"Seed,omitempty"`
UseRawPrompt bool `json:"UseRawPrompt,omitempty"`
Temperature float32 `json:"Temperature,omitempty"`
MaxTokens int32 `json:"MaxTokens,omitempty"`
}

func (cp CompletionRequestModelParameter) String() string {
Expand Down Expand Up @@ -419,6 +421,24 @@ func (cp *CompletionRequestModelParameter) GetUseRawPrompt() bool {
return cp.UseRawPrompt
}

func (cp *CompletionRequestModelParameter) SetTemperature(v float32) *CompletionRequestModelParameter {
cp.Temperature = v
return cp
}

func (cp *CompletionRequestModelParameter) GetTemperature() float32 {
return cp.Temperature
}

func (cp *CompletionRequestModelParameter) SetMaxTokens(v int32) *CompletionRequestModelParameter {
cp.MaxTokens = v
return cp
}

func (cp *CompletionRequestModelParameter) GetMaxTokens() int32 {
return cp.MaxTokens
}

type CompletionResponseDataThought struct {
Thought *string `json:"Thought,omitempty"`
ActionType *string `json:"ActionType,omitempty"`
Expand Down Expand Up @@ -636,12 +656,44 @@ func (cr *CompletionResponseDataDocReference) GetBizId() string {
return *cr.BizId
}

type CompletionResponseDataUsage struct {
InputTokens int32 `json:"InputTokens"`
OutputTokens int32 `json:"OutputTokens"`
}

func (cu CompletionResponseDataUsage) String() string {
return tea.Prettify(cu)
}

func (cu CompletionResponseDataUsage) GoString() string {
return cu.String()
}

func (cu *CompletionResponseDataUsage) SetInputTokens(v int32) *CompletionResponseDataUsage {
cu.InputTokens = v
return cu
}

func (cu *CompletionResponseDataUsage) GetInputTokens() int32 {
return cu.InputTokens
}

func (cu *CompletionResponseDataUsage) SetOutputTokens(v int32) *CompletionResponseDataUsage {
cu.OutputTokens = v
return cu
}

func (cu *CompletionResponseDataUsage) GetOutputTokens() int32 {
return cu.OutputTokens
}

type CompletionResponseData struct {
ResponseId *string `json:"ResponseId"`
SessionId *string `json:"SessionId,omitempty"`
Text *string `json:"Text,omitempty"`
Thoughts []*CompletionResponseDataThought `json:"Thoughts,omitempty"`
DocReferences []*CompletionResponseDataDocReference `json:"DocReferences,omitempty"`
Usage []*CompletionResponseDataUsage `json:"Usage,omitempty"`
}

func (cd CompletionResponseData) String() string {
Expand Down Expand Up @@ -706,6 +758,15 @@ func (cd *CompletionResponseData) GetDocReferences() []*CompletionResponseDataDo
return cd.DocReferences
}

func (cd *CompletionResponseData) SetUsage(v []*CompletionResponseDataUsage) *CompletionResponseData {
cd.Usage = v
return cd
}

func (cd *CompletionResponseData) GetUsage() []*CompletionResponseDataUsage {
return cd.Usage
}

type CompletionResponse struct {
Success bool `json:"Success"`
Code *string `json:"Code,omitempty"`
Expand Down
20 changes: 17 additions & 3 deletions client/completion_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,11 @@ func TestCreateCompletion(t *testing.T) {
return
}

t.Logf("requestId: %s, text : %s\n", response.GetRequestId(), response.GetData().GetText())
t.Logf("requestId: %s, text : %s", response.GetRequestId(), response.GetData().GetText())
if response.GetData().GetUsage() != nil && len(response.GetData().GetUsage()) > 0 {
usage := response.GetData().GetUsage()[0]
t.Logf(", inputTokens: %d, outputTokens: %d\n", usage.GetInputTokens(), usage.GetOutputTokens())
}
}

func TestCreateStreamCompletion(t *testing.T) {
Expand Down Expand Up @@ -138,8 +142,18 @@ func TestCreateCompletionWithParams(t *testing.T) {
chatHistory := []*client.ChatQaMessage{message1, message2}
request.SetHistory(chatHistory)

//设置模型参数topK,seed
modelParameter := &client.CompletionRequestModelParameter{TopK: 50, Seed: 2222, UseRawPrompt: true}
//设置模型参数topK,seed, temperature和max tokens
modelParameter := &client.CompletionRequestModelParameter{}
//设置模型参数topK
modelParameter.SetTopK(50)
//设置模型参数seed
modelParameter.SetSeed(2222)
//设置模型参数temperature
modelParameter.SetTemperature(0.3)
//设置模型参数max tokens
modelParameter.SetMaxTokens(20)
//是否使用原始prompt
modelParameter.SetUseRawPrompt(true)
request.SetParameters(modelParameter)

//设置文档标签tagId,设置后,文档检索召回时,仅从tagIds对应的文档范围进行召回
Expand Down

0 comments on commit 73706d7

Please sign in to comment.