Create
Create
client.Chat.Completions.New(ctx, body) (*CompletionMessageCompletionMessageIDstringMetricsarrayCreateChatCompletionResponse, error)
post/chat/completions
Generate a chat completion for the given messages using the specified model.
Parameters
bodyMessagesfieldModelfieldMaxCompletionTokensfieldRepetitionPenaltyfieldResponseFormatfieldTemperaturefieldToolChoicefieldToolsfieldTopKfieldTopPfieldUserfieldChatCompletionNewParams
Returns
CompletionMessageCompletionMessageIDstringMetricsarrayCreateChatCompletionResponse
package main
import (
"context"
"fmt"
"github.com/stainless-sdks/-go"
"github.com/stainless-sdks/-go/option"
)
func main() {
client := llamaapi.NewClient(
option.WithAPIKey("My API Key"),
)
createChatCompletionResponse, err := client.Chat.Completions.New(context.TODO(), llamaapi.ChatCompletionNewParams{
Messages: []llamaapi.MessageUnionParam{llamaapi.MessageUnionParam{
OfUser: &llamaapi.UserMessageParam{
Content: llamaapi.UserMessageContentUnionParam{
OfString: llamaapi.String("string"),
},
Role: llamaapi.UserMessageRoleUser,
},
}},
Model: "model",
})
if err != nil {
panic(err.Error())
}
fmt.Printf("%+v\n", createChatCompletionResponse.ID)
}
200 Example
{
"completion_message": {
"role": "assistant",
"content": "string",
"stop_reason": "stop",
"tool_calls": [
{
"id": "id",
"function": {
"arguments": "arguments",
"name": "name"
}
}
]
},
"id": "id",
"metrics": [
{
"metric": "metric",
"value": 0,
"unit": "unit"
}
]
}