Add ToolReferenceChunk, ToolFileChunk, BuiltInConnector enum, ReferenceID union type (int|string), GuardrailConfig with v1/v2 moderation, ConnectorTool for custom connectors, and guardrails field on chat/agents/conversation requests. Add AudioTranscriptionRealtime and AudioSpeech to ModelCapabilities. Move GuardrailConfig from agents/ to chat/ as shared base type. Remove bundled OpenAPI spec; SDK now tracks upstream Python SDK. BREAKING: ReferenceChunk.ReferenceIDs changed from []int to []ReferenceID. Use IntRef(n) / StringRef(s) constructors.
58 lines
2.1 KiB
Go
58 lines
2.1 KiB
Go
package chat
|
|
|
|
import "encoding/json"
|
|
|
|
// PromptMode controls the reasoning behavior of the model.
|
|
type PromptMode string
|
|
|
|
const (
|
|
PromptModeReasoning PromptMode = "reasoning"
|
|
)
|
|
|
|
// Prediction provides expected completion content for optimization.
|
|
type Prediction struct {
|
|
Type string `json:"type"`
|
|
Content string `json:"content"`
|
|
}
|
|
|
|
// CompletionRequest represents a chat completion request.
|
|
type CompletionRequest struct {
|
|
Model string `json:"model"`
|
|
Messages []Message `json:"-"`
|
|
Temperature *float64 `json:"temperature,omitempty"`
|
|
TopP *float64 `json:"top_p,omitempty"`
|
|
MaxTokens *int `json:"max_tokens,omitempty"`
|
|
Stop []string `json:"stop,omitempty"`
|
|
RandomSeed *int `json:"random_seed,omitempty"`
|
|
ResponseFormat *ResponseFormat `json:"response_format,omitempty"`
|
|
Tools []Tool `json:"tools,omitempty"`
|
|
ToolChoice *ToolChoice `json:"tool_choice,omitempty"`
|
|
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
|
|
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
|
|
N *int `json:"n,omitempty"`
|
|
SafePrompt bool `json:"safe_prompt,omitempty"`
|
|
ParallelToolCalls *bool `json:"parallel_tool_calls,omitempty"`
|
|
Metadata map[string]any `json:"metadata,omitempty"`
|
|
Prediction *Prediction `json:"prediction,omitempty"`
|
|
PromptMode *PromptMode `json:"prompt_mode,omitempty"`
|
|
Guardrails []GuardrailConfig `json:"guardrails,omitempty"`
|
|
stream bool
|
|
}
|
|
|
|
// EnableStream is used by the mistral package to enable streaming on requests.
|
|
// It is not intended for direct use by consumers.
|
|
func (r *CompletionRequest) EnableStream() { r.stream = true }
|
|
|
|
func (r *CompletionRequest) MarshalJSON() ([]byte, error) {
|
|
type Alias CompletionRequest
|
|
return json.Marshal(&struct {
|
|
Messages []Message `json:"messages"`
|
|
Stream bool `json:"stream"`
|
|
*Alias
|
|
}{
|
|
Messages: r.Messages,
|
|
Stream: r.stream,
|
|
Alias: (*Alias)(r),
|
|
})
|
|
}
|