1. Add README, LICENSE (MIT), .gitignore, Makefile, CHANGELOG 2. Add Version constant and User-Agent header to all requests 3. Rename SetStream to EnableStream (narrower API surface) 4. Fix FinishReason in CompletionStreamChoice to use typed *FinishReason 5. Type conversation entry Content as chat.Content instead of json.RawMessage 6. Graceful unknown type handling — UnknownEntry, UnknownEvent, UnknownChunk, UnknownMessage, UnknownAgentTool all return data instead of erroring on unrecognized discriminator values 7. Type agent tools with AgentTool sealed interface + UnmarshalAgentTool 8. Add pagination params to ListConversations and ListLibraries 9. Move openapi.yaml to docs/openapi.yaml
57 lines
2.0 KiB
Go
57 lines
2.0 KiB
Go
package chat
|
|
|
|
import "encoding/json"
|
|
|
|
// PromptMode controls the reasoning behavior of the model.
|
|
type PromptMode string
|
|
|
|
const (
|
|
PromptModeReasoning PromptMode = "reasoning"
|
|
)
|
|
|
|
// Prediction provides expected completion content for optimization.
|
|
type Prediction struct {
|
|
Type string `json:"type"`
|
|
Content string `json:"content"`
|
|
}
|
|
|
|
// CompletionRequest represents a chat completion request.
|
|
type CompletionRequest struct {
|
|
Model string `json:"model"`
|
|
Messages []Message `json:"-"`
|
|
Temperature *float64 `json:"temperature,omitempty"`
|
|
TopP *float64 `json:"top_p,omitempty"`
|
|
MaxTokens *int `json:"max_tokens,omitempty"`
|
|
Stop []string `json:"stop,omitempty"`
|
|
RandomSeed *int `json:"random_seed,omitempty"`
|
|
ResponseFormat *ResponseFormat `json:"response_format,omitempty"`
|
|
Tools []Tool `json:"tools,omitempty"`
|
|
ToolChoice *ToolChoice `json:"tool_choice,omitempty"`
|
|
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
|
|
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
|
|
N *int `json:"n,omitempty"`
|
|
SafePrompt bool `json:"safe_prompt,omitempty"`
|
|
ParallelToolCalls *bool `json:"parallel_tool_calls,omitempty"`
|
|
Metadata map[string]any `json:"metadata,omitempty"`
|
|
Prediction *Prediction `json:"prediction,omitempty"`
|
|
PromptMode *PromptMode `json:"prompt_mode,omitempty"`
|
|
stream bool
|
|
}
|
|
|
|
// EnableStream is used by the mistral package to enable streaming on requests.
|
|
// It is not intended for direct use by consumers.
|
|
func (r *CompletionRequest) EnableStream() { r.stream = true }
|
|
|
|
func (r *CompletionRequest) MarshalJSON() ([]byte, error) {
|
|
type Alias CompletionRequest
|
|
return json.Marshal(&struct {
|
|
Messages []Message `json:"messages"`
|
|
Stream bool `json:"stream"`
|
|
*Alias
|
|
}{
|
|
Messages: r.Messages,
|
|
Stream: r.stream,
|
|
Alias: (*Alias)(r),
|
|
})
|
|
}
|