Class: OmniAI::Anthropic::Chat
- Inherits:
-
Chat
- Object
- Chat
- OmniAI::Anthropic::Chat
show all
- Defined in:
- lib/omniai/anthropic/chat.rb,
lib/omniai/anthropic/chat/stream.rb,
lib/omniai/anthropic/chat/url_serializer.rb,
lib/omniai/anthropic/chat/file_serializer.rb,
lib/omniai/anthropic/chat/text_serializer.rb,
lib/omniai/anthropic/chat/tool_serializer.rb,
lib/omniai/anthropic/chat/choice_serializer.rb,
lib/omniai/anthropic/chat/content_serializer.rb,
lib/omniai/anthropic/chat/message_serializer.rb,
lib/omniai/anthropic/chat/function_serializer.rb,
lib/omniai/anthropic/chat/response_serializer.rb,
lib/omniai/anthropic/chat/thinking_serializer.rb,
lib/omniai/anthropic/chat/tool_call_serializer.rb,
lib/omniai/anthropic/chat/tool_call_result_serializer.rb
Overview
An Anthropic chat implementation.
Usage:
completion = OmniAI::Anthropic::Chat.process!(client: client) do |prompt|
prompt.system('You are an expert in the field of AI.')
prompt.user('What are the biggest risks of AI?')
end
completion.text
Defined Under Namespace
Modules: ChoiceSerializer, ContentSerializer, FileSerializer, FunctionSerializer, MessageSerializer, Model, ResponseSerializer, TextSerializer, ThinkingSerializer, ToolCallResultSerializer, ToolCallSerializer, ToolSerializer, URLSerializer
Classes: Stream
Constant Summary
collapse
- DEFAULT_MODEL =
Model::CLAUDE_SONNET
- CONTEXT =
Context.build do |context|
context.serializers[:tool] = ToolSerializer.method(:serialize)
context.serializers[:file] = FileSerializer.method(:serialize)
context.serializers[:url] = URLSerializer.method(:serialize)
context.serializers[:choice] = ChoiceSerializer.method(:serialize)
context.deserializers[:choice] = ChoiceSerializer.method(:deserialize)
context.serializers[:tool_call] = ToolCallSerializer.method(:serialize)
context.deserializers[:tool_call] = ToolCallSerializer.method(:deserialize)
context.serializers[:tool_call_result] = ToolCallResultSerializer.method(:serialize)
context.deserializers[:tool_call_result] = ToolCallResultSerializer.method(:deserialize)
context.serializers[:function] = FunctionSerializer.method(:serialize)
context.deserializers[:function] = FunctionSerializer.method(:deserialize)
context.serializers[:message] = MessageSerializer.method(:serialize)
context.deserializers[:message] = MessageSerializer.method(:deserialize)
context.deserializers[:content] = ContentSerializer.method(:deserialize)
context.deserializers[:response] = ResponseSerializer.method(:deserialize)
context.serializers[:thinking] = ThinkingSerializer.method(:serialize)
context.deserializers[:thinking] = ThinkingSerializer.method(:deserialize)
end
Instance Method Summary
collapse
Instance Method Details
#messages ⇒ Array<Hash>
125
126
127
128
|
# File 'lib/omniai/anthropic/chat.rb', line 125
def messages
messages = @prompt.messages.reject(&:system?)
messages.map { |message| message.serialize(context:) }
end
|
#path ⇒ String
140
141
142
|
# File 'lib/omniai/anthropic/chat.rb', line 140
def path
"/#{Client::VERSION}/messages"
end
|
#payload ⇒ Hash
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
|
# File 'lib/omniai/anthropic/chat.rb', line 85
def payload
data = OmniAI::Anthropic.config.chat_options.merge({
model: @model,
messages:,
system:,
stream: stream? || nil,
temperature: thinking_config ? nil : @temperature, tools: tools_payload,
thinking: thinking_config,
}).compact
data[:max_tokens] = thinking_max_tokens if thinking_config
data
end
|
#system ⇒ String?
131
132
133
134
135
136
137
|
# File 'lib/omniai/anthropic/chat.rb', line 131
def system
parts = @prompt.messages.filter(&:system?).filter(&:text?).map(&:text)
parts << formatting if formatting?
return if parts.empty?
parts.join("\n\n")
end
|
#thinking_config ⇒ Hash?
Translates unified thinking option to Anthropic’s native format. Example: ‘thinking: { budget_tokens: 10000 }` becomes `{ type: “enabled”, budget_tokens: 10000 }`
105
106
107
108
109
110
111
112
113
|
# File 'lib/omniai/anthropic/chat.rb', line 105
def thinking_config
thinking = @options[:thinking]
return unless thinking
case thinking
when true then { type: "enabled", budget_tokens: 10_000 }
when Hash then { type: "enabled" }.merge(thinking)
end
end
|
#thinking_max_tokens ⇒ Integer
Returns max_tokens ensuring it’s greater than budget_tokens when thinking is enabled.
117
118
119
120
121
122
|
# File 'lib/omniai/anthropic/chat.rb', line 117
def thinking_max_tokens
budget = thinking_config[:budget_tokens]
base = @options[:max_tokens] || OmniAI::Anthropic.config.chat_options[:max_tokens] || 0
[base, budget + 8_000].max
end
|