Class: Anthropic::Resources::Messages
- Inherits:
-
Object
- Object
- Anthropic::Resources::Messages
- Defined in:
- lib/anthropic/resources/messages.rb,
lib/anthropic/resources/messages/batches.rb
Defined Under Namespace
Classes: Batches
Constant Summary collapse
- MODELS_TO_WARN_WITH_THINKING_ENABLED =
["claude-opus-4-6"].freeze
Instance Attribute Summary collapse
Instance Method Summary collapse
-
#count_tokens(messages:, model:, cache_control: nil, output_config: nil, system_: nil, thinking: nil, tool_choice: nil, tools: nil, request_options: {}) ⇒ Anthropic::Models::MessageTokensCount
Some parameter documentations has been truncated, see Models::MessageCountTokensParams for more details.
-
#create(max_tokens:, messages:, model:, cache_control: nil, container: nil, inference_geo: nil, metadata: nil, output_config: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Models::Message
(also: #parse)
See #stream for streaming counterpart.
-
#stream(max_tokens:, messages:, model:, metadata: nil, output_config: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Streaming::MessageStream
See #create for non-streaming counterpart.
-
#stream_raw(max_tokens:, messages:, model:, cache_control: nil, container: nil, inference_geo: nil, metadata: nil, output_config: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::RawMessageStartEvent, Anthropic::Models::RawMessageDeltaEvent, Anthropic::Models::RawMessageStopEvent, Anthropic::Models::RawContentBlockStartEvent, Anthropic::Models::RawContentBlockDeltaEvent, Anthropic::Models::RawContentBlockStopEvent>
See #create for non-streaming counterpart.
Instance Attribute Details
#batches ⇒ Anthropic::Resources::Messages::Batches (readonly)
9 10 11 |
# File 'lib/anthropic/resources/messages.rb', line 9 def batches @batches end |
Instance Method Details
#count_tokens(messages:, model:, cache_control: nil, output_config: nil, system_: nil, thinking: nil, tool_choice: nil, tools: nil, request_options: {}) ⇒ Anthropic::Models::MessageTokensCount
Some parameter documentations has been truncated, see Models::MessageCountTokensParams for more details.
Count the number of tokens in a Message.
The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
Learn more about token counting in our [user guide](docs.claude.com/en/docs/build-with-claude/token-counting)
284 285 286 287 288 289 290 291 292 293 294 |
# File 'lib/anthropic/resources/messages.rb', line 284 def count_tokens(params) parsed, = Anthropic::MessageCountTokensParams.dump_request(params) Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil) @client.request( method: :post, path: "v1/messages/count_tokens", body: parsed, model: Anthropic::MessageTokensCount, options: ) end |
#create(max_tokens:, messages:, model:, cache_control: nil, container: nil, inference_geo: nil, metadata: nil, output_config: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Models::Message Also known as: parse
See #stream for streaming counterpart.
Some parameter documentations has been truncated, see Models::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our [user guide](docs.claude.com/en/docs/initial-setup)
66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
# File 'lib/anthropic/resources/messages.rb', line 66 def create(params) parsed, = Anthropic::MessageCreateParams.dump_request(params) if parsed[:stream] = "Please use `#stream` for the streaming use case." raise ArgumentError.new() end warn_thinking_enabled(parsed) tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil) unwrap = ->(raw) { Anthropic::Helpers::Messages.parse_input_schemas!(raw, tools:, models:) } if .empty? && @client.timeout == Anthropic::Client::DEFAULT_TIMEOUT_IN_SECONDS model = parsed[:model]&.to_sym max_tokens = parsed[:max_tokens].to_i timeout = @client.calculate_nonstreaming_timeout( max_tokens, Anthropic::Client::MODEL_NONSTREAMING_TOKENS[model] ) = {timeout: timeout} else = {timeout: 600, **} end @client.request( method: :post, path: "v1/messages", body: parsed, model: Anthropic::Message, unwrap: unwrap, options: ) end |
#stream(max_tokens:, messages:, model:, metadata: nil, output_config: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Streaming::MessageStream
See #create for non-streaming counterpart.
Some parameter documentations has been truncated, see Models::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation with streaming.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our [user guide](/en/docs/initial-setup)
151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 |
# File 'lib/anthropic/resources/messages.rb', line 151 def stream(params) parsed, = Anthropic::Models::MessageCreateParams.dump_request(params) unless parsed.fetch(:stream, true) = "Please use `#create` for the non-streaming use case." raise ArgumentError.new() end parsed.store(:stream, true) warn_thinking_enabled(parsed) tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil) raw_stream = @client.request( method: :post, path: "v1/messages", headers: stream_headers("accept" => "text/event-stream"), body: parsed, stream: Anthropic::Internal::Stream, model: Anthropic::Models::RawMessageStreamEvent, options: ) Anthropic::Streaming::MessageStream.new(raw_stream:, tools:, models:) end |
#stream_raw(max_tokens:, messages:, model:, cache_control: nil, container: nil, inference_geo: nil, metadata: nil, output_config: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::RawMessageStartEvent, Anthropic::Models::RawMessageDeltaEvent, Anthropic::Models::RawMessageStopEvent, Anthropic::Models::RawContentBlockStartEvent, Anthropic::Models::RawContentBlockDeltaEvent, Anthropic::Models::RawContentBlockStopEvent>
See #create for non-streaming counterpart.
Some parameter documentations has been truncated, see Models::MessageCreateParams for more details.
Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
The Messages API can be used for either single queries or stateless multi-turn conversations.
Learn more about the Messages API in our [user guide](docs.claude.com/en/docs/initial-setup)
230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 |
# File 'lib/anthropic/resources/messages.rb', line 230 def stream_raw(params) parsed, = Anthropic::MessageCreateParams.dump_request(params) unless parsed.fetch(:stream, true) = "Please use `#create` for the non-streaming use case." raise ArgumentError.new() end parsed.store(:stream, true) Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil) @client.request( method: :post, path: "v1/messages", headers: stream_headers("accept" => "text/event-stream"), body: parsed, stream: Anthropic::Internal::Stream, model: Anthropic::RawMessageStreamEvent, options: {timeout: 600, **} ) end |