Class: Anthropic::Resources::Beta::Messages

Inherits:
Object
  • Object
show all
Defined in:
lib/anthropic/resources/beta/messages.rb,
lib/anthropic/resources/beta/messages/batches.rb

Defined Under Namespace

Classes: Batches

Instance Attribute Summary collapse

Instance Method Summary collapse

Instance Attribute Details

#batchesAnthropic::Resources::Beta::Messages::Batches (readonly)



8
9
10
# File 'lib/anthropic/resources/beta/messages.rb', line 8

def batches
  @batches
end

Instance Method Details

#count_tokens(messages:, model:, cache_control: nil, context_management: nil, mcp_servers: nil, output_config: nil, output_format: nil, speed: nil, system_: nil, thinking: nil, tool_choice: nil, tools: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessageTokensCount

Some parameter documentations has been truncated, see Models::Beta::MessageCountTokensParams for more details.

Count the number of tokens in a Message.

The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.

Learn more about token counting in our [user guide](docs.claude.com/en/docs/build-with-claude/token-counting)

Parameters:

Returns:

See Also:



355
356
357
358
359
360
361
362
363
364
365
366
367
368
# File 'lib/anthropic/resources/beta/messages.rb', line 355

def count_tokens(params)
  parsed, options = Anthropic::Beta::MessageCountTokensParams.dump_request(params)
  Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil, is_beta: true)

  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages/count_tokens?beta=true",
    headers: parsed.slice(*header_params.keys).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    model: Anthropic::Beta::BetaMessageTokensCount,
    options: options
  )
end

#create(max_tokens:, messages:, model:, cache_control: nil, container: nil, context_management: nil, inference_geo: nil, mcp_servers: nil, metadata: nil, output_config: nil, output_format: nil, service_tier: nil, speed: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessage

See #stream_raw for streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.

Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.

The Messages API can be used for either single queries or stateless multi-turn conversations.

Learn more about the Messages API in our [user guide](docs.claude.com/en/docs/initial-setup)

Parameters:

Returns:

See Also:



94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
# File 'lib/anthropic/resources/beta/messages.rb', line 94

def create(params)
  parsed, options = Anthropic::Beta::MessageCreateParams.dump_request(params)
  if parsed[:stream]
    message = "Please use `#stream` for the streaming use case."
    raise ArgumentError.new(message)
  end

  warn_thinking_enabled(parsed)

  tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!(
    parsed,
    strict: nil,
    is_beta: true
  )

  unwrap = ->(raw) { Anthropic::Helpers::Messages.parse_input_schemas!(raw, tools:, models:) }

  if options.empty? && @client.timeout == Anthropic::Client::DEFAULT_TIMEOUT_IN_SECONDS
    model = parsed[:model]&.to_sym
    max_tokens = parsed[:max_tokens].to_i
    timeout = @client.calculate_nonstreaming_timeout(
      max_tokens,
      Anthropic::Client::MODEL_NONSTREAMING_TOKENS[model]
    )
    options = {timeout: timeout}
  else
    options = {timeout: 600, **options}
  end

  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: parsed.slice(*header_params.keys).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    model: Anthropic::Beta::BetaMessage,
    unwrap: unwrap,
    options: options
  )
end

#stream_raw(max_tokens:, messages:, model:, cache_control: nil, container: nil, context_management: nil, inference_geo: nil, mcp_servers: nil, metadata: nil, output_config: nil, output_format: nil, service_tier: nil, speed: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>

See #create for non-streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.

Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.

The Messages API can be used for either single queries or stateless multi-turn conversations.

Learn more about the Messages API in our [user guide](docs.claude.com/en/docs/initial-setup)

Parameters:

Returns:

See Also:



200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
# File 'lib/anthropic/resources/beta/messages.rb', line 200

def stream(params)
  parsed, options = Anthropic::Models::Beta::MessageCreateParams.dump_request(params)
  unless parsed.fetch(:stream, true)
    message = "Please use `#create` for the non-streaming use case."
    raise ArgumentError.new(message)
  end
  parsed.store(:stream, true)

  warn_thinking_enabled(parsed)

  tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!(
    parsed,
    strict: nil,
    is_beta: true
  )

  header_params = {betas: "anthropic-beta"}
  raw_stream = @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: stream_headers(
      "accept" => "text/event-stream",
      **parsed.slice(*header_params.keys)
    ).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    stream: Anthropic::Internal::Stream,
    model: Anthropic::Beta::BetaRawMessageStreamEvent,
    options: {timeout: 600, **options}
  )
  Anthropic::Streaming::MessageStream.new(raw_stream:, tools:, models:)
end

#stream_raw(max_tokens:, messages:, model:, container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>

See #create for non-streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.

Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.

The Messages API can be used for either single queries or stateless multi-turn conversations.

Learn more about the Messages API in our [user guide](/en/docs/initial-setup)

Parameters:

Returns:

See Also:



284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
# File 'lib/anthropic/resources/beta/messages.rb', line 284

def stream_raw(params)
  parsed, options = Anthropic::Beta::MessageCreateParams.dump_request(params)
  unless parsed.fetch(:stream, true)
    message = "Please use `#create` for the non-streaming use case."
    raise ArgumentError.new(message)
  end
  parsed.store(:stream, true)

  warn_thinking_enabled(parsed)

  Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil, is_beta: true)

  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: stream_headers(
      "accept" => "text/event-stream",
      **parsed.slice(*header_params.keys)
    ).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    stream: Anthropic::Internal::Stream,
    model: Anthropic::Beta::BetaRawMessageStreamEvent,
    options: {timeout: 600, **options}
  )
end

#tool_runner(params) ⇒ Anthropic::Helpers::Tools::Runner



13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
# File 'lib/anthropic/resources/beta/messages.rb', line 13

def tool_runner(params)
  params = params.to_h
  warn_thinking_enabled(params)
  max_iterations = params.delete(:max_iterations)
  compaction_control = params.delete(:compaction_control)
  if compaction_control&.dig(:enabled) || compaction_control&.dig("enabled")
    warn(
      "[DEPRECATION] The 'compaction_control' parameter is deprecated and will be removed in a future version. " \
      "Use server-side compaction instead by passing edits: [{ type: 'compact_20260112' }] in the params passed to `tool_runner()`. " \
      "See https://platform.claude.com/docs/en/build-with-claude/compaction",
      category: :deprecated
    )
  end
  Anthropic::Helpers::Tools::Runner.new(@client, params:, max_iterations:, compaction_control:)
end