Class: OpenAI::Resources::Chat::Completions

Inherits:
Object
  • Object
show all
Defined in:
lib/openai/resources/chat/completions.rb,
lib/openai/resources/chat/completions/messages.rb

Defined Under Namespace

Classes: Messages

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(client:) ⇒ Completions

This method is part of a private API. You should avoid using this method if possible, as it may be removed or be changed in the future.

Returns a new instance of Completions.

Parameters:



405
406
407
408
# File 'lib/openai/resources/chat/completions.rb', line 405

def initialize(client:)
  @client = client
  @messages = OpenAI::Resources::Chat::Completions::Messages.new(client: client)
end

Instance Attribute Details

#messagesOpenAI::Resources::Chat::Completions::Messages (readonly)



8
9
10
# File 'lib/openai/resources/chat/completions.rb', line 8

def messages
  @messages
end

Instance Method Details

#create(messages: , model: , audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) ⇒ OpenAI::Models::Chat::ChatCompletion

See #stream_raw for streaming counterpart.

Some parameter documentations has been truncated, see Models::Chat::CompletionCreateParams for more details.

**Starting a new project?** We recommend trying [Responses](platform.openai.com/docs/api-reference/responses) to take advantage of the latest OpenAI platform features. Compare [Chat Completions with Responses](platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses).


Creates a model response for the given chat conversation. Learn more in the [text generation](platform.openai.com/docs/guides/text-generation), [vision](platform.openai.com/docs/guides/vision), and [audio](platform.openai.com/docs/guides/audio) guides.

Parameter support can differ depending on the model used to generate the response, particularly for newer reasoning models. Parameters that are only supported for reasoning models are noted below. For the current state of unsupported parameters in reasoning models, [refer to the reasoning guide](platform.openai.com/docs/guides/reasoning).

Parameters:

Returns:

See Also:



100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
# File 'lib/openai/resources/chat/completions.rb', line 100

def create(params)
  parsed, options = OpenAI::Chat::CompletionCreateParams.dump_request(params)
  if parsed[:stream]
    message = "Please use `#stream_raw` for the streaming use case."
    raise ArgumentError.new(message)
  end

  # rubocop:disable Layout/LineLength
  model = nil
  tool_models = {}
  case parsed
  in {response_format: OpenAI::StructuredOutput::JsonSchemaConverter => model}
    parsed.update(
      response_format: {
        type: :json_schema,
        json_schema: {
          strict: true,
          name: model.name.split("::").last,
          schema: model.to_json_schema
        }
      }
    )
  in {response_format: {type: :json_schema, json_schema: {schema: OpenAI::StructuredOutput::JsonSchemaConverter => model}}}
    parsed.dig(:response_format, :json_schema).store(:schema, model.to_json_schema)
  in {tools: Array => tools}
    mapped = tools.map do |tool|
      case tool
      in OpenAI::StructuredOutput::JsonSchemaConverter
        name = tool.name.split("::").last
        tool_models.store(name, tool)
        {
          type: :function,
          function: {
            strict: true,
            name: name,
            parameters: tool.to_json_schema
          }
        }
      in {function: {parameters: OpenAI::StructuredOutput::JsonSchemaConverter => params}}
        func = tool.fetch(:function)
        name = func[:name] ||= params.name.split("::").last
        tool_models.store(name, params)
        func.update(parameters: params.to_json_schema)
      else
      end
    end
    tools.replace(mapped)
  else
  end

  unwrap = ->(raw) do
    if model.is_a?(OpenAI::StructuredOutput::JsonSchemaConverter)
      raw[:choices]&.each do |choice|
        message = choice.fetch(:message)
        parsed = JSON.parse(message.fetch(:content), symbolize_names: true)
        coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed)
        message.store(:parsed, coerced)
      end
    end
    raw[:choices]&.each do |choice|
      choice.dig(:message, :tool_calls)&.each do |tool_call|
        func = tool_call.fetch(:function)
        next if (model = tool_models[func.fetch(:name)]).nil?

        parsed = JSON.parse(func.fetch(:arguments), symbolize_names: true)
        coerced = OpenAI::Internal::Type::Converter.coerce(model, parsed)
        func.store(:parsed, coerced)
      end
    end

    raw
  end
  # rubocop:enable Layout/LineLength

  @client.request(
    method: :post,
    path: "chat/completions",
    body: parsed,
    unwrap: unwrap,
    model: OpenAI::Chat::ChatCompletion,
    options: options
  )
end

#delete(completion_id, request_options: {}) ⇒ OpenAI::Models::Chat::ChatCompletionDeleted

Delete a stored chat completion. Only Chat Completions that have been created with the ‘store` parameter set to `true` can be deleted.

Parameters:

  • completion_id (String)

    The ID of the chat completion to delete.

  • request_options (OpenAI::RequestOptions, Hash{Symbol=>Object}, nil)

Returns:

See Also:



393
394
395
396
397
398
399
400
# File 'lib/openai/resources/chat/completions.rb', line 393

def delete(completion_id, params = {})
  @client.request(
    method: :delete,
    path: ["chat/completions/%1$s", completion_id],
    model: OpenAI::Chat::ChatCompletionDeleted,
    options: params[:request_options]
  )
end

#list(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) ⇒ OpenAI::Internal::CursorPage<OpenAI::Models::Chat::ChatCompletion>

Some parameter documentations has been truncated, see Models::Chat::CompletionListParams for more details.

List stored Chat Completions. Only Chat Completions that have been stored with the ‘store` parameter set to `true` will be returned.

Parameters:

  • after (String)

    Identifier for the last chat completion from the previous pagination request.

  • limit (Integer)

    Number of Chat Completions to retrieve.

  • metadata (Hash{Symbol=>String}, nil)

    A list of metadata keys to filter the Chat Completions by. Example:

  • model (String)

    The model used to generate the Chat Completions.

  • order (Symbol, OpenAI::Models::Chat::CompletionListParams::Order)

    Sort order for Chat Completions by timestamp. Use ‘asc` for ascending order or `

  • request_options (OpenAI::RequestOptions, Hash{Symbol=>Object}, nil)

Returns:

See Also:



369
370
371
372
373
374
375
376
377
378
379
# File 'lib/openai/resources/chat/completions.rb', line 369

def list(params = {})
  parsed, options = OpenAI::Chat::CompletionListParams.dump_request(params)
  @client.request(
    method: :get,
    path: "chat/completions",
    query: parsed,
    page: OpenAI::Internal::CursorPage,
    model: OpenAI::Chat::ChatCompletion,
    options: options
  )
end

#retrieve(completion_id, request_options: {}) ⇒ OpenAI::Models::Chat::ChatCompletion

Get a stored chat completion. Only Chat Completions that have been created with the ‘store` parameter set to `true` will be returned.

Parameters:

  • completion_id (String)

    The ID of the chat completion to retrieve.

  • request_options (OpenAI::RequestOptions, Hash{Symbol=>Object}, nil)

Returns:

See Also:



308
309
310
311
312
313
314
315
# File 'lib/openai/resources/chat/completions.rb', line 308

def retrieve(completion_id, params = {})
  @client.request(
    method: :get,
    path: ["chat/completions/%1$s", completion_id],
    model: OpenAI::Chat::ChatCompletion,
    options: params[:request_options]
  )
end

#streamObject

Raises:

  • (NotImplementedError)


184
185
186
# File 'lib/openai/resources/chat/completions.rb', line 184

def stream
  raise NotImplementedError.new("higher level helpers are coming soon!")
end

#stream_raw(messages: , model: , audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) ⇒ OpenAI::Internal::Stream<OpenAI::Models::Chat::ChatCompletionChunk>

See #create for non-streaming counterpart.

Some parameter documentations has been truncated, see Models::Chat::CompletionCreateParams for more details.

**Starting a new project?** We recommend trying [Responses](platform.openai.com/docs/api-reference/responses) to take advantage of the latest OpenAI platform features. Compare [Chat Completions with Responses](platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses).


Creates a model response for the given chat conversation. Learn more in the [text generation](platform.openai.com/docs/guides/text-generation), [vision](platform.openai.com/docs/guides/vision), and [audio](platform.openai.com/docs/guides/audio) guides.

Parameter support can differ depending on the model used to generate the response, particularly for newer reasoning models. Parameters that are only supported for reasoning models are noted below. For the current state of unsupported parameters in reasoning models, [refer to the reasoning guide](platform.openai.com/docs/guides/reasoning).

Parameters:

Returns:

See Also:



278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
# File 'lib/openai/resources/chat/completions.rb', line 278

def stream_raw(params)
  parsed, options = OpenAI::Chat::CompletionCreateParams.dump_request(params)
  unless parsed.fetch(:stream, true)
    message = "Please use `#create` for the non-streaming use case."
    raise ArgumentError.new(message)
  end
  parsed.store(:stream, true)
  @client.request(
    method: :post,
    path: "chat/completions",
    headers: {"accept" => "text/event-stream"},
    body: parsed,
    stream: OpenAI::Internal::Stream,
    model: OpenAI::Chat::ChatCompletionChunk,
    options: options
  )
end

#update(completion_id, metadata: , request_options: {}) ⇒ OpenAI::Models::Chat::ChatCompletion

Some parameter documentations has been truncated, see Models::Chat::CompletionUpdateParams for more details.

Modify a stored chat completion. Only Chat Completions that have been created with the ‘store` parameter set to `true` can be modified. Currently, the only supported modification is to update the `metadata` field.

Parameters:

  • completion_id (String)

    The ID of the chat completion to update.

  • metadata (Hash{Symbol=>String}, nil)

    Set of 16 key-value pairs that can be attached to an object. This can be

  • request_options (OpenAI::RequestOptions, Hash{Symbol=>Object}, nil)

Returns:

See Also:



335
336
337
338
339
340
341
342
343
344
# File 'lib/openai/resources/chat/completions.rb', line 335

def update(completion_id, params)
  parsed, options = OpenAI::Chat::CompletionUpdateParams.dump_request(params)
  @client.request(
    method: :post,
    path: ["chat/completions/%1$s", completion_id],
    body: parsed,
    model: OpenAI::Chat::ChatCompletion,
    options: options
  )
end