Class: OpenAI::Models::Chat::ChatCompletion::Choice

Inherits:
Internal::Type::BaseModel show all
Defined in:
lib/openai/models/chat/chat_completion.rb

Defined Under Namespace

Modules: FinishReason Classes: Logprobs

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from Internal::Type::BaseModel

==, #==, #[], coerce, #deconstruct_keys, #deep_to_h, dump, fields, hash, #hash, inherited, inspect, #inspect, known_fields, optional, recursively_to_h, required, #to_h, #to_json, #to_s, to_sorbet_type, #to_yaml

Methods included from Internal::Type::Converter

#coerce, coerce, #dump, dump, #inspect, inspect, type_info

Methods included from Internal::Util::SorbetRuntimeSupport

#const_missing, #define_sorbet_constant!, #sorbet_constant_defined?, #to_sorbet_type, to_sorbet_type

Constructor Details

#initialize(finish_reason: , index: , logprobs: , message: ) ⇒ Object

Some parameter documentations has been truncated, see OpenAI::Models::Chat::ChatCompletion::Choice for more details.

Parameters:



101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
# File 'lib/openai/models/chat/chat_completion.rb', line 101

class Choice < OpenAI::Internal::Type::BaseModel
  # @!attribute finish_reason
  #   The reason the model stopped generating tokens. This will be `stop` if the model
  #   hit a natural stop point or a provided stop sequence, `length` if the maximum
  #   number of tokens specified in the request was reached, `content_filter` if
  #   content was omitted due to a flag from our content filters, `tool_calls` if the
  #   model called a tool, or `function_call` (deprecated) if the model called a
  #   function.
  #
  #   @return [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason]
  required :finish_reason, enum: -> { OpenAI::Chat::ChatCompletion::Choice::FinishReason }

  # @!attribute index
  #   The index of the choice in the list of choices.
  #
  #   @return [Integer]
  required :index, Integer

  # @!attribute logprobs
  #   Log probability information for the choice.
  #
  #   @return [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil]
  required :logprobs, -> { OpenAI::Chat::ChatCompletion::Choice::Logprobs }, nil?: true

  # @!attribute message
  #   A chat completion message generated by the model.
  #
  #   @return [OpenAI::Models::Chat::ChatCompletionMessage]
  required :message, -> { OpenAI::Chat::ChatCompletionMessage }

  # @!method initialize(finish_reason:, index:, logprobs:, message:)
  #   Some parameter documentations has been truncated, see
  #   {OpenAI::Models::Chat::ChatCompletion::Choice} for more details.
  #
  #   @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model
  #
  #   @param index [Integer] The index of the choice in the list of choices.
  #
  #   @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] Log probability information for the choice.
  #
  #   @param message [OpenAI::Models::Chat::ChatCompletionMessage] A chat completion message generated by the model.

  # The reason the model stopped generating tokens. This will be `stop` if the model
  # hit a natural stop point or a provided stop sequence, `length` if the maximum
  # number of tokens specified in the request was reached, `content_filter` if
  # content was omitted due to a flag from our content filters, `tool_calls` if the
  # model called a tool, or `function_call` (deprecated) if the model called a
  # function.
  #
  # @see OpenAI::Models::Chat::ChatCompletion::Choice#finish_reason
  module FinishReason
    extend OpenAI::Internal::Type::Enum

    STOP = :stop
    LENGTH = :length
    TOOL_CALLS = :tool_calls
    CONTENT_FILTER = :content_filter
    FUNCTION_CALL = :function_call

    # @!method self.values
    #   @return [Array<Symbol>]
  end

  # @see OpenAI::Models::Chat::ChatCompletion::Choice#logprobs
  class Logprobs < OpenAI::Internal::Type::BaseModel
    # @!attribute content
    #   A list of message content tokens with log probability information.
    #
    #   @return [Array<OpenAI::Models::Chat::ChatCompletionTokenLogprob>, nil]
    required :content,
             -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] },
             nil?: true

    # @!attribute refusal
    #   A list of message refusal tokens with log probability information.
    #
    #   @return [Array<OpenAI::Models::Chat::ChatCompletionTokenLogprob>, nil]
    required :refusal,
             -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] },
             nil?: true

    # @!method initialize(content:, refusal:)
    #   Log probability information for the choice.
    #
    #   @param content [Array<OpenAI::Models::Chat::ChatCompletionTokenLogprob>, nil] A list of message content tokens with log probability information.
    #
    #   @param refusal [Array<OpenAI::Models::Chat::ChatCompletionTokenLogprob>, nil] A list of message refusal tokens with log probability information.
  end
end

Instance Attribute Details

#finish_reasonSymbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason

The reason the model stopped generating tokens. This will be ‘stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model called a function.



111
# File 'lib/openai/models/chat/chat_completion.rb', line 111

required :finish_reason, enum: -> { OpenAI::Chat::ChatCompletion::Choice::FinishReason }

#indexInteger

The index of the choice in the list of choices.

Returns:

  • (Integer)


117
# File 'lib/openai/models/chat/chat_completion.rb', line 117

required :index, Integer

#logprobsOpenAI::Models::Chat::ChatCompletion::Choice::Logprobs?

Log probability information for the choice.



123
# File 'lib/openai/models/chat/chat_completion.rb', line 123

required :logprobs, -> { OpenAI::Chat::ChatCompletion::Choice::Logprobs }, nil?: true

#messageOpenAI::Models::Chat::ChatCompletionMessage

A chat completion message generated by the model.



129
# File 'lib/openai/models/chat/chat_completion.rb', line 129

required :message, -> { OpenAI::Chat::ChatCompletionMessage }

Class Method Details

.valuesArray<Symbol>

Returns:

  • (Array<Symbol>)


# File 'lib/openai/models/chat/chat_completion.rb', line 160