Module: RubyLLM::Providers::OpenAI::Capabilities
- Defined in:
- lib/ruby_llm/providers/openai/capabilities.rb
Overview
Provider-level capability checks and narrow registry fallbacks.
Constant Summary collapse
- MODEL_PATTERNS =
{ gpt_image15: /^gpt-image-1\.5/, gpt_image_mini: /^gpt-image-1-mini/, gpt_image: /^gpt-image-1(?:$|-)/, gpt41: /^gpt-4\.1(?!-(?:mini|nano))/, gpt41_mini: /^gpt-4\.1-mini/, gpt41_nano: /^gpt-4\.1-nano/, gpt4: /^gpt-4(?:-\d{6})?$/, gpt4_turbo: /^gpt-4(?:\.5)?-(?:\d{6}-)?(preview|turbo)/, gpt35_turbo: /^gpt-3\.5-turbo/, gpt4o: /^gpt-4o(?!-(?:mini|audio|realtime|transcribe|tts|search))/, gpt4o_audio: /^gpt-4o-(?:audio)/, gpt4o_mini: /^gpt-4o-mini(?!-(?:audio|realtime|transcribe|tts|search))/, gpt4o_mini_audio: /^gpt-4o-mini-audio/, gpt4o_mini_realtime: /^gpt-4o-mini-realtime/, gpt4o_mini_transcribe: /^gpt-4o-mini-transcribe/, gpt4o_mini_tts: /^gpt-4o-mini-tts/, gpt4o_realtime: /^gpt-4o-realtime/, gpt4o_search: /^gpt-4o-search/, gpt4o_transcribe: /^gpt-4o-transcribe/, gpt5: /^gpt-5(?!.*(?:mini|nano))/, gpt5_mini: /^gpt-5.*mini/, gpt5_nano: /^gpt-5.*nano/, o1: /^o1(?!-(?:mini|pro))/, o1_mini: /^o1-mini/, o1_pro: /^o1-pro/, o3_mini: /^o3-mini/, babbage: /^babbage/, davinci: /^davinci/, embedding3_large: /^text-embedding-3-large/, embedding3_small: /^text-embedding-3-small/, embedding_ada: /^text-embedding-ada/, tts1: /^tts-1(?!-hd)/, tts1_hd: /^tts-1-hd/, whisper: /^whisper/, moderation: /^(?:omni|text)-moderation/ }.freeze
- PRICES =
{ gpt_image: { text: { input: 5.0, cached_input: 1.25 }, images: { input: 10.0, cached_input: 2.5, output: 40.0 } }, gpt_image_mini: { text: { input: 2.0, cached_input: 0.2 }, images: { input: 2.5, cached_input: 0.25, output: 8.0 } }, gpt_image15: { text: { input: 5.0, cached_input: 1.25, output: 10.0 }, images: { input: 8.0, cached_input: 2.0, output: 32.0 } }, gpt5: { input: 1.25, output: 10.0, cached_input: 0.125 }, gpt5_mini: { input: 0.25, output: 2.0, cached_input: 0.025 }, gpt5_nano: { input: 0.05, output: 0.4, cached_input: 0.005 }, gpt41: { input: 2.0, output: 8.0, cached_input: 0.5 }, gpt41_mini: { input: 0.4, output: 1.6, cached_input: 0.1 }, gpt41_nano: { input: 0.1, output: 0.4 }, gpt4: { input: 10.0, output: 30.0 }, gpt4_turbo: { input: 10.0, output: 30.0 }, gpt35_turbo: { input: 0.5, output: 1.5 }, gpt4o: { input: 2.5, output: 10.0 }, gpt4o_audio: { input: 2.5, output: 10.0 }, gpt4o_mini: { input: 0.15, output: 0.6 }, gpt4o_mini_audio: { input: 0.15, output: 0.6 }, gpt4o_mini_realtime: { input: 0.6, output: 2.4 }, gpt4o_mini_transcribe: { input: 1.25, output: 5.0 }, gpt4o_mini_tts: { input: 0.6, output: 12.0 }, gpt4o_realtime: { input: 5.0, output: 20.0 }, gpt4o_search: { input: 2.5, output: 10.0 }, gpt4o_transcribe: { input: 2.5, output: 10.0 }, o1: { input: 15.0, output: 60.0 }, o1_mini: { input: 1.1, output: 4.4 }, o1_pro: { input: 150.0, output: 600.0 }, o3_mini: { input: 1.1, output: 4.4 }, babbage: { input: 0.4, output: 0.4 }, davinci: { input: 2.0, output: 2.0 }, embedding3_large: { price: 0.13 }, embedding3_small: { price: 0.02 }, embedding_ada: { price: 0.10 }, tts1: { price: 15.0 }, tts1_hd: { price: 30.0 }, whisper: { price: 0.006 }, moderation: { price: 0.0 } }.freeze
- NIL_LIMIT_FAMILIES =
%w[ gpt_image gpt_image_mini gpt_image15 gpt4o_mini_tts tts1 tts1_hd whisper moderation embedding3_large embedding3_small embedding_ada ].freeze
Class Method Summary collapse
- .cached_input_price_for(model_id) ⇒ Object
- .context_window_for(model_id) ⇒ Object
- .critical_capabilities_for(model_id) ⇒ Object
- .family_prices(model_id) ⇒ Object
- .image_model?(model_id) ⇒ Object
- .image_pricing_for(model_id) ⇒ Object
- .input_price_for(model_id) ⇒ Object
- .max_tokens_for(model_id) ⇒ Object
- .model_family(model_id) ⇒ Object
- .output_price_for(model_id) ⇒ Object
- .price_for(model_id, key, fallback) ⇒ Object
- .pricing_for(model_id) ⇒ Object
- .supports_functions?(model_id) ⇒ Object
- .supports_structured_output?(model_id) ⇒ Object
- .supports_tool_choice?(_model_id) ⇒ Boolean
- .supports_tool_parallel_control?(_model_id) ⇒ Boolean
- .supports_vision?(model_id) ⇒ Object
Class Method Details
.cached_input_price_for(model_id) ⇒ Object
224 225 226 227 228 229 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 224 def cached_input_price_for(model_id) return family_prices(model_id).dig(:text, :cached_input) if image_model?(model_id) family = model_family(model_id).to_sym PRICES.fetch(family, {})[:cached_input] end |
.context_window_for(model_id) ⇒ Object
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 117 def context_window_for(model_id) family = model_family(model_id) return nil if NIL_LIMIT_FAMILIES.include?(family) case family when 'gpt41', 'gpt41_mini', 'gpt41_nano' then 1_047_576 when 'gpt5', 'gpt5_mini', 'gpt5_nano', 'gpt4_turbo', 'gpt4o', 'gpt4o_audio', 'gpt4o_mini', 'gpt4o_mini_audio', 'gpt4o_mini_realtime', 'gpt4o_realtime', 'gpt4o_search', 'gpt4o_transcribe', 'o1_mini' then 128_000 when 'gpt4' then 8_192 when 'gpt4o_mini_transcribe' then 16_000 when 'o1', 'o1_pro', 'o3_mini' then 200_000 when 'gpt35_turbo' then 16_385 else 4_096 end end |
.critical_capabilities_for(model_id) ⇒ Object
150 151 152 153 154 155 156 157 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 150 def critical_capabilities_for(model_id) capabilities = [] capabilities << 'function_calling' if supports_functions?(model_id) capabilities << 'structured_output' if supports_structured_output?(model_id) capabilities << 'vision' if supports_vision?(model_id) capabilities << 'reasoning' if model_id.match?(/o\d|gpt-5|codex/) capabilities end |
.family_prices(model_id) ⇒ Object
261 262 263 264 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 261 def family_prices(model_id) family = model_family(model_id).to_sym PRICES.fetch(family, {}) end |
.image_model?(model_id) ⇒ Object
231 232 233 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 231 def image_model?(model_id) %w[gpt_image gpt_image_mini gpt_image15].include?(model_family(model_id)) end |
.image_pricing_for(model_id) ⇒ Object
235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 235 def image_pricing_for(model_id) text_pricing = { input_per_million: input_price_for(model_id) } cached_text_price = cached_input_price_for(model_id) text_pricing[:cache_read_input_per_million] = cached_text_price if cached_text_price image_pricing = { input_per_million: family_prices(model_id).dig(:images, :input), output_per_million: family_prices(model_id).dig(:images, :output) } cached_image_price = family_prices(model_id).dig(:images, :cached_input) image_pricing[:cache_read_input_per_million] = cached_image_price if cached_image_price { text_tokens: { standard: text_pricing }, images: { standard: image_pricing } } end |
.input_price_for(model_id) ⇒ Object
212 213 214 215 216 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 212 def input_price_for(model_id) return family_prices(model_id).dig(:text, :input) if image_model?(model_id) price_for(model_id, :input, 0.50) end |
.max_tokens_for(model_id) ⇒ Object
134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 134 def max_tokens_for(model_id) family = model_family(model_id) return nil if NIL_LIMIT_FAMILIES.include?(family) case family when 'gpt5', 'gpt5_mini', 'gpt5_nano' then 400_000 when 'gpt41', 'gpt41_mini', 'gpt41_nano' then 32_768 when 'gpt4' then 8_192 when 'gpt35_turbo' then 4_096 when 'gpt4o_mini_transcribe' then 2_000 when 'o1', 'o1_pro', 'o3_mini' then 100_000 when 'o1_mini' then 65_536 else 16_384 end end |
.model_family(model_id) ⇒ Object
173 174 175 176 177 178 179 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 173 def model_family(model_id) MODEL_PATTERNS.each do |family, pattern| return family.to_s if model_id.match?(pattern) end 'other' end |
.output_price_for(model_id) ⇒ Object
218 219 220 221 222 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 218 def output_price_for(model_id) return family_prices(model_id).dig(:text, :output) if image_model?(model_id) price_for(model_id, :output, 1.50) end |
.price_for(model_id, key, fallback) ⇒ Object
255 256 257 258 259 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 255 def price_for(model_id, key, fallback) prices = family_prices(model_id) prices = { key => fallback } if prices.empty? prices[key] || prices[:price] || fallback end |
.pricing_for(model_id) ⇒ Object
159 160 161 162 163 164 165 166 167 168 169 170 171 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 159 def pricing_for(model_id) return image_pricing_for(model_id) if image_model?(model_id) standard_pricing = { input_per_million: input_price_for(model_id), output_per_million: output_price_for(model_id) } cached_price = cached_input_price_for(model_id) standard_pricing[:cache_read_input_per_million] = cached_price if cached_price { text_tokens: { standard: standard_pricing } } end |
.supports_functions?(model_id) ⇒ Object
192 193 194 195 196 197 198 199 200 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 192 def supports_functions?(model_id) case model_family(model_id) when 'gpt5', 'gpt5_mini', 'gpt5_nano', 'gpt41', 'gpt41_mini', 'gpt41_nano', 'gpt4', 'gpt4_turbo', 'gpt4o', 'gpt4o_mini', 'o1', 'o1_pro', 'o3_mini' true else false end end |
.supports_structured_output?(model_id) ⇒ Object
202 203 204 205 206 207 208 209 210 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 202 def supports_structured_output?(model_id) case model_family(model_id) when 'gpt5', 'gpt5_mini', 'gpt5_nano', 'gpt41', 'gpt41_mini', 'gpt41_nano', 'gpt4o', 'gpt4o_mini', 'o1', 'o1_pro', 'o3_mini' true else false end end |
.supports_tool_choice?(_model_id) ⇒ Boolean
109 110 111 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 109 def supports_tool_choice?(_model_id) true end |
.supports_tool_parallel_control?(_model_id) ⇒ Boolean
113 114 115 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 113 def supports_tool_parallel_control?(_model_id) true end |
.supports_vision?(model_id) ⇒ Object
181 182 183 184 185 186 187 188 189 190 |
# File 'lib/ruby_llm/providers/openai/capabilities.rb', line 181 def supports_vision?(model_id) case model_family(model_id) when 'gpt_image', 'gpt_image_mini', 'gpt_image15', 'gpt5', 'gpt5_mini', 'gpt5_nano', 'gpt41', 'gpt41_mini', 'gpt41_nano', 'gpt4', 'gpt4_turbo', 'gpt4o', 'gpt4o_mini', 'o1', 'o1_pro', 'moderation', 'gpt4o_search' true else false end end |